1 #ifndef _G_KERNEL_CHANNEL_NVOC_H_
2 #define _G_KERNEL_CHANNEL_NVOC_H_
3 #include "nvoc/runtime.h"
4 
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8 
9 /*
10  * SPDX-FileCopyrightText: Copyright (c) 2020-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11  * SPDX-License-Identifier: MIT
12  *
13  * Permission is hereby granted, free of charge, to any person obtaining a
14  * copy of this software and associated documentation files (the "Software"),
15  * to deal in the Software without restriction, including without limitation
16  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17  * and/or sell copies of the Software, and to permit persons to whom the
18  * Software is furnished to do so, subject to the following conditions:
19  *
20  * The above copyright notice and this permission notice shall be included in
21  * all copies or substantial portions of the Software.
22  *
23  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29  * DEALINGS IN THE SOFTWARE.
30  */
31 
32 #include "g_kernel_channel_nvoc.h"
33 
34 #ifndef KERNEL_CHANNEL_H
35 #define KERNEL_CHANNEL_H
36 
37 #include "core/core.h"
38 #include "os/os.h"
39 #include "resserv/resserv.h"
40 #include "nvoc/prelude.h"
41 #include "gpu/gpu_resource.h"
42 #include "kernel/gpu/gpu_engine_type.h"
43 #include "kernel/gpu/fifo/kernel_ctxshare.h"
44 #include "kernel/gpu/fifo/kernel_fifo.h"
45 #include "kernel/gpu/gr/kernel_graphics_context.h"
46 #include "kernel/gpu/intr/intr_service.h"
47 #include "kernel/gpu/mig_mgr/kernel_mig_manager.h"
48 
49 #include "ctrl/ctrl0090.h"
50 #include "ctrl/ctrl208f/ctrl208ffifo.h"
51 #include "ctrl/ctrl506f.h"
52 #include "ctrl/ctrl906f.h"
53 #include "ctrl/ctrla06f.h"
54 #include "ctrl/ctrla16f.h"
55 #include "ctrl/ctrla26f.h"
56 #include "ctrl/ctrlb06f.h"
57 #include "ctrl/ctrlc06f.h"
58 #include "ctrl/ctrlc36f.h"
59 #include "ctrl/ctrlc56f.h"
60 
61 #include "cc_drv.h"
62 
63 struct OBJGPU;
64 
65 #ifndef __NVOC_CLASS_OBJGPU_TYPEDEF__
66 #define __NVOC_CLASS_OBJGPU_TYPEDEF__
67 typedef struct OBJGPU OBJGPU;
68 #endif /* __NVOC_CLASS_OBJGPU_TYPEDEF__ */
69 
70 #ifndef __nvoc_class_id_OBJGPU
71 #define __nvoc_class_id_OBJGPU 0x7ef3cb
72 #endif /* __nvoc_class_id_OBJGPU */
73 
74 
75 struct UserInfo;
76 
77 #ifndef __NVOC_CLASS_UserInfo_TYPEDEF__
78 #define __NVOC_CLASS_UserInfo_TYPEDEF__
79 typedef struct UserInfo UserInfo;
80 #endif /* __NVOC_CLASS_UserInfo_TYPEDEF__ */
81 
82 #ifndef __nvoc_class_id_UserInfo
83 #define __nvoc_class_id_UserInfo 0x21d236
84 #endif /* __nvoc_class_id_UserInfo */
85 
86 
87 /*!
88  * @brief Type of hErrorContext or hEccErrorContext
89  *
90  * This is RPCed to GSP in #NV_CHANNEL_ALLOC_PARAMS.internalFlags
91  * along with the actual memdesc in
92  * #NV_CHANNEL_ALLOC_PARAMS.errorNotifierMem and
93  * #NV_CHANNEL_ALLOC_PARAMS.eccErrorNotifierMem.
94  */
95 typedef enum {
96     /*!
97      * Initial state as passed in NV_CHANNEL_ALLOC_PARAMS by
98      * kernel CPU-RM clients.
99      */
100     ERROR_NOTIFIER_TYPE_UNKNOWN = 0,
101     /*! @brief Error notifier is explicitly not set.
102      *
103      * The corresponding hErrorContext or hEccErrorContext must be
104      * NV01_NULL_OBJECT.
105      */
106     ERROR_NOTIFIER_TYPE_NONE,
107     /*! @brief Error notifier is a ContextDma */
108     ERROR_NOTIFIER_TYPE_CTXDMA,
109     /*! @brief Error notifier is a NvNotification array in sysmem/vidmem */
110     ERROR_NOTIFIER_TYPE_MEMORY
111 } ErrorNotifierType;
112 
113 //
114 // Iterates over the ChannelDescendants on a channel
115 // Uses an RS_ORDERED_ITERATOR and filters it by EngineID / ClassID
116 //
117 typedef struct {
118     RS_ORDERED_ITERATOR rsIter;
119     RM_ENGINE_TYPE engineID;
120     NvU32 classID;
121 } KernelChannelChildIterator;
122 
123 typedef enum
124 {
125     CHANNEL_CLASS_TYPE_DMA,
126     CHANNEL_CLASS_TYPE_GPFIFO,
127 } CHANNEL_CLASS_TYPE;
128 
129 //
130 // Channel class info structure.
131 //
132 // Filled in by CliGetChannelClassInfo() routine.
133 //
134 typedef struct
135 {
136     NvU32 notifiersMaxCount;    // max# of notifiers for class
137     NvU32 eventActionDisable;   // event disable action cmd value
138     NvU32 eventActionSingle;    // event single-shot enable action cmd value
139     NvU32 eventActionRepeat;    // event repeat enable action cmd value
140     NvU32 rcNotifierIndex;      // RC notifier index differs depending on the channel class
141     CHANNEL_CLASS_TYPE classType;
142 } CLI_CHANNEL_CLASS_INFO;
143 
144 void CliGetChannelClassInfo(NvU32, CLI_CHANNEL_CLASS_INFO*);
145 
146 /*!
147  * This structure represents an iterator for all objects
148  * with given class number or engine tag on a channel or TSG.
149  * It is created by function @ref kchannelGetChildIterOverGroup.
150  */
151 typedef struct
152 {
153     NvU32 engDesc;
154     NvU32 classNum;
155 
156     //
157     // During iteration, a copy of the current channel/TSG as well as the
158     // next object node to start iterating from is tracked.
159     //
160     CHANNEL_NODE channelNode;
161     KernelChannelChildIterator kchannelIter;
162 } KernelChannelChildIterOverGroup;
163 
164 typedef struct _def_instance_block
165 {
166     MEMORY_DESCRIPTOR  *pInstanceBlockDesc;
167     MEMORY_DESCRIPTOR  *pRamfcDesc;
168     /*!
169      * Used only for Suspend Resume RM internal channel.
170      * Will be moved to the Host context RL infolist.
171      */
172     MEMORY_DESCRIPTOR  *pRLMemDesc;
173 } FIFO_INSTANCE_BLOCK;
174 
175 /* Bitfields in NV_CHANNEL_ALLOC_PARAMS.internalFlags */
176 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_PRIVILEGE                       1:0
177 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_PRIVILEGE_USER                  0x0
178 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_PRIVILEGE_ADMIN                 0x1
179 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_PRIVILEGE_KERNEL                0x2
180 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE             3:2
181 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE_UNKNOWN     ERROR_NOTIFIER_TYPE_UNKNOWN
182 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE_NONE        ERROR_NOTIFIER_TYPE_NONE
183 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE_CTXDMA      ERROR_NOTIFIER_TYPE_CTXDMA
184 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE_MEMORY      ERROR_NOTIFIER_TYPE_MEMORY
185 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE         5:4
186 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE_UNKNOWN ERROR_NOTIFIER_TYPE_UNKNOWN
187 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE_NONE    ERROR_NOTIFIER_TYPE_NONE
188 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE_CTXDMA  ERROR_NOTIFIER_TYPE_CTXDMA
189 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE_MEMORY  ERROR_NOTIFIER_TYPE_MEMORY
190 
191 /*!
192  * Class for the kernel side of a Channel object.
193  */
194 
195 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
196 // the matching C source file, but causes diagnostics to be issued if another
197 // source file references the field.
198 #ifdef NVOC_KERNEL_CHANNEL_H_PRIVATE_ACCESS_ALLOWED
199 #define PRIVATE_FIELD(x) x
200 #else
201 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
202 #endif
203 
204 struct KernelChannel {
205     const struct NVOC_RTTI *__nvoc_rtti;
206     struct GpuResource __nvoc_base_GpuResource;
207     struct Notifier __nvoc_base_Notifier;
208     struct Object *__nvoc_pbase_Object;
209     struct RsResource *__nvoc_pbase_RsResource;
210     struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
211     struct RmResource *__nvoc_pbase_RmResource;
212     struct GpuResource *__nvoc_pbase_GpuResource;
213     struct INotifier *__nvoc_pbase_INotifier;
214     struct Notifier *__nvoc_pbase_Notifier;
215     struct KernelChannel *__nvoc_pbase_KernelChannel;
216     NV_STATUS (*__kchannelMap__)(struct KernelChannel *, CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, RsCpuMapping *);
217     NV_STATUS (*__kchannelUnmap__)(struct KernelChannel *, CALL_CONTEXT *, RsCpuMapping *);
218     NV_STATUS (*__kchannelGetMapAddrSpace__)(struct KernelChannel *, CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
219     NV_STATUS (*__kchannelGetMemInterMapParams__)(struct KernelChannel *, RMRES_MEM_INTER_MAP_PARAMS *);
220     NV_STATUS (*__kchannelCheckMemInterUnmap__)(struct KernelChannel *, NvBool);
221     NV_STATUS (*__kchannelCreateUserMemDesc__)(struct OBJGPU *, struct KernelChannel *);
222     NvBool (*__kchannelIsUserdAddrSizeValid__)(struct KernelChannel *, NvU32, NvU32);
223     NV_STATUS (*__kchannelCtrlCmdResetIsolatedChannel__)(struct KernelChannel *, NV506F_CTRL_CMD_RESET_ISOLATED_CHANNEL_PARAMS *);
224     NV_STATUS (*__kchannelCtrlCmdInternalResetIsolatedChannel__)(struct KernelChannel *, NV506F_CTRL_CMD_INTERNAL_RESET_ISOLATED_CHANNEL_PARAMS *);
225     NV_STATUS (*__kchannelCtrlCmdGetClassEngineid__)(struct KernelChannel *, NV906F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
226     NV_STATUS (*__kchannelCtrlCmdResetChannel__)(struct KernelChannel *, NV906F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
227     NV_STATUS (*__kchannelCtrlCmdGetDeferRCState__)(struct KernelChannel *, NV906F_CTRL_CMD_GET_DEFER_RC_STATE_PARAMS *);
228     NV_STATUS (*__kchannelCtrlCmdGetMmuFaultInfo__)(struct KernelChannel *, NV906F_CTRL_GET_MMU_FAULT_INFO_PARAMS *);
229     NV_STATUS (*__kchannelCtrlCmdEventSetNotification__)(struct KernelChannel *, NV906F_CTRL_EVENT_SET_NOTIFICATION_PARAMS *);
230     NV_STATUS (*__kchannelCtrlCmdGetClassEngineidA06F__)(struct KernelChannel *, NVA06F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
231     NV_STATUS (*__kchannelCtrlCmdResetChannelA06F__)(struct KernelChannel *, NVA06F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
232     NV_STATUS (*__kchannelCtrlCmdGpFifoSchedule__)(struct KernelChannel *, NVA06F_CTRL_GPFIFO_SCHEDULE_PARAMS *);
233     NV_STATUS (*__kchannelCtrlCmdBind__)(struct KernelChannel *, NVA06F_CTRL_BIND_PARAMS *);
234     NV_STATUS (*__kchannelCtrlCmdGetMmuFaultInfoA06F__)(struct KernelChannel *, NVA06F_CTRL_GET_MMU_FAULT_INFO_PARAMS *);
235     NV_STATUS (*__kchannelCtrlCmdSetErrorNotifier__)(struct KernelChannel *, NVA06F_CTRL_SET_ERROR_NOTIFIER_PARAMS *);
236     NV_STATUS (*__kchannelCtrlCmdSetInterleaveLevel__)(struct KernelChannel *, NVA06F_CTRL_INTERLEAVE_LEVEL_PARAMS *);
237     NV_STATUS (*__kchannelCtrlCmdRestartRunlist__)(struct KernelChannel *, NVA06F_CTRL_RESTART_RUNLIST_PARAMS *);
238     NV_STATUS (*__kchannelCtrlCmdGetClassEngineidA16F__)(struct KernelChannel *, NVA16F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
239     NV_STATUS (*__kchannelCtrlCmdResetChannelA16F__)(struct KernelChannel *, NVA16F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
240     NV_STATUS (*__kchannelCtrlCmdGpFifoScheduleA16F__)(struct KernelChannel *, NVA16F_CTRL_GPFIFO_SCHEDULE_PARAMS *);
241     NV_STATUS (*__kchannelCtrlCmdGetClassEngineidA26F__)(struct KernelChannel *, NVA26F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
242     NV_STATUS (*__kchannelCtrlCmdResetChannelA26F__)(struct KernelChannel *, NVA26F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
243     NV_STATUS (*__kchannelFCtrlCmdGpFifoScheduleA26F__)(struct KernelChannel *, NVA26F_CTRL_GPFIFO_SCHEDULE_PARAMS *);
244     NV_STATUS (*__kchannelCtrlCmdGetClassEngineidB06F__)(struct KernelChannel *, NVB06F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
245     NV_STATUS (*__kchannelCtrlCmdResetChannelB06F__)(struct KernelChannel *, NVB06F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
246     NV_STATUS (*__kchannelCtrlCmdGpFifoScheduleB06F__)(struct KernelChannel *, NVB06F_CTRL_GPFIFO_SCHEDULE_PARAMS *);
247     NV_STATUS (*__kchannelCtrlCmdBindB06F__)(struct KernelChannel *, NVB06F_CTRL_BIND_PARAMS *);
248     NV_STATUS (*__kchannelCtrlCmdGetEngineCtxSize__)(struct KernelChannel *, NVB06F_CTRL_GET_ENGINE_CTX_SIZE_PARAMS *);
249     NV_STATUS (*__kchannelCtrlCmdGetEngineCtxData__)(struct KernelChannel *, NVB06F_CTRL_GET_ENGINE_CTX_DATA_PARAMS *);
250     NV_STATUS (*__kchannelCtrlCmdMigrateEngineCtxData__)(struct KernelChannel *, NVB06F_CTRL_MIGRATE_ENGINE_CTX_DATA_PARAMS *);
251     NV_STATUS (*__kchannelCtrlCmdGetEngineCtxState__)(struct KernelChannel *, NVB06F_CTRL_GET_ENGINE_CTX_STATE_PARAMS *);
252     NV_STATUS (*__kchannelCtrlCmdGetChannelHwState__)(struct KernelChannel *, NVB06F_CTRL_GET_CHANNEL_HW_STATE_PARAMS *);
253     NV_STATUS (*__kchannelCtrlCmdSetChannelHwState__)(struct KernelChannel *, NVB06F_CTRL_SET_CHANNEL_HW_STATE_PARAMS *);
254     NV_STATUS (*__kchannelCtrlCmdSaveEngineCtxData__)(struct KernelChannel *, NVB06F_CTRL_SAVE_ENGINE_CTX_DATA_PARAMS *);
255     NV_STATUS (*__kchannelCtrlCmdRestoreEngineCtxData__)(struct KernelChannel *, NVB06F_CTRL_RESTORE_ENGINE_CTX_DATA_PARAMS *);
256     NV_STATUS (*__kchannelCtrlCmdGetClassEngineidC06F__)(struct KernelChannel *, NVC06F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
257     NV_STATUS (*__kchannelCtrlCmdResetChannelC06F__)(struct KernelChannel *, NVC06F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
258     NV_STATUS (*__kchannelCtrlCmdGpFifoScheduleC06F__)(struct KernelChannel *, NVC06F_CTRL_GPFIFO_SCHEDULE_PARAMS *);
259     NV_STATUS (*__kchannelCtrlCmdBindC06F__)(struct KernelChannel *, NVC06F_CTRL_BIND_PARAMS *);
260     NV_STATUS (*__kchannelCtrlCmdGetClassEngineidC36F__)(struct KernelChannel *, NVC36F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
261     NV_STATUS (*__kchannelCtrlCmdResetChannelC36F__)(struct KernelChannel *, NVC36F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
262     NV_STATUS (*__kchannelCtrlCmdGpFifoScheduleC36F__)(struct KernelChannel *, NVC36F_CTRL_GPFIFO_SCHEDULE_PARAMS *);
263     NV_STATUS (*__kchannelCtrlCmdBindC36F__)(struct KernelChannel *, NVC36F_CTRL_BIND_PARAMS *);
264     NV_STATUS (*__kchannelCtrlCmdGpfifoGetWorkSubmitToken__)(struct KernelChannel *, NVC36F_CTRL_CMD_GPFIFO_GET_WORK_SUBMIT_TOKEN_PARAMS *);
265     NV_STATUS (*__kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer__)(struct KernelChannel *, NVC36F_CTRL_GPFIFO_UPDATE_FAULT_METHOD_BUFFER_PARAMS *);
266     NV_STATUS (*__kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex__)(struct KernelChannel *, NVC36F_CTRL_GPFIFO_SET_WORK_SUBMIT_TOKEN_NOTIF_INDEX_PARAMS *);
267     NV_STATUS (*__kchannelCtrlCmdStopChannel__)(struct KernelChannel *, NVA06F_CTRL_STOP_CHANNEL_PARAMS *);
268     NV_STATUS (*__kchannelCtrlCmdGetKmb__)(struct KernelChannel *, NVC56F_CTRL_CMD_GET_KMB_PARAMS *);
269     NV_STATUS (*__kchannelCtrlRotateSecureChannelIv__)(struct KernelChannel *, NVC56F_CTRL_ROTATE_SECURE_CHANNEL_IV_PARAMS *);
270     NV_STATUS (*__kchannelSetEncryptionStatsBuffer__)(struct OBJGPU *, struct KernelChannel *, MEMORY_DESCRIPTOR *, NvBool);
271     NV_STATUS (*__kchannelCtrlGetTpcPartitionMode__)(struct KernelChannel *, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *);
272     NV_STATUS (*__kchannelCtrlSetTpcPartitionMode__)(struct KernelChannel *, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *);
273     NV_STATUS (*__kchannelCtrlGetMMUDebugMode__)(struct KernelChannel *, NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS *);
274     NV_STATUS (*__kchannelCtrlProgramVidmemPromote__)(struct KernelChannel *, NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS *);
275     NV_STATUS (*__kchannelRetrieveKmb__)(struct OBJGPU *, struct KernelChannel *, ROTATE_IV_TYPE, NvBool, CC_KMB *);
276     NV_STATUS (*__kchannelSetKeyRotationNotifier__)(struct OBJGPU *, struct KernelChannel *, NvBool);
277     NvBool (*__kchannelShareCallback__)(struct KernelChannel *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
278     NV_STATUS (*__kchannelGetOrAllocNotifShare__)(struct KernelChannel *, NvHandle, NvHandle, struct NotifShare **);
279     NV_STATUS (*__kchannelMapTo__)(struct KernelChannel *, RS_RES_MAP_TO_PARAMS *);
280     void (*__kchannelSetNotificationShare__)(struct KernelChannel *, struct NotifShare *);
281     NvU32 (*__kchannelGetRefCount__)(struct KernelChannel *);
282     void (*__kchannelAddAdditionalDependants__)(struct RsClient *, struct KernelChannel *, RsResourceRef *);
283     NV_STATUS (*__kchannelControl_Prologue__)(struct KernelChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
284     NV_STATUS (*__kchannelGetRegBaseOffsetAndSize__)(struct KernelChannel *, struct OBJGPU *, NvU32 *, NvU32 *);
285     NV_STATUS (*__kchannelInternalControlForward__)(struct KernelChannel *, NvU32, void *, NvU32);
286     NV_STATUS (*__kchannelUnmapFrom__)(struct KernelChannel *, RS_RES_UNMAP_FROM_PARAMS *);
287     void (*__kchannelControl_Epilogue__)(struct KernelChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
288     NvHandle (*__kchannelGetInternalObjectHandle__)(struct KernelChannel *);
289     NV_STATUS (*__kchannelControl__)(struct KernelChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
290     NV_STATUS (*__kchannelGetMemoryMappingDescriptor__)(struct KernelChannel *, struct MEMORY_DESCRIPTOR **);
291     NV_STATUS (*__kchannelControlFilter__)(struct KernelChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
292     NV_STATUS (*__kchannelUnregisterEvent__)(struct KernelChannel *, NvHandle, NvHandle, NvHandle, NvHandle);
293     NV_STATUS (*__kchannelControlSerialization_Prologue__)(struct KernelChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
294     NvBool (*__kchannelCanCopy__)(struct KernelChannel *);
295     NvBool (*__kchannelIsPartialUnmapSupported__)(struct KernelChannel *);
296     void (*__kchannelPreDestruct__)(struct KernelChannel *);
297     NV_STATUS (*__kchannelIsDuplicate__)(struct KernelChannel *, NvHandle, NvBool *);
298     void (*__kchannelControlSerialization_Epilogue__)(struct KernelChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
299     PEVENTNOTIFICATION *(*__kchannelGetNotificationListPtr__)(struct KernelChannel *);
300     struct NotifShare *(*__kchannelGetNotificationShare__)(struct KernelChannel *);
301     NvBool (*__kchannelAccessCallback__)(struct KernelChannel *, struct RsClient *, void *, RsAccessRight);
302     NvU16 nextObjectClassID;
303     struct KernelChannel *pNextBindKernelChannel;
304     FIFO_MMU_EXCEPTION_DATA *pMmuExceptionData;
305     NvHandle hErrorContext;
306     MEMORY_DESCRIPTOR *pErrContextMemDesc;
307     ErrorNotifierType errorContextType;
308     NvU64 errorContextOffset;
309     NvHandle hEccErrorContext;
310     MEMORY_DESCRIPTOR *pEccErrContextMemDesc;
311     ErrorNotifierType eccErrorContextType;
312     NvU64 eccErrorContextOffset;
313     struct UserInfo *pUserInfo;
314     NvHandle hVASpace;
315     struct OBJVASPACE *pVAS;
316     NvHandle hKernelGraphicsContext;
317     NvU8 privilegeLevel;
318     NvU32 runlistId;
319     NvU32 ChID;
320     struct KernelChannelGroupApi *pKernelChannelGroupApi;
321     struct KernelCtxShareApi *pKernelCtxShareApi;
322     NvU32 refCount;
323     NvBool bIsContextBound;
324     FIFO_INSTANCE_BLOCK *pFifoHalData[8];
325     MEMORY_DESCRIPTOR *pInstSubDeviceMemDesc[8];
326     MEMORY_DESCRIPTOR *pUserdSubDeviceMemDesc[8];
327     NvBool bClientAllocatedUserD;
328     NvU32 swState[8];
329     NvBool bIsRcPending[8];
330     NvU32 ProcessID;
331     NvU32 SubProcessID;
332     NvU32 bcStateCurrent;
333     NvU32 notifyIndex[3];
334     NvU32 *pNotifyActions;
335     NvU64 userdLength;
336     NvBool bSkipCtxBufferAlloc;
337     NvU32 subctxId;
338     NvU32 cid;
339     struct MIG_INSTANCE_REF partitionRef;
340     NvU32 runqueue;
341     RM_ENGINE_TYPE engineType;
342     CC_KMB clientKmb;
343     MEMORY_DESCRIPTOR *pEncStatsBufMemDesc;
344     CC_CRYPTOBUNDLE_STATS *pEncStatsBuf;
345     MEMORY_DESCRIPTOR *pKeyRotationNotifierMemDesc;
346     NvNotification *pKeyRotationNotifier;
347     NvBool bCCSecureChannel;
348     NvBool bUseScrubKey;
349 };
350 
351 #ifndef __NVOC_CLASS_KernelChannel_TYPEDEF__
352 #define __NVOC_CLASS_KernelChannel_TYPEDEF__
353 typedef struct KernelChannel KernelChannel;
354 #endif /* __NVOC_CLASS_KernelChannel_TYPEDEF__ */
355 
356 #ifndef __nvoc_class_id_KernelChannel
357 #define __nvoc_class_id_KernelChannel 0x5d8d70
358 #endif /* __nvoc_class_id_KernelChannel */
359 
360 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelChannel;
361 
362 #define __staticCast_KernelChannel(pThis) \
363     ((pThis)->__nvoc_pbase_KernelChannel)
364 
365 #ifdef __nvoc_kernel_channel_h_disabled
366 #define __dynamicCast_KernelChannel(pThis) ((KernelChannel*)NULL)
367 #else //__nvoc_kernel_channel_h_disabled
368 #define __dynamicCast_KernelChannel(pThis) \
369     ((KernelChannel*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelChannel)))
370 #endif //__nvoc_kernel_channel_h_disabled
371 
372 
373 NV_STATUS __nvoc_objCreateDynamic_KernelChannel(KernelChannel**, Dynamic*, NvU32, va_list);
374 
375 NV_STATUS __nvoc_objCreate_KernelChannel(KernelChannel**, Dynamic*, NvU32, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
376 #define __objCreate_KernelChannel(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
377     __nvoc_objCreate_KernelChannel((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
378 
379 #define kchannelMap(pKernelChannel, pCallContext, pParams, pCpuMapping) kchannelMap_DISPATCH(pKernelChannel, pCallContext, pParams, pCpuMapping)
380 #define kchannelUnmap(pKernelChannel, pCallContext, pCpuMapping) kchannelUnmap_DISPATCH(pKernelChannel, pCallContext, pCpuMapping)
381 #define kchannelGetMapAddrSpace(pKernelChannel, pCallContext, mapFlags, pAddrSpace) kchannelGetMapAddrSpace_DISPATCH(pKernelChannel, pCallContext, mapFlags, pAddrSpace)
382 #define kchannelGetMemInterMapParams(pKernelChannel, pParams) kchannelGetMemInterMapParams_DISPATCH(pKernelChannel, pParams)
383 #define kchannelCheckMemInterUnmap(pKernelChannel, bSubdeviceHandleProvided) kchannelCheckMemInterUnmap_DISPATCH(pKernelChannel, bSubdeviceHandleProvided)
384 #define kchannelCreateUserMemDesc(pGpu, arg0) kchannelCreateUserMemDesc_DISPATCH(pGpu, arg0)
385 #define kchannelCreateUserMemDesc_HAL(pGpu, arg0) kchannelCreateUserMemDesc_DISPATCH(pGpu, arg0)
386 #define kchannelIsUserdAddrSizeValid(pKernelChannel, userdAddrLo, userdAddrHi) kchannelIsUserdAddrSizeValid_DISPATCH(pKernelChannel, userdAddrLo, userdAddrHi)
387 #define kchannelIsUserdAddrSizeValid_HAL(pKernelChannel, userdAddrLo, userdAddrHi) kchannelIsUserdAddrSizeValid_DISPATCH(pKernelChannel, userdAddrLo, userdAddrHi)
388 #define kchannelCtrlCmdResetIsolatedChannel(pKernelChannel, pResetParams) kchannelCtrlCmdResetIsolatedChannel_DISPATCH(pKernelChannel, pResetParams)
389 #define kchannelCtrlCmdInternalResetIsolatedChannel(pKernelChannel, pResetParams) kchannelCtrlCmdInternalResetIsolatedChannel_DISPATCH(pKernelChannel, pResetParams)
390 #define kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineid_DISPATCH(pKernelChannel, pParams)
391 #define kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannel_DISPATCH(pKernelChannel, pResetChannelParams)
392 #define kchannelCtrlCmdGetDeferRCState(pKernelChannel, pStateParams) kchannelCtrlCmdGetDeferRCState_DISPATCH(pKernelChannel, pStateParams)
393 #define kchannelCtrlCmdGetMmuFaultInfo(pKernelChannel, pFaultInfoParams) kchannelCtrlCmdGetMmuFaultInfo_DISPATCH(pKernelChannel, pFaultInfoParams)
394 #define kchannelCtrlCmdEventSetNotification(pKernelChannel, pSetEventParams) kchannelCtrlCmdEventSetNotification_DISPATCH(pKernelChannel, pSetEventParams)
395 #define kchannelCtrlCmdGetClassEngineidA06F(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineidA06F_DISPATCH(pKernelChannel, pParams)
396 #define kchannelCtrlCmdResetChannelA06F(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannelA06F_DISPATCH(pKernelChannel, pResetChannelParams)
397 #define kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams) kchannelCtrlCmdGpFifoSchedule_DISPATCH(pKernelChannel, pSchedParams)
398 #define kchannelCtrlCmdBind(pKernelChannel, pParams) kchannelCtrlCmdBind_DISPATCH(pKernelChannel, pParams)
399 #define kchannelCtrlCmdGetMmuFaultInfoA06F(pKernelChannel, pFaultInfoParams) kchannelCtrlCmdGetMmuFaultInfoA06F_DISPATCH(pKernelChannel, pFaultInfoParams)
400 #define kchannelCtrlCmdSetErrorNotifier(pKernelChannel, pSetErrorNotifierParams) kchannelCtrlCmdSetErrorNotifier_DISPATCH(pKernelChannel, pSetErrorNotifierParams)
401 #define kchannelCtrlCmdSetInterleaveLevel(pKernelChannel, pParams) kchannelCtrlCmdSetInterleaveLevel_DISPATCH(pKernelChannel, pParams)
402 #define kchannelCtrlCmdRestartRunlist(pKernelChannel, pParams) kchannelCtrlCmdRestartRunlist_DISPATCH(pKernelChannel, pParams)
403 #define kchannelCtrlCmdGetClassEngineidA16F(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineidA16F_DISPATCH(pKernelChannel, pParams)
404 #define kchannelCtrlCmdResetChannelA16F(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannelA16F_DISPATCH(pKernelChannel, pResetChannelParams)
405 #define kchannelCtrlCmdGpFifoScheduleA16F(pKernelChannel, pSchedParams) kchannelCtrlCmdGpFifoScheduleA16F_DISPATCH(pKernelChannel, pSchedParams)
406 #define kchannelCtrlCmdGetClassEngineidA26F(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineidA26F_DISPATCH(pKernelChannel, pParams)
407 #define kchannelCtrlCmdResetChannelA26F(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannelA26F_DISPATCH(pKernelChannel, pResetChannelParams)
408 #define kchannelFCtrlCmdGpFifoScheduleA26F(pKernelChannel, pSchedParams) kchannelFCtrlCmdGpFifoScheduleA26F_DISPATCH(pKernelChannel, pSchedParams)
409 #define kchannelCtrlCmdGetClassEngineidB06F(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineidB06F_DISPATCH(pKernelChannel, pParams)
410 #define kchannelCtrlCmdResetChannelB06F(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannelB06F_DISPATCH(pKernelChannel, pResetChannelParams)
411 #define kchannelCtrlCmdGpFifoScheduleB06F(pKernelChannel, pSchedParams) kchannelCtrlCmdGpFifoScheduleB06F_DISPATCH(pKernelChannel, pSchedParams)
412 #define kchannelCtrlCmdBindB06F(pKernelChannel, pParams) kchannelCtrlCmdBindB06F_DISPATCH(pKernelChannel, pParams)
413 #define kchannelCtrlCmdGetEngineCtxSize(pKernelChannel, pCtxSizeParams) kchannelCtrlCmdGetEngineCtxSize_DISPATCH(pKernelChannel, pCtxSizeParams)
414 #define kchannelCtrlCmdGetEngineCtxData(pKernelChannel, pCtxBuffParams) kchannelCtrlCmdGetEngineCtxData_DISPATCH(pKernelChannel, pCtxBuffParams)
415 #define kchannelCtrlCmdMigrateEngineCtxData(pKernelChannel, pCtxBuffParams) kchannelCtrlCmdMigrateEngineCtxData_DISPATCH(pKernelChannel, pCtxBuffParams)
416 #define kchannelCtrlCmdGetEngineCtxState(pKernelChannel, pCtxStateParams) kchannelCtrlCmdGetEngineCtxState_DISPATCH(pKernelChannel, pCtxStateParams)
417 #define kchannelCtrlCmdGetChannelHwState(pKernelChannel, pParams) kchannelCtrlCmdGetChannelHwState_DISPATCH(pKernelChannel, pParams)
418 #define kchannelCtrlCmdSetChannelHwState(pKernelChannel, pParams) kchannelCtrlCmdSetChannelHwState_DISPATCH(pKernelChannel, pParams)
419 #define kchannelCtrlCmdSaveEngineCtxData(pKernelChannel, pCtxBuffParams) kchannelCtrlCmdSaveEngineCtxData_DISPATCH(pKernelChannel, pCtxBuffParams)
420 #define kchannelCtrlCmdRestoreEngineCtxData(pKernelChannel, pCtxBuffParams) kchannelCtrlCmdRestoreEngineCtxData_DISPATCH(pKernelChannel, pCtxBuffParams)
421 #define kchannelCtrlCmdGetClassEngineidC06F(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineidC06F_DISPATCH(pKernelChannel, pParams)
422 #define kchannelCtrlCmdResetChannelC06F(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannelC06F_DISPATCH(pKernelChannel, pResetChannelParams)
423 #define kchannelCtrlCmdGpFifoScheduleC06F(pKernelChannel, pSchedParams) kchannelCtrlCmdGpFifoScheduleC06F_DISPATCH(pKernelChannel, pSchedParams)
424 #define kchannelCtrlCmdBindC06F(pKernelChannel, pParams) kchannelCtrlCmdBindC06F_DISPATCH(pKernelChannel, pParams)
425 #define kchannelCtrlCmdGetClassEngineidC36F(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineidC36F_DISPATCH(pKernelChannel, pParams)
426 #define kchannelCtrlCmdResetChannelC36F(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannelC36F_DISPATCH(pKernelChannel, pResetChannelParams)
427 #define kchannelCtrlCmdGpFifoScheduleC36F(pKernelChannel, pSchedParams) kchannelCtrlCmdGpFifoScheduleC36F_DISPATCH(pKernelChannel, pSchedParams)
428 #define kchannelCtrlCmdBindC36F(pKernelChannel, pParams) kchannelCtrlCmdBindC36F_DISPATCH(pKernelChannel, pParams)
429 #define kchannelCtrlCmdGpfifoGetWorkSubmitToken(pKernelChannel, pTokenParams) kchannelCtrlCmdGpfifoGetWorkSubmitToken_DISPATCH(pKernelChannel, pTokenParams)
430 #define kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer(pKernelChannel, pFaultMthdBufferParams) kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer_DISPATCH(pKernelChannel, pFaultMthdBufferParams)
431 #define kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex(pKernelChannel, pParams) kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex_DISPATCH(pKernelChannel, pParams)
432 #define kchannelCtrlCmdStopChannel(pKernelChannel, pStopChannelParams) kchannelCtrlCmdStopChannel_DISPATCH(pKernelChannel, pStopChannelParams)
433 #define kchannelCtrlCmdGetKmb(pKernelChannel, pGetKmbParams) kchannelCtrlCmdGetKmb_DISPATCH(pKernelChannel, pGetKmbParams)
434 #define kchannelCtrlCmdGetKmb_HAL(pKernelChannel, pGetKmbParams) kchannelCtrlCmdGetKmb_DISPATCH(pKernelChannel, pGetKmbParams)
435 #define kchannelCtrlRotateSecureChannelIv(pKernelChannel, pRotateIvParams) kchannelCtrlRotateSecureChannelIv_DISPATCH(pKernelChannel, pRotateIvParams)
436 #define kchannelCtrlRotateSecureChannelIv_HAL(pKernelChannel, pRotateIvParams) kchannelCtrlRotateSecureChannelIv_DISPATCH(pKernelChannel, pRotateIvParams)
437 #define kchannelSetEncryptionStatsBuffer(pGpu, pKernelChannel, pMemDesc, bSet) kchannelSetEncryptionStatsBuffer_DISPATCH(pGpu, pKernelChannel, pMemDesc, bSet)
438 #define kchannelSetEncryptionStatsBuffer_HAL(pGpu, pKernelChannel, pMemDesc, bSet) kchannelSetEncryptionStatsBuffer_DISPATCH(pGpu, pKernelChannel, pMemDesc, bSet)
439 #define kchannelCtrlGetTpcPartitionMode(pKernelChannel, pParams) kchannelCtrlGetTpcPartitionMode_DISPATCH(pKernelChannel, pParams)
440 #define kchannelCtrlSetTpcPartitionMode(pKernelChannel, pParams) kchannelCtrlSetTpcPartitionMode_DISPATCH(pKernelChannel, pParams)
441 #define kchannelCtrlGetMMUDebugMode(pKernelChannel, pParams) kchannelCtrlGetMMUDebugMode_DISPATCH(pKernelChannel, pParams)
442 #define kchannelCtrlProgramVidmemPromote(pKernelChannel, pParams) kchannelCtrlProgramVidmemPromote_DISPATCH(pKernelChannel, pParams)
443 #define kchannelRetrieveKmb(pGpu, pKernelChannel, rotateOperation, includeSecrets, keyMaterialBundle) kchannelRetrieveKmb_DISPATCH(pGpu, pKernelChannel, rotateOperation, includeSecrets, keyMaterialBundle)
444 #define kchannelRetrieveKmb_HAL(pGpu, pKernelChannel, rotateOperation, includeSecrets, keyMaterialBundle) kchannelRetrieveKmb_DISPATCH(pGpu, pKernelChannel, rotateOperation, includeSecrets, keyMaterialBundle)
445 #define kchannelSetKeyRotationNotifier(pGpu, pKernelChannel, bSet) kchannelSetKeyRotationNotifier_DISPATCH(pGpu, pKernelChannel, bSet)
446 #define kchannelSetKeyRotationNotifier_HAL(pGpu, pKernelChannel, bSet) kchannelSetKeyRotationNotifier_DISPATCH(pGpu, pKernelChannel, bSet)
447 #define kchannelShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) kchannelShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
448 #define kchannelGetOrAllocNotifShare(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare) kchannelGetOrAllocNotifShare_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare)
449 #define kchannelMapTo(pResource, pParams) kchannelMapTo_DISPATCH(pResource, pParams)
450 #define kchannelSetNotificationShare(pNotifier, pNotifShare) kchannelSetNotificationShare_DISPATCH(pNotifier, pNotifShare)
451 #define kchannelGetRefCount(pResource) kchannelGetRefCount_DISPATCH(pResource)
452 #define kchannelAddAdditionalDependants(pClient, pResource, pReference) kchannelAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
453 #define kchannelControl_Prologue(pResource, pCallContext, pParams) kchannelControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
454 #define kchannelGetRegBaseOffsetAndSize(pGpuResource, pGpu, pOffset, pSize) kchannelGetRegBaseOffsetAndSize_DISPATCH(pGpuResource, pGpu, pOffset, pSize)
455 #define kchannelInternalControlForward(pGpuResource, command, pParams, size) kchannelInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
456 #define kchannelUnmapFrom(pResource, pParams) kchannelUnmapFrom_DISPATCH(pResource, pParams)
457 #define kchannelControl_Epilogue(pResource, pCallContext, pParams) kchannelControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
458 #define kchannelGetInternalObjectHandle(pGpuResource) kchannelGetInternalObjectHandle_DISPATCH(pGpuResource)
459 #define kchannelControl(pGpuResource, pCallContext, pParams) kchannelControl_DISPATCH(pGpuResource, pCallContext, pParams)
460 #define kchannelGetMemoryMappingDescriptor(pRmResource, ppMemDesc) kchannelGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
461 #define kchannelControlFilter(pResource, pCallContext, pParams) kchannelControlFilter_DISPATCH(pResource, pCallContext, pParams)
462 #define kchannelUnregisterEvent(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent) kchannelUnregisterEvent_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent)
463 #define kchannelControlSerialization_Prologue(pResource, pCallContext, pParams) kchannelControlSerialization_Prologue_DISPATCH(pResource, pCallContext, pParams)
464 #define kchannelCanCopy(pResource) kchannelCanCopy_DISPATCH(pResource)
465 #define kchannelIsPartialUnmapSupported(pResource) kchannelIsPartialUnmapSupported_DISPATCH(pResource)
466 #define kchannelPreDestruct(pResource) kchannelPreDestruct_DISPATCH(pResource)
467 #define kchannelIsDuplicate(pResource, hMemory, pDuplicate) kchannelIsDuplicate_DISPATCH(pResource, hMemory, pDuplicate)
468 #define kchannelControlSerialization_Epilogue(pResource, pCallContext, pParams) kchannelControlSerialization_Epilogue_DISPATCH(pResource, pCallContext, pParams)
469 #define kchannelGetNotificationListPtr(pNotifier) kchannelGetNotificationListPtr_DISPATCH(pNotifier)
470 #define kchannelGetNotificationShare(pNotifier) kchannelGetNotificationShare_DISPATCH(pNotifier)
471 #define kchannelAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) kchannelAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
472 NV_STATUS kchannelNotifyRc_IMPL(struct KernelChannel *pKernelChannel);
473 
474 
475 #ifdef __nvoc_kernel_channel_h_disabled
kchannelNotifyRc(struct KernelChannel * pKernelChannel)476 static inline NV_STATUS kchannelNotifyRc(struct KernelChannel *pKernelChannel) {
477     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
478     return NV_ERR_NOT_SUPPORTED;
479 }
480 #else //__nvoc_kernel_channel_h_disabled
481 #define kchannelNotifyRc(pKernelChannel) kchannelNotifyRc_IMPL(pKernelChannel)
482 #endif //__nvoc_kernel_channel_h_disabled
483 
484 #define kchannelNotifyRc_HAL(pKernelChannel) kchannelNotifyRc(pKernelChannel)
485 
486 NvBool kchannelIsSchedulable_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
487 
488 
489 #ifdef __nvoc_kernel_channel_h_disabled
kchannelIsSchedulable(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel)490 static inline NvBool kchannelIsSchedulable(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
491     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
492     return NV_FALSE;
493 }
494 #else //__nvoc_kernel_channel_h_disabled
495 #define kchannelIsSchedulable(pGpu, pKernelChannel) kchannelIsSchedulable_IMPL(pGpu, pKernelChannel)
496 #endif //__nvoc_kernel_channel_h_disabled
497 
498 #define kchannelIsSchedulable_HAL(pGpu, pKernelChannel) kchannelIsSchedulable(pGpu, pKernelChannel)
499 
500 NV_STATUS kchannelAllocMem_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 Flags, NvU32 verifFlags);
501 
502 
503 #ifdef __nvoc_kernel_channel_h_disabled
kchannelAllocMem(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 Flags,NvU32 verifFlags)504 static inline NV_STATUS kchannelAllocMem(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 Flags, NvU32 verifFlags) {
505     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
506     return NV_ERR_NOT_SUPPORTED;
507 }
508 #else //__nvoc_kernel_channel_h_disabled
509 #define kchannelAllocMem(pGpu, pKernelChannel, Flags, verifFlags) kchannelAllocMem_GM107(pGpu, pKernelChannel, Flags, verifFlags)
510 #endif //__nvoc_kernel_channel_h_disabled
511 
512 #define kchannelAllocMem_HAL(pGpu, pKernelChannel, Flags, verifFlags) kchannelAllocMem(pGpu, pKernelChannel, Flags, verifFlags)
513 
514 void kchannelDestroyMem_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
515 
516 
517 #ifdef __nvoc_kernel_channel_h_disabled
kchannelDestroyMem(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel)518 static inline void kchannelDestroyMem(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
519     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
520 }
521 #else //__nvoc_kernel_channel_h_disabled
522 #define kchannelDestroyMem(pGpu, pKernelChannel) kchannelDestroyMem_GM107(pGpu, pKernelChannel)
523 #endif //__nvoc_kernel_channel_h_disabled
524 
525 #define kchannelDestroyMem_HAL(pGpu, pKernelChannel) kchannelDestroyMem(pGpu, pKernelChannel)
526 
527 NV_STATUS kchannelGetChannelPhysicalState_KERNEL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NV208F_CTRL_FIFO_GET_CHANNEL_STATE_PARAMS *pChannelStateParams);
528 
529 
530 #ifdef __nvoc_kernel_channel_h_disabled
kchannelGetChannelPhysicalState(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NV208F_CTRL_FIFO_GET_CHANNEL_STATE_PARAMS * pChannelStateParams)531 static inline NV_STATUS kchannelGetChannelPhysicalState(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NV208F_CTRL_FIFO_GET_CHANNEL_STATE_PARAMS *pChannelStateParams) {
532     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
533     return NV_ERR_NOT_SUPPORTED;
534 }
535 #else //__nvoc_kernel_channel_h_disabled
536 #define kchannelGetChannelPhysicalState(pGpu, pKernelChannel, pChannelStateParams) kchannelGetChannelPhysicalState_KERNEL(pGpu, pKernelChannel, pChannelStateParams)
537 #endif //__nvoc_kernel_channel_h_disabled
538 
539 #define kchannelGetChannelPhysicalState_HAL(pGpu, pKernelChannel, pChannelStateParams) kchannelGetChannelPhysicalState(pGpu, pKernelChannel, pChannelStateParams)
540 
kchannelEmbedRunlistID_13cd8d(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel)541 static inline NvU32 kchannelEmbedRunlistID_13cd8d(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
542     NV_ASSERT_PRECOMP(0);
543     return 0;
544 }
545 
546 
547 #ifdef __nvoc_kernel_channel_h_disabled
kchannelEmbedRunlistID(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel)548 static inline NvU32 kchannelEmbedRunlistID(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
549     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
550     return 0;
551 }
552 #else //__nvoc_kernel_channel_h_disabled
553 #define kchannelEmbedRunlistID(pGpu, pKernelChannel) kchannelEmbedRunlistID_13cd8d(pGpu, pKernelChannel)
554 #endif //__nvoc_kernel_channel_h_disabled
555 
556 #define kchannelEmbedRunlistID_HAL(pGpu, pKernelChannel) kchannelEmbedRunlistID(pGpu, pKernelChannel)
557 
558 NV_STATUS kchannelAllocHwID_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle hClient, NvU32 Flags, NvU32 verifFlags2, NvU32 ChID);
559 
560 
561 #ifdef __nvoc_kernel_channel_h_disabled
kchannelAllocHwID(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvHandle hClient,NvU32 Flags,NvU32 verifFlags2,NvU32 ChID)562 static inline NV_STATUS kchannelAllocHwID(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle hClient, NvU32 Flags, NvU32 verifFlags2, NvU32 ChID) {
563     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
564     return NV_ERR_NOT_SUPPORTED;
565 }
566 #else //__nvoc_kernel_channel_h_disabled
567 #define kchannelAllocHwID(pGpu, pKernelChannel, hClient, Flags, verifFlags2, ChID) kchannelAllocHwID_GM107(pGpu, pKernelChannel, hClient, Flags, verifFlags2, ChID)
568 #endif //__nvoc_kernel_channel_h_disabled
569 
570 #define kchannelAllocHwID_HAL(pGpu, pKernelChannel, hClient, Flags, verifFlags2, ChID) kchannelAllocHwID(pGpu, pKernelChannel, hClient, Flags, verifFlags2, ChID)
571 
572 NV_STATUS kchannelFreeHwID_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
573 
574 
575 #ifdef __nvoc_kernel_channel_h_disabled
kchannelFreeHwID(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel)576 static inline NV_STATUS kchannelFreeHwID(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
577     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
578     return NV_ERR_NOT_SUPPORTED;
579 }
580 #else //__nvoc_kernel_channel_h_disabled
581 #define kchannelFreeHwID(pGpu, pKernelChannel) kchannelFreeHwID_GM107(pGpu, pKernelChannel)
582 #endif //__nvoc_kernel_channel_h_disabled
583 
584 #define kchannelFreeHwID_HAL(pGpu, pKernelChannel) kchannelFreeHwID(pGpu, pKernelChannel)
585 
586 NV_STATUS kchannelGetUserdInfo_GM107(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU64 *userBase, NvU64 *offset, NvU64 *length);
587 
588 
589 #ifdef __nvoc_kernel_channel_h_disabled
kchannelGetUserdInfo(struct OBJGPU * pGpu,struct KernelChannel * arg0,NvU64 * userBase,NvU64 * offset,NvU64 * length)590 static inline NV_STATUS kchannelGetUserdInfo(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU64 *userBase, NvU64 *offset, NvU64 *length) {
591     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
592     return NV_ERR_NOT_SUPPORTED;
593 }
594 #else //__nvoc_kernel_channel_h_disabled
595 #define kchannelGetUserdInfo(pGpu, arg0, userBase, offset, length) kchannelGetUserdInfo_GM107(pGpu, arg0, userBase, offset, length)
596 #endif //__nvoc_kernel_channel_h_disabled
597 
598 #define kchannelGetUserdInfo_HAL(pGpu, arg0, userBase, offset, length) kchannelGetUserdInfo(pGpu, arg0, userBase, offset, length)
599 
600 NV_STATUS kchannelGetUserdBar1MapOffset_GM107(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU64 *bar1Offset, NvU32 *bar1MapSize);
601 
602 
603 #ifdef __nvoc_kernel_channel_h_disabled
kchannelGetUserdBar1MapOffset(struct OBJGPU * pGpu,struct KernelChannel * arg0,NvU64 * bar1Offset,NvU32 * bar1MapSize)604 static inline NV_STATUS kchannelGetUserdBar1MapOffset(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU64 *bar1Offset, NvU32 *bar1MapSize) {
605     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
606     return NV_ERR_NOT_SUPPORTED;
607 }
608 #else //__nvoc_kernel_channel_h_disabled
609 #define kchannelGetUserdBar1MapOffset(pGpu, arg0, bar1Offset, bar1MapSize) kchannelGetUserdBar1MapOffset_GM107(pGpu, arg0, bar1Offset, bar1MapSize)
610 #endif //__nvoc_kernel_channel_h_disabled
611 
612 #define kchannelGetUserdBar1MapOffset_HAL(pGpu, arg0, bar1Offset, bar1MapSize) kchannelGetUserdBar1MapOffset(pGpu, arg0, bar1Offset, bar1MapSize)
613 
614 NV_STATUS kchannelCreateUserdMemDescBc_GV100(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle arg0, NvHandle *arg1, NvU64 *arg2);
615 
616 
617 #ifdef __nvoc_kernel_channel_h_disabled
kchannelCreateUserdMemDescBc(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvHandle arg0,NvHandle * arg1,NvU64 * arg2)618 static inline NV_STATUS kchannelCreateUserdMemDescBc(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle arg0, NvHandle *arg1, NvU64 *arg2) {
619     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
620     return NV_ERR_NOT_SUPPORTED;
621 }
622 #else //__nvoc_kernel_channel_h_disabled
623 #define kchannelCreateUserdMemDescBc(pGpu, pKernelChannel, arg0, arg1, arg2) kchannelCreateUserdMemDescBc_GV100(pGpu, pKernelChannel, arg0, arg1, arg2)
624 #endif //__nvoc_kernel_channel_h_disabled
625 
626 #define kchannelCreateUserdMemDescBc_HAL(pGpu, pKernelChannel, arg0, arg1, arg2) kchannelCreateUserdMemDescBc(pGpu, pKernelChannel, arg0, arg1, arg2)
627 
628 NV_STATUS kchannelCreateUserdMemDesc_GV100(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvHandle arg1, NvHandle arg2, NvU64 arg3, NvU64 *arg4, NvU32 *arg5);
629 
630 
631 #ifdef __nvoc_kernel_channel_h_disabled
kchannelCreateUserdMemDesc(struct OBJGPU * pGpu,struct KernelChannel * arg0,NvHandle arg1,NvHandle arg2,NvU64 arg3,NvU64 * arg4,NvU32 * arg5)632 static inline NV_STATUS kchannelCreateUserdMemDesc(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvHandle arg1, NvHandle arg2, NvU64 arg3, NvU64 *arg4, NvU32 *arg5) {
633     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
634     return NV_ERR_NOT_SUPPORTED;
635 }
636 #else //__nvoc_kernel_channel_h_disabled
637 #define kchannelCreateUserdMemDesc(pGpu, arg0, arg1, arg2, arg3, arg4, arg5) kchannelCreateUserdMemDesc_GV100(pGpu, arg0, arg1, arg2, arg3, arg4, arg5)
638 #endif //__nvoc_kernel_channel_h_disabled
639 
640 #define kchannelCreateUserdMemDesc_HAL(pGpu, arg0, arg1, arg2, arg3, arg4, arg5) kchannelCreateUserdMemDesc(pGpu, arg0, arg1, arg2, arg3, arg4, arg5)
641 
642 void kchannelDestroyUserdMemDesc_GV100(struct OBJGPU *pGpu, struct KernelChannel *arg0);
643 
644 
645 #ifdef __nvoc_kernel_channel_h_disabled
kchannelDestroyUserdMemDesc(struct OBJGPU * pGpu,struct KernelChannel * arg0)646 static inline void kchannelDestroyUserdMemDesc(struct OBJGPU *pGpu, struct KernelChannel *arg0) {
647     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
648 }
649 #else //__nvoc_kernel_channel_h_disabled
650 #define kchannelDestroyUserdMemDesc(pGpu, arg0) kchannelDestroyUserdMemDesc_GV100(pGpu, arg0)
651 #endif //__nvoc_kernel_channel_h_disabled
652 
653 #define kchannelDestroyUserdMemDesc_HAL(pGpu, arg0) kchannelDestroyUserdMemDesc(pGpu, arg0)
654 
655 NV_STATUS kchannelGetEngine_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 *engDesc);
656 
657 
658 #ifdef __nvoc_kernel_channel_h_disabled
kchannelGetEngine(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 * engDesc)659 static inline NV_STATUS kchannelGetEngine(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 *engDesc) {
660     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
661     return NV_ERR_NOT_SUPPORTED;
662 }
663 #else //__nvoc_kernel_channel_h_disabled
664 #define kchannelGetEngine(pGpu, pKernelChannel, engDesc) kchannelGetEngine_GM107(pGpu, pKernelChannel, engDesc)
665 #endif //__nvoc_kernel_channel_h_disabled
666 
667 #define kchannelGetEngine_HAL(pGpu, pKernelChannel, engDesc) kchannelGetEngine(pGpu, pKernelChannel, engDesc)
668 
kchannelFwdToInternalCtrl_56cd7a(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 internalCmd,RmCtrlParams * pRmCtrlParams)669 static inline NV_STATUS kchannelFwdToInternalCtrl_56cd7a(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 internalCmd, RmCtrlParams *pRmCtrlParams) {
670     return NV_OK;
671 }
672 
673 
674 #ifdef __nvoc_kernel_channel_h_disabled
kchannelFwdToInternalCtrl(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 internalCmd,RmCtrlParams * pRmCtrlParams)675 static inline NV_STATUS kchannelFwdToInternalCtrl(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 internalCmd, RmCtrlParams *pRmCtrlParams) {
676     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
677     return NV_ERR_NOT_SUPPORTED;
678 }
679 #else //__nvoc_kernel_channel_h_disabled
680 #define kchannelFwdToInternalCtrl(pGpu, pKernelChannel, internalCmd, pRmCtrlParams) kchannelFwdToInternalCtrl_56cd7a(pGpu, pKernelChannel, internalCmd, pRmCtrlParams)
681 #endif //__nvoc_kernel_channel_h_disabled
682 
683 #define kchannelFwdToInternalCtrl_HAL(pGpu, pKernelChannel, internalCmd, pRmCtrlParams) kchannelFwdToInternalCtrl(pGpu, pKernelChannel, internalCmd, pRmCtrlParams)
684 
kchannelAllocChannel_56cd7a(struct KernelChannel * pKernelChannel,NV_CHANNEL_ALLOC_PARAMS * pChannelGpfifoParams)685 static inline NV_STATUS kchannelAllocChannel_56cd7a(struct KernelChannel *pKernelChannel, NV_CHANNEL_ALLOC_PARAMS *pChannelGpfifoParams) {
686     return NV_OK;
687 }
688 
689 
690 #ifdef __nvoc_kernel_channel_h_disabled
kchannelAllocChannel(struct KernelChannel * pKernelChannel,NV_CHANNEL_ALLOC_PARAMS * pChannelGpfifoParams)691 static inline NV_STATUS kchannelAllocChannel(struct KernelChannel *pKernelChannel, NV_CHANNEL_ALLOC_PARAMS *pChannelGpfifoParams) {
692     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
693     return NV_ERR_NOT_SUPPORTED;
694 }
695 #else //__nvoc_kernel_channel_h_disabled
696 #define kchannelAllocChannel(pKernelChannel, pChannelGpfifoParams) kchannelAllocChannel_56cd7a(pKernelChannel, pChannelGpfifoParams)
697 #endif //__nvoc_kernel_channel_h_disabled
698 
699 #define kchannelAllocChannel_HAL(pKernelChannel, pChannelGpfifoParams) kchannelAllocChannel(pKernelChannel, pChannelGpfifoParams)
700 
kchannelIsValid_cbe027(struct KernelChannel * pKernelChannel)701 static inline NvBool kchannelIsValid_cbe027(struct KernelChannel *pKernelChannel) {
702     return ((NvBool)(0 == 0));
703 }
704 
705 
706 #ifdef __nvoc_kernel_channel_h_disabled
kchannelIsValid(struct KernelChannel * pKernelChannel)707 static inline NvBool kchannelIsValid(struct KernelChannel *pKernelChannel) {
708     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
709     return NV_FALSE;
710 }
711 #else //__nvoc_kernel_channel_h_disabled
712 #define kchannelIsValid(pKernelChannel) kchannelIsValid_cbe027(pKernelChannel)
713 #endif //__nvoc_kernel_channel_h_disabled
714 
715 #define kchannelIsValid_HAL(pKernelChannel) kchannelIsValid(pKernelChannel)
716 
717 NV_STATUS kchannelGetClassEngineID_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle handle, NvU32 *classEngineID, NvU32 *classID, RM_ENGINE_TYPE *rmEngineID);
718 
719 
720 #ifdef __nvoc_kernel_channel_h_disabled
kchannelGetClassEngineID(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvHandle handle,NvU32 * classEngineID,NvU32 * classID,RM_ENGINE_TYPE * rmEngineID)721 static inline NV_STATUS kchannelGetClassEngineID(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle handle, NvU32 *classEngineID, NvU32 *classID, RM_ENGINE_TYPE *rmEngineID) {
722     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
723     return NV_ERR_NOT_SUPPORTED;
724 }
725 #else //__nvoc_kernel_channel_h_disabled
726 #define kchannelGetClassEngineID(pGpu, pKernelChannel, handle, classEngineID, classID, rmEngineID) kchannelGetClassEngineID_GM107(pGpu, pKernelChannel, handle, classEngineID, classID, rmEngineID)
727 #endif //__nvoc_kernel_channel_h_disabled
728 
729 #define kchannelGetClassEngineID_HAL(pGpu, pKernelChannel, handle, classEngineID, classID, rmEngineID) kchannelGetClassEngineID(pGpu, pKernelChannel, handle, classEngineID, classID, rmEngineID)
730 
731 NV_STATUS kchannelEnableVirtualContext_GM107(struct KernelChannel *arg0);
732 
733 
734 #ifdef __nvoc_kernel_channel_h_disabled
kchannelEnableVirtualContext(struct KernelChannel * arg0)735 static inline NV_STATUS kchannelEnableVirtualContext(struct KernelChannel *arg0) {
736     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
737     return NV_ERR_NOT_SUPPORTED;
738 }
739 #else //__nvoc_kernel_channel_h_disabled
740 #define kchannelEnableVirtualContext(arg0) kchannelEnableVirtualContext_GM107(arg0)
741 #endif //__nvoc_kernel_channel_h_disabled
742 
743 #define kchannelEnableVirtualContext_HAL(arg0) kchannelEnableVirtualContext(arg0)
744 
kchannelRotateSecureChannelIv_46f6a7(struct KernelChannel * pKernelChannel,ROTATE_IV_TYPE rotateOperation,NvU32 * encryptIv,NvU32 * decryptIv)745 static inline NV_STATUS kchannelRotateSecureChannelIv_46f6a7(struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvU32 *encryptIv, NvU32 *decryptIv) {
746     return NV_ERR_NOT_SUPPORTED;
747 }
748 
749 
750 #ifdef __nvoc_kernel_channel_h_disabled
kchannelRotateSecureChannelIv(struct KernelChannel * pKernelChannel,ROTATE_IV_TYPE rotateOperation,NvU32 * encryptIv,NvU32 * decryptIv)751 static inline NV_STATUS kchannelRotateSecureChannelIv(struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvU32 *encryptIv, NvU32 *decryptIv) {
752     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
753     return NV_ERR_NOT_SUPPORTED;
754 }
755 #else //__nvoc_kernel_channel_h_disabled
756 #define kchannelRotateSecureChannelIv(pKernelChannel, rotateOperation, encryptIv, decryptIv) kchannelRotateSecureChannelIv_46f6a7(pKernelChannel, rotateOperation, encryptIv, decryptIv)
757 #endif //__nvoc_kernel_channel_h_disabled
758 
759 #define kchannelRotateSecureChannelIv_HAL(pKernelChannel, rotateOperation, encryptIv, decryptIv) kchannelRotateSecureChannelIv(pKernelChannel, rotateOperation, encryptIv, decryptIv)
760 
761 NV_STATUS kchannelMap_IMPL(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping);
762 
kchannelMap_DISPATCH(struct KernelChannel * pKernelChannel,CALL_CONTEXT * pCallContext,struct RS_CPU_MAP_PARAMS * pParams,RsCpuMapping * pCpuMapping)763 static inline NV_STATUS kchannelMap_DISPATCH(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
764     return pKernelChannel->__kchannelMap__(pKernelChannel, pCallContext, pParams, pCpuMapping);
765 }
766 
767 NV_STATUS kchannelUnmap_IMPL(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping);
768 
kchannelUnmap_DISPATCH(struct KernelChannel * pKernelChannel,CALL_CONTEXT * pCallContext,RsCpuMapping * pCpuMapping)769 static inline NV_STATUS kchannelUnmap_DISPATCH(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
770     return pKernelChannel->__kchannelUnmap__(pKernelChannel, pCallContext, pCpuMapping);
771 }
772 
773 NV_STATUS kchannelGetMapAddrSpace_IMPL(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace);
774 
kchannelGetMapAddrSpace_DISPATCH(struct KernelChannel * pKernelChannel,CALL_CONTEXT * pCallContext,NvU32 mapFlags,NV_ADDRESS_SPACE * pAddrSpace)775 static inline NV_STATUS kchannelGetMapAddrSpace_DISPATCH(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
776     return pKernelChannel->__kchannelGetMapAddrSpace__(pKernelChannel, pCallContext, mapFlags, pAddrSpace);
777 }
778 
779 NV_STATUS kchannelGetMemInterMapParams_IMPL(struct KernelChannel *pKernelChannel, RMRES_MEM_INTER_MAP_PARAMS *pParams);
780 
kchannelGetMemInterMapParams_DISPATCH(struct KernelChannel * pKernelChannel,RMRES_MEM_INTER_MAP_PARAMS * pParams)781 static inline NV_STATUS kchannelGetMemInterMapParams_DISPATCH(struct KernelChannel *pKernelChannel, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
782     return pKernelChannel->__kchannelGetMemInterMapParams__(pKernelChannel, pParams);
783 }
784 
785 NV_STATUS kchannelCheckMemInterUnmap_IMPL(struct KernelChannel *pKernelChannel, NvBool bSubdeviceHandleProvided);
786 
kchannelCheckMemInterUnmap_DISPATCH(struct KernelChannel * pKernelChannel,NvBool bSubdeviceHandleProvided)787 static inline NV_STATUS kchannelCheckMemInterUnmap_DISPATCH(struct KernelChannel *pKernelChannel, NvBool bSubdeviceHandleProvided) {
788     return pKernelChannel->__kchannelCheckMemInterUnmap__(pKernelChannel, bSubdeviceHandleProvided);
789 }
790 
791 NV_STATUS kchannelCreateUserMemDesc_GM107(struct OBJGPU *pGpu, struct KernelChannel *arg0);
792 
793 NV_STATUS kchannelCreateUserMemDesc_GA10B(struct OBJGPU *pGpu, struct KernelChannel *arg0);
794 
kchannelCreateUserMemDesc_DISPATCH(struct OBJGPU * pGpu,struct KernelChannel * arg0)795 static inline NV_STATUS kchannelCreateUserMemDesc_DISPATCH(struct OBJGPU *pGpu, struct KernelChannel *arg0) {
796     return arg0->__kchannelCreateUserMemDesc__(pGpu, arg0);
797 }
798 
799 NvBool kchannelIsUserdAddrSizeValid_GV100(struct KernelChannel *pKernelChannel, NvU32 userdAddrLo, NvU32 userdAddrHi);
800 
801 NvBool kchannelIsUserdAddrSizeValid_GA100(struct KernelChannel *pKernelChannel, NvU32 userdAddrLo, NvU32 userdAddrHi);
802 
803 NvBool kchannelIsUserdAddrSizeValid_GH100(struct KernelChannel *pKernelChannel, NvU32 userdAddrLo, NvU32 userdAddrHi);
804 
kchannelIsUserdAddrSizeValid_DISPATCH(struct KernelChannel * pKernelChannel,NvU32 userdAddrLo,NvU32 userdAddrHi)805 static inline NvBool kchannelIsUserdAddrSizeValid_DISPATCH(struct KernelChannel *pKernelChannel, NvU32 userdAddrLo, NvU32 userdAddrHi) {
806     return pKernelChannel->__kchannelIsUserdAddrSizeValid__(pKernelChannel, userdAddrLo, userdAddrHi);
807 }
808 
809 NV_STATUS kchannelCtrlCmdResetIsolatedChannel_IMPL(struct KernelChannel *pKernelChannel, NV506F_CTRL_CMD_RESET_ISOLATED_CHANNEL_PARAMS *pResetParams);
810 
kchannelCtrlCmdResetIsolatedChannel_DISPATCH(struct KernelChannel * pKernelChannel,NV506F_CTRL_CMD_RESET_ISOLATED_CHANNEL_PARAMS * pResetParams)811 static inline NV_STATUS kchannelCtrlCmdResetIsolatedChannel_DISPATCH(struct KernelChannel *pKernelChannel, NV506F_CTRL_CMD_RESET_ISOLATED_CHANNEL_PARAMS *pResetParams) {
812     return pKernelChannel->__kchannelCtrlCmdResetIsolatedChannel__(pKernelChannel, pResetParams);
813 }
814 
815 NV_STATUS kchannelCtrlCmdInternalResetIsolatedChannel_IMPL(struct KernelChannel *pKernelChannel, NV506F_CTRL_CMD_INTERNAL_RESET_ISOLATED_CHANNEL_PARAMS *pResetParams);
816 
kchannelCtrlCmdInternalResetIsolatedChannel_DISPATCH(struct KernelChannel * pKernelChannel,NV506F_CTRL_CMD_INTERNAL_RESET_ISOLATED_CHANNEL_PARAMS * pResetParams)817 static inline NV_STATUS kchannelCtrlCmdInternalResetIsolatedChannel_DISPATCH(struct KernelChannel *pKernelChannel, NV506F_CTRL_CMD_INTERNAL_RESET_ISOLATED_CHANNEL_PARAMS *pResetParams) {
818     return pKernelChannel->__kchannelCtrlCmdInternalResetIsolatedChannel__(pKernelChannel, pResetParams);
819 }
820 
821 NV_STATUS kchannelCtrlCmdGetClassEngineid_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams);
822 
kchannelCtrlCmdGetClassEngineid_DISPATCH(struct KernelChannel * pKernelChannel,NV906F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)823 static inline NV_STATUS kchannelCtrlCmdGetClassEngineid_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
824     return pKernelChannel->__kchannelCtrlCmdGetClassEngineid__(pKernelChannel, pParams);
825 }
826 
827 NV_STATUS kchannelCtrlCmdResetChannel_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams);
828 
kchannelCtrlCmdResetChannel_DISPATCH(struct KernelChannel * pKernelChannel,NV906F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)829 static inline NV_STATUS kchannelCtrlCmdResetChannel_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
830     return pKernelChannel->__kchannelCtrlCmdResetChannel__(pKernelChannel, pResetChannelParams);
831 }
832 
833 NV_STATUS kchannelCtrlCmdGetDeferRCState_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_CMD_GET_DEFER_RC_STATE_PARAMS *pStateParams);
834 
kchannelCtrlCmdGetDeferRCState_DISPATCH(struct KernelChannel * pKernelChannel,NV906F_CTRL_CMD_GET_DEFER_RC_STATE_PARAMS * pStateParams)835 static inline NV_STATUS kchannelCtrlCmdGetDeferRCState_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_CMD_GET_DEFER_RC_STATE_PARAMS *pStateParams) {
836     return pKernelChannel->__kchannelCtrlCmdGetDeferRCState__(pKernelChannel, pStateParams);
837 }
838 
839 NV_STATUS kchannelCtrlCmdGetMmuFaultInfo_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_GET_MMU_FAULT_INFO_PARAMS *pFaultInfoParams);
840 
kchannelCtrlCmdGetMmuFaultInfo_DISPATCH(struct KernelChannel * pKernelChannel,NV906F_CTRL_GET_MMU_FAULT_INFO_PARAMS * pFaultInfoParams)841 static inline NV_STATUS kchannelCtrlCmdGetMmuFaultInfo_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_GET_MMU_FAULT_INFO_PARAMS *pFaultInfoParams) {
842     return pKernelChannel->__kchannelCtrlCmdGetMmuFaultInfo__(pKernelChannel, pFaultInfoParams);
843 }
844 
845 NV_STATUS kchannelCtrlCmdEventSetNotification_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_EVENT_SET_NOTIFICATION_PARAMS *pSetEventParams);
846 
kchannelCtrlCmdEventSetNotification_DISPATCH(struct KernelChannel * pKernelChannel,NV906F_CTRL_EVENT_SET_NOTIFICATION_PARAMS * pSetEventParams)847 static inline NV_STATUS kchannelCtrlCmdEventSetNotification_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_EVENT_SET_NOTIFICATION_PARAMS *pSetEventParams) {
848     return pKernelChannel->__kchannelCtrlCmdEventSetNotification__(pKernelChannel, pSetEventParams);
849 }
850 
kchannelCtrlCmdGetClassEngineidA06F_6a9a13(struct KernelChannel * pKernelChannel,NVA06F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)851 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidA06F_6a9a13(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
852     return kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams);
853 }
854 
kchannelCtrlCmdGetClassEngineidA06F_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)855 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidA06F_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
856     return pKernelChannel->__kchannelCtrlCmdGetClassEngineidA06F__(pKernelChannel, pParams);
857 }
858 
kchannelCtrlCmdResetChannelA06F_ef73a1(struct KernelChannel * pKernelChannel,NVA06F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)859 static inline NV_STATUS kchannelCtrlCmdResetChannelA06F_ef73a1(struct KernelChannel *pKernelChannel, NVA06F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
860     return kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams);
861 }
862 
kchannelCtrlCmdResetChannelA06F_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)863 static inline NV_STATUS kchannelCtrlCmdResetChannelA06F_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
864     return pKernelChannel->__kchannelCtrlCmdResetChannelA06F__(pKernelChannel, pResetChannelParams);
865 }
866 
867 NV_STATUS kchannelCtrlCmdGpFifoSchedule_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams);
868 
kchannelCtrlCmdGpFifoSchedule_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_GPFIFO_SCHEDULE_PARAMS * pSchedParams)869 static inline NV_STATUS kchannelCtrlCmdGpFifoSchedule_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
870     return pKernelChannel->__kchannelCtrlCmdGpFifoSchedule__(pKernelChannel, pSchedParams);
871 }
872 
873 NV_STATUS kchannelCtrlCmdBind_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_BIND_PARAMS *pParams);
874 
kchannelCtrlCmdBind_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_BIND_PARAMS * pParams)875 static inline NV_STATUS kchannelCtrlCmdBind_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_BIND_PARAMS *pParams) {
876     return pKernelChannel->__kchannelCtrlCmdBind__(pKernelChannel, pParams);
877 }
878 
kchannelCtrlCmdGetMmuFaultInfoA06F_a7f9ac(struct KernelChannel * pKernelChannel,NVA06F_CTRL_GET_MMU_FAULT_INFO_PARAMS * pFaultInfoParams)879 static inline NV_STATUS kchannelCtrlCmdGetMmuFaultInfoA06F_a7f9ac(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GET_MMU_FAULT_INFO_PARAMS *pFaultInfoParams) {
880     return kchannelCtrlCmdGetMmuFaultInfo(pKernelChannel, pFaultInfoParams);
881 }
882 
kchannelCtrlCmdGetMmuFaultInfoA06F_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_GET_MMU_FAULT_INFO_PARAMS * pFaultInfoParams)883 static inline NV_STATUS kchannelCtrlCmdGetMmuFaultInfoA06F_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GET_MMU_FAULT_INFO_PARAMS *pFaultInfoParams) {
884     return pKernelChannel->__kchannelCtrlCmdGetMmuFaultInfoA06F__(pKernelChannel, pFaultInfoParams);
885 }
886 
887 NV_STATUS kchannelCtrlCmdSetErrorNotifier_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_SET_ERROR_NOTIFIER_PARAMS *pSetErrorNotifierParams);
888 
kchannelCtrlCmdSetErrorNotifier_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_SET_ERROR_NOTIFIER_PARAMS * pSetErrorNotifierParams)889 static inline NV_STATUS kchannelCtrlCmdSetErrorNotifier_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_SET_ERROR_NOTIFIER_PARAMS *pSetErrorNotifierParams) {
890     return pKernelChannel->__kchannelCtrlCmdSetErrorNotifier__(pKernelChannel, pSetErrorNotifierParams);
891 }
892 
893 NV_STATUS kchannelCtrlCmdSetInterleaveLevel_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_INTERLEAVE_LEVEL_PARAMS *pParams);
894 
kchannelCtrlCmdSetInterleaveLevel_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_INTERLEAVE_LEVEL_PARAMS * pParams)895 static inline NV_STATUS kchannelCtrlCmdSetInterleaveLevel_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_INTERLEAVE_LEVEL_PARAMS *pParams) {
896     return pKernelChannel->__kchannelCtrlCmdSetInterleaveLevel__(pKernelChannel, pParams);
897 }
898 
899 NV_STATUS kchannelCtrlCmdRestartRunlist_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_RESTART_RUNLIST_PARAMS *pParams);
900 
kchannelCtrlCmdRestartRunlist_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_RESTART_RUNLIST_PARAMS * pParams)901 static inline NV_STATUS kchannelCtrlCmdRestartRunlist_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_RESTART_RUNLIST_PARAMS *pParams) {
902     return pKernelChannel->__kchannelCtrlCmdRestartRunlist__(pKernelChannel, pParams);
903 }
904 
kchannelCtrlCmdGetClassEngineidA16F_6a9a13(struct KernelChannel * pKernelChannel,NVA16F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)905 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidA16F_6a9a13(struct KernelChannel *pKernelChannel, NVA16F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
906     return kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams);
907 }
908 
kchannelCtrlCmdGetClassEngineidA16F_DISPATCH(struct KernelChannel * pKernelChannel,NVA16F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)909 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidA16F_DISPATCH(struct KernelChannel *pKernelChannel, NVA16F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
910     return pKernelChannel->__kchannelCtrlCmdGetClassEngineidA16F__(pKernelChannel, pParams);
911 }
912 
kchannelCtrlCmdResetChannelA16F_ef73a1(struct KernelChannel * pKernelChannel,NVA16F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)913 static inline NV_STATUS kchannelCtrlCmdResetChannelA16F_ef73a1(struct KernelChannel *pKernelChannel, NVA16F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
914     return kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams);
915 }
916 
kchannelCtrlCmdResetChannelA16F_DISPATCH(struct KernelChannel * pKernelChannel,NVA16F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)917 static inline NV_STATUS kchannelCtrlCmdResetChannelA16F_DISPATCH(struct KernelChannel *pKernelChannel, NVA16F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
918     return pKernelChannel->__kchannelCtrlCmdResetChannelA16F__(pKernelChannel, pResetChannelParams);
919 }
920 
kchannelCtrlCmdGpFifoScheduleA16F_6546a6(struct KernelChannel * pKernelChannel,NVA16F_CTRL_GPFIFO_SCHEDULE_PARAMS * pSchedParams)921 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleA16F_6546a6(struct KernelChannel *pKernelChannel, NVA16F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
922     return kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams);
923 }
924 
kchannelCtrlCmdGpFifoScheduleA16F_DISPATCH(struct KernelChannel * pKernelChannel,NVA16F_CTRL_GPFIFO_SCHEDULE_PARAMS * pSchedParams)925 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleA16F_DISPATCH(struct KernelChannel *pKernelChannel, NVA16F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
926     return pKernelChannel->__kchannelCtrlCmdGpFifoScheduleA16F__(pKernelChannel, pSchedParams);
927 }
928 
kchannelCtrlCmdGetClassEngineidA26F_6a9a13(struct KernelChannel * pKernelChannel,NVA26F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)929 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidA26F_6a9a13(struct KernelChannel *pKernelChannel, NVA26F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
930     return kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams);
931 }
932 
kchannelCtrlCmdGetClassEngineidA26F_DISPATCH(struct KernelChannel * pKernelChannel,NVA26F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)933 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidA26F_DISPATCH(struct KernelChannel *pKernelChannel, NVA26F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
934     return pKernelChannel->__kchannelCtrlCmdGetClassEngineidA26F__(pKernelChannel, pParams);
935 }
936 
kchannelCtrlCmdResetChannelA26F_ef73a1(struct KernelChannel * pKernelChannel,NVA26F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)937 static inline NV_STATUS kchannelCtrlCmdResetChannelA26F_ef73a1(struct KernelChannel *pKernelChannel, NVA26F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
938     return kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams);
939 }
940 
kchannelCtrlCmdResetChannelA26F_DISPATCH(struct KernelChannel * pKernelChannel,NVA26F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)941 static inline NV_STATUS kchannelCtrlCmdResetChannelA26F_DISPATCH(struct KernelChannel *pKernelChannel, NVA26F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
942     return pKernelChannel->__kchannelCtrlCmdResetChannelA26F__(pKernelChannel, pResetChannelParams);
943 }
944 
kchannelFCtrlCmdGpFifoScheduleA26F_6546a6(struct KernelChannel * pKernelChannel,NVA26F_CTRL_GPFIFO_SCHEDULE_PARAMS * pSchedParams)945 static inline NV_STATUS kchannelFCtrlCmdGpFifoScheduleA26F_6546a6(struct KernelChannel *pKernelChannel, NVA26F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
946     return kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams);
947 }
948 
kchannelFCtrlCmdGpFifoScheduleA26F_DISPATCH(struct KernelChannel * pKernelChannel,NVA26F_CTRL_GPFIFO_SCHEDULE_PARAMS * pSchedParams)949 static inline NV_STATUS kchannelFCtrlCmdGpFifoScheduleA26F_DISPATCH(struct KernelChannel *pKernelChannel, NVA26F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
950     return pKernelChannel->__kchannelFCtrlCmdGpFifoScheduleA26F__(pKernelChannel, pSchedParams);
951 }
952 
kchannelCtrlCmdGetClassEngineidB06F_6a9a13(struct KernelChannel * pKernelChannel,NVB06F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)953 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidB06F_6a9a13(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
954     return kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams);
955 }
956 
kchannelCtrlCmdGetClassEngineidB06F_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)957 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidB06F_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
958     return pKernelChannel->__kchannelCtrlCmdGetClassEngineidB06F__(pKernelChannel, pParams);
959 }
960 
kchannelCtrlCmdResetChannelB06F_ef73a1(struct KernelChannel * pKernelChannel,NVB06F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)961 static inline NV_STATUS kchannelCtrlCmdResetChannelB06F_ef73a1(struct KernelChannel *pKernelChannel, NVB06F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
962     return kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams);
963 }
964 
kchannelCtrlCmdResetChannelB06F_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)965 static inline NV_STATUS kchannelCtrlCmdResetChannelB06F_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
966     return pKernelChannel->__kchannelCtrlCmdResetChannelB06F__(pKernelChannel, pResetChannelParams);
967 }
968 
kchannelCtrlCmdGpFifoScheduleB06F_6546a6(struct KernelChannel * pKernelChannel,NVB06F_CTRL_GPFIFO_SCHEDULE_PARAMS * pSchedParams)969 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleB06F_6546a6(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
970     return kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams);
971 }
972 
kchannelCtrlCmdGpFifoScheduleB06F_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_GPFIFO_SCHEDULE_PARAMS * pSchedParams)973 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleB06F_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
974     return pKernelChannel->__kchannelCtrlCmdGpFifoScheduleB06F__(pKernelChannel, pSchedParams);
975 }
976 
kchannelCtrlCmdBindB06F_2c1c21(struct KernelChannel * pKernelChannel,NVB06F_CTRL_BIND_PARAMS * pParams)977 static inline NV_STATUS kchannelCtrlCmdBindB06F_2c1c21(struct KernelChannel *pKernelChannel, NVB06F_CTRL_BIND_PARAMS *pParams) {
978     return kchannelCtrlCmdBind(pKernelChannel, pParams);
979 }
980 
kchannelCtrlCmdBindB06F_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_BIND_PARAMS * pParams)981 static inline NV_STATUS kchannelCtrlCmdBindB06F_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_BIND_PARAMS *pParams) {
982     return pKernelChannel->__kchannelCtrlCmdBindB06F__(pKernelChannel, pParams);
983 }
984 
985 NV_STATUS kchannelCtrlCmdGetEngineCtxSize_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_SIZE_PARAMS *pCtxSizeParams);
986 
kchannelCtrlCmdGetEngineCtxSize_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_GET_ENGINE_CTX_SIZE_PARAMS * pCtxSizeParams)987 static inline NV_STATUS kchannelCtrlCmdGetEngineCtxSize_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_SIZE_PARAMS *pCtxSizeParams) {
988     return pKernelChannel->__kchannelCtrlCmdGetEngineCtxSize__(pKernelChannel, pCtxSizeParams);
989 }
990 
991 NV_STATUS kchannelCtrlCmdGetEngineCtxData_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams);
992 
kchannelCtrlCmdGetEngineCtxData_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_GET_ENGINE_CTX_DATA_PARAMS * pCtxBuffParams)993 static inline NV_STATUS kchannelCtrlCmdGetEngineCtxData_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams) {
994     return pKernelChannel->__kchannelCtrlCmdGetEngineCtxData__(pKernelChannel, pCtxBuffParams);
995 }
996 
997 NV_STATUS kchannelCtrlCmdMigrateEngineCtxData_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_MIGRATE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams);
998 
kchannelCtrlCmdMigrateEngineCtxData_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_MIGRATE_ENGINE_CTX_DATA_PARAMS * pCtxBuffParams)999 static inline NV_STATUS kchannelCtrlCmdMigrateEngineCtxData_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_MIGRATE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams) {
1000     return pKernelChannel->__kchannelCtrlCmdMigrateEngineCtxData__(pKernelChannel, pCtxBuffParams);
1001 }
1002 
1003 NV_STATUS kchannelCtrlCmdGetEngineCtxState_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_STATE_PARAMS *pCtxStateParams);
1004 
kchannelCtrlCmdGetEngineCtxState_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_GET_ENGINE_CTX_STATE_PARAMS * pCtxStateParams)1005 static inline NV_STATUS kchannelCtrlCmdGetEngineCtxState_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_STATE_PARAMS *pCtxStateParams) {
1006     return pKernelChannel->__kchannelCtrlCmdGetEngineCtxState__(pKernelChannel, pCtxStateParams);
1007 }
1008 
1009 NV_STATUS kchannelCtrlCmdGetChannelHwState_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_CHANNEL_HW_STATE_PARAMS *pParams);
1010 
kchannelCtrlCmdGetChannelHwState_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_GET_CHANNEL_HW_STATE_PARAMS * pParams)1011 static inline NV_STATUS kchannelCtrlCmdGetChannelHwState_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_CHANNEL_HW_STATE_PARAMS *pParams) {
1012     return pKernelChannel->__kchannelCtrlCmdGetChannelHwState__(pKernelChannel, pParams);
1013 }
1014 
1015 NV_STATUS kchannelCtrlCmdSetChannelHwState_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_SET_CHANNEL_HW_STATE_PARAMS *pParams);
1016 
kchannelCtrlCmdSetChannelHwState_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_SET_CHANNEL_HW_STATE_PARAMS * pParams)1017 static inline NV_STATUS kchannelCtrlCmdSetChannelHwState_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_SET_CHANNEL_HW_STATE_PARAMS *pParams) {
1018     return pKernelChannel->__kchannelCtrlCmdSetChannelHwState__(pKernelChannel, pParams);
1019 }
1020 
1021 NV_STATUS kchannelCtrlCmdSaveEngineCtxData_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_SAVE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams);
1022 
kchannelCtrlCmdSaveEngineCtxData_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_SAVE_ENGINE_CTX_DATA_PARAMS * pCtxBuffParams)1023 static inline NV_STATUS kchannelCtrlCmdSaveEngineCtxData_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_SAVE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams) {
1024     return pKernelChannel->__kchannelCtrlCmdSaveEngineCtxData__(pKernelChannel, pCtxBuffParams);
1025 }
1026 
1027 NV_STATUS kchannelCtrlCmdRestoreEngineCtxData_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_RESTORE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams);
1028 
kchannelCtrlCmdRestoreEngineCtxData_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_RESTORE_ENGINE_CTX_DATA_PARAMS * pCtxBuffParams)1029 static inline NV_STATUS kchannelCtrlCmdRestoreEngineCtxData_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_RESTORE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams) {
1030     return pKernelChannel->__kchannelCtrlCmdRestoreEngineCtxData__(pKernelChannel, pCtxBuffParams);
1031 }
1032 
kchannelCtrlCmdGetClassEngineidC06F_6a9a13(struct KernelChannel * pKernelChannel,NVC06F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)1033 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidC06F_6a9a13(struct KernelChannel *pKernelChannel, NVC06F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
1034     return kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams);
1035 }
1036 
kchannelCtrlCmdGetClassEngineidC06F_DISPATCH(struct KernelChannel * pKernelChannel,NVC06F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)1037 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidC06F_DISPATCH(struct KernelChannel *pKernelChannel, NVC06F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
1038     return pKernelChannel->__kchannelCtrlCmdGetClassEngineidC06F__(pKernelChannel, pParams);
1039 }
1040 
kchannelCtrlCmdResetChannelC06F_ef73a1(struct KernelChannel * pKernelChannel,NVC06F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)1041 static inline NV_STATUS kchannelCtrlCmdResetChannelC06F_ef73a1(struct KernelChannel *pKernelChannel, NVC06F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
1042     return kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams);
1043 }
1044 
kchannelCtrlCmdResetChannelC06F_DISPATCH(struct KernelChannel * pKernelChannel,NVC06F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)1045 static inline NV_STATUS kchannelCtrlCmdResetChannelC06F_DISPATCH(struct KernelChannel *pKernelChannel, NVC06F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
1046     return pKernelChannel->__kchannelCtrlCmdResetChannelC06F__(pKernelChannel, pResetChannelParams);
1047 }
1048 
kchannelCtrlCmdGpFifoScheduleC06F_6546a6(struct KernelChannel * pKernelChannel,NVC06F_CTRL_GPFIFO_SCHEDULE_PARAMS * pSchedParams)1049 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleC06F_6546a6(struct KernelChannel *pKernelChannel, NVC06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
1050     return kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams);
1051 }
1052 
kchannelCtrlCmdGpFifoScheduleC06F_DISPATCH(struct KernelChannel * pKernelChannel,NVC06F_CTRL_GPFIFO_SCHEDULE_PARAMS * pSchedParams)1053 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleC06F_DISPATCH(struct KernelChannel *pKernelChannel, NVC06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
1054     return pKernelChannel->__kchannelCtrlCmdGpFifoScheduleC06F__(pKernelChannel, pSchedParams);
1055 }
1056 
kchannelCtrlCmdBindC06F_2c1c21(struct KernelChannel * pKernelChannel,NVC06F_CTRL_BIND_PARAMS * pParams)1057 static inline NV_STATUS kchannelCtrlCmdBindC06F_2c1c21(struct KernelChannel *pKernelChannel, NVC06F_CTRL_BIND_PARAMS *pParams) {
1058     return kchannelCtrlCmdBind(pKernelChannel, pParams);
1059 }
1060 
kchannelCtrlCmdBindC06F_DISPATCH(struct KernelChannel * pKernelChannel,NVC06F_CTRL_BIND_PARAMS * pParams)1061 static inline NV_STATUS kchannelCtrlCmdBindC06F_DISPATCH(struct KernelChannel *pKernelChannel, NVC06F_CTRL_BIND_PARAMS *pParams) {
1062     return pKernelChannel->__kchannelCtrlCmdBindC06F__(pKernelChannel, pParams);
1063 }
1064 
kchannelCtrlCmdGetClassEngineidC36F_6a9a13(struct KernelChannel * pKernelChannel,NVC36F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)1065 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidC36F_6a9a13(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
1066     return kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams);
1067 }
1068 
kchannelCtrlCmdGetClassEngineidC36F_DISPATCH(struct KernelChannel * pKernelChannel,NVC36F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)1069 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidC36F_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
1070     return pKernelChannel->__kchannelCtrlCmdGetClassEngineidC36F__(pKernelChannel, pParams);
1071 }
1072 
kchannelCtrlCmdResetChannelC36F_ef73a1(struct KernelChannel * pKernelChannel,NVC36F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)1073 static inline NV_STATUS kchannelCtrlCmdResetChannelC36F_ef73a1(struct KernelChannel *pKernelChannel, NVC36F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
1074     return kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams);
1075 }
1076 
kchannelCtrlCmdResetChannelC36F_DISPATCH(struct KernelChannel * pKernelChannel,NVC36F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)1077 static inline NV_STATUS kchannelCtrlCmdResetChannelC36F_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
1078     return pKernelChannel->__kchannelCtrlCmdResetChannelC36F__(pKernelChannel, pResetChannelParams);
1079 }
1080 
kchannelCtrlCmdGpFifoScheduleC36F_6546a6(struct KernelChannel * pKernelChannel,NVC36F_CTRL_GPFIFO_SCHEDULE_PARAMS * pSchedParams)1081 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleC36F_6546a6(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
1082     return kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams);
1083 }
1084 
kchannelCtrlCmdGpFifoScheduleC36F_DISPATCH(struct KernelChannel * pKernelChannel,NVC36F_CTRL_GPFIFO_SCHEDULE_PARAMS * pSchedParams)1085 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleC36F_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
1086     return pKernelChannel->__kchannelCtrlCmdGpFifoScheduleC36F__(pKernelChannel, pSchedParams);
1087 }
1088 
kchannelCtrlCmdBindC36F_2c1c21(struct KernelChannel * pKernelChannel,NVC36F_CTRL_BIND_PARAMS * pParams)1089 static inline NV_STATUS kchannelCtrlCmdBindC36F_2c1c21(struct KernelChannel *pKernelChannel, NVC36F_CTRL_BIND_PARAMS *pParams) {
1090     return kchannelCtrlCmdBind(pKernelChannel, pParams);
1091 }
1092 
kchannelCtrlCmdBindC36F_DISPATCH(struct KernelChannel * pKernelChannel,NVC36F_CTRL_BIND_PARAMS * pParams)1093 static inline NV_STATUS kchannelCtrlCmdBindC36F_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_BIND_PARAMS *pParams) {
1094     return pKernelChannel->__kchannelCtrlCmdBindC36F__(pKernelChannel, pParams);
1095 }
1096 
1097 NV_STATUS kchannelCtrlCmdGpfifoGetWorkSubmitToken_IMPL(struct KernelChannel *pKernelChannel, NVC36F_CTRL_CMD_GPFIFO_GET_WORK_SUBMIT_TOKEN_PARAMS *pTokenParams);
1098 
kchannelCtrlCmdGpfifoGetWorkSubmitToken_DISPATCH(struct KernelChannel * pKernelChannel,NVC36F_CTRL_CMD_GPFIFO_GET_WORK_SUBMIT_TOKEN_PARAMS * pTokenParams)1099 static inline NV_STATUS kchannelCtrlCmdGpfifoGetWorkSubmitToken_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_CMD_GPFIFO_GET_WORK_SUBMIT_TOKEN_PARAMS *pTokenParams) {
1100     return pKernelChannel->__kchannelCtrlCmdGpfifoGetWorkSubmitToken__(pKernelChannel, pTokenParams);
1101 }
1102 
1103 NV_STATUS kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer_IMPL(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_UPDATE_FAULT_METHOD_BUFFER_PARAMS *pFaultMthdBufferParams);
1104 
kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer_DISPATCH(struct KernelChannel * pKernelChannel,NVC36F_CTRL_GPFIFO_UPDATE_FAULT_METHOD_BUFFER_PARAMS * pFaultMthdBufferParams)1105 static inline NV_STATUS kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_UPDATE_FAULT_METHOD_BUFFER_PARAMS *pFaultMthdBufferParams) {
1106     return pKernelChannel->__kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer__(pKernelChannel, pFaultMthdBufferParams);
1107 }
1108 
1109 NV_STATUS kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex_IMPL(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_SET_WORK_SUBMIT_TOKEN_NOTIF_INDEX_PARAMS *pParams);
1110 
kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex_DISPATCH(struct KernelChannel * pKernelChannel,NVC36F_CTRL_GPFIFO_SET_WORK_SUBMIT_TOKEN_NOTIF_INDEX_PARAMS * pParams)1111 static inline NV_STATUS kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_SET_WORK_SUBMIT_TOKEN_NOTIF_INDEX_PARAMS *pParams) {
1112     return pKernelChannel->__kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex__(pKernelChannel, pParams);
1113 }
1114 
1115 NV_STATUS kchannelCtrlCmdStopChannel_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_STOP_CHANNEL_PARAMS *pStopChannelParams);
1116 
kchannelCtrlCmdStopChannel_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_STOP_CHANNEL_PARAMS * pStopChannelParams)1117 static inline NV_STATUS kchannelCtrlCmdStopChannel_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_STOP_CHANNEL_PARAMS *pStopChannelParams) {
1118     return pKernelChannel->__kchannelCtrlCmdStopChannel__(pKernelChannel, pStopChannelParams);
1119 }
1120 
kchannelCtrlCmdGetKmb_46f6a7(struct KernelChannel * pKernelChannel,NVC56F_CTRL_CMD_GET_KMB_PARAMS * pGetKmbParams)1121 static inline NV_STATUS kchannelCtrlCmdGetKmb_46f6a7(struct KernelChannel *pKernelChannel, NVC56F_CTRL_CMD_GET_KMB_PARAMS *pGetKmbParams) {
1122     return NV_ERR_NOT_SUPPORTED;
1123 }
1124 
1125 NV_STATUS kchannelCtrlCmdGetKmb_KERNEL(struct KernelChannel *pKernelChannel, NVC56F_CTRL_CMD_GET_KMB_PARAMS *pGetKmbParams);
1126 
kchannelCtrlCmdGetKmb_DISPATCH(struct KernelChannel * pKernelChannel,NVC56F_CTRL_CMD_GET_KMB_PARAMS * pGetKmbParams)1127 static inline NV_STATUS kchannelCtrlCmdGetKmb_DISPATCH(struct KernelChannel *pKernelChannel, NVC56F_CTRL_CMD_GET_KMB_PARAMS *pGetKmbParams) {
1128     return pKernelChannel->__kchannelCtrlCmdGetKmb__(pKernelChannel, pGetKmbParams);
1129 }
1130 
kchannelCtrlRotateSecureChannelIv_46f6a7(struct KernelChannel * pKernelChannel,NVC56F_CTRL_ROTATE_SECURE_CHANNEL_IV_PARAMS * pRotateIvParams)1131 static inline NV_STATUS kchannelCtrlRotateSecureChannelIv_46f6a7(struct KernelChannel *pKernelChannel, NVC56F_CTRL_ROTATE_SECURE_CHANNEL_IV_PARAMS *pRotateIvParams) {
1132     return NV_ERR_NOT_SUPPORTED;
1133 }
1134 
1135 NV_STATUS kchannelCtrlRotateSecureChannelIv_KERNEL(struct KernelChannel *pKernelChannel, NVC56F_CTRL_ROTATE_SECURE_CHANNEL_IV_PARAMS *pRotateIvParams);
1136 
kchannelCtrlRotateSecureChannelIv_DISPATCH(struct KernelChannel * pKernelChannel,NVC56F_CTRL_ROTATE_SECURE_CHANNEL_IV_PARAMS * pRotateIvParams)1137 static inline NV_STATUS kchannelCtrlRotateSecureChannelIv_DISPATCH(struct KernelChannel *pKernelChannel, NVC56F_CTRL_ROTATE_SECURE_CHANNEL_IV_PARAMS *pRotateIvParams) {
1138     return pKernelChannel->__kchannelCtrlRotateSecureChannelIv__(pKernelChannel, pRotateIvParams);
1139 }
1140 
1141 NV_STATUS kchannelSetEncryptionStatsBuffer_KERNEL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, MEMORY_DESCRIPTOR *pMemDesc, NvBool bSet);
1142 
kchannelSetEncryptionStatsBuffer_56cd7a(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,MEMORY_DESCRIPTOR * pMemDesc,NvBool bSet)1143 static inline NV_STATUS kchannelSetEncryptionStatsBuffer_56cd7a(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, MEMORY_DESCRIPTOR *pMemDesc, NvBool bSet) {
1144     return NV_OK;
1145 }
1146 
kchannelSetEncryptionStatsBuffer_DISPATCH(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,MEMORY_DESCRIPTOR * pMemDesc,NvBool bSet)1147 static inline NV_STATUS kchannelSetEncryptionStatsBuffer_DISPATCH(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, MEMORY_DESCRIPTOR *pMemDesc, NvBool bSet) {
1148     return pKernelChannel->__kchannelSetEncryptionStatsBuffer__(pGpu, pKernelChannel, pMemDesc, bSet);
1149 }
1150 
kchannelCtrlGetTpcPartitionMode_a094e1(struct KernelChannel * pKernelChannel,NV0090_CTRL_TPC_PARTITION_MODE_PARAMS * pParams)1151 static inline NV_STATUS kchannelCtrlGetTpcPartitionMode_a094e1(struct KernelChannel *pKernelChannel, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *pParams) {
1152     return kgrctxCtrlHandle(resservGetTlsCallContext(), pKernelChannel->hKernelGraphicsContext);
1153 }
1154 
kchannelCtrlGetTpcPartitionMode_DISPATCH(struct KernelChannel * pKernelChannel,NV0090_CTRL_TPC_PARTITION_MODE_PARAMS * pParams)1155 static inline NV_STATUS kchannelCtrlGetTpcPartitionMode_DISPATCH(struct KernelChannel *pKernelChannel, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *pParams) {
1156     return pKernelChannel->__kchannelCtrlGetTpcPartitionMode__(pKernelChannel, pParams);
1157 }
1158 
kchannelCtrlSetTpcPartitionMode_a094e1(struct KernelChannel * pKernelChannel,NV0090_CTRL_TPC_PARTITION_MODE_PARAMS * pParams)1159 static inline NV_STATUS kchannelCtrlSetTpcPartitionMode_a094e1(struct KernelChannel *pKernelChannel, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *pParams) {
1160     return kgrctxCtrlHandle(resservGetTlsCallContext(), pKernelChannel->hKernelGraphicsContext);
1161 }
1162 
kchannelCtrlSetTpcPartitionMode_DISPATCH(struct KernelChannel * pKernelChannel,NV0090_CTRL_TPC_PARTITION_MODE_PARAMS * pParams)1163 static inline NV_STATUS kchannelCtrlSetTpcPartitionMode_DISPATCH(struct KernelChannel *pKernelChannel, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *pParams) {
1164     return pKernelChannel->__kchannelCtrlSetTpcPartitionMode__(pKernelChannel, pParams);
1165 }
1166 
kchannelCtrlGetMMUDebugMode_a094e1(struct KernelChannel * pKernelChannel,NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS * pParams)1167 static inline NV_STATUS kchannelCtrlGetMMUDebugMode_a094e1(struct KernelChannel *pKernelChannel, NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS *pParams) {
1168     return kgrctxCtrlHandle(resservGetTlsCallContext(), pKernelChannel->hKernelGraphicsContext);
1169 }
1170 
kchannelCtrlGetMMUDebugMode_DISPATCH(struct KernelChannel * pKernelChannel,NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS * pParams)1171 static inline NV_STATUS kchannelCtrlGetMMUDebugMode_DISPATCH(struct KernelChannel *pKernelChannel, NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS *pParams) {
1172     return pKernelChannel->__kchannelCtrlGetMMUDebugMode__(pKernelChannel, pParams);
1173 }
1174 
kchannelCtrlProgramVidmemPromote_a094e1(struct KernelChannel * pKernelChannel,NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS * pParams)1175 static inline NV_STATUS kchannelCtrlProgramVidmemPromote_a094e1(struct KernelChannel *pKernelChannel, NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS *pParams) {
1176     return kgrctxCtrlHandle(resservGetTlsCallContext(), pKernelChannel->hKernelGraphicsContext);
1177 }
1178 
kchannelCtrlProgramVidmemPromote_DISPATCH(struct KernelChannel * pKernelChannel,NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS * pParams)1179 static inline NV_STATUS kchannelCtrlProgramVidmemPromote_DISPATCH(struct KernelChannel *pKernelChannel, NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS *pParams) {
1180     return pKernelChannel->__kchannelCtrlProgramVidmemPromote__(pKernelChannel, pParams);
1181 }
1182 
kchannelRetrieveKmb_56cd7a(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,ROTATE_IV_TYPE rotateOperation,NvBool includeSecrets,CC_KMB * keyMaterialBundle)1183 static inline NV_STATUS kchannelRetrieveKmb_56cd7a(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvBool includeSecrets, CC_KMB *keyMaterialBundle) {
1184     return NV_OK;
1185 }
1186 
1187 NV_STATUS kchannelRetrieveKmb_KERNEL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvBool includeSecrets, CC_KMB *keyMaterialBundle);
1188 
kchannelRetrieveKmb_DISPATCH(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,ROTATE_IV_TYPE rotateOperation,NvBool includeSecrets,CC_KMB * keyMaterialBundle)1189 static inline NV_STATUS kchannelRetrieveKmb_DISPATCH(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvBool includeSecrets, CC_KMB *keyMaterialBundle) {
1190     return pKernelChannel->__kchannelRetrieveKmb__(pGpu, pKernelChannel, rotateOperation, includeSecrets, keyMaterialBundle);
1191 }
1192 
1193 NV_STATUS kchannelSetKeyRotationNotifier_KERNEL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bSet);
1194 
kchannelSetKeyRotationNotifier_56cd7a(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvBool bSet)1195 static inline NV_STATUS kchannelSetKeyRotationNotifier_56cd7a(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bSet) {
1196     return NV_OK;
1197 }
1198 
kchannelSetKeyRotationNotifier_DISPATCH(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvBool bSet)1199 static inline NV_STATUS kchannelSetKeyRotationNotifier_DISPATCH(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bSet) {
1200     return pKernelChannel->__kchannelSetKeyRotationNotifier__(pGpu, pKernelChannel, bSet);
1201 }
1202 
kchannelShareCallback_DISPATCH(struct KernelChannel * pGpuResource,struct RsClient * pInvokingClient,struct RsResourceRef * pParentRef,RS_SHARE_POLICY * pSharePolicy)1203 static inline NvBool kchannelShareCallback_DISPATCH(struct KernelChannel *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
1204     return pGpuResource->__kchannelShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
1205 }
1206 
kchannelGetOrAllocNotifShare_DISPATCH(struct KernelChannel * pNotifier,NvHandle hNotifierClient,NvHandle hNotifierResource,struct NotifShare ** ppNotifShare)1207 static inline NV_STATUS kchannelGetOrAllocNotifShare_DISPATCH(struct KernelChannel *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare) {
1208     return pNotifier->__kchannelGetOrAllocNotifShare__(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare);
1209 }
1210 
kchannelMapTo_DISPATCH(struct KernelChannel * pResource,RS_RES_MAP_TO_PARAMS * pParams)1211 static inline NV_STATUS kchannelMapTo_DISPATCH(struct KernelChannel *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
1212     return pResource->__kchannelMapTo__(pResource, pParams);
1213 }
1214 
kchannelSetNotificationShare_DISPATCH(struct KernelChannel * pNotifier,struct NotifShare * pNotifShare)1215 static inline void kchannelSetNotificationShare_DISPATCH(struct KernelChannel *pNotifier, struct NotifShare *pNotifShare) {
1216     pNotifier->__kchannelSetNotificationShare__(pNotifier, pNotifShare);
1217 }
1218 
kchannelGetRefCount_DISPATCH(struct KernelChannel * pResource)1219 static inline NvU32 kchannelGetRefCount_DISPATCH(struct KernelChannel *pResource) {
1220     return pResource->__kchannelGetRefCount__(pResource);
1221 }
1222 
kchannelAddAdditionalDependants_DISPATCH(struct RsClient * pClient,struct KernelChannel * pResource,RsResourceRef * pReference)1223 static inline void kchannelAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct KernelChannel *pResource, RsResourceRef *pReference) {
1224     pResource->__kchannelAddAdditionalDependants__(pClient, pResource, pReference);
1225 }
1226 
kchannelControl_Prologue_DISPATCH(struct KernelChannel * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)1227 static inline NV_STATUS kchannelControl_Prologue_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
1228     return pResource->__kchannelControl_Prologue__(pResource, pCallContext, pParams);
1229 }
1230 
kchannelGetRegBaseOffsetAndSize_DISPATCH(struct KernelChannel * pGpuResource,struct OBJGPU * pGpu,NvU32 * pOffset,NvU32 * pSize)1231 static inline NV_STATUS kchannelGetRegBaseOffsetAndSize_DISPATCH(struct KernelChannel *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
1232     return pGpuResource->__kchannelGetRegBaseOffsetAndSize__(pGpuResource, pGpu, pOffset, pSize);
1233 }
1234 
kchannelInternalControlForward_DISPATCH(struct KernelChannel * pGpuResource,NvU32 command,void * pParams,NvU32 size)1235 static inline NV_STATUS kchannelInternalControlForward_DISPATCH(struct KernelChannel *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
1236     return pGpuResource->__kchannelInternalControlForward__(pGpuResource, command, pParams, size);
1237 }
1238 
kchannelUnmapFrom_DISPATCH(struct KernelChannel * pResource,RS_RES_UNMAP_FROM_PARAMS * pParams)1239 static inline NV_STATUS kchannelUnmapFrom_DISPATCH(struct KernelChannel *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
1240     return pResource->__kchannelUnmapFrom__(pResource, pParams);
1241 }
1242 
kchannelControl_Epilogue_DISPATCH(struct KernelChannel * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)1243 static inline void kchannelControl_Epilogue_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
1244     pResource->__kchannelControl_Epilogue__(pResource, pCallContext, pParams);
1245 }
1246 
kchannelGetInternalObjectHandle_DISPATCH(struct KernelChannel * pGpuResource)1247 static inline NvHandle kchannelGetInternalObjectHandle_DISPATCH(struct KernelChannel *pGpuResource) {
1248     return pGpuResource->__kchannelGetInternalObjectHandle__(pGpuResource);
1249 }
1250 
kchannelControl_DISPATCH(struct KernelChannel * pGpuResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)1251 static inline NV_STATUS kchannelControl_DISPATCH(struct KernelChannel *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
1252     return pGpuResource->__kchannelControl__(pGpuResource, pCallContext, pParams);
1253 }
1254 
kchannelGetMemoryMappingDescriptor_DISPATCH(struct KernelChannel * pRmResource,struct MEMORY_DESCRIPTOR ** ppMemDesc)1255 static inline NV_STATUS kchannelGetMemoryMappingDescriptor_DISPATCH(struct KernelChannel *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
1256     return pRmResource->__kchannelGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
1257 }
1258 
kchannelControlFilter_DISPATCH(struct KernelChannel * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)1259 static inline NV_STATUS kchannelControlFilter_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
1260     return pResource->__kchannelControlFilter__(pResource, pCallContext, pParams);
1261 }
1262 
kchannelUnregisterEvent_DISPATCH(struct KernelChannel * pNotifier,NvHandle hNotifierClient,NvHandle hNotifierResource,NvHandle hEventClient,NvHandle hEvent)1263 static inline NV_STATUS kchannelUnregisterEvent_DISPATCH(struct KernelChannel *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent) {
1264     return pNotifier->__kchannelUnregisterEvent__(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent);
1265 }
1266 
kchannelControlSerialization_Prologue_DISPATCH(struct KernelChannel * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)1267 static inline NV_STATUS kchannelControlSerialization_Prologue_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
1268     return pResource->__kchannelControlSerialization_Prologue__(pResource, pCallContext, pParams);
1269 }
1270 
kchannelCanCopy_DISPATCH(struct KernelChannel * pResource)1271 static inline NvBool kchannelCanCopy_DISPATCH(struct KernelChannel *pResource) {
1272     return pResource->__kchannelCanCopy__(pResource);
1273 }
1274 
kchannelIsPartialUnmapSupported_DISPATCH(struct KernelChannel * pResource)1275 static inline NvBool kchannelIsPartialUnmapSupported_DISPATCH(struct KernelChannel *pResource) {
1276     return pResource->__kchannelIsPartialUnmapSupported__(pResource);
1277 }
1278 
kchannelPreDestruct_DISPATCH(struct KernelChannel * pResource)1279 static inline void kchannelPreDestruct_DISPATCH(struct KernelChannel *pResource) {
1280     pResource->__kchannelPreDestruct__(pResource);
1281 }
1282 
kchannelIsDuplicate_DISPATCH(struct KernelChannel * pResource,NvHandle hMemory,NvBool * pDuplicate)1283 static inline NV_STATUS kchannelIsDuplicate_DISPATCH(struct KernelChannel *pResource, NvHandle hMemory, NvBool *pDuplicate) {
1284     return pResource->__kchannelIsDuplicate__(pResource, hMemory, pDuplicate);
1285 }
1286 
kchannelControlSerialization_Epilogue_DISPATCH(struct KernelChannel * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)1287 static inline void kchannelControlSerialization_Epilogue_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
1288     pResource->__kchannelControlSerialization_Epilogue__(pResource, pCallContext, pParams);
1289 }
1290 
kchannelGetNotificationListPtr_DISPATCH(struct KernelChannel * pNotifier)1291 static inline PEVENTNOTIFICATION *kchannelGetNotificationListPtr_DISPATCH(struct KernelChannel *pNotifier) {
1292     return pNotifier->__kchannelGetNotificationListPtr__(pNotifier);
1293 }
1294 
kchannelGetNotificationShare_DISPATCH(struct KernelChannel * pNotifier)1295 static inline struct NotifShare *kchannelGetNotificationShare_DISPATCH(struct KernelChannel *pNotifier) {
1296     return pNotifier->__kchannelGetNotificationShare__(pNotifier);
1297 }
1298 
kchannelAccessCallback_DISPATCH(struct KernelChannel * pResource,struct RsClient * pInvokingClient,void * pAllocParams,RsAccessRight accessRight)1299 static inline NvBool kchannelAccessCallback_DISPATCH(struct KernelChannel *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
1300     return pResource->__kchannelAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
1301 }
1302 
kchannelGetDebugTag(const struct KernelChannel * pKernelChannel)1303 static inline NvU32 kchannelGetDebugTag(const struct KernelChannel *pKernelChannel) {
1304     if (pKernelChannel == ((void *)0))
1305         return 4294967295U;
1306     return pKernelChannel->ChID;
1307 }
1308 
kchannelIsCtxBufferAllocSkipped(struct KernelChannel * pKernelChannel)1309 static inline NvBool kchannelIsCtxBufferAllocSkipped(struct KernelChannel *pKernelChannel) {
1310     return pKernelChannel->bSkipCtxBufferAlloc;
1311 }
1312 
kchannelGetSubctxId(struct KernelChannel * pKernelChannel)1313 static inline NvU32 kchannelGetSubctxId(struct KernelChannel *pKernelChannel) {
1314     return pKernelChannel->subctxId;
1315 }
1316 
kchannelGetCid(struct KernelChannel * pKernelChannel)1317 static inline NvU32 kchannelGetCid(struct KernelChannel *pKernelChannel) {
1318     return pKernelChannel->cid;
1319 }
1320 
kchannelGetMIGReference(struct KernelChannel * pKernelChannel)1321 static inline struct MIG_INSTANCE_REF *kchannelGetMIGReference(struct KernelChannel *pKernelChannel) {
1322     return &pKernelChannel->partitionRef;
1323 }
1324 
kchannelGetRunqueue(struct KernelChannel * pKernelChannel)1325 static inline NvU32 kchannelGetRunqueue(struct KernelChannel *pKernelChannel) {
1326     return pKernelChannel->runqueue;
1327 }
1328 
kchannelGetRunlistId(struct KernelChannel * pKernelChannel)1329 static inline NvU32 kchannelGetRunlistId(struct KernelChannel *pKernelChannel) {
1330     return pKernelChannel->runlistId;
1331 }
1332 
kchannelSetRunlistId(struct KernelChannel * pKernelChannel,NvU32 runlistId)1333 static inline void kchannelSetRunlistId(struct KernelChannel *pKernelChannel, NvU32 runlistId) {
1334     pKernelChannel->runlistId = runlistId;
1335 }
1336 
kchannelGetEngineType(struct KernelChannel * pKernelChannel)1337 static inline RM_ENGINE_TYPE kchannelGetEngineType(struct KernelChannel *pKernelChannel) {
1338     return pKernelChannel->engineType;
1339 }
1340 
1341 NV_STATUS kchannelConstruct_IMPL(struct KernelChannel *arg_pKernelChannel, CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
1342 
1343 #define __nvoc_kchannelConstruct(arg_pKernelChannel, arg_pCallContext, arg_pParams) kchannelConstruct_IMPL(arg_pKernelChannel, arg_pCallContext, arg_pParams)
1344 void kchannelDestruct_IMPL(struct KernelChannel *pResource);
1345 
1346 #define __nvoc_kchannelDestruct(pResource) kchannelDestruct_IMPL(pResource)
1347 NV_STATUS kchannelRegisterChild_IMPL(struct KernelChannel *pKernelChannel, ChannelDescendant *pObject);
1348 
1349 #ifdef __nvoc_kernel_channel_h_disabled
kchannelRegisterChild(struct KernelChannel * pKernelChannel,ChannelDescendant * pObject)1350 static inline NV_STATUS kchannelRegisterChild(struct KernelChannel *pKernelChannel, ChannelDescendant *pObject) {
1351     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1352     return NV_ERR_NOT_SUPPORTED;
1353 }
1354 #else //__nvoc_kernel_channel_h_disabled
1355 #define kchannelRegisterChild(pKernelChannel, pObject) kchannelRegisterChild_IMPL(pKernelChannel, pObject)
1356 #endif //__nvoc_kernel_channel_h_disabled
1357 
1358 NV_STATUS kchannelDeregisterChild_IMPL(struct KernelChannel *pKernelChannel, ChannelDescendant *pObject);
1359 
1360 #ifdef __nvoc_kernel_channel_h_disabled
kchannelDeregisterChild(struct KernelChannel * pKernelChannel,ChannelDescendant * pObject)1361 static inline NV_STATUS kchannelDeregisterChild(struct KernelChannel *pKernelChannel, ChannelDescendant *pObject) {
1362     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1363     return NV_ERR_NOT_SUPPORTED;
1364 }
1365 #else //__nvoc_kernel_channel_h_disabled
1366 #define kchannelDeregisterChild(pKernelChannel, pObject) kchannelDeregisterChild_IMPL(pKernelChannel, pObject)
1367 #endif //__nvoc_kernel_channel_h_disabled
1368 
1369 void kchannelNotifyEvent_IMPL(struct KernelChannel *pKernelChannel, NvU32 notifyIndex, NvU32 info32, NvU16 info16, void *pNotifyParams, NvU32 notifyParamsSize);
1370 
1371 #ifdef __nvoc_kernel_channel_h_disabled
kchannelNotifyEvent(struct KernelChannel * pKernelChannel,NvU32 notifyIndex,NvU32 info32,NvU16 info16,void * pNotifyParams,NvU32 notifyParamsSize)1372 static inline void kchannelNotifyEvent(struct KernelChannel *pKernelChannel, NvU32 notifyIndex, NvU32 info32, NvU16 info16, void *pNotifyParams, NvU32 notifyParamsSize) {
1373     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1374 }
1375 #else //__nvoc_kernel_channel_h_disabled
1376 #define kchannelNotifyEvent(pKernelChannel, notifyIndex, info32, info16, pNotifyParams, notifyParamsSize) kchannelNotifyEvent_IMPL(pKernelChannel, notifyIndex, info32, info16, pNotifyParams, notifyParamsSize)
1377 #endif //__nvoc_kernel_channel_h_disabled
1378 
1379 NV_STATUS kchannelUpdateNotifierMem_IMPL(struct KernelChannel *pKernelChannel, NvU32 notifyIndex, NvU32 info32, NvU16 info16, NvU32 notifierStatus);
1380 
1381 #ifdef __nvoc_kernel_channel_h_disabled
kchannelUpdateNotifierMem(struct KernelChannel * pKernelChannel,NvU32 notifyIndex,NvU32 info32,NvU16 info16,NvU32 notifierStatus)1382 static inline NV_STATUS kchannelUpdateNotifierMem(struct KernelChannel *pKernelChannel, NvU32 notifyIndex, NvU32 info32, NvU16 info16, NvU32 notifierStatus) {
1383     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1384     return NV_ERR_NOT_SUPPORTED;
1385 }
1386 #else //__nvoc_kernel_channel_h_disabled
1387 #define kchannelUpdateNotifierMem(pKernelChannel, notifyIndex, info32, info16, notifierStatus) kchannelUpdateNotifierMem_IMPL(pKernelChannel, notifyIndex, info32, info16, notifierStatus)
1388 #endif //__nvoc_kernel_channel_h_disabled
1389 
1390 NvBool kchannelCheckIsUserMode_IMPL(struct KernelChannel *pKernelChannel);
1391 
1392 #ifdef __nvoc_kernel_channel_h_disabled
kchannelCheckIsUserMode(struct KernelChannel * pKernelChannel)1393 static inline NvBool kchannelCheckIsUserMode(struct KernelChannel *pKernelChannel) {
1394     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1395     return NV_FALSE;
1396 }
1397 #else //__nvoc_kernel_channel_h_disabled
1398 #define kchannelCheckIsUserMode(pKernelChannel) kchannelCheckIsUserMode_IMPL(pKernelChannel)
1399 #endif //__nvoc_kernel_channel_h_disabled
1400 
1401 NvBool kchannelCheckIsKernel_IMPL(struct KernelChannel *pKernelChannel);
1402 
1403 #ifdef __nvoc_kernel_channel_h_disabled
kchannelCheckIsKernel(struct KernelChannel * pKernelChannel)1404 static inline NvBool kchannelCheckIsKernel(struct KernelChannel *pKernelChannel) {
1405     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1406     return NV_FALSE;
1407 }
1408 #else //__nvoc_kernel_channel_h_disabled
1409 #define kchannelCheckIsKernel(pKernelChannel) kchannelCheckIsKernel_IMPL(pKernelChannel)
1410 #endif //__nvoc_kernel_channel_h_disabled
1411 
1412 NvBool kchannelCheckIsAdmin_IMPL(struct KernelChannel *pKernelChannel);
1413 
1414 #ifdef __nvoc_kernel_channel_h_disabled
kchannelCheckIsAdmin(struct KernelChannel * pKernelChannel)1415 static inline NvBool kchannelCheckIsAdmin(struct KernelChannel *pKernelChannel) {
1416     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1417     return NV_FALSE;
1418 }
1419 #else //__nvoc_kernel_channel_h_disabled
1420 #define kchannelCheckIsAdmin(pKernelChannel) kchannelCheckIsAdmin_IMPL(pKernelChannel)
1421 #endif //__nvoc_kernel_channel_h_disabled
1422 
1423 NV_STATUS kchannelBindToRunlist_IMPL(struct KernelChannel *pKernelChannel, RM_ENGINE_TYPE localRmEngineType, ENGDESCRIPTOR engineDesc);
1424 
1425 #ifdef __nvoc_kernel_channel_h_disabled
kchannelBindToRunlist(struct KernelChannel * pKernelChannel,RM_ENGINE_TYPE localRmEngineType,ENGDESCRIPTOR engineDesc)1426 static inline NV_STATUS kchannelBindToRunlist(struct KernelChannel *pKernelChannel, RM_ENGINE_TYPE localRmEngineType, ENGDESCRIPTOR engineDesc) {
1427     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1428     return NV_ERR_NOT_SUPPORTED;
1429 }
1430 #else //__nvoc_kernel_channel_h_disabled
1431 #define kchannelBindToRunlist(pKernelChannel, localRmEngineType, engineDesc) kchannelBindToRunlist_IMPL(pKernelChannel, localRmEngineType, engineDesc)
1432 #endif //__nvoc_kernel_channel_h_disabled
1433 
1434 NV_STATUS kchannelSetEngineContextMemDesc_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine, MEMORY_DESCRIPTOR *pMemDesc);
1435 
1436 #ifdef __nvoc_kernel_channel_h_disabled
kchannelSetEngineContextMemDesc(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 engine,MEMORY_DESCRIPTOR * pMemDesc)1437 static inline NV_STATUS kchannelSetEngineContextMemDesc(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine, MEMORY_DESCRIPTOR *pMemDesc) {
1438     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1439     return NV_ERR_NOT_SUPPORTED;
1440 }
1441 #else //__nvoc_kernel_channel_h_disabled
1442 #define kchannelSetEngineContextMemDesc(pGpu, pKernelChannel, engine, pMemDesc) kchannelSetEngineContextMemDesc_IMPL(pGpu, pKernelChannel, engine, pMemDesc)
1443 #endif //__nvoc_kernel_channel_h_disabled
1444 
1445 NV_STATUS kchannelMapEngineCtxBuf_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine);
1446 
1447 #ifdef __nvoc_kernel_channel_h_disabled
kchannelMapEngineCtxBuf(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 engine)1448 static inline NV_STATUS kchannelMapEngineCtxBuf(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine) {
1449     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1450     return NV_ERR_NOT_SUPPORTED;
1451 }
1452 #else //__nvoc_kernel_channel_h_disabled
1453 #define kchannelMapEngineCtxBuf(pGpu, pKernelChannel, engine) kchannelMapEngineCtxBuf_IMPL(pGpu, pKernelChannel, engine)
1454 #endif //__nvoc_kernel_channel_h_disabled
1455 
1456 NV_STATUS kchannelUnmapEngineCtxBuf_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine);
1457 
1458 #ifdef __nvoc_kernel_channel_h_disabled
kchannelUnmapEngineCtxBuf(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 engine)1459 static inline NV_STATUS kchannelUnmapEngineCtxBuf(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine) {
1460     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1461     return NV_ERR_NOT_SUPPORTED;
1462 }
1463 #else //__nvoc_kernel_channel_h_disabled
1464 #define kchannelUnmapEngineCtxBuf(pGpu, pKernelChannel, engine) kchannelUnmapEngineCtxBuf_IMPL(pGpu, pKernelChannel, engine)
1465 #endif //__nvoc_kernel_channel_h_disabled
1466 
1467 NV_STATUS kchannelCheckBcStateCurrent_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1468 
1469 #ifdef __nvoc_kernel_channel_h_disabled
kchannelCheckBcStateCurrent(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel)1470 static inline NV_STATUS kchannelCheckBcStateCurrent(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
1471     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1472     return NV_ERR_NOT_SUPPORTED;
1473 }
1474 #else //__nvoc_kernel_channel_h_disabled
1475 #define kchannelCheckBcStateCurrent(pGpu, pKernelChannel) kchannelCheckBcStateCurrent_IMPL(pGpu, pKernelChannel)
1476 #endif //__nvoc_kernel_channel_h_disabled
1477 
1478 NV_STATUS kchannelUpdateWorkSubmitTokenNotifIndex_IMPL(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU32 index);
1479 
1480 #ifdef __nvoc_kernel_channel_h_disabled
kchannelUpdateWorkSubmitTokenNotifIndex(struct OBJGPU * pGpu,struct KernelChannel * arg0,NvU32 index)1481 static inline NV_STATUS kchannelUpdateWorkSubmitTokenNotifIndex(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU32 index) {
1482     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1483     return NV_ERR_NOT_SUPPORTED;
1484 }
1485 #else //__nvoc_kernel_channel_h_disabled
1486 #define kchannelUpdateWorkSubmitTokenNotifIndex(pGpu, arg0, index) kchannelUpdateWorkSubmitTokenNotifIndex_IMPL(pGpu, arg0, index)
1487 #endif //__nvoc_kernel_channel_h_disabled
1488 
1489 NV_STATUS kchannelNotifyWorkSubmitToken_IMPL(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU32 token);
1490 
1491 #ifdef __nvoc_kernel_channel_h_disabled
kchannelNotifyWorkSubmitToken(struct OBJGPU * pGpu,struct KernelChannel * arg0,NvU32 token)1492 static inline NV_STATUS kchannelNotifyWorkSubmitToken(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU32 token) {
1493     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1494     return NV_ERR_NOT_SUPPORTED;
1495 }
1496 #else //__nvoc_kernel_channel_h_disabled
1497 #define kchannelNotifyWorkSubmitToken(pGpu, arg0, token) kchannelNotifyWorkSubmitToken_IMPL(pGpu, arg0, token)
1498 #endif //__nvoc_kernel_channel_h_disabled
1499 
1500 NV_STATUS kchannelMapUserD_IMPL(struct OBJGPU *pGpu, struct KernelChannel *arg0, RS_PRIV_LEVEL arg1, NvU64 arg2, NvU32 arg3, NvP64 *arg4, NvP64 *arg5);
1501 
1502 #ifdef __nvoc_kernel_channel_h_disabled
kchannelMapUserD(struct OBJGPU * pGpu,struct KernelChannel * arg0,RS_PRIV_LEVEL arg1,NvU64 arg2,NvU32 arg3,NvP64 * arg4,NvP64 * arg5)1503 static inline NV_STATUS kchannelMapUserD(struct OBJGPU *pGpu, struct KernelChannel *arg0, RS_PRIV_LEVEL arg1, NvU64 arg2, NvU32 arg3, NvP64 *arg4, NvP64 *arg5) {
1504     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1505     return NV_ERR_NOT_SUPPORTED;
1506 }
1507 #else //__nvoc_kernel_channel_h_disabled
1508 #define kchannelMapUserD(pGpu, arg0, arg1, arg2, arg3, arg4, arg5) kchannelMapUserD_IMPL(pGpu, arg0, arg1, arg2, arg3, arg4, arg5)
1509 #endif //__nvoc_kernel_channel_h_disabled
1510 
1511 void kchannelUnmapUserD_IMPL(struct OBJGPU *pGpu, struct KernelChannel *arg0, RS_PRIV_LEVEL arg1, NvP64 *arg2, NvP64 *arg3);
1512 
1513 #ifdef __nvoc_kernel_channel_h_disabled
kchannelUnmapUserD(struct OBJGPU * pGpu,struct KernelChannel * arg0,RS_PRIV_LEVEL arg1,NvP64 * arg2,NvP64 * arg3)1514 static inline void kchannelUnmapUserD(struct OBJGPU *pGpu, struct KernelChannel *arg0, RS_PRIV_LEVEL arg1, NvP64 *arg2, NvP64 *arg3) {
1515     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1516 }
1517 #else //__nvoc_kernel_channel_h_disabled
1518 #define kchannelUnmapUserD(pGpu, arg0, arg1, arg2, arg3) kchannelUnmapUserD_IMPL(pGpu, arg0, arg1, arg2, arg3)
1519 #endif //__nvoc_kernel_channel_h_disabled
1520 
1521 void kchannelFillMmuExceptionInfo_IMPL(struct KernelChannel *pKernelChannel, FIFO_MMU_EXCEPTION_DATA *arg0);
1522 
1523 #ifdef __nvoc_kernel_channel_h_disabled
kchannelFillMmuExceptionInfo(struct KernelChannel * pKernelChannel,FIFO_MMU_EXCEPTION_DATA * arg0)1524 static inline void kchannelFillMmuExceptionInfo(struct KernelChannel *pKernelChannel, FIFO_MMU_EXCEPTION_DATA *arg0) {
1525     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1526 }
1527 #else //__nvoc_kernel_channel_h_disabled
1528 #define kchannelFillMmuExceptionInfo(pKernelChannel, arg0) kchannelFillMmuExceptionInfo_IMPL(pKernelChannel, arg0)
1529 #endif //__nvoc_kernel_channel_h_disabled
1530 
1531 void kchannelFreeMmuExceptionInfo_IMPL(struct KernelChannel *pKernelChannel);
1532 
1533 #ifdef __nvoc_kernel_channel_h_disabled
kchannelFreeMmuExceptionInfo(struct KernelChannel * pKernelChannel)1534 static inline void kchannelFreeMmuExceptionInfo(struct KernelChannel *pKernelChannel) {
1535     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1536 }
1537 #else //__nvoc_kernel_channel_h_disabled
1538 #define kchannelFreeMmuExceptionInfo(pKernelChannel) kchannelFreeMmuExceptionInfo_IMPL(pKernelChannel)
1539 #endif //__nvoc_kernel_channel_h_disabled
1540 
1541 NV_STATUS kchannelGetFromDualHandle_IMPL(struct RsClient *arg0, NvHandle arg1, struct KernelChannel **arg2);
1542 
1543 #define kchannelGetFromDualHandle(arg0, arg1, arg2) kchannelGetFromDualHandle_IMPL(arg0, arg1, arg2)
1544 NV_STATUS kchannelGetFromDualHandleRestricted_IMPL(struct RsClient *arg0, NvHandle arg1, struct KernelChannel **arg2);
1545 
1546 #define kchannelGetFromDualHandleRestricted(arg0, arg1, arg2) kchannelGetFromDualHandleRestricted_IMPL(arg0, arg1, arg2)
1547 NvU32 kchannelGetGfid_IMPL(struct KernelChannel *pKernelChannel);
1548 
1549 #ifdef __nvoc_kernel_channel_h_disabled
kchannelGetGfid(struct KernelChannel * pKernelChannel)1550 static inline NvU32 kchannelGetGfid(struct KernelChannel *pKernelChannel) {
1551     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1552     return 0;
1553 }
1554 #else //__nvoc_kernel_channel_h_disabled
1555 #define kchannelGetGfid(pKernelChannel) kchannelGetGfid_IMPL(pKernelChannel)
1556 #endif //__nvoc_kernel_channel_h_disabled
1557 
1558 #undef PRIVATE_FIELD
1559 
1560 #ifndef NVOC_KERNEL_CHANNEL_H_PRIVATE_ACCESS_ALLOWED
1561 #undef kchannelRetrieveKmb
1562 NV_STATUS NVOC_PRIVATE_FUNCTION(kchannelRetrieveKmb)(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvBool includeSecrets, CC_KMB *keyMaterialBundle);
1563 
1564 #undef kchannelRetrieveKmb_HAL
1565 NV_STATUS NVOC_PRIVATE_FUNCTION(kchannelRetrieveKmb_HAL)(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvBool includeSecrets, CC_KMB *keyMaterialBundle);
1566 
1567 #undef kchannelSetKeyRotationNotifier
1568 NV_STATUS NVOC_PRIVATE_FUNCTION(kchannelSetKeyRotationNotifier)(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bSet);
1569 
1570 #undef kchannelSetKeyRotationNotifier_HAL
1571 NV_STATUS NVOC_PRIVATE_FUNCTION(kchannelSetKeyRotationNotifier_HAL)(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bSet);
1572 
1573 #ifndef __nvoc_kernel_channel_h_disabled
1574 #undef kchannelRotateSecureChannelIv
1575 NV_STATUS NVOC_PRIVATE_FUNCTION(kchannelRotateSecureChannelIv)(struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvU32 *encryptIv, NvU32 *decryptIv);
1576 #endif //__nvoc_kernel_channel_h_disabled
1577 
1578 #endif // NVOC_KERNEL_CHANNEL_H_PRIVATE_ACCESS_ALLOWED
1579 
1580 
1581 RS_ORDERED_ITERATOR kchannelGetIter(
1582     struct RsClient *pClient,
1583     RsResourceRef *pScopeRef);
1584 
1585 NV_STATUS kchannelGetNextKernelChannel(
1586     struct OBJGPU *pGpu,
1587     CHANNEL_ITERATOR *pIt,
1588     struct KernelChannel **ppKernelChannel);
1589 
1590 NV_STATUS CliGetKernelChannelWithDevice(struct RsClient       *pClient,
1591                                         NvHandle        hParent,
1592                                         NvHandle        hKernelChannel,
1593                                         struct KernelChannel **ppKernelChannel);
1594 
1595 NV_STATUS CliGetKernelChannel(struct RsClient       *pClient,
1596                               NvHandle        hKernelChannel,
1597                               struct KernelChannel **ppKernelChannel);
1598 
1599 /*!
1600  * @brief Helper to get type and memdesc of a channel notifier (memory/ctxdma)
1601  */
1602 NV_STATUS kchannelGetNotifierInfo(struct OBJGPU *pGpu,
1603                                   Device *pDevice,
1604                                   NvHandle hErrorContext,
1605                                   MEMORY_DESCRIPTOR **ppMemDesc,
1606                                   ErrorNotifierType *pNotifierType,
1607                                   NvU64 *pOffset);
1608 
1609 // Utils to iterate over ChannelDescendants on one Channels
1610 void kchannelGetChildIterator(struct KernelChannel *pKernelChannel,
1611                               NvU32 classID,
1612                               RM_ENGINE_TYPE engineID,
1613                               KernelChannelChildIterator *pIter);
1614 ChannelDescendant *kchannelGetNextChild(KernelChannelChildIterator *pIter);
1615 // Simpler function to call if you just need one result
1616 ChannelDescendant *kchannelGetOneChild(struct KernelChannel *pKernelChannel,
1617                                        NvU32 classID,
1618                                        NvU32 engineID);
1619 
1620 // Utils to iterate over ChannelDescendants on all Channels in the same ChannelGroup
1621 void kchannelGetChildIterOverGroup(struct KernelChannel *pKernelChannel,
1622                                    NvU32 classNum,
1623                                    NvU32 engDesc,
1624                                    KernelChannelChildIterOverGroup *pIt);
1625 ChannelDescendant *kchannelGetNextChildOverGroup(KernelChannelChildIterOverGroup *pIt);
1626 
1627 NV_STATUS kchannelFindChildByHandle(struct KernelChannel *pKernelChannel, NvHandle hResource, ChannelDescendant **ppObject);
1628 
1629 // Bitmap for KernelChannel->swState
1630 #define KERNEL_CHANNEL_SW_STATE_CPU_MAP                    NVBIT(0) //UserD is mapped
1631 #define KERNEL_CHANNEL_SW_STATE_RUNLIST_SET                NVBIT(1) // RunlistId is set
1632 #define KERNEL_CHANNEL_SW_STATE_DISABLED_FOR_KEY_ROTATION  NVBIT(2) // disabled for key rotation
1633 #define KERNEL_CHANNEL_SW_STATE_ENABLE_AFTER_KEY_ROTATION  NVBIT(3) // RM should enable after key rotation
1634 
1635 NvBool kchannelIsCpuMapped(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1636 void kchannelSetCpuMapped(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bCpuMapped);
1637 NvBool kchannelIsRunlistSet(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1638 void kchannelSetRunlistSet(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bRunlistSet);
1639 NvBool kchannelIsDisabledForKeyRotation(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1640 void kchannelDisableForKeyRotation(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bDisable);
1641 NvBool kchannelIsEnableAfterKeyRotation(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1642 void kchannelEnableAfterKeyRotation(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bEnable);
1643 
1644 #endif // KERNEL_CHANNEL_H
1645 
1646 #ifdef __cplusplus
1647 } // extern "C"
1648 #endif
1649 
1650 #endif // _G_KERNEL_CHANNEL_NVOC_H_
1651