1
2 #ifndef _G_KERNEL_CHANNEL_NVOC_H_
3 #define _G_KERNEL_CHANNEL_NVOC_H_
4 #include "nvoc/runtime.h"
5
6 // Version of generated metadata structures
7 #ifdef NVOC_METADATA_VERSION
8 #undef NVOC_METADATA_VERSION
9 #endif
10 #define NVOC_METADATA_VERSION 0
11
12 #ifdef __cplusplus
13 extern "C" {
14 #endif
15
16 /*
17 * SPDX-FileCopyrightText: Copyright (c) 2020-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
18 * SPDX-License-Identifier: MIT
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining a
21 * copy of this software and associated documentation files (the "Software"),
22 * to deal in the Software without restriction, including without limitation
23 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
24 * and/or sell copies of the Software, and to permit persons to whom the
25 * Software is furnished to do so, subject to the following conditions:
26 *
27 * The above copyright notice and this permission notice shall be included in
28 * all copies or substantial portions of the Software.
29 *
30 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
31 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
32 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
33 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
34 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
35 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
36 * DEALINGS IN THE SOFTWARE.
37 */
38
39 #pragma once
40 #include "g_kernel_channel_nvoc.h"
41
42 #ifndef KERNEL_CHANNEL_H
43 #define KERNEL_CHANNEL_H
44
45 #include "core/core.h"
46 #include "resserv/resserv.h"
47 #include "nvoc/prelude.h"
48 #include "gpu/gpu_resource.h"
49 #include "kernel/gpu/gpu_engine_type.h"
50 #include "kernel/gpu/fifo/kernel_ctxshare.h"
51 #include "kernel/gpu/fifo/kernel_fifo.h"
52 #include "kernel/gpu/gr/kernel_graphics_context.h"
53 #include "kernel/gpu/intr/intr_service.h"
54 #include "kernel/gpu/mig_mgr/kernel_mig_manager.h"
55
56 #include "ctrl/ctrl0090.h"
57 #include "ctrl/ctrl208f/ctrl208ffifo.h"
58 #include "ctrl/ctrl506f.h"
59 #include "ctrl/ctrl906f.h"
60 #include "ctrl/ctrla06f.h"
61 #include "ctrl/ctrla16f.h"
62 #include "ctrl/ctrla26f.h"
63 #include "ctrl/ctrlb06f.h"
64 #include "ctrl/ctrlc06f.h"
65 #include "ctrl/ctrlc36f.h"
66 #include "ctrl/ctrlc56f.h"
67
68 #include "cc_drv.h"
69
70
71 struct OBJGPU;
72
73 #ifndef __NVOC_CLASS_OBJGPU_TYPEDEF__
74 #define __NVOC_CLASS_OBJGPU_TYPEDEF__
75 typedef struct OBJGPU OBJGPU;
76 #endif /* __NVOC_CLASS_OBJGPU_TYPEDEF__ */
77
78 #ifndef __nvoc_class_id_OBJGPU
79 #define __nvoc_class_id_OBJGPU 0x7ef3cb
80 #endif /* __nvoc_class_id_OBJGPU */
81
82
83
84 struct UserInfo;
85
86 #ifndef __NVOC_CLASS_UserInfo_TYPEDEF__
87 #define __NVOC_CLASS_UserInfo_TYPEDEF__
88 typedef struct UserInfo UserInfo;
89 #endif /* __NVOC_CLASS_UserInfo_TYPEDEF__ */
90
91 #ifndef __nvoc_class_id_UserInfo
92 #define __nvoc_class_id_UserInfo 0x21d236
93 #endif /* __nvoc_class_id_UserInfo */
94
95
96 /*!
97 * @brief Type of hErrorContext or hEccErrorContext
98 *
99 * This is RPCed to GSP in #NV_CHANNEL_ALLOC_PARAMS.internalFlags
100 * along with the actual memdesc in
101 * #NV_CHANNEL_ALLOC_PARAMS.errorNotifierMem and
102 * #NV_CHANNEL_ALLOC_PARAMS.eccErrorNotifierMem.
103 */
104 typedef enum {
105 /*!
106 * Initial state as passed in NV_CHANNEL_ALLOC_PARAMS by
107 * kernel CPU-RM clients.
108 */
109 ERROR_NOTIFIER_TYPE_UNKNOWN = 0,
110 /*! @brief Error notifier is explicitly not set.
111 *
112 * The corresponding hErrorContext or hEccErrorContext must be
113 * NV01_NULL_OBJECT.
114 */
115 ERROR_NOTIFIER_TYPE_NONE,
116 /*! @brief Error notifier is a ContextDma */
117 ERROR_NOTIFIER_TYPE_CTXDMA,
118 /*! @brief Error notifier is a NvNotification array in sysmem/vidmem */
119 ERROR_NOTIFIER_TYPE_MEMORY
120 } ErrorNotifierType;
121
122 //
123 // Iterates over the ChannelDescendants on a channel
124 // Uses an RS_ORDERED_ITERATOR and filters it by EngineID / ClassID
125 //
126 typedef struct {
127 RS_ORDERED_ITERATOR rsIter;
128 RM_ENGINE_TYPE engineID;
129 NvU32 classID;
130 } KernelChannelChildIterator;
131
132 typedef enum
133 {
134 CHANNEL_CLASS_TYPE_DMA,
135 CHANNEL_CLASS_TYPE_GPFIFO,
136 } CHANNEL_CLASS_TYPE;
137
138 //
139 // Channel class info structure.
140 //
141 // Filled in by CliGetChannelClassInfo() routine.
142 //
143 typedef struct
144 {
145 NvU32 notifiersMaxCount; // max# of notifiers for class
146 NvU32 eventActionDisable; // event disable action cmd value
147 NvU32 eventActionSingle; // event single-shot enable action cmd value
148 NvU32 eventActionRepeat; // event repeat enable action cmd value
149 NvU32 rcNotifierIndex; // RC notifier index differs depending on the channel class
150 CHANNEL_CLASS_TYPE classType;
151 } CLI_CHANNEL_CLASS_INFO;
152
153 void CliGetChannelClassInfo(NvU32, CLI_CHANNEL_CLASS_INFO*);
154
155 /*!
156 * This structure represents an iterator for all objects
157 * with given class number or engine tag on a channel or TSG.
158 * It is created by function @ref kchannelGetChildIterOverGroup.
159 */
160 typedef struct
161 {
162 NvU32 engDesc;
163 NvU32 classNum;
164
165 //
166 // During iteration, a copy of the current channel/TSG as well as the
167 // next object node to start iterating from is tracked.
168 //
169 CHANNEL_NODE channelNode;
170 KernelChannelChildIterator kchannelIter;
171 } KernelChannelChildIterOverGroup;
172
173 typedef struct _def_instance_block
174 {
175 MEMORY_DESCRIPTOR *pInstanceBlockDesc;
176 MEMORY_DESCRIPTOR *pRamfcDesc;
177 /*!
178 * Used only for Suspend Resume RM internal channel.
179 * Will be moved to the Host context RL infolist.
180 */
181 MEMORY_DESCRIPTOR *pRLMemDesc;
182 } FIFO_INSTANCE_BLOCK;
183
184 /* Bitfields in NV_CHANNEL_ALLOC_PARAMS.internalFlags */
185 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_PRIVILEGE 1:0
186 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_PRIVILEGE_USER 0x0
187 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_PRIVILEGE_ADMIN 0x1
188 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_PRIVILEGE_KERNEL 0x2
189 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE 3:2
190 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE_UNKNOWN ERROR_NOTIFIER_TYPE_UNKNOWN
191 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE_NONE ERROR_NOTIFIER_TYPE_NONE
192 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE_CTXDMA ERROR_NOTIFIER_TYPE_CTXDMA
193 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE_MEMORY ERROR_NOTIFIER_TYPE_MEMORY
194 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE 5:4
195 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE_UNKNOWN ERROR_NOTIFIER_TYPE_UNKNOWN
196 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE_NONE ERROR_NOTIFIER_TYPE_NONE
197 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE_CTXDMA ERROR_NOTIFIER_TYPE_CTXDMA
198 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE_MEMORY ERROR_NOTIFIER_TYPE_MEMORY
199
200 // Channel is created by GSP RM
201 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_GSP_OWNED 6:6
202 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_GSP_OWNED_NO 0x0
203 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_GSP_OWNED_YES 0x1
204
205 /*!
206 * Class for the kernel side of a Channel object.
207 */
208
209 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
210 // the matching C source file, but causes diagnostics to be issued if another
211 // source file references the field.
212 #ifdef NVOC_KERNEL_CHANNEL_H_PRIVATE_ACCESS_ALLOWED
213 #define PRIVATE_FIELD(x) x
214 #else
215 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
216 #endif
217
218
219 struct KernelChannel {
220
221 // Metadata
222 const struct NVOC_RTTI *__nvoc_rtti;
223
224 // Parent (i.e. superclass or base class) object pointers
225 struct GpuResource __nvoc_base_GpuResource;
226 struct Notifier __nvoc_base_Notifier;
227
228 // Ancestor object pointers for `staticCast` feature
229 struct Object *__nvoc_pbase_Object; // obj super^4
230 struct RsResource *__nvoc_pbase_RsResource; // res super^3
231 struct RmResourceCommon *__nvoc_pbase_RmResourceCommon; // rmrescmn super^3
232 struct RmResource *__nvoc_pbase_RmResource; // rmres super^2
233 struct GpuResource *__nvoc_pbase_GpuResource; // gpures super
234 struct INotifier *__nvoc_pbase_INotifier; // inotify super^2
235 struct Notifier *__nvoc_pbase_Notifier; // notify super
236 struct KernelChannel *__nvoc_pbase_KernelChannel; // kchannel
237
238 // Vtable with 66 per-object function pointers
239 NV_STATUS (*__kchannelMap__)(struct KernelChannel * /*this*/, CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, RsCpuMapping *); // virtual override (res) base (gpures)
240 NV_STATUS (*__kchannelUnmap__)(struct KernelChannel * /*this*/, CALL_CONTEXT *, RsCpuMapping *); // virtual override (res) base (gpures)
241 NV_STATUS (*__kchannelGetMapAddrSpace__)(struct KernelChannel * /*this*/, CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *); // virtual override (gpures) base (gpures)
242 NV_STATUS (*__kchannelGetMemInterMapParams__)(struct KernelChannel * /*this*/, RMRES_MEM_INTER_MAP_PARAMS *); // virtual override (rmres) base (gpures)
243 NV_STATUS (*__kchannelCheckMemInterUnmap__)(struct KernelChannel * /*this*/, NvBool); // virtual override (rmres) base (gpures)
244 NV_STATUS (*__kchannelCreateUserMemDesc__)(struct OBJGPU *, struct KernelChannel * /*this*/); // halified (2 hals)
245 NvBool (*__kchannelIsUserdAddrSizeValid__)(struct KernelChannel * /*this*/, NvU32, NvU32); // halified (3 hals) body
246 NV_STATUS (*__kchannelCtrlCmdResetIsolatedChannel__)(struct KernelChannel * /*this*/, NV506F_CTRL_CMD_RESET_ISOLATED_CHANNEL_PARAMS *); // exported (id=0x506f0105)
247 NV_STATUS (*__kchannelCtrlCmdInternalResetIsolatedChannel__)(struct KernelChannel * /*this*/, NV506F_CTRL_CMD_INTERNAL_RESET_ISOLATED_CHANNEL_PARAMS *); // exported (id=0x506f0106)
248 NV_STATUS (*__kchannelCtrlCmdGetClassEngineid__)(struct KernelChannel * /*this*/, NV906F_CTRL_GET_CLASS_ENGINEID_PARAMS *); // exported (id=0x906f0101)
249 NV_STATUS (*__kchannelCtrlCmdResetChannel__)(struct KernelChannel * /*this*/, NV906F_CTRL_CMD_RESET_CHANNEL_PARAMS *); // exported (id=0x906f0102)
250 NV_STATUS (*__kchannelCtrlCmdGetDeferRCState__)(struct KernelChannel * /*this*/, NV906F_CTRL_CMD_GET_DEFER_RC_STATE_PARAMS *); // exported (id=0x906f0105)
251 NV_STATUS (*__kchannelCtrlCmdGetMmuFaultInfo__)(struct KernelChannel * /*this*/, NV906F_CTRL_GET_MMU_FAULT_INFO_PARAMS *); // exported (id=0x906f0106)
252 NV_STATUS (*__kchannelCtrlCmdEventSetNotification__)(struct KernelChannel * /*this*/, NV906F_CTRL_EVENT_SET_NOTIFICATION_PARAMS *); // exported (id=0x906f0203)
253 NV_STATUS (*__kchannelCtrlCmdGpFifoSchedule__)(struct KernelChannel * /*this*/, NVA06F_CTRL_GPFIFO_SCHEDULE_PARAMS *); // exported (id=0xa06f0103)
254 NV_STATUS (*__kchannelCtrlCmdBind__)(struct KernelChannel * /*this*/, NVA06F_CTRL_BIND_PARAMS *); // exported (id=0xa06f0104)
255 NV_STATUS (*__kchannelCtrlCmdSetErrorNotifier__)(struct KernelChannel * /*this*/, NVA06F_CTRL_SET_ERROR_NOTIFIER_PARAMS *); // exported (id=0xa06f0108)
256 NV_STATUS (*__kchannelCtrlCmdSetInterleaveLevel__)(struct KernelChannel * /*this*/, NVA06F_CTRL_INTERLEAVE_LEVEL_PARAMS *); // exported (id=0xa06f0109)
257 NV_STATUS (*__kchannelCtrlCmdGetContextId__)(struct KernelChannel * /*this*/, NVA06F_CTRL_GET_CONTEXT_ID_PARAMS *); // exported (id=0xa06f0113)
258 NV_STATUS (*__kchannelCtrlCmdRestartRunlist__)(struct KernelChannel * /*this*/, NVA06F_CTRL_RESTART_RUNLIST_PARAMS *); // exported (id=0xa06f0111)
259 NV_STATUS (*__kchannelCtrlCmdGetEngineCtxSize__)(struct KernelChannel * /*this*/, NVB06F_CTRL_GET_ENGINE_CTX_SIZE_PARAMS *); // exported (id=0xb06f010b)
260 NV_STATUS (*__kchannelCtrlCmdGetEngineCtxData__)(struct KernelChannel * /*this*/, NVB06F_CTRL_GET_ENGINE_CTX_DATA_PARAMS *); // exported (id=0xb06f010c)
261 NV_STATUS (*__kchannelCtrlCmdMigrateEngineCtxData__)(struct KernelChannel * /*this*/, NVB06F_CTRL_MIGRATE_ENGINE_CTX_DATA_PARAMS *); // exported (id=0xb06f010d)
262 NV_STATUS (*__kchannelCtrlCmdGetEngineCtxState__)(struct KernelChannel * /*this*/, NVB06F_CTRL_GET_ENGINE_CTX_STATE_PARAMS *); // exported (id=0xb06f010e)
263 NV_STATUS (*__kchannelCtrlCmdGetChannelHwState__)(struct KernelChannel * /*this*/, NVB06F_CTRL_GET_CHANNEL_HW_STATE_PARAMS *); // exported (id=0xb06f010f)
264 NV_STATUS (*__kchannelCtrlCmdSetChannelHwState__)(struct KernelChannel * /*this*/, NVB06F_CTRL_SET_CHANNEL_HW_STATE_PARAMS *); // exported (id=0xb06f0110)
265 NV_STATUS (*__kchannelCtrlCmdSaveEngineCtxData__)(struct KernelChannel * /*this*/, NVB06F_CTRL_SAVE_ENGINE_CTX_DATA_PARAMS *); // exported (id=0xb06f0111)
266 NV_STATUS (*__kchannelCtrlCmdRestoreEngineCtxData__)(struct KernelChannel * /*this*/, NVB06F_CTRL_RESTORE_ENGINE_CTX_DATA_PARAMS *); // exported (id=0xb06f0112)
267 NV_STATUS (*__kchannelCtrlCmdGpfifoGetWorkSubmitToken__)(struct KernelChannel * /*this*/, NVC36F_CTRL_CMD_GPFIFO_GET_WORK_SUBMIT_TOKEN_PARAMS *); // exported (id=0xc36f0108)
268 NV_STATUS (*__kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer__)(struct KernelChannel * /*this*/, NVC36F_CTRL_GPFIFO_UPDATE_FAULT_METHOD_BUFFER_PARAMS *); // exported (id=0xc36f0109)
269 NV_STATUS (*__kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex__)(struct KernelChannel * /*this*/, NVC36F_CTRL_GPFIFO_SET_WORK_SUBMIT_TOKEN_NOTIF_INDEX_PARAMS *); // exported (id=0xc36f010a)
270 NV_STATUS (*__kchannelCtrlCmdStopChannel__)(struct KernelChannel * /*this*/, NVA06F_CTRL_STOP_CHANNEL_PARAMS *); // exported (id=0xa06f0112)
271 NV_STATUS (*__kchannelCtrlCmdGetKmb__)(struct KernelChannel * /*this*/, NVC56F_CTRL_CMD_GET_KMB_PARAMS *); // halified (2 hals) exported (id=0xc56f010b) body
272 NV_STATUS (*__kchannelCtrlRotateSecureChannelIv__)(struct KernelChannel * /*this*/, NVC56F_CTRL_ROTATE_SECURE_CHANNEL_IV_PARAMS *); // halified (2 hals) exported (id=0xc56f010c) body
273 NV_STATUS (*__kchannelSetEncryptionStatsBuffer__)(struct OBJGPU *, struct KernelChannel * /*this*/, MEMORY_DESCRIPTOR *, NvBool); // halified (2 hals) body
274 NV_STATUS (*__kchannelCtrlGetTpcPartitionMode__)(struct KernelChannel * /*this*/, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *); // inline exported (id=0x900103) body
275 NV_STATUS (*__kchannelCtrlSetTpcPartitionMode__)(struct KernelChannel * /*this*/, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *); // inline exported (id=0x900101) body
276 NV_STATUS (*__kchannelCtrlGetMMUDebugMode__)(struct KernelChannel * /*this*/, NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS *); // inline exported (id=0x900105) body
277 NV_STATUS (*__kchannelCtrlProgramVidmemPromote__)(struct KernelChannel * /*this*/, NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS *); // inline exported (id=0x900107) body
278 NV_STATUS (*__kchannelRetrieveKmb__)(struct OBJGPU *, struct KernelChannel * /*this*/, ROTATE_IV_TYPE, NvBool, CC_KMB *); // halified (2 hals) body
279 NV_STATUS (*__kchannelSetKeyRotationNotifier__)(struct OBJGPU *, struct KernelChannel * /*this*/, NvBool); // halified (2 hals) body
280 NV_STATUS (*__kchannelControl__)(struct KernelChannel * /*this*/, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *); // virtual inherited (gpures) base (gpures)
281 NvBool (*__kchannelShareCallback__)(struct KernelChannel * /*this*/, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *); // virtual inherited (gpures) base (gpures)
282 NV_STATUS (*__kchannelGetRegBaseOffsetAndSize__)(struct KernelChannel * /*this*/, struct OBJGPU *, NvU32 *, NvU32 *); // virtual inherited (gpures) base (gpures)
283 NV_STATUS (*__kchannelInternalControlForward__)(struct KernelChannel * /*this*/, NvU32, void *, NvU32); // virtual inherited (gpures) base (gpures)
284 NvHandle (*__kchannelGetInternalObjectHandle__)(struct KernelChannel * /*this*/); // virtual inherited (gpures) base (gpures)
285 NvBool (*__kchannelAccessCallback__)(struct KernelChannel * /*this*/, struct RsClient *, void *, RsAccessRight); // virtual inherited (rmres) base (gpures)
286 NV_STATUS (*__kchannelGetMemoryMappingDescriptor__)(struct KernelChannel * /*this*/, struct MEMORY_DESCRIPTOR **); // virtual inherited (rmres) base (gpures)
287 NV_STATUS (*__kchannelControlSerialization_Prologue__)(struct KernelChannel * /*this*/, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *); // virtual inherited (rmres) base (gpures)
288 void (*__kchannelControlSerialization_Epilogue__)(struct KernelChannel * /*this*/, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *); // virtual inherited (rmres) base (gpures)
289 NV_STATUS (*__kchannelControl_Prologue__)(struct KernelChannel * /*this*/, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *); // virtual inherited (rmres) base (gpures)
290 void (*__kchannelControl_Epilogue__)(struct KernelChannel * /*this*/, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *); // virtual inherited (rmres) base (gpures)
291 NvBool (*__kchannelCanCopy__)(struct KernelChannel * /*this*/); // virtual inherited (res) base (gpures)
292 NV_STATUS (*__kchannelIsDuplicate__)(struct KernelChannel * /*this*/, NvHandle, NvBool *); // virtual inherited (res) base (gpures)
293 void (*__kchannelPreDestruct__)(struct KernelChannel * /*this*/); // virtual inherited (res) base (gpures)
294 NV_STATUS (*__kchannelControlFilter__)(struct KernelChannel * /*this*/, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *); // virtual inherited (res) base (gpures)
295 NvBool (*__kchannelIsPartialUnmapSupported__)(struct KernelChannel * /*this*/); // inline virtual inherited (res) base (gpures) body
296 NV_STATUS (*__kchannelMapTo__)(struct KernelChannel * /*this*/, RS_RES_MAP_TO_PARAMS *); // virtual inherited (res) base (gpures)
297 NV_STATUS (*__kchannelUnmapFrom__)(struct KernelChannel * /*this*/, RS_RES_UNMAP_FROM_PARAMS *); // virtual inherited (res) base (gpures)
298 NvU32 (*__kchannelGetRefCount__)(struct KernelChannel * /*this*/); // virtual inherited (res) base (gpures)
299 void (*__kchannelAddAdditionalDependants__)(struct RsClient *, struct KernelChannel * /*this*/, RsResourceRef *); // virtual inherited (res) base (gpures)
300 PEVENTNOTIFICATION * (*__kchannelGetNotificationListPtr__)(struct KernelChannel * /*this*/); // virtual inherited (notify) base (notify)
301 struct NotifShare * (*__kchannelGetNotificationShare__)(struct KernelChannel * /*this*/); // virtual inherited (notify) base (notify)
302 void (*__kchannelSetNotificationShare__)(struct KernelChannel * /*this*/, struct NotifShare *); // virtual inherited (notify) base (notify)
303 NV_STATUS (*__kchannelUnregisterEvent__)(struct KernelChannel * /*this*/, NvHandle, NvHandle, NvHandle, NvHandle); // virtual inherited (notify) base (notify)
304 NV_STATUS (*__kchannelGetOrAllocNotifShare__)(struct KernelChannel * /*this*/, NvHandle, NvHandle, struct NotifShare **); // virtual inherited (notify) base (notify)
305
306 // Data members
307 NvU16 nextObjectClassID;
308 struct KernelChannel *pNextBindKernelChannel;
309 FIFO_MMU_EXCEPTION_DATA *pMmuExceptionData;
310 NvHandle hErrorContext;
311 MEMORY_DESCRIPTOR *pErrContextMemDesc;
312 ErrorNotifierType errorContextType;
313 NvU64 errorContextOffset;
314 NvHandle hEccErrorContext;
315 MEMORY_DESCRIPTOR *pEccErrContextMemDesc;
316 ErrorNotifierType eccErrorContextType;
317 NvU64 eccErrorContextOffset;
318 struct UserInfo *pUserInfo;
319 NvHandle hVASpace;
320 struct OBJVASPACE *pVAS;
321 NvHandle hKernelGraphicsContext;
322 NvU8 privilegeLevel;
323 NvU32 runlistId;
324 NvU32 ChID;
325 struct KernelChannelGroupApi *pKernelChannelGroupApi;
326 struct KernelCtxShareApi *pKernelCtxShareApi;
327 NvU32 refCount;
328 NvBool bGspOwned;
329 NvBool bIsContextBound;
330 FIFO_INSTANCE_BLOCK *pFifoHalData[8];
331 MEMORY_DESCRIPTOR *pInstSubDeviceMemDesc[8];
332 MEMORY_DESCRIPTOR *pUserdSubDeviceMemDesc[8];
333 NvBool bClientAllocatedUserD;
334 NvU32 swState[8];
335 NvBool bIsRcPending[8];
336 NvU32 ProcessID;
337 NvU32 SubProcessID;
338 NvU32 bcStateCurrent;
339 NvU32 notifyIndex[3];
340 NvU32 *pNotifyActions;
341 NvU64 userdLength;
342 NvBool bSkipCtxBufferAlloc;
343 NvU32 subctxId;
344 NvU32 cid;
345 struct MIG_INSTANCE_REF partitionRef;
346 NvU32 runqueue;
347 RM_ENGINE_TYPE engineType;
348 CC_KMB clientKmb;
349 MEMORY_DESCRIPTOR *pEncStatsBufMemDesc;
350 CC_CRYPTOBUNDLE_STATS *pEncStatsBuf;
351 MEMORY_DESCRIPTOR *pKeyRotationNotifierMemDesc;
352 NvNotification *pKeyRotationNotifier;
353 NvBool bCCSecureChannel;
354 NvBool bUseScrubKey;
355 };
356
357 #ifndef __NVOC_CLASS_KernelChannel_TYPEDEF__
358 #define __NVOC_CLASS_KernelChannel_TYPEDEF__
359 typedef struct KernelChannel KernelChannel;
360 #endif /* __NVOC_CLASS_KernelChannel_TYPEDEF__ */
361
362 #ifndef __nvoc_class_id_KernelChannel
363 #define __nvoc_class_id_KernelChannel 0x5d8d70
364 #endif /* __nvoc_class_id_KernelChannel */
365
366 // Casting support
367 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelChannel;
368
369 #define __staticCast_KernelChannel(pThis) \
370 ((pThis)->__nvoc_pbase_KernelChannel)
371
372 #ifdef __nvoc_kernel_channel_h_disabled
373 #define __dynamicCast_KernelChannel(pThis) ((KernelChannel*)NULL)
374 #else //__nvoc_kernel_channel_h_disabled
375 #define __dynamicCast_KernelChannel(pThis) \
376 ((KernelChannel*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelChannel)))
377 #endif //__nvoc_kernel_channel_h_disabled
378
379 NV_STATUS __nvoc_objCreateDynamic_KernelChannel(KernelChannel**, Dynamic*, NvU32, va_list);
380
381 NV_STATUS __nvoc_objCreate_KernelChannel(KernelChannel**, Dynamic*, NvU32, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
382 #define __objCreate_KernelChannel(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
383 __nvoc_objCreate_KernelChannel((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
384
385
386 // Wrapper macros
387 #define kchannelMap_FNPTR(pKernelChannel) pKernelChannel->__kchannelMap__
388 #define kchannelMap(pKernelChannel, pCallContext, pParams, pCpuMapping) kchannelMap_DISPATCH(pKernelChannel, pCallContext, pParams, pCpuMapping)
389 #define kchannelUnmap_FNPTR(pKernelChannel) pKernelChannel->__kchannelUnmap__
390 #define kchannelUnmap(pKernelChannel, pCallContext, pCpuMapping) kchannelUnmap_DISPATCH(pKernelChannel, pCallContext, pCpuMapping)
391 #define kchannelGetMapAddrSpace_FNPTR(pKernelChannel) pKernelChannel->__kchannelGetMapAddrSpace__
392 #define kchannelGetMapAddrSpace(pKernelChannel, pCallContext, mapFlags, pAddrSpace) kchannelGetMapAddrSpace_DISPATCH(pKernelChannel, pCallContext, mapFlags, pAddrSpace)
393 #define kchannelGetMemInterMapParams_FNPTR(pKernelChannel) pKernelChannel->__kchannelGetMemInterMapParams__
394 #define kchannelGetMemInterMapParams(pKernelChannel, pParams) kchannelGetMemInterMapParams_DISPATCH(pKernelChannel, pParams)
395 #define kchannelCheckMemInterUnmap_FNPTR(pKernelChannel) pKernelChannel->__kchannelCheckMemInterUnmap__
396 #define kchannelCheckMemInterUnmap(pKernelChannel, bSubdeviceHandleProvided) kchannelCheckMemInterUnmap_DISPATCH(pKernelChannel, bSubdeviceHandleProvided)
397 #define kchannelCreateUserMemDesc_FNPTR(arg_this) arg_this->__kchannelCreateUserMemDesc__
398 #define kchannelCreateUserMemDesc(pGpu, arg_this) kchannelCreateUserMemDesc_DISPATCH(pGpu, arg_this)
399 #define kchannelCreateUserMemDesc_HAL(pGpu, arg_this) kchannelCreateUserMemDesc_DISPATCH(pGpu, arg_this)
400 #define kchannelIsUserdAddrSizeValid_FNPTR(pKernelChannel) pKernelChannel->__kchannelIsUserdAddrSizeValid__
401 #define kchannelIsUserdAddrSizeValid(pKernelChannel, userdAddrLo, userdAddrHi) kchannelIsUserdAddrSizeValid_DISPATCH(pKernelChannel, userdAddrLo, userdAddrHi)
402 #define kchannelIsUserdAddrSizeValid_HAL(pKernelChannel, userdAddrLo, userdAddrHi) kchannelIsUserdAddrSizeValid_DISPATCH(pKernelChannel, userdAddrLo, userdAddrHi)
403 #define kchannelCtrlCmdResetIsolatedChannel_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdResetIsolatedChannel__
404 #define kchannelCtrlCmdResetIsolatedChannel(pKernelChannel, pResetParams) kchannelCtrlCmdResetIsolatedChannel_DISPATCH(pKernelChannel, pResetParams)
405 #define kchannelCtrlCmdInternalResetIsolatedChannel_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdInternalResetIsolatedChannel__
406 #define kchannelCtrlCmdInternalResetIsolatedChannel(pKernelChannel, pResetParams) kchannelCtrlCmdInternalResetIsolatedChannel_DISPATCH(pKernelChannel, pResetParams)
407 #define kchannelCtrlCmdGetClassEngineid_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGetClassEngineid__
408 #define kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineid_DISPATCH(pKernelChannel, pParams)
409 #define kchannelCtrlCmdResetChannel_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdResetChannel__
410 #define kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannel_DISPATCH(pKernelChannel, pResetChannelParams)
411 #define kchannelCtrlCmdGetDeferRCState_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGetDeferRCState__
412 #define kchannelCtrlCmdGetDeferRCState(pKernelChannel, pStateParams) kchannelCtrlCmdGetDeferRCState_DISPATCH(pKernelChannel, pStateParams)
413 #define kchannelCtrlCmdGetMmuFaultInfo_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGetMmuFaultInfo__
414 #define kchannelCtrlCmdGetMmuFaultInfo(pKernelChannel, pFaultInfoParams) kchannelCtrlCmdGetMmuFaultInfo_DISPATCH(pKernelChannel, pFaultInfoParams)
415 #define kchannelCtrlCmdEventSetNotification_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdEventSetNotification__
416 #define kchannelCtrlCmdEventSetNotification(pKernelChannel, pSetEventParams) kchannelCtrlCmdEventSetNotification_DISPATCH(pKernelChannel, pSetEventParams)
417 #define kchannelCtrlCmdGpFifoSchedule_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGpFifoSchedule__
418 #define kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams) kchannelCtrlCmdGpFifoSchedule_DISPATCH(pKernelChannel, pSchedParams)
419 #define kchannelCtrlCmdBind_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdBind__
420 #define kchannelCtrlCmdBind(pKernelChannel, pParams) kchannelCtrlCmdBind_DISPATCH(pKernelChannel, pParams)
421 #define kchannelCtrlCmdSetErrorNotifier_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdSetErrorNotifier__
422 #define kchannelCtrlCmdSetErrorNotifier(pKernelChannel, pSetErrorNotifierParams) kchannelCtrlCmdSetErrorNotifier_DISPATCH(pKernelChannel, pSetErrorNotifierParams)
423 #define kchannelCtrlCmdSetInterleaveLevel_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdSetInterleaveLevel__
424 #define kchannelCtrlCmdSetInterleaveLevel(pKernelChannel, pParams) kchannelCtrlCmdSetInterleaveLevel_DISPATCH(pKernelChannel, pParams)
425 #define kchannelCtrlCmdGetContextId_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGetContextId__
426 #define kchannelCtrlCmdGetContextId(pKernelChannel, pParams) kchannelCtrlCmdGetContextId_DISPATCH(pKernelChannel, pParams)
427 #define kchannelCtrlCmdRestartRunlist_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdRestartRunlist__
428 #define kchannelCtrlCmdRestartRunlist(pKernelChannel, pParams) kchannelCtrlCmdRestartRunlist_DISPATCH(pKernelChannel, pParams)
429 #define kchannelCtrlCmdGetEngineCtxSize_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGetEngineCtxSize__
430 #define kchannelCtrlCmdGetEngineCtxSize(pKernelChannel, pCtxSizeParams) kchannelCtrlCmdGetEngineCtxSize_DISPATCH(pKernelChannel, pCtxSizeParams)
431 #define kchannelCtrlCmdGetEngineCtxData_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGetEngineCtxData__
432 #define kchannelCtrlCmdGetEngineCtxData(pKernelChannel, pCtxBuffParams) kchannelCtrlCmdGetEngineCtxData_DISPATCH(pKernelChannel, pCtxBuffParams)
433 #define kchannelCtrlCmdMigrateEngineCtxData_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdMigrateEngineCtxData__
434 #define kchannelCtrlCmdMigrateEngineCtxData(pKernelChannel, pCtxBuffParams) kchannelCtrlCmdMigrateEngineCtxData_DISPATCH(pKernelChannel, pCtxBuffParams)
435 #define kchannelCtrlCmdGetEngineCtxState_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGetEngineCtxState__
436 #define kchannelCtrlCmdGetEngineCtxState(pKernelChannel, pCtxStateParams) kchannelCtrlCmdGetEngineCtxState_DISPATCH(pKernelChannel, pCtxStateParams)
437 #define kchannelCtrlCmdGetChannelHwState_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGetChannelHwState__
438 #define kchannelCtrlCmdGetChannelHwState(pKernelChannel, pParams) kchannelCtrlCmdGetChannelHwState_DISPATCH(pKernelChannel, pParams)
439 #define kchannelCtrlCmdSetChannelHwState_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdSetChannelHwState__
440 #define kchannelCtrlCmdSetChannelHwState(pKernelChannel, pParams) kchannelCtrlCmdSetChannelHwState_DISPATCH(pKernelChannel, pParams)
441 #define kchannelCtrlCmdSaveEngineCtxData_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdSaveEngineCtxData__
442 #define kchannelCtrlCmdSaveEngineCtxData(pKernelChannel, pCtxBuffParams) kchannelCtrlCmdSaveEngineCtxData_DISPATCH(pKernelChannel, pCtxBuffParams)
443 #define kchannelCtrlCmdRestoreEngineCtxData_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdRestoreEngineCtxData__
444 #define kchannelCtrlCmdRestoreEngineCtxData(pKernelChannel, pCtxBuffParams) kchannelCtrlCmdRestoreEngineCtxData_DISPATCH(pKernelChannel, pCtxBuffParams)
445 #define kchannelCtrlCmdGpfifoGetWorkSubmitToken_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGpfifoGetWorkSubmitToken__
446 #define kchannelCtrlCmdGpfifoGetWorkSubmitToken(pKernelChannel, pTokenParams) kchannelCtrlCmdGpfifoGetWorkSubmitToken_DISPATCH(pKernelChannel, pTokenParams)
447 #define kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer__
448 #define kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer(pKernelChannel, pFaultMthdBufferParams) kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer_DISPATCH(pKernelChannel, pFaultMthdBufferParams)
449 #define kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex__
450 #define kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex(pKernelChannel, pParams) kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex_DISPATCH(pKernelChannel, pParams)
451 #define kchannelCtrlCmdStopChannel_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdStopChannel__
452 #define kchannelCtrlCmdStopChannel(pKernelChannel, pStopChannelParams) kchannelCtrlCmdStopChannel_DISPATCH(pKernelChannel, pStopChannelParams)
453 #define kchannelCtrlCmdGetKmb_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlCmdGetKmb__
454 #define kchannelCtrlCmdGetKmb(pKernelChannel, pGetKmbParams) kchannelCtrlCmdGetKmb_DISPATCH(pKernelChannel, pGetKmbParams)
455 #define kchannelCtrlCmdGetKmb_HAL(pKernelChannel, pGetKmbParams) kchannelCtrlCmdGetKmb_DISPATCH(pKernelChannel, pGetKmbParams)
456 #define kchannelCtrlRotateSecureChannelIv_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlRotateSecureChannelIv__
457 #define kchannelCtrlRotateSecureChannelIv(pKernelChannel, pRotateIvParams) kchannelCtrlRotateSecureChannelIv_DISPATCH(pKernelChannel, pRotateIvParams)
458 #define kchannelCtrlRotateSecureChannelIv_HAL(pKernelChannel, pRotateIvParams) kchannelCtrlRotateSecureChannelIv_DISPATCH(pKernelChannel, pRotateIvParams)
459 #define kchannelSetEncryptionStatsBuffer_FNPTR(pKernelChannel) pKernelChannel->__kchannelSetEncryptionStatsBuffer__
460 #define kchannelSetEncryptionStatsBuffer(pGpu, pKernelChannel, pMemDesc, bSet) kchannelSetEncryptionStatsBuffer_DISPATCH(pGpu, pKernelChannel, pMemDesc, bSet)
461 #define kchannelSetEncryptionStatsBuffer_HAL(pGpu, pKernelChannel, pMemDesc, bSet) kchannelSetEncryptionStatsBuffer_DISPATCH(pGpu, pKernelChannel, pMemDesc, bSet)
462 #define kchannelCtrlGetTpcPartitionMode_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlGetTpcPartitionMode__
463 #define kchannelCtrlGetTpcPartitionMode(pKernelChannel, pParams) kchannelCtrlGetTpcPartitionMode_DISPATCH(pKernelChannel, pParams)
464 #define kchannelCtrlSetTpcPartitionMode_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlSetTpcPartitionMode__
465 #define kchannelCtrlSetTpcPartitionMode(pKernelChannel, pParams) kchannelCtrlSetTpcPartitionMode_DISPATCH(pKernelChannel, pParams)
466 #define kchannelCtrlGetMMUDebugMode_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlGetMMUDebugMode__
467 #define kchannelCtrlGetMMUDebugMode(pKernelChannel, pParams) kchannelCtrlGetMMUDebugMode_DISPATCH(pKernelChannel, pParams)
468 #define kchannelCtrlProgramVidmemPromote_FNPTR(pKernelChannel) pKernelChannel->__kchannelCtrlProgramVidmemPromote__
469 #define kchannelCtrlProgramVidmemPromote(pKernelChannel, pParams) kchannelCtrlProgramVidmemPromote_DISPATCH(pKernelChannel, pParams)
470 #define kchannelRetrieveKmb_FNPTR(pKernelChannel) pKernelChannel->__kchannelRetrieveKmb__
471 #define kchannelRetrieveKmb(pGpu, pKernelChannel, rotateOperation, includeSecrets, keyMaterialBundle) kchannelRetrieveKmb_DISPATCH(pGpu, pKernelChannel, rotateOperation, includeSecrets, keyMaterialBundle)
472 #define kchannelRetrieveKmb_HAL(pGpu, pKernelChannel, rotateOperation, includeSecrets, keyMaterialBundle) kchannelRetrieveKmb_DISPATCH(pGpu, pKernelChannel, rotateOperation, includeSecrets, keyMaterialBundle)
473 #define kchannelSetKeyRotationNotifier_FNPTR(pKernelChannel) pKernelChannel->__kchannelSetKeyRotationNotifier__
474 #define kchannelSetKeyRotationNotifier(pGpu, pKernelChannel, bSet) kchannelSetKeyRotationNotifier_DISPATCH(pGpu, pKernelChannel, bSet)
475 #define kchannelSetKeyRotationNotifier_HAL(pGpu, pKernelChannel, bSet) kchannelSetKeyRotationNotifier_DISPATCH(pGpu, pKernelChannel, bSet)
476 #define kchannelControl_FNPTR(pGpuResource) pGpuResource->__nvoc_base_GpuResource.__gpuresControl__
477 #define kchannelControl(pGpuResource, pCallContext, pParams) kchannelControl_DISPATCH(pGpuResource, pCallContext, pParams)
478 #define kchannelShareCallback_FNPTR(pGpuResource) pGpuResource->__nvoc_base_GpuResource.__gpuresShareCallback__
479 #define kchannelShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) kchannelShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
480 #define kchannelGetRegBaseOffsetAndSize_FNPTR(pGpuResource) pGpuResource->__nvoc_base_GpuResource.__gpuresGetRegBaseOffsetAndSize__
481 #define kchannelGetRegBaseOffsetAndSize(pGpuResource, pGpu, pOffset, pSize) kchannelGetRegBaseOffsetAndSize_DISPATCH(pGpuResource, pGpu, pOffset, pSize)
482 #define kchannelInternalControlForward_FNPTR(pGpuResource) pGpuResource->__nvoc_base_GpuResource.__gpuresInternalControlForward__
483 #define kchannelInternalControlForward(pGpuResource, command, pParams, size) kchannelInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
484 #define kchannelGetInternalObjectHandle_FNPTR(pGpuResource) pGpuResource->__nvoc_base_GpuResource.__gpuresGetInternalObjectHandle__
485 #define kchannelGetInternalObjectHandle(pGpuResource) kchannelGetInternalObjectHandle_DISPATCH(pGpuResource)
486 #define kchannelAccessCallback_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__rmresAccessCallback__
487 #define kchannelAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) kchannelAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
488 #define kchannelGetMemoryMappingDescriptor_FNPTR(pRmResource) pRmResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__rmresGetMemoryMappingDescriptor__
489 #define kchannelGetMemoryMappingDescriptor(pRmResource, ppMemDesc) kchannelGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
490 #define kchannelControlSerialization_Prologue_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__rmresControlSerialization_Prologue__
491 #define kchannelControlSerialization_Prologue(pResource, pCallContext, pParams) kchannelControlSerialization_Prologue_DISPATCH(pResource, pCallContext, pParams)
492 #define kchannelControlSerialization_Epilogue_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__rmresControlSerialization_Epilogue__
493 #define kchannelControlSerialization_Epilogue(pResource, pCallContext, pParams) kchannelControlSerialization_Epilogue_DISPATCH(pResource, pCallContext, pParams)
494 #define kchannelControl_Prologue_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__rmresControl_Prologue__
495 #define kchannelControl_Prologue(pResource, pCallContext, pParams) kchannelControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
496 #define kchannelControl_Epilogue_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__rmresControl_Epilogue__
497 #define kchannelControl_Epilogue(pResource, pCallContext, pParams) kchannelControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
498 #define kchannelCanCopy_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__resCanCopy__
499 #define kchannelCanCopy(pResource) kchannelCanCopy_DISPATCH(pResource)
500 #define kchannelIsDuplicate_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__resIsDuplicate__
501 #define kchannelIsDuplicate(pResource, hMemory, pDuplicate) kchannelIsDuplicate_DISPATCH(pResource, hMemory, pDuplicate)
502 #define kchannelPreDestruct_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__resPreDestruct__
503 #define kchannelPreDestruct(pResource) kchannelPreDestruct_DISPATCH(pResource)
504 #define kchannelControlFilter_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__resControlFilter__
505 #define kchannelControlFilter(pResource, pCallContext, pParams) kchannelControlFilter_DISPATCH(pResource, pCallContext, pParams)
506 #define kchannelIsPartialUnmapSupported_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__resIsPartialUnmapSupported__
507 #define kchannelIsPartialUnmapSupported(pResource) kchannelIsPartialUnmapSupported_DISPATCH(pResource)
508 #define kchannelMapTo_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__resMapTo__
509 #define kchannelMapTo(pResource, pParams) kchannelMapTo_DISPATCH(pResource, pParams)
510 #define kchannelUnmapFrom_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__resUnmapFrom__
511 #define kchannelUnmapFrom(pResource, pParams) kchannelUnmapFrom_DISPATCH(pResource, pParams)
512 #define kchannelGetRefCount_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__resGetRefCount__
513 #define kchannelGetRefCount(pResource) kchannelGetRefCount_DISPATCH(pResource)
514 #define kchannelAddAdditionalDependants_FNPTR(pResource) pResource->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__resAddAdditionalDependants__
515 #define kchannelAddAdditionalDependants(pClient, pResource, pReference) kchannelAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
516 #define kchannelGetNotificationListPtr_FNPTR(pNotifier) pNotifier->__nvoc_base_Notifier.__notifyGetNotificationListPtr__
517 #define kchannelGetNotificationListPtr(pNotifier) kchannelGetNotificationListPtr_DISPATCH(pNotifier)
518 #define kchannelGetNotificationShare_FNPTR(pNotifier) pNotifier->__nvoc_base_Notifier.__notifyGetNotificationShare__
519 #define kchannelGetNotificationShare(pNotifier) kchannelGetNotificationShare_DISPATCH(pNotifier)
520 #define kchannelSetNotificationShare_FNPTR(pNotifier) pNotifier->__nvoc_base_Notifier.__notifySetNotificationShare__
521 #define kchannelSetNotificationShare(pNotifier, pNotifShare) kchannelSetNotificationShare_DISPATCH(pNotifier, pNotifShare)
522 #define kchannelUnregisterEvent_FNPTR(pNotifier) pNotifier->__nvoc_base_Notifier.__notifyUnregisterEvent__
523 #define kchannelUnregisterEvent(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent) kchannelUnregisterEvent_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent)
524 #define kchannelGetOrAllocNotifShare_FNPTR(pNotifier) pNotifier->__nvoc_base_Notifier.__notifyGetOrAllocNotifShare__
525 #define kchannelGetOrAllocNotifShare(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare) kchannelGetOrAllocNotifShare_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare)
526
527 // Dispatch functions
kchannelMap_DISPATCH(struct KernelChannel * pKernelChannel,CALL_CONTEXT * pCallContext,struct RS_CPU_MAP_PARAMS * pParams,RsCpuMapping * pCpuMapping)528 static inline NV_STATUS kchannelMap_DISPATCH(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
529 return pKernelChannel->__kchannelMap__(pKernelChannel, pCallContext, pParams, pCpuMapping);
530 }
531
kchannelUnmap_DISPATCH(struct KernelChannel * pKernelChannel,CALL_CONTEXT * pCallContext,RsCpuMapping * pCpuMapping)532 static inline NV_STATUS kchannelUnmap_DISPATCH(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
533 return pKernelChannel->__kchannelUnmap__(pKernelChannel, pCallContext, pCpuMapping);
534 }
535
kchannelGetMapAddrSpace_DISPATCH(struct KernelChannel * pKernelChannel,CALL_CONTEXT * pCallContext,NvU32 mapFlags,NV_ADDRESS_SPACE * pAddrSpace)536 static inline NV_STATUS kchannelGetMapAddrSpace_DISPATCH(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
537 return pKernelChannel->__kchannelGetMapAddrSpace__(pKernelChannel, pCallContext, mapFlags, pAddrSpace);
538 }
539
kchannelGetMemInterMapParams_DISPATCH(struct KernelChannel * pKernelChannel,RMRES_MEM_INTER_MAP_PARAMS * pParams)540 static inline NV_STATUS kchannelGetMemInterMapParams_DISPATCH(struct KernelChannel *pKernelChannel, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
541 return pKernelChannel->__kchannelGetMemInterMapParams__(pKernelChannel, pParams);
542 }
543
kchannelCheckMemInterUnmap_DISPATCH(struct KernelChannel * pKernelChannel,NvBool bSubdeviceHandleProvided)544 static inline NV_STATUS kchannelCheckMemInterUnmap_DISPATCH(struct KernelChannel *pKernelChannel, NvBool bSubdeviceHandleProvided) {
545 return pKernelChannel->__kchannelCheckMemInterUnmap__(pKernelChannel, bSubdeviceHandleProvided);
546 }
547
kchannelCreateUserMemDesc_DISPATCH(struct OBJGPU * pGpu,struct KernelChannel * arg_this)548 static inline NV_STATUS kchannelCreateUserMemDesc_DISPATCH(struct OBJGPU *pGpu, struct KernelChannel *arg_this) {
549 return arg_this->__kchannelCreateUserMemDesc__(pGpu, arg_this);
550 }
551
kchannelIsUserdAddrSizeValid_DISPATCH(struct KernelChannel * pKernelChannel,NvU32 userdAddrLo,NvU32 userdAddrHi)552 static inline NvBool kchannelIsUserdAddrSizeValid_DISPATCH(struct KernelChannel *pKernelChannel, NvU32 userdAddrLo, NvU32 userdAddrHi) {
553 return pKernelChannel->__kchannelIsUserdAddrSizeValid__(pKernelChannel, userdAddrLo, userdAddrHi);
554 }
555
kchannelCtrlCmdResetIsolatedChannel_DISPATCH(struct KernelChannel * pKernelChannel,NV506F_CTRL_CMD_RESET_ISOLATED_CHANNEL_PARAMS * pResetParams)556 static inline NV_STATUS kchannelCtrlCmdResetIsolatedChannel_DISPATCH(struct KernelChannel *pKernelChannel, NV506F_CTRL_CMD_RESET_ISOLATED_CHANNEL_PARAMS *pResetParams) {
557 return pKernelChannel->__kchannelCtrlCmdResetIsolatedChannel__(pKernelChannel, pResetParams);
558 }
559
kchannelCtrlCmdInternalResetIsolatedChannel_DISPATCH(struct KernelChannel * pKernelChannel,NV506F_CTRL_CMD_INTERNAL_RESET_ISOLATED_CHANNEL_PARAMS * pResetParams)560 static inline NV_STATUS kchannelCtrlCmdInternalResetIsolatedChannel_DISPATCH(struct KernelChannel *pKernelChannel, NV506F_CTRL_CMD_INTERNAL_RESET_ISOLATED_CHANNEL_PARAMS *pResetParams) {
561 return pKernelChannel->__kchannelCtrlCmdInternalResetIsolatedChannel__(pKernelChannel, pResetParams);
562 }
563
kchannelCtrlCmdGetClassEngineid_DISPATCH(struct KernelChannel * pKernelChannel,NV906F_CTRL_GET_CLASS_ENGINEID_PARAMS * pParams)564 static inline NV_STATUS kchannelCtrlCmdGetClassEngineid_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
565 return pKernelChannel->__kchannelCtrlCmdGetClassEngineid__(pKernelChannel, pParams);
566 }
567
kchannelCtrlCmdResetChannel_DISPATCH(struct KernelChannel * pKernelChannel,NV906F_CTRL_CMD_RESET_CHANNEL_PARAMS * pResetChannelParams)568 static inline NV_STATUS kchannelCtrlCmdResetChannel_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
569 return pKernelChannel->__kchannelCtrlCmdResetChannel__(pKernelChannel, pResetChannelParams);
570 }
571
kchannelCtrlCmdGetDeferRCState_DISPATCH(struct KernelChannel * pKernelChannel,NV906F_CTRL_CMD_GET_DEFER_RC_STATE_PARAMS * pStateParams)572 static inline NV_STATUS kchannelCtrlCmdGetDeferRCState_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_CMD_GET_DEFER_RC_STATE_PARAMS *pStateParams) {
573 return pKernelChannel->__kchannelCtrlCmdGetDeferRCState__(pKernelChannel, pStateParams);
574 }
575
kchannelCtrlCmdGetMmuFaultInfo_DISPATCH(struct KernelChannel * pKernelChannel,NV906F_CTRL_GET_MMU_FAULT_INFO_PARAMS * pFaultInfoParams)576 static inline NV_STATUS kchannelCtrlCmdGetMmuFaultInfo_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_GET_MMU_FAULT_INFO_PARAMS *pFaultInfoParams) {
577 return pKernelChannel->__kchannelCtrlCmdGetMmuFaultInfo__(pKernelChannel, pFaultInfoParams);
578 }
579
kchannelCtrlCmdEventSetNotification_DISPATCH(struct KernelChannel * pKernelChannel,NV906F_CTRL_EVENT_SET_NOTIFICATION_PARAMS * pSetEventParams)580 static inline NV_STATUS kchannelCtrlCmdEventSetNotification_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_EVENT_SET_NOTIFICATION_PARAMS *pSetEventParams) {
581 return pKernelChannel->__kchannelCtrlCmdEventSetNotification__(pKernelChannel, pSetEventParams);
582 }
583
kchannelCtrlCmdGpFifoSchedule_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_GPFIFO_SCHEDULE_PARAMS * pSchedParams)584 static inline NV_STATUS kchannelCtrlCmdGpFifoSchedule_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
585 return pKernelChannel->__kchannelCtrlCmdGpFifoSchedule__(pKernelChannel, pSchedParams);
586 }
587
kchannelCtrlCmdBind_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_BIND_PARAMS * pParams)588 static inline NV_STATUS kchannelCtrlCmdBind_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_BIND_PARAMS *pParams) {
589 return pKernelChannel->__kchannelCtrlCmdBind__(pKernelChannel, pParams);
590 }
591
kchannelCtrlCmdSetErrorNotifier_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_SET_ERROR_NOTIFIER_PARAMS * pSetErrorNotifierParams)592 static inline NV_STATUS kchannelCtrlCmdSetErrorNotifier_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_SET_ERROR_NOTIFIER_PARAMS *pSetErrorNotifierParams) {
593 return pKernelChannel->__kchannelCtrlCmdSetErrorNotifier__(pKernelChannel, pSetErrorNotifierParams);
594 }
595
kchannelCtrlCmdSetInterleaveLevel_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_INTERLEAVE_LEVEL_PARAMS * pParams)596 static inline NV_STATUS kchannelCtrlCmdSetInterleaveLevel_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_INTERLEAVE_LEVEL_PARAMS *pParams) {
597 return pKernelChannel->__kchannelCtrlCmdSetInterleaveLevel__(pKernelChannel, pParams);
598 }
599
kchannelCtrlCmdGetContextId_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_GET_CONTEXT_ID_PARAMS * pParams)600 static inline NV_STATUS kchannelCtrlCmdGetContextId_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GET_CONTEXT_ID_PARAMS *pParams) {
601 return pKernelChannel->__kchannelCtrlCmdGetContextId__(pKernelChannel, pParams);
602 }
603
kchannelCtrlCmdRestartRunlist_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_RESTART_RUNLIST_PARAMS * pParams)604 static inline NV_STATUS kchannelCtrlCmdRestartRunlist_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_RESTART_RUNLIST_PARAMS *pParams) {
605 return pKernelChannel->__kchannelCtrlCmdRestartRunlist__(pKernelChannel, pParams);
606 }
607
kchannelCtrlCmdGetEngineCtxSize_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_GET_ENGINE_CTX_SIZE_PARAMS * pCtxSizeParams)608 static inline NV_STATUS kchannelCtrlCmdGetEngineCtxSize_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_SIZE_PARAMS *pCtxSizeParams) {
609 return pKernelChannel->__kchannelCtrlCmdGetEngineCtxSize__(pKernelChannel, pCtxSizeParams);
610 }
611
kchannelCtrlCmdGetEngineCtxData_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_GET_ENGINE_CTX_DATA_PARAMS * pCtxBuffParams)612 static inline NV_STATUS kchannelCtrlCmdGetEngineCtxData_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams) {
613 return pKernelChannel->__kchannelCtrlCmdGetEngineCtxData__(pKernelChannel, pCtxBuffParams);
614 }
615
kchannelCtrlCmdMigrateEngineCtxData_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_MIGRATE_ENGINE_CTX_DATA_PARAMS * pCtxBuffParams)616 static inline NV_STATUS kchannelCtrlCmdMigrateEngineCtxData_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_MIGRATE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams) {
617 return pKernelChannel->__kchannelCtrlCmdMigrateEngineCtxData__(pKernelChannel, pCtxBuffParams);
618 }
619
kchannelCtrlCmdGetEngineCtxState_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_GET_ENGINE_CTX_STATE_PARAMS * pCtxStateParams)620 static inline NV_STATUS kchannelCtrlCmdGetEngineCtxState_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_STATE_PARAMS *pCtxStateParams) {
621 return pKernelChannel->__kchannelCtrlCmdGetEngineCtxState__(pKernelChannel, pCtxStateParams);
622 }
623
kchannelCtrlCmdGetChannelHwState_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_GET_CHANNEL_HW_STATE_PARAMS * pParams)624 static inline NV_STATUS kchannelCtrlCmdGetChannelHwState_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_CHANNEL_HW_STATE_PARAMS *pParams) {
625 return pKernelChannel->__kchannelCtrlCmdGetChannelHwState__(pKernelChannel, pParams);
626 }
627
kchannelCtrlCmdSetChannelHwState_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_SET_CHANNEL_HW_STATE_PARAMS * pParams)628 static inline NV_STATUS kchannelCtrlCmdSetChannelHwState_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_SET_CHANNEL_HW_STATE_PARAMS *pParams) {
629 return pKernelChannel->__kchannelCtrlCmdSetChannelHwState__(pKernelChannel, pParams);
630 }
631
kchannelCtrlCmdSaveEngineCtxData_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_SAVE_ENGINE_CTX_DATA_PARAMS * pCtxBuffParams)632 static inline NV_STATUS kchannelCtrlCmdSaveEngineCtxData_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_SAVE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams) {
633 return pKernelChannel->__kchannelCtrlCmdSaveEngineCtxData__(pKernelChannel, pCtxBuffParams);
634 }
635
kchannelCtrlCmdRestoreEngineCtxData_DISPATCH(struct KernelChannel * pKernelChannel,NVB06F_CTRL_RESTORE_ENGINE_CTX_DATA_PARAMS * pCtxBuffParams)636 static inline NV_STATUS kchannelCtrlCmdRestoreEngineCtxData_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_RESTORE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams) {
637 return pKernelChannel->__kchannelCtrlCmdRestoreEngineCtxData__(pKernelChannel, pCtxBuffParams);
638 }
639
kchannelCtrlCmdGpfifoGetWorkSubmitToken_DISPATCH(struct KernelChannel * pKernelChannel,NVC36F_CTRL_CMD_GPFIFO_GET_WORK_SUBMIT_TOKEN_PARAMS * pTokenParams)640 static inline NV_STATUS kchannelCtrlCmdGpfifoGetWorkSubmitToken_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_CMD_GPFIFO_GET_WORK_SUBMIT_TOKEN_PARAMS *pTokenParams) {
641 return pKernelChannel->__kchannelCtrlCmdGpfifoGetWorkSubmitToken__(pKernelChannel, pTokenParams);
642 }
643
kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer_DISPATCH(struct KernelChannel * pKernelChannel,NVC36F_CTRL_GPFIFO_UPDATE_FAULT_METHOD_BUFFER_PARAMS * pFaultMthdBufferParams)644 static inline NV_STATUS kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_UPDATE_FAULT_METHOD_BUFFER_PARAMS *pFaultMthdBufferParams) {
645 return pKernelChannel->__kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer__(pKernelChannel, pFaultMthdBufferParams);
646 }
647
kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex_DISPATCH(struct KernelChannel * pKernelChannel,NVC36F_CTRL_GPFIFO_SET_WORK_SUBMIT_TOKEN_NOTIF_INDEX_PARAMS * pParams)648 static inline NV_STATUS kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_SET_WORK_SUBMIT_TOKEN_NOTIF_INDEX_PARAMS *pParams) {
649 return pKernelChannel->__kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex__(pKernelChannel, pParams);
650 }
651
kchannelCtrlCmdStopChannel_DISPATCH(struct KernelChannel * pKernelChannel,NVA06F_CTRL_STOP_CHANNEL_PARAMS * pStopChannelParams)652 static inline NV_STATUS kchannelCtrlCmdStopChannel_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_STOP_CHANNEL_PARAMS *pStopChannelParams) {
653 return pKernelChannel->__kchannelCtrlCmdStopChannel__(pKernelChannel, pStopChannelParams);
654 }
655
kchannelCtrlCmdGetKmb_DISPATCH(struct KernelChannel * pKernelChannel,NVC56F_CTRL_CMD_GET_KMB_PARAMS * pGetKmbParams)656 static inline NV_STATUS kchannelCtrlCmdGetKmb_DISPATCH(struct KernelChannel *pKernelChannel, NVC56F_CTRL_CMD_GET_KMB_PARAMS *pGetKmbParams) {
657 return pKernelChannel->__kchannelCtrlCmdGetKmb__(pKernelChannel, pGetKmbParams);
658 }
659
kchannelCtrlRotateSecureChannelIv_DISPATCH(struct KernelChannel * pKernelChannel,NVC56F_CTRL_ROTATE_SECURE_CHANNEL_IV_PARAMS * pRotateIvParams)660 static inline NV_STATUS kchannelCtrlRotateSecureChannelIv_DISPATCH(struct KernelChannel *pKernelChannel, NVC56F_CTRL_ROTATE_SECURE_CHANNEL_IV_PARAMS *pRotateIvParams) {
661 return pKernelChannel->__kchannelCtrlRotateSecureChannelIv__(pKernelChannel, pRotateIvParams);
662 }
663
kchannelSetEncryptionStatsBuffer_DISPATCH(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,MEMORY_DESCRIPTOR * pMemDesc,NvBool bSet)664 static inline NV_STATUS kchannelSetEncryptionStatsBuffer_DISPATCH(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, MEMORY_DESCRIPTOR *pMemDesc, NvBool bSet) {
665 return pKernelChannel->__kchannelSetEncryptionStatsBuffer__(pGpu, pKernelChannel, pMemDesc, bSet);
666 }
667
kchannelCtrlGetTpcPartitionMode_DISPATCH(struct KernelChannel * pKernelChannel,NV0090_CTRL_TPC_PARTITION_MODE_PARAMS * pParams)668 static inline NV_STATUS kchannelCtrlGetTpcPartitionMode_DISPATCH(struct KernelChannel *pKernelChannel, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *pParams) {
669 return pKernelChannel->__kchannelCtrlGetTpcPartitionMode__(pKernelChannel, pParams);
670 }
671
kchannelCtrlSetTpcPartitionMode_DISPATCH(struct KernelChannel * pKernelChannel,NV0090_CTRL_TPC_PARTITION_MODE_PARAMS * pParams)672 static inline NV_STATUS kchannelCtrlSetTpcPartitionMode_DISPATCH(struct KernelChannel *pKernelChannel, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *pParams) {
673 return pKernelChannel->__kchannelCtrlSetTpcPartitionMode__(pKernelChannel, pParams);
674 }
675
kchannelCtrlGetMMUDebugMode_DISPATCH(struct KernelChannel * pKernelChannel,NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS * pParams)676 static inline NV_STATUS kchannelCtrlGetMMUDebugMode_DISPATCH(struct KernelChannel *pKernelChannel, NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS *pParams) {
677 return pKernelChannel->__kchannelCtrlGetMMUDebugMode__(pKernelChannel, pParams);
678 }
679
kchannelCtrlProgramVidmemPromote_DISPATCH(struct KernelChannel * pKernelChannel,NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS * pParams)680 static inline NV_STATUS kchannelCtrlProgramVidmemPromote_DISPATCH(struct KernelChannel *pKernelChannel, NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS *pParams) {
681 return pKernelChannel->__kchannelCtrlProgramVidmemPromote__(pKernelChannel, pParams);
682 }
683
kchannelRetrieveKmb_DISPATCH(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,ROTATE_IV_TYPE rotateOperation,NvBool includeSecrets,CC_KMB * keyMaterialBundle)684 static inline NV_STATUS kchannelRetrieveKmb_DISPATCH(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvBool includeSecrets, CC_KMB *keyMaterialBundle) {
685 return pKernelChannel->__kchannelRetrieveKmb__(pGpu, pKernelChannel, rotateOperation, includeSecrets, keyMaterialBundle);
686 }
687
kchannelSetKeyRotationNotifier_DISPATCH(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvBool bSet)688 static inline NV_STATUS kchannelSetKeyRotationNotifier_DISPATCH(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bSet) {
689 return pKernelChannel->__kchannelSetKeyRotationNotifier__(pGpu, pKernelChannel, bSet);
690 }
691
kchannelControl_DISPATCH(struct KernelChannel * pGpuResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)692 static inline NV_STATUS kchannelControl_DISPATCH(struct KernelChannel *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
693 return pGpuResource->__kchannelControl__(pGpuResource, pCallContext, pParams);
694 }
695
kchannelShareCallback_DISPATCH(struct KernelChannel * pGpuResource,struct RsClient * pInvokingClient,struct RsResourceRef * pParentRef,RS_SHARE_POLICY * pSharePolicy)696 static inline NvBool kchannelShareCallback_DISPATCH(struct KernelChannel *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
697 return pGpuResource->__kchannelShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
698 }
699
kchannelGetRegBaseOffsetAndSize_DISPATCH(struct KernelChannel * pGpuResource,struct OBJGPU * pGpu,NvU32 * pOffset,NvU32 * pSize)700 static inline NV_STATUS kchannelGetRegBaseOffsetAndSize_DISPATCH(struct KernelChannel *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
701 return pGpuResource->__kchannelGetRegBaseOffsetAndSize__(pGpuResource, pGpu, pOffset, pSize);
702 }
703
kchannelInternalControlForward_DISPATCH(struct KernelChannel * pGpuResource,NvU32 command,void * pParams,NvU32 size)704 static inline NV_STATUS kchannelInternalControlForward_DISPATCH(struct KernelChannel *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
705 return pGpuResource->__kchannelInternalControlForward__(pGpuResource, command, pParams, size);
706 }
707
kchannelGetInternalObjectHandle_DISPATCH(struct KernelChannel * pGpuResource)708 static inline NvHandle kchannelGetInternalObjectHandle_DISPATCH(struct KernelChannel *pGpuResource) {
709 return pGpuResource->__kchannelGetInternalObjectHandle__(pGpuResource);
710 }
711
kchannelAccessCallback_DISPATCH(struct KernelChannel * pResource,struct RsClient * pInvokingClient,void * pAllocParams,RsAccessRight accessRight)712 static inline NvBool kchannelAccessCallback_DISPATCH(struct KernelChannel *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
713 return pResource->__kchannelAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
714 }
715
kchannelGetMemoryMappingDescriptor_DISPATCH(struct KernelChannel * pRmResource,struct MEMORY_DESCRIPTOR ** ppMemDesc)716 static inline NV_STATUS kchannelGetMemoryMappingDescriptor_DISPATCH(struct KernelChannel *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
717 return pRmResource->__kchannelGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
718 }
719
kchannelControlSerialization_Prologue_DISPATCH(struct KernelChannel * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)720 static inline NV_STATUS kchannelControlSerialization_Prologue_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
721 return pResource->__kchannelControlSerialization_Prologue__(pResource, pCallContext, pParams);
722 }
723
kchannelControlSerialization_Epilogue_DISPATCH(struct KernelChannel * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)724 static inline void kchannelControlSerialization_Epilogue_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
725 pResource->__kchannelControlSerialization_Epilogue__(pResource, pCallContext, pParams);
726 }
727
kchannelControl_Prologue_DISPATCH(struct KernelChannel * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)728 static inline NV_STATUS kchannelControl_Prologue_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
729 return pResource->__kchannelControl_Prologue__(pResource, pCallContext, pParams);
730 }
731
kchannelControl_Epilogue_DISPATCH(struct KernelChannel * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)732 static inline void kchannelControl_Epilogue_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
733 pResource->__kchannelControl_Epilogue__(pResource, pCallContext, pParams);
734 }
735
kchannelCanCopy_DISPATCH(struct KernelChannel * pResource)736 static inline NvBool kchannelCanCopy_DISPATCH(struct KernelChannel *pResource) {
737 return pResource->__kchannelCanCopy__(pResource);
738 }
739
kchannelIsDuplicate_DISPATCH(struct KernelChannel * pResource,NvHandle hMemory,NvBool * pDuplicate)740 static inline NV_STATUS kchannelIsDuplicate_DISPATCH(struct KernelChannel *pResource, NvHandle hMemory, NvBool *pDuplicate) {
741 return pResource->__kchannelIsDuplicate__(pResource, hMemory, pDuplicate);
742 }
743
kchannelPreDestruct_DISPATCH(struct KernelChannel * pResource)744 static inline void kchannelPreDestruct_DISPATCH(struct KernelChannel *pResource) {
745 pResource->__kchannelPreDestruct__(pResource);
746 }
747
kchannelControlFilter_DISPATCH(struct KernelChannel * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)748 static inline NV_STATUS kchannelControlFilter_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
749 return pResource->__kchannelControlFilter__(pResource, pCallContext, pParams);
750 }
751
kchannelIsPartialUnmapSupported_DISPATCH(struct KernelChannel * pResource)752 static inline NvBool kchannelIsPartialUnmapSupported_DISPATCH(struct KernelChannel *pResource) {
753 return pResource->__kchannelIsPartialUnmapSupported__(pResource);
754 }
755
kchannelMapTo_DISPATCH(struct KernelChannel * pResource,RS_RES_MAP_TO_PARAMS * pParams)756 static inline NV_STATUS kchannelMapTo_DISPATCH(struct KernelChannel *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
757 return pResource->__kchannelMapTo__(pResource, pParams);
758 }
759
kchannelUnmapFrom_DISPATCH(struct KernelChannel * pResource,RS_RES_UNMAP_FROM_PARAMS * pParams)760 static inline NV_STATUS kchannelUnmapFrom_DISPATCH(struct KernelChannel *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
761 return pResource->__kchannelUnmapFrom__(pResource, pParams);
762 }
763
kchannelGetRefCount_DISPATCH(struct KernelChannel * pResource)764 static inline NvU32 kchannelGetRefCount_DISPATCH(struct KernelChannel *pResource) {
765 return pResource->__kchannelGetRefCount__(pResource);
766 }
767
kchannelAddAdditionalDependants_DISPATCH(struct RsClient * pClient,struct KernelChannel * pResource,RsResourceRef * pReference)768 static inline void kchannelAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct KernelChannel *pResource, RsResourceRef *pReference) {
769 pResource->__kchannelAddAdditionalDependants__(pClient, pResource, pReference);
770 }
771
kchannelGetNotificationListPtr_DISPATCH(struct KernelChannel * pNotifier)772 static inline PEVENTNOTIFICATION * kchannelGetNotificationListPtr_DISPATCH(struct KernelChannel *pNotifier) {
773 return pNotifier->__kchannelGetNotificationListPtr__(pNotifier);
774 }
775
kchannelGetNotificationShare_DISPATCH(struct KernelChannel * pNotifier)776 static inline struct NotifShare * kchannelGetNotificationShare_DISPATCH(struct KernelChannel *pNotifier) {
777 return pNotifier->__kchannelGetNotificationShare__(pNotifier);
778 }
779
kchannelSetNotificationShare_DISPATCH(struct KernelChannel * pNotifier,struct NotifShare * pNotifShare)780 static inline void kchannelSetNotificationShare_DISPATCH(struct KernelChannel *pNotifier, struct NotifShare *pNotifShare) {
781 pNotifier->__kchannelSetNotificationShare__(pNotifier, pNotifShare);
782 }
783
kchannelUnregisterEvent_DISPATCH(struct KernelChannel * pNotifier,NvHandle hNotifierClient,NvHandle hNotifierResource,NvHandle hEventClient,NvHandle hEvent)784 static inline NV_STATUS kchannelUnregisterEvent_DISPATCH(struct KernelChannel *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent) {
785 return pNotifier->__kchannelUnregisterEvent__(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent);
786 }
787
kchannelGetOrAllocNotifShare_DISPATCH(struct KernelChannel * pNotifier,NvHandle hNotifierClient,NvHandle hNotifierResource,struct NotifShare ** ppNotifShare)788 static inline NV_STATUS kchannelGetOrAllocNotifShare_DISPATCH(struct KernelChannel *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare) {
789 return pNotifier->__kchannelGetOrAllocNotifShare__(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare);
790 }
791
792 NV_STATUS kchannelNotifyRc_IMPL(struct KernelChannel *pKernelChannel);
793
794
795 #ifdef __nvoc_kernel_channel_h_disabled
kchannelNotifyRc(struct KernelChannel * pKernelChannel)796 static inline NV_STATUS kchannelNotifyRc(struct KernelChannel *pKernelChannel) {
797 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
798 return NV_ERR_NOT_SUPPORTED;
799 }
800 #else //__nvoc_kernel_channel_h_disabled
801 #define kchannelNotifyRc(pKernelChannel) kchannelNotifyRc_IMPL(pKernelChannel)
802 #endif //__nvoc_kernel_channel_h_disabled
803
804 #define kchannelNotifyRc_HAL(pKernelChannel) kchannelNotifyRc(pKernelChannel)
805
806 NvBool kchannelIsSchedulable_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
807
808
809 #ifdef __nvoc_kernel_channel_h_disabled
kchannelIsSchedulable(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel)810 static inline NvBool kchannelIsSchedulable(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
811 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
812 return NV_FALSE;
813 }
814 #else //__nvoc_kernel_channel_h_disabled
815 #define kchannelIsSchedulable(pGpu, pKernelChannel) kchannelIsSchedulable_IMPL(pGpu, pKernelChannel)
816 #endif //__nvoc_kernel_channel_h_disabled
817
818 #define kchannelIsSchedulable_HAL(pGpu, pKernelChannel) kchannelIsSchedulable(pGpu, pKernelChannel)
819
820 NV_STATUS kchannelAllocMem_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 Flags, NvU32 verifFlags);
821
822
823 #ifdef __nvoc_kernel_channel_h_disabled
kchannelAllocMem(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 Flags,NvU32 verifFlags)824 static inline NV_STATUS kchannelAllocMem(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 Flags, NvU32 verifFlags) {
825 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
826 return NV_ERR_NOT_SUPPORTED;
827 }
828 #else //__nvoc_kernel_channel_h_disabled
829 #define kchannelAllocMem(pGpu, pKernelChannel, Flags, verifFlags) kchannelAllocMem_GM107(pGpu, pKernelChannel, Flags, verifFlags)
830 #endif //__nvoc_kernel_channel_h_disabled
831
832 #define kchannelAllocMem_HAL(pGpu, pKernelChannel, Flags, verifFlags) kchannelAllocMem(pGpu, pKernelChannel, Flags, verifFlags)
833
834 void kchannelDestroyMem_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
835
836
837 #ifdef __nvoc_kernel_channel_h_disabled
kchannelDestroyMem(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel)838 static inline void kchannelDestroyMem(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
839 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
840 }
841 #else //__nvoc_kernel_channel_h_disabled
842 #define kchannelDestroyMem(pGpu, pKernelChannel) kchannelDestroyMem_GM107(pGpu, pKernelChannel)
843 #endif //__nvoc_kernel_channel_h_disabled
844
845 #define kchannelDestroyMem_HAL(pGpu, pKernelChannel) kchannelDestroyMem(pGpu, pKernelChannel)
846
847 NV_STATUS kchannelGetChannelPhysicalState_KERNEL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NV208F_CTRL_FIFO_GET_CHANNEL_STATE_PARAMS *pChannelStateParams);
848
849
850 #ifdef __nvoc_kernel_channel_h_disabled
kchannelGetChannelPhysicalState(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NV208F_CTRL_FIFO_GET_CHANNEL_STATE_PARAMS * pChannelStateParams)851 static inline NV_STATUS kchannelGetChannelPhysicalState(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NV208F_CTRL_FIFO_GET_CHANNEL_STATE_PARAMS *pChannelStateParams) {
852 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
853 return NV_ERR_NOT_SUPPORTED;
854 }
855 #else //__nvoc_kernel_channel_h_disabled
856 #define kchannelGetChannelPhysicalState(pGpu, pKernelChannel, pChannelStateParams) kchannelGetChannelPhysicalState_KERNEL(pGpu, pKernelChannel, pChannelStateParams)
857 #endif //__nvoc_kernel_channel_h_disabled
858
859 #define kchannelGetChannelPhysicalState_HAL(pGpu, pKernelChannel, pChannelStateParams) kchannelGetChannelPhysicalState(pGpu, pKernelChannel, pChannelStateParams)
860
kchannelEmbedRunlistID_13cd8d(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel)861 static inline NvU32 kchannelEmbedRunlistID_13cd8d(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
862 NV_ASSERT_PRECOMP(0);
863 return 0;
864 }
865
866
867 #ifdef __nvoc_kernel_channel_h_disabled
kchannelEmbedRunlistID(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel)868 static inline NvU32 kchannelEmbedRunlistID(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
869 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
870 return 0;
871 }
872 #else //__nvoc_kernel_channel_h_disabled
873 #define kchannelEmbedRunlistID(pGpu, pKernelChannel) kchannelEmbedRunlistID_13cd8d(pGpu, pKernelChannel)
874 #endif //__nvoc_kernel_channel_h_disabled
875
876 #define kchannelEmbedRunlistID_HAL(pGpu, pKernelChannel) kchannelEmbedRunlistID(pGpu, pKernelChannel)
877
878 NV_STATUS kchannelAllocHwID_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle hClient, NvU32 Flags, NvU32 verifFlags2, NvU32 ChID);
879
880
881 #ifdef __nvoc_kernel_channel_h_disabled
kchannelAllocHwID(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvHandle hClient,NvU32 Flags,NvU32 verifFlags2,NvU32 ChID)882 static inline NV_STATUS kchannelAllocHwID(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle hClient, NvU32 Flags, NvU32 verifFlags2, NvU32 ChID) {
883 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
884 return NV_ERR_NOT_SUPPORTED;
885 }
886 #else //__nvoc_kernel_channel_h_disabled
887 #define kchannelAllocHwID(pGpu, pKernelChannel, hClient, Flags, verifFlags2, ChID) kchannelAllocHwID_GM107(pGpu, pKernelChannel, hClient, Flags, verifFlags2, ChID)
888 #endif //__nvoc_kernel_channel_h_disabled
889
890 #define kchannelAllocHwID_HAL(pGpu, pKernelChannel, hClient, Flags, verifFlags2, ChID) kchannelAllocHwID(pGpu, pKernelChannel, hClient, Flags, verifFlags2, ChID)
891
892 NV_STATUS kchannelFreeHwID_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
893
894
895 #ifdef __nvoc_kernel_channel_h_disabled
kchannelFreeHwID(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel)896 static inline NV_STATUS kchannelFreeHwID(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
897 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
898 return NV_ERR_NOT_SUPPORTED;
899 }
900 #else //__nvoc_kernel_channel_h_disabled
901 #define kchannelFreeHwID(pGpu, pKernelChannel) kchannelFreeHwID_GM107(pGpu, pKernelChannel)
902 #endif //__nvoc_kernel_channel_h_disabled
903
904 #define kchannelFreeHwID_HAL(pGpu, pKernelChannel) kchannelFreeHwID(pGpu, pKernelChannel)
905
906 NV_STATUS kchannelGetUserdInfo_GM107(struct OBJGPU *pGpu, struct KernelChannel *arg2, NvU64 *userBase, NvU64 *offset, NvU64 *length);
907
908
909 #ifdef __nvoc_kernel_channel_h_disabled
kchannelGetUserdInfo(struct OBJGPU * pGpu,struct KernelChannel * arg2,NvU64 * userBase,NvU64 * offset,NvU64 * length)910 static inline NV_STATUS kchannelGetUserdInfo(struct OBJGPU *pGpu, struct KernelChannel *arg2, NvU64 *userBase, NvU64 *offset, NvU64 *length) {
911 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
912 return NV_ERR_NOT_SUPPORTED;
913 }
914 #else //__nvoc_kernel_channel_h_disabled
915 #define kchannelGetUserdInfo(pGpu, arg2, userBase, offset, length) kchannelGetUserdInfo_GM107(pGpu, arg2, userBase, offset, length)
916 #endif //__nvoc_kernel_channel_h_disabled
917
918 #define kchannelGetUserdInfo_HAL(pGpu, arg2, userBase, offset, length) kchannelGetUserdInfo(pGpu, arg2, userBase, offset, length)
919
920 NV_STATUS kchannelGetUserdBar1MapOffset_GM107(struct OBJGPU *pGpu, struct KernelChannel *arg2, NvU64 *bar1Offset, NvU32 *bar1MapSize);
921
922
923 #ifdef __nvoc_kernel_channel_h_disabled
kchannelGetUserdBar1MapOffset(struct OBJGPU * pGpu,struct KernelChannel * arg2,NvU64 * bar1Offset,NvU32 * bar1MapSize)924 static inline NV_STATUS kchannelGetUserdBar1MapOffset(struct OBJGPU *pGpu, struct KernelChannel *arg2, NvU64 *bar1Offset, NvU32 *bar1MapSize) {
925 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
926 return NV_ERR_NOT_SUPPORTED;
927 }
928 #else //__nvoc_kernel_channel_h_disabled
929 #define kchannelGetUserdBar1MapOffset(pGpu, arg2, bar1Offset, bar1MapSize) kchannelGetUserdBar1MapOffset_GM107(pGpu, arg2, bar1Offset, bar1MapSize)
930 #endif //__nvoc_kernel_channel_h_disabled
931
932 #define kchannelGetUserdBar1MapOffset_HAL(pGpu, arg2, bar1Offset, bar1MapSize) kchannelGetUserdBar1MapOffset(pGpu, arg2, bar1Offset, bar1MapSize)
933
934 NV_STATUS kchannelCreateUserdMemDescBc_GV100(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle arg3, NvHandle *arg4, NvU64 *arg5);
935
936
937 #ifdef __nvoc_kernel_channel_h_disabled
kchannelCreateUserdMemDescBc(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvHandle arg3,NvHandle * arg4,NvU64 * arg5)938 static inline NV_STATUS kchannelCreateUserdMemDescBc(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle arg3, NvHandle *arg4, NvU64 *arg5) {
939 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
940 return NV_ERR_NOT_SUPPORTED;
941 }
942 #else //__nvoc_kernel_channel_h_disabled
943 #define kchannelCreateUserdMemDescBc(pGpu, pKernelChannel, arg3, arg4, arg5) kchannelCreateUserdMemDescBc_GV100(pGpu, pKernelChannel, arg3, arg4, arg5)
944 #endif //__nvoc_kernel_channel_h_disabled
945
946 #define kchannelCreateUserdMemDescBc_HAL(pGpu, pKernelChannel, arg3, arg4, arg5) kchannelCreateUserdMemDescBc(pGpu, pKernelChannel, arg3, arg4, arg5)
947
948 NV_STATUS kchannelCreateUserdMemDesc_GV100(struct OBJGPU *pGpu, struct KernelChannel *arg2, NvHandle arg3, NvHandle arg4, NvU64 arg5, NvU64 *arg6, NvU32 *arg7);
949
950
951 #ifdef __nvoc_kernel_channel_h_disabled
kchannelCreateUserdMemDesc(struct OBJGPU * pGpu,struct KernelChannel * arg2,NvHandle arg3,NvHandle arg4,NvU64 arg5,NvU64 * arg6,NvU32 * arg7)952 static inline NV_STATUS kchannelCreateUserdMemDesc(struct OBJGPU *pGpu, struct KernelChannel *arg2, NvHandle arg3, NvHandle arg4, NvU64 arg5, NvU64 *arg6, NvU32 *arg7) {
953 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
954 return NV_ERR_NOT_SUPPORTED;
955 }
956 #else //__nvoc_kernel_channel_h_disabled
957 #define kchannelCreateUserdMemDesc(pGpu, arg2, arg3, arg4, arg5, arg6, arg7) kchannelCreateUserdMemDesc_GV100(pGpu, arg2, arg3, arg4, arg5, arg6, arg7)
958 #endif //__nvoc_kernel_channel_h_disabled
959
960 #define kchannelCreateUserdMemDesc_HAL(pGpu, arg2, arg3, arg4, arg5, arg6, arg7) kchannelCreateUserdMemDesc(pGpu, arg2, arg3, arg4, arg5, arg6, arg7)
961
962 void kchannelDestroyUserdMemDesc_GV100(struct OBJGPU *pGpu, struct KernelChannel *arg2);
963
964
965 #ifdef __nvoc_kernel_channel_h_disabled
kchannelDestroyUserdMemDesc(struct OBJGPU * pGpu,struct KernelChannel * arg2)966 static inline void kchannelDestroyUserdMemDesc(struct OBJGPU *pGpu, struct KernelChannel *arg2) {
967 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
968 }
969 #else //__nvoc_kernel_channel_h_disabled
970 #define kchannelDestroyUserdMemDesc(pGpu, arg2) kchannelDestroyUserdMemDesc_GV100(pGpu, arg2)
971 #endif //__nvoc_kernel_channel_h_disabled
972
973 #define kchannelDestroyUserdMemDesc_HAL(pGpu, arg2) kchannelDestroyUserdMemDesc(pGpu, arg2)
974
975 NV_STATUS kchannelGetEngine_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 *engDesc);
976
977
978 #ifdef __nvoc_kernel_channel_h_disabled
kchannelGetEngine(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 * engDesc)979 static inline NV_STATUS kchannelGetEngine(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 *engDesc) {
980 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
981 return NV_ERR_NOT_SUPPORTED;
982 }
983 #else //__nvoc_kernel_channel_h_disabled
984 #define kchannelGetEngine(pGpu, pKernelChannel, engDesc) kchannelGetEngine_GM107(pGpu, pKernelChannel, engDesc)
985 #endif //__nvoc_kernel_channel_h_disabled
986
987 #define kchannelGetEngine_HAL(pGpu, pKernelChannel, engDesc) kchannelGetEngine(pGpu, pKernelChannel, engDesc)
988
kchannelFwdToInternalCtrl_56cd7a(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 internalCmd,RmCtrlParams * pRmCtrlParams)989 static inline NV_STATUS kchannelFwdToInternalCtrl_56cd7a(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 internalCmd, RmCtrlParams *pRmCtrlParams) {
990 return NV_OK;
991 }
992
993
994 #ifdef __nvoc_kernel_channel_h_disabled
kchannelFwdToInternalCtrl(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 internalCmd,RmCtrlParams * pRmCtrlParams)995 static inline NV_STATUS kchannelFwdToInternalCtrl(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 internalCmd, RmCtrlParams *pRmCtrlParams) {
996 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
997 return NV_ERR_NOT_SUPPORTED;
998 }
999 #else //__nvoc_kernel_channel_h_disabled
1000 #define kchannelFwdToInternalCtrl(pGpu, pKernelChannel, internalCmd, pRmCtrlParams) kchannelFwdToInternalCtrl_56cd7a(pGpu, pKernelChannel, internalCmd, pRmCtrlParams)
1001 #endif //__nvoc_kernel_channel_h_disabled
1002
1003 #define kchannelFwdToInternalCtrl_HAL(pGpu, pKernelChannel, internalCmd, pRmCtrlParams) kchannelFwdToInternalCtrl(pGpu, pKernelChannel, internalCmd, pRmCtrlParams)
1004
kchannelAllocChannel_56cd7a(struct KernelChannel * pKernelChannel,NV_CHANNEL_ALLOC_PARAMS * pChannelGpfifoParams)1005 static inline NV_STATUS kchannelAllocChannel_56cd7a(struct KernelChannel *pKernelChannel, NV_CHANNEL_ALLOC_PARAMS *pChannelGpfifoParams) {
1006 return NV_OK;
1007 }
1008
1009
1010 #ifdef __nvoc_kernel_channel_h_disabled
kchannelAllocChannel(struct KernelChannel * pKernelChannel,NV_CHANNEL_ALLOC_PARAMS * pChannelGpfifoParams)1011 static inline NV_STATUS kchannelAllocChannel(struct KernelChannel *pKernelChannel, NV_CHANNEL_ALLOC_PARAMS *pChannelGpfifoParams) {
1012 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1013 return NV_ERR_NOT_SUPPORTED;
1014 }
1015 #else //__nvoc_kernel_channel_h_disabled
1016 #define kchannelAllocChannel(pKernelChannel, pChannelGpfifoParams) kchannelAllocChannel_56cd7a(pKernelChannel, pChannelGpfifoParams)
1017 #endif //__nvoc_kernel_channel_h_disabled
1018
1019 #define kchannelAllocChannel_HAL(pKernelChannel, pChannelGpfifoParams) kchannelAllocChannel(pKernelChannel, pChannelGpfifoParams)
1020
kchannelIsValid_cbe027(struct KernelChannel * pKernelChannel)1021 static inline NvBool kchannelIsValid_cbe027(struct KernelChannel *pKernelChannel) {
1022 return ((NvBool)(0 == 0));
1023 }
1024
1025
1026 #ifdef __nvoc_kernel_channel_h_disabled
kchannelIsValid(struct KernelChannel * pKernelChannel)1027 static inline NvBool kchannelIsValid(struct KernelChannel *pKernelChannel) {
1028 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1029 return NV_FALSE;
1030 }
1031 #else //__nvoc_kernel_channel_h_disabled
1032 #define kchannelIsValid(pKernelChannel) kchannelIsValid_cbe027(pKernelChannel)
1033 #endif //__nvoc_kernel_channel_h_disabled
1034
1035 #define kchannelIsValid_HAL(pKernelChannel) kchannelIsValid(pKernelChannel)
1036
1037 NV_STATUS kchannelGetClassEngineID_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle handle, NvU32 *classEngineID, NvU32 *classID, RM_ENGINE_TYPE *rmEngineID);
1038
1039
1040 #ifdef __nvoc_kernel_channel_h_disabled
kchannelGetClassEngineID(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvHandle handle,NvU32 * classEngineID,NvU32 * classID,RM_ENGINE_TYPE * rmEngineID)1041 static inline NV_STATUS kchannelGetClassEngineID(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle handle, NvU32 *classEngineID, NvU32 *classID, RM_ENGINE_TYPE *rmEngineID) {
1042 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1043 return NV_ERR_NOT_SUPPORTED;
1044 }
1045 #else //__nvoc_kernel_channel_h_disabled
1046 #define kchannelGetClassEngineID(pGpu, pKernelChannel, handle, classEngineID, classID, rmEngineID) kchannelGetClassEngineID_GM107(pGpu, pKernelChannel, handle, classEngineID, classID, rmEngineID)
1047 #endif //__nvoc_kernel_channel_h_disabled
1048
1049 #define kchannelGetClassEngineID_HAL(pGpu, pKernelChannel, handle, classEngineID, classID, rmEngineID) kchannelGetClassEngineID(pGpu, pKernelChannel, handle, classEngineID, classID, rmEngineID)
1050
1051 NV_STATUS kchannelEnableVirtualContext_GM107(struct KernelChannel *arg1);
1052
1053
1054 #ifdef __nvoc_kernel_channel_h_disabled
kchannelEnableVirtualContext(struct KernelChannel * arg1)1055 static inline NV_STATUS kchannelEnableVirtualContext(struct KernelChannel *arg1) {
1056 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1057 return NV_ERR_NOT_SUPPORTED;
1058 }
1059 #else //__nvoc_kernel_channel_h_disabled
1060 #define kchannelEnableVirtualContext(arg1) kchannelEnableVirtualContext_GM107(arg1)
1061 #endif //__nvoc_kernel_channel_h_disabled
1062
1063 #define kchannelEnableVirtualContext_HAL(arg1) kchannelEnableVirtualContext(arg1)
1064
kchannelRotateSecureChannelIv_46f6a7(struct KernelChannel * pKernelChannel,ROTATE_IV_TYPE rotateOperation,NvU32 * encryptIv,NvU32 * decryptIv)1065 static inline NV_STATUS kchannelRotateSecureChannelIv_46f6a7(struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvU32 *encryptIv, NvU32 *decryptIv) {
1066 return NV_ERR_NOT_SUPPORTED;
1067 }
1068
1069
1070 #ifdef __nvoc_kernel_channel_h_disabled
kchannelRotateSecureChannelIv(struct KernelChannel * pKernelChannel,ROTATE_IV_TYPE rotateOperation,NvU32 * encryptIv,NvU32 * decryptIv)1071 static inline NV_STATUS kchannelRotateSecureChannelIv(struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvU32 *encryptIv, NvU32 *decryptIv) {
1072 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1073 return NV_ERR_NOT_SUPPORTED;
1074 }
1075 #else //__nvoc_kernel_channel_h_disabled
1076 #define kchannelRotateSecureChannelIv(pKernelChannel, rotateOperation, encryptIv, decryptIv) kchannelRotateSecureChannelIv_46f6a7(pKernelChannel, rotateOperation, encryptIv, decryptIv)
1077 #endif //__nvoc_kernel_channel_h_disabled
1078
1079 #define kchannelRotateSecureChannelIv_HAL(pKernelChannel, rotateOperation, encryptIv, decryptIv) kchannelRotateSecureChannelIv(pKernelChannel, rotateOperation, encryptIv, decryptIv)
1080
1081 NV_STATUS kchannelMap_IMPL(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping);
1082
1083 NV_STATUS kchannelUnmap_IMPL(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping);
1084
1085 NV_STATUS kchannelGetMapAddrSpace_IMPL(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace);
1086
1087 NV_STATUS kchannelGetMemInterMapParams_IMPL(struct KernelChannel *pKernelChannel, RMRES_MEM_INTER_MAP_PARAMS *pParams);
1088
1089 NV_STATUS kchannelCheckMemInterUnmap_IMPL(struct KernelChannel *pKernelChannel, NvBool bSubdeviceHandleProvided);
1090
1091 NV_STATUS kchannelCreateUserMemDesc_GM107(struct OBJGPU *pGpu, struct KernelChannel *arg2);
1092
1093 NV_STATUS kchannelCreateUserMemDesc_GA10B(struct OBJGPU *pGpu, struct KernelChannel *arg2);
1094
1095 NvBool kchannelIsUserdAddrSizeValid_GV100(struct KernelChannel *pKernelChannel, NvU32 userdAddrLo, NvU32 userdAddrHi);
1096
1097 NvBool kchannelIsUserdAddrSizeValid_GA100(struct KernelChannel *pKernelChannel, NvU32 userdAddrLo, NvU32 userdAddrHi);
1098
1099 NvBool kchannelIsUserdAddrSizeValid_GH100(struct KernelChannel *pKernelChannel, NvU32 userdAddrLo, NvU32 userdAddrHi);
1100
1101 NV_STATUS kchannelCtrlCmdResetIsolatedChannel_IMPL(struct KernelChannel *pKernelChannel, NV506F_CTRL_CMD_RESET_ISOLATED_CHANNEL_PARAMS *pResetParams);
1102
1103 NV_STATUS kchannelCtrlCmdInternalResetIsolatedChannel_IMPL(struct KernelChannel *pKernelChannel, NV506F_CTRL_CMD_INTERNAL_RESET_ISOLATED_CHANNEL_PARAMS *pResetParams);
1104
1105 NV_STATUS kchannelCtrlCmdGetClassEngineid_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams);
1106
1107 NV_STATUS kchannelCtrlCmdResetChannel_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams);
1108
1109 NV_STATUS kchannelCtrlCmdGetDeferRCState_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_CMD_GET_DEFER_RC_STATE_PARAMS *pStateParams);
1110
1111 NV_STATUS kchannelCtrlCmdGetMmuFaultInfo_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_GET_MMU_FAULT_INFO_PARAMS *pFaultInfoParams);
1112
1113 NV_STATUS kchannelCtrlCmdEventSetNotification_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_EVENT_SET_NOTIFICATION_PARAMS *pSetEventParams);
1114
1115 NV_STATUS kchannelCtrlCmdGpFifoSchedule_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams);
1116
1117 NV_STATUS kchannelCtrlCmdBind_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_BIND_PARAMS *pParams);
1118
1119 NV_STATUS kchannelCtrlCmdSetErrorNotifier_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_SET_ERROR_NOTIFIER_PARAMS *pSetErrorNotifierParams);
1120
1121 NV_STATUS kchannelCtrlCmdSetInterleaveLevel_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_INTERLEAVE_LEVEL_PARAMS *pParams);
1122
1123 NV_STATUS kchannelCtrlCmdGetContextId_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GET_CONTEXT_ID_PARAMS *pParams);
1124
1125 NV_STATUS kchannelCtrlCmdRestartRunlist_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_RESTART_RUNLIST_PARAMS *pParams);
1126
1127 NV_STATUS kchannelCtrlCmdGetEngineCtxSize_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_SIZE_PARAMS *pCtxSizeParams);
1128
1129 NV_STATUS kchannelCtrlCmdGetEngineCtxData_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams);
1130
1131 NV_STATUS kchannelCtrlCmdMigrateEngineCtxData_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_MIGRATE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams);
1132
1133 NV_STATUS kchannelCtrlCmdGetEngineCtxState_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_STATE_PARAMS *pCtxStateParams);
1134
1135 NV_STATUS kchannelCtrlCmdGetChannelHwState_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_CHANNEL_HW_STATE_PARAMS *pParams);
1136
1137 NV_STATUS kchannelCtrlCmdSetChannelHwState_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_SET_CHANNEL_HW_STATE_PARAMS *pParams);
1138
1139 NV_STATUS kchannelCtrlCmdSaveEngineCtxData_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_SAVE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams);
1140
1141 NV_STATUS kchannelCtrlCmdRestoreEngineCtxData_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_RESTORE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams);
1142
1143 NV_STATUS kchannelCtrlCmdGpfifoGetWorkSubmitToken_IMPL(struct KernelChannel *pKernelChannel, NVC36F_CTRL_CMD_GPFIFO_GET_WORK_SUBMIT_TOKEN_PARAMS *pTokenParams);
1144
1145 NV_STATUS kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer_IMPL(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_UPDATE_FAULT_METHOD_BUFFER_PARAMS *pFaultMthdBufferParams);
1146
1147 NV_STATUS kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex_IMPL(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_SET_WORK_SUBMIT_TOKEN_NOTIF_INDEX_PARAMS *pParams);
1148
1149 NV_STATUS kchannelCtrlCmdStopChannel_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_STOP_CHANNEL_PARAMS *pStopChannelParams);
1150
kchannelCtrlCmdGetKmb_46f6a7(struct KernelChannel * pKernelChannel,NVC56F_CTRL_CMD_GET_KMB_PARAMS * pGetKmbParams)1151 static inline NV_STATUS kchannelCtrlCmdGetKmb_46f6a7(struct KernelChannel *pKernelChannel, NVC56F_CTRL_CMD_GET_KMB_PARAMS *pGetKmbParams) {
1152 return NV_ERR_NOT_SUPPORTED;
1153 }
1154
1155 NV_STATUS kchannelCtrlCmdGetKmb_KERNEL(struct KernelChannel *pKernelChannel, NVC56F_CTRL_CMD_GET_KMB_PARAMS *pGetKmbParams);
1156
kchannelCtrlRotateSecureChannelIv_46f6a7(struct KernelChannel * pKernelChannel,NVC56F_CTRL_ROTATE_SECURE_CHANNEL_IV_PARAMS * pRotateIvParams)1157 static inline NV_STATUS kchannelCtrlRotateSecureChannelIv_46f6a7(struct KernelChannel *pKernelChannel, NVC56F_CTRL_ROTATE_SECURE_CHANNEL_IV_PARAMS *pRotateIvParams) {
1158 return NV_ERR_NOT_SUPPORTED;
1159 }
1160
1161 NV_STATUS kchannelCtrlRotateSecureChannelIv_KERNEL(struct KernelChannel *pKernelChannel, NVC56F_CTRL_ROTATE_SECURE_CHANNEL_IV_PARAMS *pRotateIvParams);
1162
1163 NV_STATUS kchannelSetEncryptionStatsBuffer_KERNEL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, MEMORY_DESCRIPTOR *pMemDesc, NvBool bSet);
1164
kchannelSetEncryptionStatsBuffer_56cd7a(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,MEMORY_DESCRIPTOR * pMemDesc,NvBool bSet)1165 static inline NV_STATUS kchannelSetEncryptionStatsBuffer_56cd7a(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, MEMORY_DESCRIPTOR *pMemDesc, NvBool bSet) {
1166 return NV_OK;
1167 }
1168
kchannelCtrlGetTpcPartitionMode_a094e1(struct KernelChannel * pKernelChannel,NV0090_CTRL_TPC_PARTITION_MODE_PARAMS * pParams)1169 static inline NV_STATUS kchannelCtrlGetTpcPartitionMode_a094e1(struct KernelChannel *pKernelChannel, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *pParams) {
1170 return kgrctxCtrlHandle(resservGetTlsCallContext(), pKernelChannel->hKernelGraphicsContext);
1171 }
1172
kchannelCtrlSetTpcPartitionMode_a094e1(struct KernelChannel * pKernelChannel,NV0090_CTRL_TPC_PARTITION_MODE_PARAMS * pParams)1173 static inline NV_STATUS kchannelCtrlSetTpcPartitionMode_a094e1(struct KernelChannel *pKernelChannel, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *pParams) {
1174 return kgrctxCtrlHandle(resservGetTlsCallContext(), pKernelChannel->hKernelGraphicsContext);
1175 }
1176
kchannelCtrlGetMMUDebugMode_a094e1(struct KernelChannel * pKernelChannel,NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS * pParams)1177 static inline NV_STATUS kchannelCtrlGetMMUDebugMode_a094e1(struct KernelChannel *pKernelChannel, NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS *pParams) {
1178 return kgrctxCtrlHandle(resservGetTlsCallContext(), pKernelChannel->hKernelGraphicsContext);
1179 }
1180
kchannelCtrlProgramVidmemPromote_a094e1(struct KernelChannel * pKernelChannel,NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS * pParams)1181 static inline NV_STATUS kchannelCtrlProgramVidmemPromote_a094e1(struct KernelChannel *pKernelChannel, NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS *pParams) {
1182 return kgrctxCtrlHandle(resservGetTlsCallContext(), pKernelChannel->hKernelGraphicsContext);
1183 }
1184
kchannelRetrieveKmb_56cd7a(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,ROTATE_IV_TYPE rotateOperation,NvBool includeSecrets,CC_KMB * keyMaterialBundle)1185 static inline NV_STATUS kchannelRetrieveKmb_56cd7a(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvBool includeSecrets, CC_KMB *keyMaterialBundle) {
1186 return NV_OK;
1187 }
1188
1189 NV_STATUS kchannelRetrieveKmb_KERNEL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvBool includeSecrets, CC_KMB *keyMaterialBundle);
1190
1191 NV_STATUS kchannelSetKeyRotationNotifier_KERNEL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bSet);
1192
kchannelSetKeyRotationNotifier_56cd7a(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvBool bSet)1193 static inline NV_STATUS kchannelSetKeyRotationNotifier_56cd7a(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bSet) {
1194 return NV_OK;
1195 }
1196
kchannelGetDebugTag(const struct KernelChannel * pKernelChannel)1197 static inline NvU32 kchannelGetDebugTag(const struct KernelChannel *pKernelChannel) {
1198 if (pKernelChannel == ((void *)0))
1199 return 4294967295U;
1200 return pKernelChannel->ChID;
1201 }
1202
kchannelIsCtxBufferAllocSkipped(struct KernelChannel * pKernelChannel)1203 static inline NvBool kchannelIsCtxBufferAllocSkipped(struct KernelChannel *pKernelChannel) {
1204 return pKernelChannel->bSkipCtxBufferAlloc;
1205 }
1206
kchannelGetSubctxId(struct KernelChannel * pKernelChannel)1207 static inline NvU32 kchannelGetSubctxId(struct KernelChannel *pKernelChannel) {
1208 return pKernelChannel->subctxId;
1209 }
1210
kchannelGetCid(struct KernelChannel * pKernelChannel)1211 static inline NvU32 kchannelGetCid(struct KernelChannel *pKernelChannel) {
1212 return pKernelChannel->cid;
1213 }
1214
kchannelGetMIGReference(struct KernelChannel * pKernelChannel)1215 static inline struct MIG_INSTANCE_REF *kchannelGetMIGReference(struct KernelChannel *pKernelChannel) {
1216 return &pKernelChannel->partitionRef;
1217 }
1218
kchannelGetRunqueue(struct KernelChannel * pKernelChannel)1219 static inline NvU32 kchannelGetRunqueue(struct KernelChannel *pKernelChannel) {
1220 return pKernelChannel->runqueue;
1221 }
1222
kchannelGetRunlistId(struct KernelChannel * pKernelChannel)1223 static inline NvU32 kchannelGetRunlistId(struct KernelChannel *pKernelChannel) {
1224 return pKernelChannel->runlistId;
1225 }
1226
kchannelSetRunlistId(struct KernelChannel * pKernelChannel,NvU32 runlistId)1227 static inline void kchannelSetRunlistId(struct KernelChannel *pKernelChannel, NvU32 runlistId) {
1228 pKernelChannel->runlistId = runlistId;
1229 }
1230
kchannelGetEngineType(struct KernelChannel * pKernelChannel)1231 static inline RM_ENGINE_TYPE kchannelGetEngineType(struct KernelChannel *pKernelChannel) {
1232 return pKernelChannel->engineType;
1233 }
1234
1235 NV_STATUS kchannelConstruct_IMPL(struct KernelChannel *arg_pKernelChannel, CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
1236
1237 #define __nvoc_kchannelConstruct(arg_pKernelChannel, arg_pCallContext, arg_pParams) kchannelConstruct_IMPL(arg_pKernelChannel, arg_pCallContext, arg_pParams)
1238 void kchannelDestruct_IMPL(struct KernelChannel *pResource);
1239
1240 #define __nvoc_kchannelDestruct(pResource) kchannelDestruct_IMPL(pResource)
1241 NV_STATUS kchannelRegisterChild_IMPL(struct KernelChannel *pKernelChannel, ChannelDescendant *pObject);
1242
1243 #ifdef __nvoc_kernel_channel_h_disabled
kchannelRegisterChild(struct KernelChannel * pKernelChannel,ChannelDescendant * pObject)1244 static inline NV_STATUS kchannelRegisterChild(struct KernelChannel *pKernelChannel, ChannelDescendant *pObject) {
1245 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1246 return NV_ERR_NOT_SUPPORTED;
1247 }
1248 #else //__nvoc_kernel_channel_h_disabled
1249 #define kchannelRegisterChild(pKernelChannel, pObject) kchannelRegisterChild_IMPL(pKernelChannel, pObject)
1250 #endif //__nvoc_kernel_channel_h_disabled
1251
1252 NV_STATUS kchannelDeregisterChild_IMPL(struct KernelChannel *pKernelChannel, ChannelDescendant *pObject);
1253
1254 #ifdef __nvoc_kernel_channel_h_disabled
kchannelDeregisterChild(struct KernelChannel * pKernelChannel,ChannelDescendant * pObject)1255 static inline NV_STATUS kchannelDeregisterChild(struct KernelChannel *pKernelChannel, ChannelDescendant *pObject) {
1256 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1257 return NV_ERR_NOT_SUPPORTED;
1258 }
1259 #else //__nvoc_kernel_channel_h_disabled
1260 #define kchannelDeregisterChild(pKernelChannel, pObject) kchannelDeregisterChild_IMPL(pKernelChannel, pObject)
1261 #endif //__nvoc_kernel_channel_h_disabled
1262
1263 void kchannelNotifyEvent_IMPL(struct KernelChannel *pKernelChannel, NvU32 notifyIndex, NvU32 info32, NvU16 info16, void *pNotifyParams, NvU32 notifyParamsSize);
1264
1265 #ifdef __nvoc_kernel_channel_h_disabled
kchannelNotifyEvent(struct KernelChannel * pKernelChannel,NvU32 notifyIndex,NvU32 info32,NvU16 info16,void * pNotifyParams,NvU32 notifyParamsSize)1266 static inline void kchannelNotifyEvent(struct KernelChannel *pKernelChannel, NvU32 notifyIndex, NvU32 info32, NvU16 info16, void *pNotifyParams, NvU32 notifyParamsSize) {
1267 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1268 }
1269 #else //__nvoc_kernel_channel_h_disabled
1270 #define kchannelNotifyEvent(pKernelChannel, notifyIndex, info32, info16, pNotifyParams, notifyParamsSize) kchannelNotifyEvent_IMPL(pKernelChannel, notifyIndex, info32, info16, pNotifyParams, notifyParamsSize)
1271 #endif //__nvoc_kernel_channel_h_disabled
1272
1273 NV_STATUS kchannelUpdateNotifierMem_IMPL(struct KernelChannel *pKernelChannel, NvU32 notifyIndex, NvU32 info32, NvU16 info16, NvU32 notifierStatus);
1274
1275 #ifdef __nvoc_kernel_channel_h_disabled
kchannelUpdateNotifierMem(struct KernelChannel * pKernelChannel,NvU32 notifyIndex,NvU32 info32,NvU16 info16,NvU32 notifierStatus)1276 static inline NV_STATUS kchannelUpdateNotifierMem(struct KernelChannel *pKernelChannel, NvU32 notifyIndex, NvU32 info32, NvU16 info16, NvU32 notifierStatus) {
1277 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1278 return NV_ERR_NOT_SUPPORTED;
1279 }
1280 #else //__nvoc_kernel_channel_h_disabled
1281 #define kchannelUpdateNotifierMem(pKernelChannel, notifyIndex, info32, info16, notifierStatus) kchannelUpdateNotifierMem_IMPL(pKernelChannel, notifyIndex, info32, info16, notifierStatus)
1282 #endif //__nvoc_kernel_channel_h_disabled
1283
1284 NvBool kchannelCheckIsUserMode_IMPL(struct KernelChannel *pKernelChannel);
1285
1286 #ifdef __nvoc_kernel_channel_h_disabled
kchannelCheckIsUserMode(struct KernelChannel * pKernelChannel)1287 static inline NvBool kchannelCheckIsUserMode(struct KernelChannel *pKernelChannel) {
1288 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1289 return NV_FALSE;
1290 }
1291 #else //__nvoc_kernel_channel_h_disabled
1292 #define kchannelCheckIsUserMode(pKernelChannel) kchannelCheckIsUserMode_IMPL(pKernelChannel)
1293 #endif //__nvoc_kernel_channel_h_disabled
1294
1295 NvBool kchannelCheckIsKernel_IMPL(struct KernelChannel *pKernelChannel);
1296
1297 #ifdef __nvoc_kernel_channel_h_disabled
kchannelCheckIsKernel(struct KernelChannel * pKernelChannel)1298 static inline NvBool kchannelCheckIsKernel(struct KernelChannel *pKernelChannel) {
1299 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1300 return NV_FALSE;
1301 }
1302 #else //__nvoc_kernel_channel_h_disabled
1303 #define kchannelCheckIsKernel(pKernelChannel) kchannelCheckIsKernel_IMPL(pKernelChannel)
1304 #endif //__nvoc_kernel_channel_h_disabled
1305
1306 NvBool kchannelCheckIsAdmin_IMPL(struct KernelChannel *pKernelChannel);
1307
1308 #ifdef __nvoc_kernel_channel_h_disabled
kchannelCheckIsAdmin(struct KernelChannel * pKernelChannel)1309 static inline NvBool kchannelCheckIsAdmin(struct KernelChannel *pKernelChannel) {
1310 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1311 return NV_FALSE;
1312 }
1313 #else //__nvoc_kernel_channel_h_disabled
1314 #define kchannelCheckIsAdmin(pKernelChannel) kchannelCheckIsAdmin_IMPL(pKernelChannel)
1315 #endif //__nvoc_kernel_channel_h_disabled
1316
1317 NV_STATUS kchannelBindToRunlist_IMPL(struct KernelChannel *pKernelChannel, RM_ENGINE_TYPE localRmEngineType, ENGDESCRIPTOR engineDesc);
1318
1319 #ifdef __nvoc_kernel_channel_h_disabled
kchannelBindToRunlist(struct KernelChannel * pKernelChannel,RM_ENGINE_TYPE localRmEngineType,ENGDESCRIPTOR engineDesc)1320 static inline NV_STATUS kchannelBindToRunlist(struct KernelChannel *pKernelChannel, RM_ENGINE_TYPE localRmEngineType, ENGDESCRIPTOR engineDesc) {
1321 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1322 return NV_ERR_NOT_SUPPORTED;
1323 }
1324 #else //__nvoc_kernel_channel_h_disabled
1325 #define kchannelBindToRunlist(pKernelChannel, localRmEngineType, engineDesc) kchannelBindToRunlist_IMPL(pKernelChannel, localRmEngineType, engineDesc)
1326 #endif //__nvoc_kernel_channel_h_disabled
1327
1328 NV_STATUS kchannelSetEngineContextMemDesc_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine, MEMORY_DESCRIPTOR *pMemDesc);
1329
1330 #ifdef __nvoc_kernel_channel_h_disabled
kchannelSetEngineContextMemDesc(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 engine,MEMORY_DESCRIPTOR * pMemDesc)1331 static inline NV_STATUS kchannelSetEngineContextMemDesc(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine, MEMORY_DESCRIPTOR *pMemDesc) {
1332 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1333 return NV_ERR_NOT_SUPPORTED;
1334 }
1335 #else //__nvoc_kernel_channel_h_disabled
1336 #define kchannelSetEngineContextMemDesc(pGpu, pKernelChannel, engine, pMemDesc) kchannelSetEngineContextMemDesc_IMPL(pGpu, pKernelChannel, engine, pMemDesc)
1337 #endif //__nvoc_kernel_channel_h_disabled
1338
1339 NV_STATUS kchannelMapEngineCtxBuf_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine);
1340
1341 #ifdef __nvoc_kernel_channel_h_disabled
kchannelMapEngineCtxBuf(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 engine)1342 static inline NV_STATUS kchannelMapEngineCtxBuf(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine) {
1343 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1344 return NV_ERR_NOT_SUPPORTED;
1345 }
1346 #else //__nvoc_kernel_channel_h_disabled
1347 #define kchannelMapEngineCtxBuf(pGpu, pKernelChannel, engine) kchannelMapEngineCtxBuf_IMPL(pGpu, pKernelChannel, engine)
1348 #endif //__nvoc_kernel_channel_h_disabled
1349
1350 NV_STATUS kchannelUnmapEngineCtxBuf_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine);
1351
1352 #ifdef __nvoc_kernel_channel_h_disabled
kchannelUnmapEngineCtxBuf(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel,NvU32 engine)1353 static inline NV_STATUS kchannelUnmapEngineCtxBuf(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine) {
1354 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1355 return NV_ERR_NOT_SUPPORTED;
1356 }
1357 #else //__nvoc_kernel_channel_h_disabled
1358 #define kchannelUnmapEngineCtxBuf(pGpu, pKernelChannel, engine) kchannelUnmapEngineCtxBuf_IMPL(pGpu, pKernelChannel, engine)
1359 #endif //__nvoc_kernel_channel_h_disabled
1360
1361 NV_STATUS kchannelCheckBcStateCurrent_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1362
1363 #ifdef __nvoc_kernel_channel_h_disabled
kchannelCheckBcStateCurrent(struct OBJGPU * pGpu,struct KernelChannel * pKernelChannel)1364 static inline NV_STATUS kchannelCheckBcStateCurrent(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
1365 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1366 return NV_ERR_NOT_SUPPORTED;
1367 }
1368 #else //__nvoc_kernel_channel_h_disabled
1369 #define kchannelCheckBcStateCurrent(pGpu, pKernelChannel) kchannelCheckBcStateCurrent_IMPL(pGpu, pKernelChannel)
1370 #endif //__nvoc_kernel_channel_h_disabled
1371
1372 NV_STATUS kchannelUpdateWorkSubmitTokenNotifIndex_IMPL(struct OBJGPU *pGpu, struct KernelChannel *arg2, NvU32 index);
1373
1374 #ifdef __nvoc_kernel_channel_h_disabled
kchannelUpdateWorkSubmitTokenNotifIndex(struct OBJGPU * pGpu,struct KernelChannel * arg2,NvU32 index)1375 static inline NV_STATUS kchannelUpdateWorkSubmitTokenNotifIndex(struct OBJGPU *pGpu, struct KernelChannel *arg2, NvU32 index) {
1376 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1377 return NV_ERR_NOT_SUPPORTED;
1378 }
1379 #else //__nvoc_kernel_channel_h_disabled
1380 #define kchannelUpdateWorkSubmitTokenNotifIndex(pGpu, arg2, index) kchannelUpdateWorkSubmitTokenNotifIndex_IMPL(pGpu, arg2, index)
1381 #endif //__nvoc_kernel_channel_h_disabled
1382
1383 NV_STATUS kchannelNotifyWorkSubmitToken_IMPL(struct OBJGPU *pGpu, struct KernelChannel *arg2, NvU32 token);
1384
1385 #ifdef __nvoc_kernel_channel_h_disabled
kchannelNotifyWorkSubmitToken(struct OBJGPU * pGpu,struct KernelChannel * arg2,NvU32 token)1386 static inline NV_STATUS kchannelNotifyWorkSubmitToken(struct OBJGPU *pGpu, struct KernelChannel *arg2, NvU32 token) {
1387 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1388 return NV_ERR_NOT_SUPPORTED;
1389 }
1390 #else //__nvoc_kernel_channel_h_disabled
1391 #define kchannelNotifyWorkSubmitToken(pGpu, arg2, token) kchannelNotifyWorkSubmitToken_IMPL(pGpu, arg2, token)
1392 #endif //__nvoc_kernel_channel_h_disabled
1393
1394 NV_STATUS kchannelMapUserD_IMPL(struct OBJGPU *pGpu, struct KernelChannel *arg2, RS_PRIV_LEVEL arg3, NvU64 arg4, NvU32 arg5, NvP64 *arg6, NvP64 *arg7);
1395
1396 #ifdef __nvoc_kernel_channel_h_disabled
kchannelMapUserD(struct OBJGPU * pGpu,struct KernelChannel * arg2,RS_PRIV_LEVEL arg3,NvU64 arg4,NvU32 arg5,NvP64 * arg6,NvP64 * arg7)1397 static inline NV_STATUS kchannelMapUserD(struct OBJGPU *pGpu, struct KernelChannel *arg2, RS_PRIV_LEVEL arg3, NvU64 arg4, NvU32 arg5, NvP64 *arg6, NvP64 *arg7) {
1398 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1399 return NV_ERR_NOT_SUPPORTED;
1400 }
1401 #else //__nvoc_kernel_channel_h_disabled
1402 #define kchannelMapUserD(pGpu, arg2, arg3, arg4, arg5, arg6, arg7) kchannelMapUserD_IMPL(pGpu, arg2, arg3, arg4, arg5, arg6, arg7)
1403 #endif //__nvoc_kernel_channel_h_disabled
1404
1405 void kchannelUnmapUserD_IMPL(struct OBJGPU *pGpu, struct KernelChannel *arg2, RS_PRIV_LEVEL arg3, NvP64 *arg4, NvP64 *arg5);
1406
1407 #ifdef __nvoc_kernel_channel_h_disabled
kchannelUnmapUserD(struct OBJGPU * pGpu,struct KernelChannel * arg2,RS_PRIV_LEVEL arg3,NvP64 * arg4,NvP64 * arg5)1408 static inline void kchannelUnmapUserD(struct OBJGPU *pGpu, struct KernelChannel *arg2, RS_PRIV_LEVEL arg3, NvP64 *arg4, NvP64 *arg5) {
1409 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1410 }
1411 #else //__nvoc_kernel_channel_h_disabled
1412 #define kchannelUnmapUserD(pGpu, arg2, arg3, arg4, arg5) kchannelUnmapUserD_IMPL(pGpu, arg2, arg3, arg4, arg5)
1413 #endif //__nvoc_kernel_channel_h_disabled
1414
1415 void kchannelFillMmuExceptionInfo_IMPL(struct KernelChannel *pKernelChannel, FIFO_MMU_EXCEPTION_DATA *arg2);
1416
1417 #ifdef __nvoc_kernel_channel_h_disabled
kchannelFillMmuExceptionInfo(struct KernelChannel * pKernelChannel,FIFO_MMU_EXCEPTION_DATA * arg2)1418 static inline void kchannelFillMmuExceptionInfo(struct KernelChannel *pKernelChannel, FIFO_MMU_EXCEPTION_DATA *arg2) {
1419 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1420 }
1421 #else //__nvoc_kernel_channel_h_disabled
1422 #define kchannelFillMmuExceptionInfo(pKernelChannel, arg2) kchannelFillMmuExceptionInfo_IMPL(pKernelChannel, arg2)
1423 #endif //__nvoc_kernel_channel_h_disabled
1424
1425 void kchannelFreeMmuExceptionInfo_IMPL(struct KernelChannel *pKernelChannel);
1426
1427 #ifdef __nvoc_kernel_channel_h_disabled
kchannelFreeMmuExceptionInfo(struct KernelChannel * pKernelChannel)1428 static inline void kchannelFreeMmuExceptionInfo(struct KernelChannel *pKernelChannel) {
1429 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1430 }
1431 #else //__nvoc_kernel_channel_h_disabled
1432 #define kchannelFreeMmuExceptionInfo(pKernelChannel) kchannelFreeMmuExceptionInfo_IMPL(pKernelChannel)
1433 #endif //__nvoc_kernel_channel_h_disabled
1434
1435 NV_STATUS kchannelGetFromDualHandle_IMPL(struct RsClient *arg1, NvHandle arg2, struct KernelChannel **arg3);
1436
1437 #define kchannelGetFromDualHandle(arg1, arg2, arg3) kchannelGetFromDualHandle_IMPL(arg1, arg2, arg3)
1438 NV_STATUS kchannelGetFromDualHandleRestricted_IMPL(struct RsClient *arg1, NvHandle arg2, struct KernelChannel **arg3);
1439
1440 #define kchannelGetFromDualHandleRestricted(arg1, arg2, arg3) kchannelGetFromDualHandleRestricted_IMPL(arg1, arg2, arg3)
1441 NvU32 kchannelGetGfid_IMPL(struct KernelChannel *pKernelChannel);
1442
1443 #ifdef __nvoc_kernel_channel_h_disabled
kchannelGetGfid(struct KernelChannel * pKernelChannel)1444 static inline NvU32 kchannelGetGfid(struct KernelChannel *pKernelChannel) {
1445 NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1446 return 0;
1447 }
1448 #else //__nvoc_kernel_channel_h_disabled
1449 #define kchannelGetGfid(pKernelChannel) kchannelGetGfid_IMPL(pKernelChannel)
1450 #endif //__nvoc_kernel_channel_h_disabled
1451
1452 #undef PRIVATE_FIELD
1453
1454 #ifndef NVOC_KERNEL_CHANNEL_H_PRIVATE_ACCESS_ALLOWED
1455 #undef kchannelRetrieveKmb
1456 NV_STATUS NVOC_PRIVATE_FUNCTION(kchannelRetrieveKmb)(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvBool includeSecrets, CC_KMB *keyMaterialBundle);
1457
1458 #undef kchannelRetrieveKmb_HAL
1459 NV_STATUS NVOC_PRIVATE_FUNCTION(kchannelRetrieveKmb_HAL)(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvBool includeSecrets, CC_KMB *keyMaterialBundle);
1460
1461 #undef kchannelSetKeyRotationNotifier
1462 NV_STATUS NVOC_PRIVATE_FUNCTION(kchannelSetKeyRotationNotifier)(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bSet);
1463
1464 #undef kchannelSetKeyRotationNotifier_HAL
1465 NV_STATUS NVOC_PRIVATE_FUNCTION(kchannelSetKeyRotationNotifier_HAL)(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bSet);
1466
1467 #ifndef __nvoc_kernel_channel_h_disabled
1468 #undef kchannelRotateSecureChannelIv
1469 NV_STATUS NVOC_PRIVATE_FUNCTION(kchannelRotateSecureChannelIv)(struct KernelChannel *pKernelChannel, ROTATE_IV_TYPE rotateOperation, NvU32 *encryptIv, NvU32 *decryptIv);
1470 #endif //__nvoc_kernel_channel_h_disabled
1471
1472 #endif // NVOC_KERNEL_CHANNEL_H_PRIVATE_ACCESS_ALLOWED
1473
1474
1475 RS_ORDERED_ITERATOR kchannelGetIter(
1476 struct RsClient *pClient,
1477 RsResourceRef *pScopeRef);
1478
1479 NV_STATUS kchannelGetNextKernelChannel(
1480 struct OBJGPU *pGpu,
1481 CHANNEL_ITERATOR *pIt,
1482 struct KernelChannel **ppKernelChannel);
1483
1484 NV_STATUS CliGetKernelChannelWithDevice(struct RsClient *pClient,
1485 NvHandle hParent,
1486 NvHandle hKernelChannel,
1487 struct KernelChannel **ppKernelChannel);
1488
1489 NV_STATUS CliGetKernelChannel(struct RsClient *pClient,
1490 NvHandle hKernelChannel,
1491 struct KernelChannel **ppKernelChannel);
1492
1493 /*!
1494 * @brief Helper to get type and memdesc of a channel notifier (memory/ctxdma)
1495 */
1496 NV_STATUS kchannelGetNotifierInfo(struct OBJGPU *pGpu,
1497 Device *pDevice,
1498 NvHandle hErrorContext,
1499 MEMORY_DESCRIPTOR **ppMemDesc,
1500 ErrorNotifierType *pNotifierType,
1501 NvU64 *pOffset);
1502
1503 // Utils to iterate over ChannelDescendants on one Channels
1504 void kchannelGetChildIterator(struct KernelChannel *pKernelChannel,
1505 NvU32 classID,
1506 RM_ENGINE_TYPE engineID,
1507 KernelChannelChildIterator *pIter);
1508 ChannelDescendant *kchannelGetNextChild(KernelChannelChildIterator *pIter);
1509 // Simpler function to call if you just need one result
1510 ChannelDescendant *kchannelGetOneChild(struct KernelChannel *pKernelChannel,
1511 NvU32 classID,
1512 NvU32 engineID);
1513
1514 // Utils to iterate over ChannelDescendants on all Channels in the same ChannelGroup
1515 void kchannelGetChildIterOverGroup(struct KernelChannel *pKernelChannel,
1516 NvU32 classNum,
1517 NvU32 engDesc,
1518 KernelChannelChildIterOverGroup *pIt);
1519 ChannelDescendant *kchannelGetNextChildOverGroup(KernelChannelChildIterOverGroup *pIt);
1520
1521 NV_STATUS kchannelFindChildByHandle(struct KernelChannel *pKernelChannel, NvHandle hResource, ChannelDescendant **ppObject);
1522
1523 // Bitmap for KernelChannel->swState
1524 #define KERNEL_CHANNEL_SW_STATE_CPU_MAP NVBIT(0) //UserD is mapped
1525 #define KERNEL_CHANNEL_SW_STATE_RUNLIST_SET NVBIT(1) // RunlistId is set
1526 #define KERNEL_CHANNEL_SW_STATE_DISABLED_FOR_KEY_ROTATION NVBIT(2) // disabled for key rotation
1527 #define KERNEL_CHANNEL_SW_STATE_ENABLE_AFTER_KEY_ROTATION NVBIT(3) // RM should enable after key rotation
1528
1529 NvBool kchannelIsCpuMapped(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1530 void kchannelSetCpuMapped(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bCpuMapped);
1531 NvBool kchannelIsRunlistSet(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1532 void kchannelSetRunlistSet(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bRunlistSet);
1533 NvBool kchannelIsDisabledForKeyRotation(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1534 void kchannelDisableForKeyRotation(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bDisable);
1535 NvBool kchannelIsEnableAfterKeyRotation(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1536 void kchannelEnableAfterKeyRotation(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bEnable);
1537
1538 #endif // KERNEL_CHANNEL_H
1539
1540 #ifdef __cplusplus
1541 } // extern "C"
1542 #endif
1543
1544 #endif // _G_KERNEL_CHANNEL_NVOC_H_
1545