1 #ifndef _G_GPU_RESOURCE_NVOC_H_
2 #define _G_GPU_RESOURCE_NVOC_H_
3 #include "nvoc/runtime.h"
4 
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8 
9 /*
10  * SPDX-FileCopyrightText: Copyright (c) 2016-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11  * SPDX-License-Identifier: MIT
12  *
13  * Permission is hereby granted, free of charge, to any person obtaining a
14  * copy of this software and associated documentation files (the "Software"),
15  * to deal in the Software without restriction, including without limitation
16  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17  * and/or sell copies of the Software, and to permit persons to whom the
18  * Software is furnished to do so, subject to the following conditions:
19  *
20  * The above copyright notice and this permission notice shall be included in
21  * all copies or substantial portions of the Software.
22  *
23  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29  * DEALINGS IN THE SOFTWARE.
30  */
31 #include "g_gpu_resource_nvoc.h"
32 
33 #ifndef _GPURESOURCE_H_
34 #define _GPURESOURCE_H_
35 
36 #include "core/core.h"
37 #include "gpu/mem_mgr/mem_desc.h"
38 
39 #include "rmapi/resource.h"
40 
41 struct OBJGPU;
42 
43 #ifndef __NVOC_CLASS_OBJGPU_TYPEDEF__
44 #define __NVOC_CLASS_OBJGPU_TYPEDEF__
45 typedef struct OBJGPU OBJGPU;
46 #endif /* __NVOC_CLASS_OBJGPU_TYPEDEF__ */
47 
48 #ifndef __nvoc_class_id_OBJGPU
49 #define __nvoc_class_id_OBJGPU 0x7ef3cb
50 #endif /* __nvoc_class_id_OBJGPU */
51 
52 
53 struct Device;
54 
55 #ifndef __NVOC_CLASS_Device_TYPEDEF__
56 #define __NVOC_CLASS_Device_TYPEDEF__
57 typedef struct Device Device;
58 #endif /* __NVOC_CLASS_Device_TYPEDEF__ */
59 
60 #ifndef __nvoc_class_id_Device
61 #define __nvoc_class_id_Device 0xe0ac20
62 #endif /* __nvoc_class_id_Device */
63 
64 
65 struct Subdevice;
66 
67 #ifndef __NVOC_CLASS_Subdevice_TYPEDEF__
68 #define __NVOC_CLASS_Subdevice_TYPEDEF__
69 typedef struct Subdevice Subdevice;
70 #endif /* __NVOC_CLASS_Subdevice_TYPEDEF__ */
71 
72 #ifndef __nvoc_class_id_Subdevice
73 #define __nvoc_class_id_Subdevice 0x4b01b3
74 #endif /* __nvoc_class_id_Subdevice */
75 
76 
77 
78 #define GPU_RES_GET_GPU(pRes) staticCastNoPtrCheck((pRes), GpuResource)->pGpu
79 #define GPU_RES_GET_GPUGRP(pRes) staticCastNoPtrCheck((pRes), GpuResource)->pGpuGrp
80 #define GPU_RES_GET_DEVICE(pRes) staticCastNoPtrCheck((pRes), GpuResource)->pDevice
81 #define GPU_RES_GET_SUBDEVICE(pRes) staticCastNoPtrCheck((pRes), GpuResource)->pSubdevice
82 
83 #define GPU_RES_SET_THREAD_BC_STATE(pRes) PORT_UNREFERENCED_VARIABLE(pRes)
84 
85 /*!
86  * Abstract base class for common CPU mapping operations
87  */
88 
89 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
90 // the matching C source file, but causes diagnostics to be issued if another
91 // source file references the field.
92 #ifdef NVOC_GPU_RESOURCE_H_PRIVATE_ACCESS_ALLOWED
93 #define PRIVATE_FIELD(x) x
94 #else
95 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
96 #endif
97 
98 struct GpuResource {
99     const struct NVOC_RTTI *__nvoc_rtti;
100     struct RmResource __nvoc_base_RmResource;
101     struct Object *__nvoc_pbase_Object;
102     struct RsResource *__nvoc_pbase_RsResource;
103     struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
104     struct RmResource *__nvoc_pbase_RmResource;
105     struct GpuResource *__nvoc_pbase_GpuResource;
106     NV_STATUS (*__gpuresControl__)(struct GpuResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
107     NV_STATUS (*__gpuresMap__)(struct GpuResource *, struct CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, struct RsCpuMapping *);
108     NV_STATUS (*__gpuresUnmap__)(struct GpuResource *, struct CALL_CONTEXT *, struct RsCpuMapping *);
109     NvBool (*__gpuresShareCallback__)(struct GpuResource *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
110     NV_STATUS (*__gpuresGetRegBaseOffsetAndSize__)(struct GpuResource *, struct OBJGPU *, NvU32 *, NvU32 *);
111     NV_STATUS (*__gpuresGetMapAddrSpace__)(struct GpuResource *, struct CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
112     NV_STATUS (*__gpuresInternalControlForward__)(struct GpuResource *, NvU32, void *, NvU32);
113     NvHandle (*__gpuresGetInternalObjectHandle__)(struct GpuResource *);
114     NV_STATUS (*__gpuresCheckMemInterUnmap__)(struct GpuResource *, NvBool);
115     NV_STATUS (*__gpuresGetMemInterMapParams__)(struct GpuResource *, RMRES_MEM_INTER_MAP_PARAMS *);
116     NV_STATUS (*__gpuresGetMemoryMappingDescriptor__)(struct GpuResource *, struct MEMORY_DESCRIPTOR **);
117     NvU32 (*__gpuresGetRefCount__)(struct GpuResource *);
118     NV_STATUS (*__gpuresControlFilter__)(struct GpuResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
119     void (*__gpuresAddAdditionalDependants__)(struct RsClient *, struct GpuResource *, RsResourceRef *);
120     NV_STATUS (*__gpuresControlSerialization_Prologue__)(struct GpuResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
121     NV_STATUS (*__gpuresControl_Prologue__)(struct GpuResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
122     NvBool (*__gpuresCanCopy__)(struct GpuResource *);
123     NvBool (*__gpuresIsPartialUnmapSupported__)(struct GpuResource *);
124     void (*__gpuresPreDestruct__)(struct GpuResource *);
125     NV_STATUS (*__gpuresMapTo__)(struct GpuResource *, RS_RES_MAP_TO_PARAMS *);
126     NV_STATUS (*__gpuresIsDuplicate__)(struct GpuResource *, NvHandle, NvBool *);
127     void (*__gpuresControlSerialization_Epilogue__)(struct GpuResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
128     void (*__gpuresControl_Epilogue__)(struct GpuResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
129     NV_STATUS (*__gpuresUnmapFrom__)(struct GpuResource *, RS_RES_UNMAP_FROM_PARAMS *);
130     NvBool (*__gpuresAccessCallback__)(struct GpuResource *, struct RsClient *, void *, RsAccessRight);
131     struct OBJGPUGRP *pGpuGrp;
132     struct OBJGPU *pGpu;
133     struct Device *pDevice;
134     struct Subdevice *pSubdevice;
135     NvBool bBcResource;
136 };
137 
138 #ifndef __NVOC_CLASS_GpuResource_TYPEDEF__
139 #define __NVOC_CLASS_GpuResource_TYPEDEF__
140 typedef struct GpuResource GpuResource;
141 #endif /* __NVOC_CLASS_GpuResource_TYPEDEF__ */
142 
143 #ifndef __nvoc_class_id_GpuResource
144 #define __nvoc_class_id_GpuResource 0x5d5d9f
145 #endif /* __nvoc_class_id_GpuResource */
146 
147 extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuResource;
148 
149 #define __staticCast_GpuResource(pThis) \
150     ((pThis)->__nvoc_pbase_GpuResource)
151 
152 #ifdef __nvoc_gpu_resource_h_disabled
153 #define __dynamicCast_GpuResource(pThis) ((GpuResource*)NULL)
154 #else //__nvoc_gpu_resource_h_disabled
155 #define __dynamicCast_GpuResource(pThis) \
156     ((GpuResource*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(GpuResource)))
157 #endif //__nvoc_gpu_resource_h_disabled
158 
159 
160 NV_STATUS __nvoc_objCreateDynamic_GpuResource(GpuResource**, Dynamic*, NvU32, va_list);
161 
162 NV_STATUS __nvoc_objCreate_GpuResource(GpuResource**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
163 #define __objCreate_GpuResource(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
164     __nvoc_objCreate_GpuResource((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
165 
166 #define gpuresControl(pGpuResource, pCallContext, pParams) gpuresControl_DISPATCH(pGpuResource, pCallContext, pParams)
167 #define gpuresMap(pGpuResource, pCallContext, pParams, pCpuMapping) gpuresMap_DISPATCH(pGpuResource, pCallContext, pParams, pCpuMapping)
168 #define gpuresUnmap(pGpuResource, pCallContext, pCpuMapping) gpuresUnmap_DISPATCH(pGpuResource, pCallContext, pCpuMapping)
169 #define gpuresShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) gpuresShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
170 #define gpuresGetRegBaseOffsetAndSize(pGpuResource, pGpu, pOffset, pSize) gpuresGetRegBaseOffsetAndSize_DISPATCH(pGpuResource, pGpu, pOffset, pSize)
171 #define gpuresGetMapAddrSpace(pGpuResource, pCallContext, mapFlags, pAddrSpace) gpuresGetMapAddrSpace_DISPATCH(pGpuResource, pCallContext, mapFlags, pAddrSpace)
172 #define gpuresInternalControlForward(pGpuResource, command, pParams, size) gpuresInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
173 #define gpuresGetInternalObjectHandle(pGpuResource) gpuresGetInternalObjectHandle_DISPATCH(pGpuResource)
174 #define gpuresCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) gpuresCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
175 #define gpuresGetMemInterMapParams(pRmResource, pParams) gpuresGetMemInterMapParams_DISPATCH(pRmResource, pParams)
176 #define gpuresGetMemoryMappingDescriptor(pRmResource, ppMemDesc) gpuresGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
177 #define gpuresGetRefCount(pResource) gpuresGetRefCount_DISPATCH(pResource)
178 #define gpuresControlFilter(pResource, pCallContext, pParams) gpuresControlFilter_DISPATCH(pResource, pCallContext, pParams)
179 #define gpuresAddAdditionalDependants(pClient, pResource, pReference) gpuresAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
180 #define gpuresControlSerialization_Prologue(pResource, pCallContext, pParams) gpuresControlSerialization_Prologue_DISPATCH(pResource, pCallContext, pParams)
181 #define gpuresControl_Prologue(pResource, pCallContext, pParams) gpuresControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
182 #define gpuresCanCopy(pResource) gpuresCanCopy_DISPATCH(pResource)
183 #define gpuresIsPartialUnmapSupported(pResource) gpuresIsPartialUnmapSupported_DISPATCH(pResource)
184 #define gpuresPreDestruct(pResource) gpuresPreDestruct_DISPATCH(pResource)
185 #define gpuresMapTo(pResource, pParams) gpuresMapTo_DISPATCH(pResource, pParams)
186 #define gpuresIsDuplicate(pResource, hMemory, pDuplicate) gpuresIsDuplicate_DISPATCH(pResource, hMemory, pDuplicate)
187 #define gpuresControlSerialization_Epilogue(pResource, pCallContext, pParams) gpuresControlSerialization_Epilogue_DISPATCH(pResource, pCallContext, pParams)
188 #define gpuresControl_Epilogue(pResource, pCallContext, pParams) gpuresControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
189 #define gpuresUnmapFrom(pResource, pParams) gpuresUnmapFrom_DISPATCH(pResource, pParams)
190 #define gpuresAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) gpuresAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
191 NV_STATUS gpuresControl_IMPL(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams);
192 
gpuresControl_DISPATCH(struct GpuResource * pGpuResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)193 static inline NV_STATUS gpuresControl_DISPATCH(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
194     return pGpuResource->__gpuresControl__(pGpuResource, pCallContext, pParams);
195 }
196 
197 NV_STATUS gpuresMap_IMPL(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping);
198 
gpuresMap_DISPATCH(struct GpuResource * pGpuResource,struct CALL_CONTEXT * pCallContext,struct RS_CPU_MAP_PARAMS * pParams,struct RsCpuMapping * pCpuMapping)199 static inline NV_STATUS gpuresMap_DISPATCH(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
200     return pGpuResource->__gpuresMap__(pGpuResource, pCallContext, pParams, pCpuMapping);
201 }
202 
203 NV_STATUS gpuresUnmap_IMPL(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping);
204 
gpuresUnmap_DISPATCH(struct GpuResource * pGpuResource,struct CALL_CONTEXT * pCallContext,struct RsCpuMapping * pCpuMapping)205 static inline NV_STATUS gpuresUnmap_DISPATCH(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
206     return pGpuResource->__gpuresUnmap__(pGpuResource, pCallContext, pCpuMapping);
207 }
208 
209 NvBool gpuresShareCallback_IMPL(struct GpuResource *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy);
210 
gpuresShareCallback_DISPATCH(struct GpuResource * pGpuResource,struct RsClient * pInvokingClient,struct RsResourceRef * pParentRef,RS_SHARE_POLICY * pSharePolicy)211 static inline NvBool gpuresShareCallback_DISPATCH(struct GpuResource *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
212     return pGpuResource->__gpuresShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
213 }
214 
215 NV_STATUS gpuresGetRegBaseOffsetAndSize_IMPL(struct GpuResource *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize);
216 
gpuresGetRegBaseOffsetAndSize_DISPATCH(struct GpuResource * pGpuResource,struct OBJGPU * pGpu,NvU32 * pOffset,NvU32 * pSize)217 static inline NV_STATUS gpuresGetRegBaseOffsetAndSize_DISPATCH(struct GpuResource *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
218     return pGpuResource->__gpuresGetRegBaseOffsetAndSize__(pGpuResource, pGpu, pOffset, pSize);
219 }
220 
221 NV_STATUS gpuresGetMapAddrSpace_IMPL(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace);
222 
gpuresGetMapAddrSpace_DISPATCH(struct GpuResource * pGpuResource,struct CALL_CONTEXT * pCallContext,NvU32 mapFlags,NV_ADDRESS_SPACE * pAddrSpace)223 static inline NV_STATUS gpuresGetMapAddrSpace_DISPATCH(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
224     return pGpuResource->__gpuresGetMapAddrSpace__(pGpuResource, pCallContext, mapFlags, pAddrSpace);
225 }
226 
227 NV_STATUS gpuresInternalControlForward_IMPL(struct GpuResource *pGpuResource, NvU32 command, void *pParams, NvU32 size);
228 
gpuresInternalControlForward_DISPATCH(struct GpuResource * pGpuResource,NvU32 command,void * pParams,NvU32 size)229 static inline NV_STATUS gpuresInternalControlForward_DISPATCH(struct GpuResource *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
230     return pGpuResource->__gpuresInternalControlForward__(pGpuResource, command, pParams, size);
231 }
232 
233 NvHandle gpuresGetInternalObjectHandle_IMPL(struct GpuResource *pGpuResource);
234 
gpuresGetInternalObjectHandle_DISPATCH(struct GpuResource * pGpuResource)235 static inline NvHandle gpuresGetInternalObjectHandle_DISPATCH(struct GpuResource *pGpuResource) {
236     return pGpuResource->__gpuresGetInternalObjectHandle__(pGpuResource);
237 }
238 
gpuresCheckMemInterUnmap_DISPATCH(struct GpuResource * pRmResource,NvBool bSubdeviceHandleProvided)239 static inline NV_STATUS gpuresCheckMemInterUnmap_DISPATCH(struct GpuResource *pRmResource, NvBool bSubdeviceHandleProvided) {
240     return pRmResource->__gpuresCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
241 }
242 
gpuresGetMemInterMapParams_DISPATCH(struct GpuResource * pRmResource,RMRES_MEM_INTER_MAP_PARAMS * pParams)243 static inline NV_STATUS gpuresGetMemInterMapParams_DISPATCH(struct GpuResource *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
244     return pRmResource->__gpuresGetMemInterMapParams__(pRmResource, pParams);
245 }
246 
gpuresGetMemoryMappingDescriptor_DISPATCH(struct GpuResource * pRmResource,struct MEMORY_DESCRIPTOR ** ppMemDesc)247 static inline NV_STATUS gpuresGetMemoryMappingDescriptor_DISPATCH(struct GpuResource *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
248     return pRmResource->__gpuresGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
249 }
250 
gpuresGetRefCount_DISPATCH(struct GpuResource * pResource)251 static inline NvU32 gpuresGetRefCount_DISPATCH(struct GpuResource *pResource) {
252     return pResource->__gpuresGetRefCount__(pResource);
253 }
254 
gpuresControlFilter_DISPATCH(struct GpuResource * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)255 static inline NV_STATUS gpuresControlFilter_DISPATCH(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
256     return pResource->__gpuresControlFilter__(pResource, pCallContext, pParams);
257 }
258 
gpuresAddAdditionalDependants_DISPATCH(struct RsClient * pClient,struct GpuResource * pResource,RsResourceRef * pReference)259 static inline void gpuresAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct GpuResource *pResource, RsResourceRef *pReference) {
260     pResource->__gpuresAddAdditionalDependants__(pClient, pResource, pReference);
261 }
262 
gpuresControlSerialization_Prologue_DISPATCH(struct GpuResource * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)263 static inline NV_STATUS gpuresControlSerialization_Prologue_DISPATCH(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
264     return pResource->__gpuresControlSerialization_Prologue__(pResource, pCallContext, pParams);
265 }
266 
gpuresControl_Prologue_DISPATCH(struct GpuResource * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)267 static inline NV_STATUS gpuresControl_Prologue_DISPATCH(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
268     return pResource->__gpuresControl_Prologue__(pResource, pCallContext, pParams);
269 }
270 
gpuresCanCopy_DISPATCH(struct GpuResource * pResource)271 static inline NvBool gpuresCanCopy_DISPATCH(struct GpuResource *pResource) {
272     return pResource->__gpuresCanCopy__(pResource);
273 }
274 
gpuresIsPartialUnmapSupported_DISPATCH(struct GpuResource * pResource)275 static inline NvBool gpuresIsPartialUnmapSupported_DISPATCH(struct GpuResource *pResource) {
276     return pResource->__gpuresIsPartialUnmapSupported__(pResource);
277 }
278 
gpuresPreDestruct_DISPATCH(struct GpuResource * pResource)279 static inline void gpuresPreDestruct_DISPATCH(struct GpuResource *pResource) {
280     pResource->__gpuresPreDestruct__(pResource);
281 }
282 
gpuresMapTo_DISPATCH(struct GpuResource * pResource,RS_RES_MAP_TO_PARAMS * pParams)283 static inline NV_STATUS gpuresMapTo_DISPATCH(struct GpuResource *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
284     return pResource->__gpuresMapTo__(pResource, pParams);
285 }
286 
gpuresIsDuplicate_DISPATCH(struct GpuResource * pResource,NvHandle hMemory,NvBool * pDuplicate)287 static inline NV_STATUS gpuresIsDuplicate_DISPATCH(struct GpuResource *pResource, NvHandle hMemory, NvBool *pDuplicate) {
288     return pResource->__gpuresIsDuplicate__(pResource, hMemory, pDuplicate);
289 }
290 
gpuresControlSerialization_Epilogue_DISPATCH(struct GpuResource * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)291 static inline void gpuresControlSerialization_Epilogue_DISPATCH(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
292     pResource->__gpuresControlSerialization_Epilogue__(pResource, pCallContext, pParams);
293 }
294 
gpuresControl_Epilogue_DISPATCH(struct GpuResource * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)295 static inline void gpuresControl_Epilogue_DISPATCH(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
296     pResource->__gpuresControl_Epilogue__(pResource, pCallContext, pParams);
297 }
298 
gpuresUnmapFrom_DISPATCH(struct GpuResource * pResource,RS_RES_UNMAP_FROM_PARAMS * pParams)299 static inline NV_STATUS gpuresUnmapFrom_DISPATCH(struct GpuResource *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
300     return pResource->__gpuresUnmapFrom__(pResource, pParams);
301 }
302 
gpuresAccessCallback_DISPATCH(struct GpuResource * pResource,struct RsClient * pInvokingClient,void * pAllocParams,RsAccessRight accessRight)303 static inline NvBool gpuresAccessCallback_DISPATCH(struct GpuResource *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
304     return pResource->__gpuresAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
305 }
306 
307 NV_STATUS gpuresConstruct_IMPL(struct GpuResource *arg_pGpuResource, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
308 
309 #define __nvoc_gpuresConstruct(arg_pGpuResource, arg_pCallContext, arg_pParams) gpuresConstruct_IMPL(arg_pGpuResource, arg_pCallContext, arg_pParams)
310 NV_STATUS gpuresCopyConstruct_IMPL(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *pParams);
311 
312 #ifdef __nvoc_gpu_resource_h_disabled
gpuresCopyConstruct(struct GpuResource * pGpuResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_ALLOC_PARAMS_INTERNAL * pParams)313 static inline NV_STATUS gpuresCopyConstruct(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *pParams) {
314     NV_ASSERT_FAILED_PRECOMP("GpuResource was disabled!");
315     return NV_ERR_NOT_SUPPORTED;
316 }
317 #else //__nvoc_gpu_resource_h_disabled
318 #define gpuresCopyConstruct(pGpuResource, pCallContext, pParams) gpuresCopyConstruct_IMPL(pGpuResource, pCallContext, pParams)
319 #endif //__nvoc_gpu_resource_h_disabled
320 
321 void gpuresSetGpu_IMPL(struct GpuResource *pGpuResource, struct OBJGPU *pGpu, NvBool bBcResource);
322 
323 #ifdef __nvoc_gpu_resource_h_disabled
gpuresSetGpu(struct GpuResource * pGpuResource,struct OBJGPU * pGpu,NvBool bBcResource)324 static inline void gpuresSetGpu(struct GpuResource *pGpuResource, struct OBJGPU *pGpu, NvBool bBcResource) {
325     NV_ASSERT_FAILED_PRECOMP("GpuResource was disabled!");
326 }
327 #else //__nvoc_gpu_resource_h_disabled
328 #define gpuresSetGpu(pGpuResource, pGpu, bBcResource) gpuresSetGpu_IMPL(pGpuResource, pGpu, bBcResource)
329 #endif //__nvoc_gpu_resource_h_disabled
330 
331 void gpuresControlSetup_IMPL(struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, struct GpuResource *pGpuResource);
332 
333 #ifdef __nvoc_gpu_resource_h_disabled
gpuresControlSetup(struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams,struct GpuResource * pGpuResource)334 static inline void gpuresControlSetup(struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, struct GpuResource *pGpuResource) {
335     NV_ASSERT_FAILED_PRECOMP("GpuResource was disabled!");
336 }
337 #else //__nvoc_gpu_resource_h_disabled
338 #define gpuresControlSetup(pParams, pGpuResource) gpuresControlSetup_IMPL(pParams, pGpuResource)
339 #endif //__nvoc_gpu_resource_h_disabled
340 
341 NV_STATUS gpuresGetByHandle_IMPL(struct RsClient *pClient, NvHandle hResource, struct GpuResource **ppGpuResource);
342 
343 #define gpuresGetByHandle(pClient, hResource, ppGpuResource) gpuresGetByHandle_IMPL(pClient, hResource, ppGpuResource)
344 NV_STATUS gpuresGetByDeviceOrSubdeviceHandle_IMPL(struct RsClient *pClient, NvHandle hResource, struct GpuResource **ppGpuResource);
345 
346 #define gpuresGetByDeviceOrSubdeviceHandle(pClient, hResource, ppGpuResource) gpuresGetByDeviceOrSubdeviceHandle_IMPL(pClient, hResource, ppGpuResource)
347 #undef PRIVATE_FIELD
348 
349 
350 #endif // _GPURESOURCE_H_
351 
352 #ifdef __cplusplus
353 } // extern "C"
354 #endif
355 
356 #endif // _G_GPU_RESOURCE_NVOC_H_
357