1 #ifndef _G_MEM_NVOC_H_
2 #define _G_MEM_NVOC_H_
3 #include "nvoc/runtime.h"
4 
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8 
9 /*
10  * SPDX-FileCopyrightText: Copyright (c) 1993-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11  * SPDX-License-Identifier: MIT
12  *
13  * Permission is hereby granted, free of charge, to any person obtaining a
14  * copy of this software and associated documentation files (the "Software"),
15  * to deal in the Software without restriction, including without limitation
16  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17  * and/or sell copies of the Software, and to permit persons to whom the
18  * Software is furnished to do so, subject to the following conditions:
19  *
20  * The above copyright notice and this permission notice shall be included in
21  * all copies or substantial portions of the Software.
22  *
23  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29  * DEALINGS IN THE SOFTWARE.
30  */
31 
32 #include "g_mem_nvoc.h"
33 
34 #ifndef _MEMORY_API_H_
35 #define _MEMORY_API_H_
36 
37 #include "core/core.h"
38 #include "resserv/rs_resource.h"
39 #include "rmapi/rmapi.h"
40 #include "rmapi/resource.h"
41 
42 #include "containers/btree.h"
43 
44 #include "ctrl/ctrl0041.h"
45 
46 struct Device;
47 
48 #ifndef __NVOC_CLASS_Device_TYPEDEF__
49 #define __NVOC_CLASS_Device_TYPEDEF__
50 typedef struct Device Device;
51 #endif /* __NVOC_CLASS_Device_TYPEDEF__ */
52 
53 #ifndef __nvoc_class_id_Device
54 #define __nvoc_class_id_Device 0xe0ac20
55 #endif /* __nvoc_class_id_Device */
56 
57 
58 struct Subdevice;
59 
60 #ifndef __NVOC_CLASS_Subdevice_TYPEDEF__
61 #define __NVOC_CLASS_Subdevice_TYPEDEF__
62 typedef struct Subdevice Subdevice;
63 #endif /* __NVOC_CLASS_Subdevice_TYPEDEF__ */
64 
65 #ifndef __nvoc_class_id_Subdevice
66 #define __nvoc_class_id_Subdevice 0x4b01b3
67 #endif /* __nvoc_class_id_Subdevice */
68 
69 
70 struct RsClient;
71 
72 #ifndef __NVOC_CLASS_RsClient_TYPEDEF__
73 #define __NVOC_CLASS_RsClient_TYPEDEF__
74 typedef struct RsClient RsClient;
75 #endif /* __NVOC_CLASS_RsClient_TYPEDEF__ */
76 
77 #ifndef __nvoc_class_id_RsClient
78 #define __nvoc_class_id_RsClient 0x8f87e5
79 #endif /* __nvoc_class_id_RsClient */
80 
81 
82 struct Heap;
83 
84 #ifndef __NVOC_CLASS_Heap_TYPEDEF__
85 #define __NVOC_CLASS_Heap_TYPEDEF__
86 typedef struct Heap Heap;
87 #endif /* __NVOC_CLASS_Heap_TYPEDEF__ */
88 
89 #ifndef __nvoc_class_id_Heap
90 #define __nvoc_class_id_Heap 0x556e9a
91 #endif /* __nvoc_class_id_Heap */
92 
93 
94 struct OBJGPU;
95 
96 #ifndef __NVOC_CLASS_OBJGPU_TYPEDEF__
97 #define __NVOC_CLASS_OBJGPU_TYPEDEF__
98 typedef struct OBJGPU OBJGPU;
99 #endif /* __NVOC_CLASS_OBJGPU_TYPEDEF__ */
100 
101 #ifndef __nvoc_class_id_OBJGPU
102 #define __nvoc_class_id_OBJGPU 0x7ef3cb
103 #endif /* __nvoc_class_id_OBJGPU */
104 
105 
106 typedef struct MEMORY_DESCRIPTOR MEMORY_DESCRIPTOR;
107 typedef struct PmuMapping PmuMapping;
108 typedef struct HWRESOURCE_INFO HWRESOURCE_INFO;
109 
110 //
111 // vGPU non-stall interrupt info
112 //
113 typedef struct _def_client_vgpu_ns_intr
114 {
115     NvU32                   nsSemValue;  // Non stall interrupt semaphore value
116     NvU32                   nsSemOffset; // Non stall interrupt semaphore offset. Currently it is always 0.
117     NvBool                  isSemaMemValidationEnabled; // Enable change in Non stall interrupt sema value check
118                                                         // while generating event
119     NvU64                   guestDomainId; // guest ID that we need to use to inject interrupt
120     NvU64                   guestMSIAddr; // MSI address allocated by guest OS
121     NvU32                   guestMSIData; // MSI data value set by guest OS
122     void                    *pVgpuVfioRef; // Reference to vgpu device in nvidia-vgpu-vfio module
123     void                    *pEventDpc; // DPC event to pass the interrupt
124 } VGPU_NS_INTR;
125 
126 typedef struct
127 {
128   struct Memory *pNext;
129   struct Memory *pPrev;
130 } memCircularListItem;
131 
132 /*!
133  * RM internal class representing NV01_MEMORY_XXX
134  *
135  * @note Memory cannot be a GpuResource because NoDeviceMemory
136  *       subclass is not allocated under a device.
137  */
138 #ifdef NVOC_MEM_H_PRIVATE_ACCESS_ALLOWED
139 #define PRIVATE_FIELD(x) x
140 #else
141 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
142 #endif
143 struct Memory {
144     const struct NVOC_RTTI *__nvoc_rtti;
145     struct RmResource __nvoc_base_RmResource;
146     struct Object *__nvoc_pbase_Object;
147     struct RsResource *__nvoc_pbase_RsResource;
148     struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
149     struct RmResource *__nvoc_pbase_RmResource;
150     struct Memory *__nvoc_pbase_Memory;
151     NV_STATUS (*__memIsDuplicate__)(struct Memory *, NvHandle, NvBool *);
152     NV_STATUS (*__memGetMapAddrSpace__)(struct Memory *, CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
153     NV_STATUS (*__memControl__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
154     NV_STATUS (*__memMap__)(struct Memory *, CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, RsCpuMapping *);
155     NV_STATUS (*__memUnmap__)(struct Memory *, CALL_CONTEXT *, RsCpuMapping *);
156     NV_STATUS (*__memGetMemInterMapParams__)(struct Memory *, RMRES_MEM_INTER_MAP_PARAMS *);
157     NV_STATUS (*__memCheckMemInterUnmap__)(struct Memory *, NvBool);
158     NV_STATUS (*__memGetMemoryMappingDescriptor__)(struct Memory *, MEMORY_DESCRIPTOR **);
159     NV_STATUS (*__memCheckCopyPermissions__)(struct Memory *, struct OBJGPU *, struct Device *);
160     NV_STATUS (*__memIsReady__)(struct Memory *, NvBool);
161     NvBool (*__memIsGpuMapAllowed__)(struct Memory *, struct OBJGPU *);
162     NvBool (*__memIsExportAllowed__)(struct Memory *);
163     NV_STATUS (*__memCtrlCmdGetSurfaceCompressionCoverageLvm__)(struct Memory *, NV0041_CTRL_GET_SURFACE_COMPRESSION_COVERAGE_PARAMS *);
164     NV_STATUS (*__memCtrlCmdGetSurfaceInfoLvm__)(struct Memory *, NV0041_CTRL_GET_SURFACE_INFO_PARAMS *);
165     NV_STATUS (*__memCtrlCmdSurfaceFlushGpuCache__)(struct Memory *, NV0041_CTRL_SURFACE_FLUSH_GPU_CACHE_PARAMS *);
166     NV_STATUS (*__memCtrlCmdGetMemPageSize__)(struct Memory *, NV0041_CTRL_GET_MEM_PAGE_SIZE_PARAMS *);
167     NV_STATUS (*__memCtrlCmdSetTag__)(struct Memory *, NV0041_CTRL_CMD_SET_TAG_PARAMS *);
168     NV_STATUS (*__memCtrlCmdGetTag__)(struct Memory *, NV0041_CTRL_CMD_GET_TAG_PARAMS *);
169     NV_STATUS (*__memCtrlCmdGetSurfacePhysAttrLvm__)(struct Memory *, NV0041_CTRL_GET_SURFACE_PHYS_ATTR_PARAMS *);
170     NvBool (*__memShareCallback__)(struct Memory *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
171     NvU32 (*__memGetRefCount__)(struct Memory *);
172     NV_STATUS (*__memControlFilter__)(struct Memory *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
173     void (*__memAddAdditionalDependants__)(struct RsClient *, struct Memory *, RsResourceRef *);
174     NV_STATUS (*__memUnmapFrom__)(struct Memory *, RS_RES_UNMAP_FROM_PARAMS *);
175     NV_STATUS (*__memControlSerialization_Prologue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
176     NV_STATUS (*__memControl_Prologue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
177     NvBool (*__memCanCopy__)(struct Memory *);
178     void (*__memPreDestruct__)(struct Memory *);
179     NV_STATUS (*__memMapTo__)(struct Memory *, RS_RES_MAP_TO_PARAMS *);
180     void (*__memControlSerialization_Epilogue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
181     void (*__memControl_Epilogue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
182     NV_STATUS (*__memControlLookup__)(struct Memory *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
183     NvBool (*__memAccessCallback__)(struct Memory *, struct RsClient *, void *, RsAccessRight);
184     NvBool bConstructed;
185     struct Device *pDevice;
186     struct Subdevice *pSubDevice;
187     struct OBJGPU *pGpu;
188     NvBool bBcResource;
189     NvU32 categoryClassId;
190     NvU64 Length;
191     NvU32 HeapOwner;
192     NvU32 RefCount;
193     struct Heap *pHeap;
194     MEMORY_DESCRIPTOR *pMemDesc;
195     NvBool isMemDescOwner;
196     memCircularListItem dupListItem;
197     NvP64 KernelVAddr;
198     NvP64 KernelMapPriv;
199     PmuMapping *pPmuMappingList;
200     NODE Node;
201     NvU32 Attr;
202     NvU32 Attr2;
203     NvU32 Pitch;
204     NvU32 Type;
205     NvU32 Flags;
206     NvU32 tag;
207     NvU64 osDeviceHandle;
208     HWRESOURCE_INFO *pHwResource;
209     NvBool bRpcAlloc;
210     VGPU_NS_INTR vgpuNsIntr;
211 };
212 
213 #ifndef __NVOC_CLASS_Memory_TYPEDEF__
214 #define __NVOC_CLASS_Memory_TYPEDEF__
215 typedef struct Memory Memory;
216 #endif /* __NVOC_CLASS_Memory_TYPEDEF__ */
217 
218 #ifndef __nvoc_class_id_Memory
219 #define __nvoc_class_id_Memory 0x4789f2
220 #endif /* __nvoc_class_id_Memory */
221 
222 extern const struct NVOC_CLASS_DEF __nvoc_class_def_Memory;
223 
224 #define __staticCast_Memory(pThis) \
225     ((pThis)->__nvoc_pbase_Memory)
226 
227 #ifdef __nvoc_mem_h_disabled
228 #define __dynamicCast_Memory(pThis) ((Memory*)NULL)
229 #else //__nvoc_mem_h_disabled
230 #define __dynamicCast_Memory(pThis) \
231     ((Memory*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(Memory)))
232 #endif //__nvoc_mem_h_disabled
233 
234 
235 NV_STATUS __nvoc_objCreateDynamic_Memory(Memory**, Dynamic*, NvU32, va_list);
236 
237 NV_STATUS __nvoc_objCreate_Memory(Memory**, Dynamic*, NvU32, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
238 #define __objCreate_Memory(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
239     __nvoc_objCreate_Memory((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
240 
241 #define memIsDuplicate(pMemory, hMemory, pDuplicate) memIsDuplicate_DISPATCH(pMemory, hMemory, pDuplicate)
242 #define memGetMapAddrSpace(pMemory, pCallContext, mapFlags, pAddrSpace) memGetMapAddrSpace_DISPATCH(pMemory, pCallContext, mapFlags, pAddrSpace)
243 #define memControl(pMemory, pCallContext, pParams) memControl_DISPATCH(pMemory, pCallContext, pParams)
244 #define memMap(pMemory, pCallContext, pParams, pCpuMapping) memMap_DISPATCH(pMemory, pCallContext, pParams, pCpuMapping)
245 #define memUnmap(pMemory, pCallContext, pCpuMapping) memUnmap_DISPATCH(pMemory, pCallContext, pCpuMapping)
246 #define memGetMemInterMapParams(pMemory, pParams) memGetMemInterMapParams_DISPATCH(pMemory, pParams)
247 #define memCheckMemInterUnmap(pMemory, bSubdeviceHandleProvided) memCheckMemInterUnmap_DISPATCH(pMemory, bSubdeviceHandleProvided)
248 #define memGetMemoryMappingDescriptor(pMemory, ppMemDesc) memGetMemoryMappingDescriptor_DISPATCH(pMemory, ppMemDesc)
249 #define memCheckCopyPermissions(pMemory, pDstGpu, pDstDevice) memCheckCopyPermissions_DISPATCH(pMemory, pDstGpu, pDstDevice)
250 #define memIsReady(pMemory, bCopyConstructorContext) memIsReady_DISPATCH(pMemory, bCopyConstructorContext)
251 #define memIsGpuMapAllowed(pMemory, pGpu) memIsGpuMapAllowed_DISPATCH(pMemory, pGpu)
252 #define memIsExportAllowed(pMemory) memIsExportAllowed_DISPATCH(pMemory)
253 #define memCtrlCmdGetSurfaceCompressionCoverageLvm(pMemory, pParams) memCtrlCmdGetSurfaceCompressionCoverageLvm_DISPATCH(pMemory, pParams)
254 #define memCtrlCmdGetSurfaceInfoLvm(pMemory, pSurfaceInfoParams) memCtrlCmdGetSurfaceInfoLvm_DISPATCH(pMemory, pSurfaceInfoParams)
255 #define memCtrlCmdSurfaceFlushGpuCache(pMemory, pCacheFlushParams) memCtrlCmdSurfaceFlushGpuCache_DISPATCH(pMemory, pCacheFlushParams)
256 #define memCtrlCmdGetMemPageSize(pMemory, pPageSizeParams) memCtrlCmdGetMemPageSize_DISPATCH(pMemory, pPageSizeParams)
257 #define memCtrlCmdSetTag(pMemory, pParams) memCtrlCmdSetTag_DISPATCH(pMemory, pParams)
258 #define memCtrlCmdGetTag(pMemory, pParams) memCtrlCmdGetTag_DISPATCH(pMemory, pParams)
259 #define memCtrlCmdGetSurfacePhysAttrLvm(pMemory, pGPAP) memCtrlCmdGetSurfacePhysAttrLvm_DISPATCH(pMemory, pGPAP)
260 #define memShareCallback(pResource, pInvokingClient, pParentRef, pSharePolicy) memShareCallback_DISPATCH(pResource, pInvokingClient, pParentRef, pSharePolicy)
261 #define memGetRefCount(pResource) memGetRefCount_DISPATCH(pResource)
262 #define memControlFilter(pResource, pCallContext, pParams) memControlFilter_DISPATCH(pResource, pCallContext, pParams)
263 #define memAddAdditionalDependants(pClient, pResource, pReference) memAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
264 #define memUnmapFrom(pResource, pParams) memUnmapFrom_DISPATCH(pResource, pParams)
265 #define memControlSerialization_Prologue(pResource, pCallContext, pParams) memControlSerialization_Prologue_DISPATCH(pResource, pCallContext, pParams)
266 #define memControl_Prologue(pResource, pCallContext, pParams) memControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
267 #define memCanCopy(pResource) memCanCopy_DISPATCH(pResource)
268 #define memPreDestruct(pResource) memPreDestruct_DISPATCH(pResource)
269 #define memMapTo(pResource, pParams) memMapTo_DISPATCH(pResource, pParams)
270 #define memControlSerialization_Epilogue(pResource, pCallContext, pParams) memControlSerialization_Epilogue_DISPATCH(pResource, pCallContext, pParams)
271 #define memControl_Epilogue(pResource, pCallContext, pParams) memControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
272 #define memControlLookup(pResource, pParams, ppEntry) memControlLookup_DISPATCH(pResource, pParams, ppEntry)
273 #define memAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) memAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
274 NV_STATUS memIsDuplicate_IMPL(struct Memory *pMemory, NvHandle hMemory, NvBool *pDuplicate);
275 
276 static inline NV_STATUS memIsDuplicate_DISPATCH(struct Memory *pMemory, NvHandle hMemory, NvBool *pDuplicate) {
277     return pMemory->__memIsDuplicate__(pMemory, hMemory, pDuplicate);
278 }
279 
280 NV_STATUS memGetMapAddrSpace_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace);
281 
282 static inline NV_STATUS memGetMapAddrSpace_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
283     return pMemory->__memGetMapAddrSpace__(pMemory, pCallContext, mapFlags, pAddrSpace);
284 }
285 
286 NV_STATUS memControl_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams);
287 
288 static inline NV_STATUS memControl_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
289     return pMemory->__memControl__(pMemory, pCallContext, pParams);
290 }
291 
292 NV_STATUS memMap_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping);
293 
294 static inline NV_STATUS memMap_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
295     return pMemory->__memMap__(pMemory, pCallContext, pParams, pCpuMapping);
296 }
297 
298 NV_STATUS memUnmap_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping);
299 
300 static inline NV_STATUS memUnmap_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
301     return pMemory->__memUnmap__(pMemory, pCallContext, pCpuMapping);
302 }
303 
304 NV_STATUS memGetMemInterMapParams_IMPL(struct Memory *pMemory, RMRES_MEM_INTER_MAP_PARAMS *pParams);
305 
306 static inline NV_STATUS memGetMemInterMapParams_DISPATCH(struct Memory *pMemory, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
307     return pMemory->__memGetMemInterMapParams__(pMemory, pParams);
308 }
309 
310 static inline NV_STATUS memCheckMemInterUnmap_ac1694(struct Memory *pMemory, NvBool bSubdeviceHandleProvided) {
311     return NV_OK;
312 }
313 
314 static inline NV_STATUS memCheckMemInterUnmap_DISPATCH(struct Memory *pMemory, NvBool bSubdeviceHandleProvided) {
315     return pMemory->__memCheckMemInterUnmap__(pMemory, bSubdeviceHandleProvided);
316 }
317 
318 NV_STATUS memGetMemoryMappingDescriptor_IMPL(struct Memory *pMemory, MEMORY_DESCRIPTOR **ppMemDesc);
319 
320 static inline NV_STATUS memGetMemoryMappingDescriptor_DISPATCH(struct Memory *pMemory, MEMORY_DESCRIPTOR **ppMemDesc) {
321     return pMemory->__memGetMemoryMappingDescriptor__(pMemory, ppMemDesc);
322 }
323 
324 static inline NV_STATUS memCheckCopyPermissions_ac1694(struct Memory *pMemory, struct OBJGPU *pDstGpu, struct Device *pDstDevice) {
325     return NV_OK;
326 }
327 
328 static inline NV_STATUS memCheckCopyPermissions_DISPATCH(struct Memory *pMemory, struct OBJGPU *pDstGpu, struct Device *pDstDevice) {
329     return pMemory->__memCheckCopyPermissions__(pMemory, pDstGpu, pDstDevice);
330 }
331 
332 NV_STATUS memIsReady_IMPL(struct Memory *pMemory, NvBool bCopyConstructorContext);
333 
334 static inline NV_STATUS memIsReady_DISPATCH(struct Memory *pMemory, NvBool bCopyConstructorContext) {
335     return pMemory->__memIsReady__(pMemory, bCopyConstructorContext);
336 }
337 
338 static inline NvBool memIsGpuMapAllowed_0c883b(struct Memory *pMemory, struct OBJGPU *pGpu) {
339     return ((NvBool)(0 == 0));
340 }
341 
342 static inline NvBool memIsGpuMapAllowed_DISPATCH(struct Memory *pMemory, struct OBJGPU *pGpu) {
343     return pMemory->__memIsGpuMapAllowed__(pMemory, pGpu);
344 }
345 
346 static inline NvBool memIsExportAllowed_0c883b(struct Memory *pMemory) {
347     return ((NvBool)(0 == 0));
348 }
349 
350 static inline NvBool memIsExportAllowed_DISPATCH(struct Memory *pMemory) {
351     return pMemory->__memIsExportAllowed__(pMemory);
352 }
353 
354 NV_STATUS memCtrlCmdGetSurfaceCompressionCoverageLvm_IMPL(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_COMPRESSION_COVERAGE_PARAMS *pParams);
355 
356 static inline NV_STATUS memCtrlCmdGetSurfaceCompressionCoverageLvm_DISPATCH(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_COMPRESSION_COVERAGE_PARAMS *pParams) {
357     return pMemory->__memCtrlCmdGetSurfaceCompressionCoverageLvm__(pMemory, pParams);
358 }
359 
360 NV_STATUS memCtrlCmdGetSurfaceInfoLvm_IMPL(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_INFO_PARAMS *pSurfaceInfoParams);
361 
362 static inline NV_STATUS memCtrlCmdGetSurfaceInfoLvm_DISPATCH(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_INFO_PARAMS *pSurfaceInfoParams) {
363     return pMemory->__memCtrlCmdGetSurfaceInfoLvm__(pMemory, pSurfaceInfoParams);
364 }
365 
366 NV_STATUS memCtrlCmdSurfaceFlushGpuCache_IMPL(struct Memory *pMemory, NV0041_CTRL_SURFACE_FLUSH_GPU_CACHE_PARAMS *pCacheFlushParams);
367 
368 static inline NV_STATUS memCtrlCmdSurfaceFlushGpuCache_DISPATCH(struct Memory *pMemory, NV0041_CTRL_SURFACE_FLUSH_GPU_CACHE_PARAMS *pCacheFlushParams) {
369     return pMemory->__memCtrlCmdSurfaceFlushGpuCache__(pMemory, pCacheFlushParams);
370 }
371 
372 NV_STATUS memCtrlCmdGetMemPageSize_IMPL(struct Memory *pMemory, NV0041_CTRL_GET_MEM_PAGE_SIZE_PARAMS *pPageSizeParams);
373 
374 static inline NV_STATUS memCtrlCmdGetMemPageSize_DISPATCH(struct Memory *pMemory, NV0041_CTRL_GET_MEM_PAGE_SIZE_PARAMS *pPageSizeParams) {
375     return pMemory->__memCtrlCmdGetMemPageSize__(pMemory, pPageSizeParams);
376 }
377 
378 NV_STATUS memCtrlCmdSetTag_IMPL(struct Memory *pMemory, NV0041_CTRL_CMD_SET_TAG_PARAMS *pParams);
379 
380 static inline NV_STATUS memCtrlCmdSetTag_DISPATCH(struct Memory *pMemory, NV0041_CTRL_CMD_SET_TAG_PARAMS *pParams) {
381     return pMemory->__memCtrlCmdSetTag__(pMemory, pParams);
382 }
383 
384 NV_STATUS memCtrlCmdGetTag_IMPL(struct Memory *pMemory, NV0041_CTRL_CMD_GET_TAG_PARAMS *pParams);
385 
386 static inline NV_STATUS memCtrlCmdGetTag_DISPATCH(struct Memory *pMemory, NV0041_CTRL_CMD_GET_TAG_PARAMS *pParams) {
387     return pMemory->__memCtrlCmdGetTag__(pMemory, pParams);
388 }
389 
390 NV_STATUS memCtrlCmdGetSurfacePhysAttrLvm_IMPL(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_PHYS_ATTR_PARAMS *pGPAP);
391 
392 static inline NV_STATUS memCtrlCmdGetSurfacePhysAttrLvm_DISPATCH(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_PHYS_ATTR_PARAMS *pGPAP) {
393     return pMemory->__memCtrlCmdGetSurfacePhysAttrLvm__(pMemory, pGPAP);
394 }
395 
396 static inline NvBool memShareCallback_DISPATCH(struct Memory *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
397     return pResource->__memShareCallback__(pResource, pInvokingClient, pParentRef, pSharePolicy);
398 }
399 
400 static inline NvU32 memGetRefCount_DISPATCH(struct Memory *pResource) {
401     return pResource->__memGetRefCount__(pResource);
402 }
403 
404 static inline NV_STATUS memControlFilter_DISPATCH(struct Memory *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
405     return pResource->__memControlFilter__(pResource, pCallContext, pParams);
406 }
407 
408 static inline void memAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct Memory *pResource, RsResourceRef *pReference) {
409     pResource->__memAddAdditionalDependants__(pClient, pResource, pReference);
410 }
411 
412 static inline NV_STATUS memUnmapFrom_DISPATCH(struct Memory *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
413     return pResource->__memUnmapFrom__(pResource, pParams);
414 }
415 
416 static inline NV_STATUS memControlSerialization_Prologue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
417     return pResource->__memControlSerialization_Prologue__(pResource, pCallContext, pParams);
418 }
419 
420 static inline NV_STATUS memControl_Prologue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
421     return pResource->__memControl_Prologue__(pResource, pCallContext, pParams);
422 }
423 
424 static inline NvBool memCanCopy_DISPATCH(struct Memory *pResource) {
425     return pResource->__memCanCopy__(pResource);
426 }
427 
428 static inline void memPreDestruct_DISPATCH(struct Memory *pResource) {
429     pResource->__memPreDestruct__(pResource);
430 }
431 
432 static inline NV_STATUS memMapTo_DISPATCH(struct Memory *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
433     return pResource->__memMapTo__(pResource, pParams);
434 }
435 
436 static inline void memControlSerialization_Epilogue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
437     pResource->__memControlSerialization_Epilogue__(pResource, pCallContext, pParams);
438 }
439 
440 static inline void memControl_Epilogue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
441     pResource->__memControl_Epilogue__(pResource, pCallContext, pParams);
442 }
443 
444 static inline NV_STATUS memControlLookup_DISPATCH(struct Memory *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
445     return pResource->__memControlLookup__(pResource, pParams, ppEntry);
446 }
447 
448 static inline NvBool memAccessCallback_DISPATCH(struct Memory *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
449     return pResource->__memAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
450 }
451 
452 NV_STATUS memConstruct_IMPL(struct Memory *arg_pMemory, CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
453 
454 #define __nvoc_memConstruct(arg_pMemory, arg_pCallContext, arg_pParams) memConstruct_IMPL(arg_pMemory, arg_pCallContext, arg_pParams)
455 NV_STATUS memCopyConstruct_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *pParams);
456 
457 #ifdef __nvoc_mem_h_disabled
458 static inline NV_STATUS memCopyConstruct(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *pParams) {
459     NV_ASSERT_FAILED_PRECOMP("Memory was disabled!");
460     return NV_ERR_NOT_SUPPORTED;
461 }
462 #else //__nvoc_mem_h_disabled
463 #define memCopyConstruct(pMemory, pCallContext, pParams) memCopyConstruct_IMPL(pMemory, pCallContext, pParams)
464 #endif //__nvoc_mem_h_disabled
465 
466 void memDestruct_IMPL(struct Memory *pMemory);
467 
468 #define __nvoc_memDestruct(pMemory) memDestruct_IMPL(pMemory)
469 NV_STATUS memConstructCommon_IMPL(struct Memory *pMemory, NvU32 categoryClassId, NvU32 flags, MEMORY_DESCRIPTOR *pMemDesc, NvU32 heapOwner, struct Heap *pHeap, NvU32 attr, NvU32 attr2, NvU32 Pitch, NvU32 type, NvU32 tag, HWRESOURCE_INFO *pHwResource);
470 
471 #ifdef __nvoc_mem_h_disabled
472 static inline NV_STATUS memConstructCommon(struct Memory *pMemory, NvU32 categoryClassId, NvU32 flags, MEMORY_DESCRIPTOR *pMemDesc, NvU32 heapOwner, struct Heap *pHeap, NvU32 attr, NvU32 attr2, NvU32 Pitch, NvU32 type, NvU32 tag, HWRESOURCE_INFO *pHwResource) {
473     NV_ASSERT_FAILED_PRECOMP("Memory was disabled!");
474     return NV_ERR_NOT_SUPPORTED;
475 }
476 #else //__nvoc_mem_h_disabled
477 #define memConstructCommon(pMemory, categoryClassId, flags, pMemDesc, heapOwner, pHeap, attr, attr2, Pitch, type, tag, pHwResource) memConstructCommon_IMPL(pMemory, categoryClassId, flags, pMemDesc, heapOwner, pHeap, attr, attr2, Pitch, type, tag, pHwResource)
478 #endif //__nvoc_mem_h_disabled
479 
480 void memDestructCommon_IMPL(struct Memory *pMemory);
481 
482 #ifdef __nvoc_mem_h_disabled
483 static inline void memDestructCommon(struct Memory *pMemory) {
484     NV_ASSERT_FAILED_PRECOMP("Memory was disabled!");
485 }
486 #else //__nvoc_mem_h_disabled
487 #define memDestructCommon(pMemory) memDestructCommon_IMPL(pMemory)
488 #endif //__nvoc_mem_h_disabled
489 
490 NV_STATUS memCreateMemDesc_IMPL(struct OBJGPU *pGpu, MEMORY_DESCRIPTOR **ppMemDesc, NV_ADDRESS_SPACE addrSpace, NvU64 FBOffset, NvU64 length, NvU32 attr, NvU32 attr2);
491 
492 #define memCreateMemDesc(pGpu, ppMemDesc, addrSpace, FBOffset, length, attr, attr2) memCreateMemDesc_IMPL(pGpu, ppMemDesc, addrSpace, FBOffset, length, attr, attr2)
493 NV_STATUS memCreateKernelMapping_IMPL(struct Memory *pMemory, NvU32 Protect, NvBool bClear);
494 
495 #ifdef __nvoc_mem_h_disabled
496 static inline NV_STATUS memCreateKernelMapping(struct Memory *pMemory, NvU32 Protect, NvBool bClear) {
497     NV_ASSERT_FAILED_PRECOMP("Memory was disabled!");
498     return NV_ERR_NOT_SUPPORTED;
499 }
500 #else //__nvoc_mem_h_disabled
501 #define memCreateKernelMapping(pMemory, Protect, bClear) memCreateKernelMapping_IMPL(pMemory, Protect, bClear)
502 #endif //__nvoc_mem_h_disabled
503 
504 NV_STATUS memGetByHandle_IMPL(struct RsClient *pClient, NvHandle hMemory, struct Memory **ppMemory);
505 
506 #define memGetByHandle(pClient, hMemory, ppMemory) memGetByHandle_IMPL(pClient, hMemory, ppMemory)
507 NV_STATUS memGetByHandleAndDevice_IMPL(struct RsClient *pClient, NvHandle hMemory, NvHandle hDevice, struct Memory **ppMemory);
508 
509 #define memGetByHandleAndDevice(pClient, hMemory, hDevice, ppMemory) memGetByHandleAndDevice_IMPL(pClient, hMemory, hDevice, ppMemory)
510 NV_STATUS memGetByHandleAndGroupedGpu_IMPL(struct RsClient *pClient, NvHandle hMemory, struct OBJGPU *pGpu, struct Memory **ppMemory);
511 
512 #define memGetByHandleAndGroupedGpu(pClient, hMemory, pGpu, ppMemory) memGetByHandleAndGroupedGpu_IMPL(pClient, hMemory, pGpu, ppMemory)
513 #undef PRIVATE_FIELD
514 
515 
516 #endif
517 
518 
519 #ifdef __cplusplus
520 } // extern "C"
521 #endif
522 
523 #endif // _G_MEM_NVOC_H_
524