1 #ifndef _G_MEM_NVOC_H_
2 #define _G_MEM_NVOC_H_
3 #include "nvoc/runtime.h"
4
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8
9 /*
10 * SPDX-FileCopyrightText: Copyright (c) 1993-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11 * SPDX-License-Identifier: MIT
12 *
13 * Permission is hereby granted, free of charge, to any person obtaining a
14 * copy of this software and associated documentation files (the "Software"),
15 * to deal in the Software without restriction, including without limitation
16 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17 * and/or sell copies of the Software, and to permit persons to whom the
18 * Software is furnished to do so, subject to the following conditions:
19 *
20 * The above copyright notice and this permission notice shall be included in
21 * all copies or substantial portions of the Software.
22 *
23 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29 * DEALINGS IN THE SOFTWARE.
30 */
31
32 #include "g_mem_nvoc.h"
33
34 #ifndef _MEMORY_API_H_
35 #define _MEMORY_API_H_
36
37 #include "core/core.h"
38 #include "resserv/rs_resource.h"
39 #include "rmapi/rmapi.h"
40 #include "rmapi/resource.h"
41
42 #include "containers/btree.h"
43
44 #include "ctrl/ctrl0041.h"
45
46 struct Device;
47
48 #ifndef __NVOC_CLASS_Device_TYPEDEF__
49 #define __NVOC_CLASS_Device_TYPEDEF__
50 typedef struct Device Device;
51 #endif /* __NVOC_CLASS_Device_TYPEDEF__ */
52
53 #ifndef __nvoc_class_id_Device
54 #define __nvoc_class_id_Device 0xe0ac20
55 #endif /* __nvoc_class_id_Device */
56
57
58 struct Subdevice;
59
60 #ifndef __NVOC_CLASS_Subdevice_TYPEDEF__
61 #define __NVOC_CLASS_Subdevice_TYPEDEF__
62 typedef struct Subdevice Subdevice;
63 #endif /* __NVOC_CLASS_Subdevice_TYPEDEF__ */
64
65 #ifndef __nvoc_class_id_Subdevice
66 #define __nvoc_class_id_Subdevice 0x4b01b3
67 #endif /* __nvoc_class_id_Subdevice */
68
69
70 struct RsClient;
71
72 #ifndef __NVOC_CLASS_RsClient_TYPEDEF__
73 #define __NVOC_CLASS_RsClient_TYPEDEF__
74 typedef struct RsClient RsClient;
75 #endif /* __NVOC_CLASS_RsClient_TYPEDEF__ */
76
77 #ifndef __nvoc_class_id_RsClient
78 #define __nvoc_class_id_RsClient 0x8f87e5
79 #endif /* __nvoc_class_id_RsClient */
80
81
82 struct Heap;
83
84 #ifndef __NVOC_CLASS_Heap_TYPEDEF__
85 #define __NVOC_CLASS_Heap_TYPEDEF__
86 typedef struct Heap Heap;
87 #endif /* __NVOC_CLASS_Heap_TYPEDEF__ */
88
89 #ifndef __nvoc_class_id_Heap
90 #define __nvoc_class_id_Heap 0x556e9a
91 #endif /* __nvoc_class_id_Heap */
92
93
94 struct OBJGPU;
95
96 #ifndef __NVOC_CLASS_OBJGPU_TYPEDEF__
97 #define __NVOC_CLASS_OBJGPU_TYPEDEF__
98 typedef struct OBJGPU OBJGPU;
99 #endif /* __NVOC_CLASS_OBJGPU_TYPEDEF__ */
100
101 #ifndef __nvoc_class_id_OBJGPU
102 #define __nvoc_class_id_OBJGPU 0x7ef3cb
103 #endif /* __nvoc_class_id_OBJGPU */
104
105
106 typedef struct MEMORY_DESCRIPTOR MEMORY_DESCRIPTOR;
107 typedef struct PmuMapping PmuMapping;
108 typedef struct HWRESOURCE_INFO HWRESOURCE_INFO;
109
110 //
111 // vGPU non-stall interrupt info
112 //
113 typedef struct _def_client_vgpu_ns_intr
114 {
115 NvU32 nsSemValue; // Non stall interrupt semaphore value
116 NvU32 nsSemOffset; // Non stall interrupt semaphore offset. Currently it is always 0.
117 NvBool isSemaMemValidationEnabled; // Enable change in Non stall interrupt sema value check
118 // while generating event
119 NvU64 guestDomainId; // guest ID that we need to use to inject interrupt
120 NvU64 guestMSIAddr; // MSI address allocated by guest OS
121 NvU32 guestMSIData; // MSI data value set by guest OS
122 void *pVgpuVfioRef; // Reference to vgpu device in nvidia-vgpu-vfio module
123 void *pEventDpc; // DPC event to pass the interrupt
124 } VGPU_NS_INTR;
125
126 typedef struct
127 {
128 struct Memory *pNext;
129 struct Memory *pPrev;
130 } memCircularListItem;
131
132 /*!
133 * RM internal class representing NV01_MEMORY_XXX
134 *
135 * @note Memory cannot be a GpuResource because NoDeviceMemory
136 * subclass is not allocated under a device.
137 */
138
139 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
140 // the matching C source file, but causes diagnostics to be issued if another
141 // source file references the field.
142 #ifdef NVOC_MEM_H_PRIVATE_ACCESS_ALLOWED
143 #define PRIVATE_FIELD(x) x
144 #else
145 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
146 #endif
147
148 struct Memory {
149 const struct NVOC_RTTI *__nvoc_rtti;
150 struct RmResource __nvoc_base_RmResource;
151 struct Object *__nvoc_pbase_Object;
152 struct RsResource *__nvoc_pbase_RsResource;
153 struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
154 struct RmResource *__nvoc_pbase_RmResource;
155 struct Memory *__nvoc_pbase_Memory;
156 NV_STATUS (*__memIsDuplicate__)(struct Memory *, NvHandle, NvBool *);
157 NV_STATUS (*__memGetMapAddrSpace__)(struct Memory *, CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
158 NV_STATUS (*__memControl__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
159 NV_STATUS (*__memMap__)(struct Memory *, CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, RsCpuMapping *);
160 NV_STATUS (*__memUnmap__)(struct Memory *, CALL_CONTEXT *, RsCpuMapping *);
161 NV_STATUS (*__memGetMemInterMapParams__)(struct Memory *, RMRES_MEM_INTER_MAP_PARAMS *);
162 NV_STATUS (*__memCheckMemInterUnmap__)(struct Memory *, NvBool);
163 NV_STATUS (*__memGetMemoryMappingDescriptor__)(struct Memory *, MEMORY_DESCRIPTOR **);
164 NV_STATUS (*__memCheckCopyPermissions__)(struct Memory *, struct OBJGPU *, struct Device *);
165 NV_STATUS (*__memIsReady__)(struct Memory *, NvBool);
166 NvBool (*__memIsGpuMapAllowed__)(struct Memory *, struct OBJGPU *);
167 NvBool (*__memIsExportAllowed__)(struct Memory *);
168 NV_STATUS (*__memCtrlCmdGetSurfaceCompressionCoverageLvm__)(struct Memory *, NV0041_CTRL_GET_SURFACE_COMPRESSION_COVERAGE_PARAMS *);
169 NV_STATUS (*__memCtrlCmdGetSurfaceInfoLvm__)(struct Memory *, NV0041_CTRL_GET_SURFACE_INFO_PARAMS *);
170 NV_STATUS (*__memCtrlCmdSurfaceFlushGpuCache__)(struct Memory *, NV0041_CTRL_SURFACE_FLUSH_GPU_CACHE_PARAMS *);
171 NV_STATUS (*__memCtrlCmdGetMemPageSize__)(struct Memory *, NV0041_CTRL_GET_MEM_PAGE_SIZE_PARAMS *);
172 NV_STATUS (*__memCtrlCmdSetTag__)(struct Memory *, NV0041_CTRL_CMD_SET_TAG_PARAMS *);
173 NV_STATUS (*__memCtrlCmdGetTag__)(struct Memory *, NV0041_CTRL_CMD_GET_TAG_PARAMS *);
174 NV_STATUS (*__memCtrlCmdGetSurfacePhysAttrLvm__)(struct Memory *, NV0041_CTRL_GET_SURFACE_PHYS_ATTR_PARAMS *);
175 NvBool (*__memShareCallback__)(struct Memory *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
176 NvU32 (*__memGetRefCount__)(struct Memory *);
177 NV_STATUS (*__memControlFilter__)(struct Memory *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
178 void (*__memAddAdditionalDependants__)(struct RsClient *, struct Memory *, RsResourceRef *);
179 NV_STATUS (*__memControlSerialization_Prologue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
180 NV_STATUS (*__memControl_Prologue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
181 NvBool (*__memCanCopy__)(struct Memory *);
182 NvBool (*__memIsPartialUnmapSupported__)(struct Memory *);
183 void (*__memPreDestruct__)(struct Memory *);
184 NV_STATUS (*__memMapTo__)(struct Memory *, RS_RES_MAP_TO_PARAMS *);
185 void (*__memControlSerialization_Epilogue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
186 void (*__memControl_Epilogue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
187 NV_STATUS (*__memUnmapFrom__)(struct Memory *, RS_RES_UNMAP_FROM_PARAMS *);
188 NvBool (*__memAccessCallback__)(struct Memory *, struct RsClient *, void *, RsAccessRight);
189 NvBool bConstructed;
190 struct Device *pDevice;
191 struct Subdevice *pSubDevice;
192 struct OBJGPU *pGpu;
193 NvBool bBcResource;
194 NvU32 categoryClassId;
195 NvU64 Length;
196 NvU32 HeapOwner;
197 NvU32 RefCount;
198 struct Heap *pHeap;
199 MEMORY_DESCRIPTOR *pMemDesc;
200 NvBool isMemDescOwner;
201 memCircularListItem dupListItem;
202 NvP64 KernelVAddr;
203 NvP64 KernelMapPriv;
204 PmuMapping *pPmuMappingList;
205 NODE Node;
206 NvU32 Attr;
207 NvU32 Attr2;
208 NvU32 Pitch;
209 NvU32 Type;
210 NvU32 Flags;
211 NvU32 tag;
212 NvU64 osDeviceHandle;
213 HWRESOURCE_INFO *pHwResource;
214 NvBool bRpcAlloc;
215 VGPU_NS_INTR vgpuNsIntr;
216 };
217
218 #ifndef __NVOC_CLASS_Memory_TYPEDEF__
219 #define __NVOC_CLASS_Memory_TYPEDEF__
220 typedef struct Memory Memory;
221 #endif /* __NVOC_CLASS_Memory_TYPEDEF__ */
222
223 #ifndef __nvoc_class_id_Memory
224 #define __nvoc_class_id_Memory 0x4789f2
225 #endif /* __nvoc_class_id_Memory */
226
227 extern const struct NVOC_CLASS_DEF __nvoc_class_def_Memory;
228
229 #define __staticCast_Memory(pThis) \
230 ((pThis)->__nvoc_pbase_Memory)
231
232 #ifdef __nvoc_mem_h_disabled
233 #define __dynamicCast_Memory(pThis) ((Memory*)NULL)
234 #else //__nvoc_mem_h_disabled
235 #define __dynamicCast_Memory(pThis) \
236 ((Memory*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(Memory)))
237 #endif //__nvoc_mem_h_disabled
238
239
240 NV_STATUS __nvoc_objCreateDynamic_Memory(Memory**, Dynamic*, NvU32, va_list);
241
242 NV_STATUS __nvoc_objCreate_Memory(Memory**, Dynamic*, NvU32, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
243 #define __objCreate_Memory(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
244 __nvoc_objCreate_Memory((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
245
246 #define memIsDuplicate(pMemory, hMemory, pDuplicate) memIsDuplicate_DISPATCH(pMemory, hMemory, pDuplicate)
247 #define memGetMapAddrSpace(pMemory, pCallContext, mapFlags, pAddrSpace) memGetMapAddrSpace_DISPATCH(pMemory, pCallContext, mapFlags, pAddrSpace)
248 #define memControl(pMemory, pCallContext, pParams) memControl_DISPATCH(pMemory, pCallContext, pParams)
249 #define memMap(pMemory, pCallContext, pParams, pCpuMapping) memMap_DISPATCH(pMemory, pCallContext, pParams, pCpuMapping)
250 #define memUnmap(pMemory, pCallContext, pCpuMapping) memUnmap_DISPATCH(pMemory, pCallContext, pCpuMapping)
251 #define memGetMemInterMapParams(pMemory, pParams) memGetMemInterMapParams_DISPATCH(pMemory, pParams)
252 #define memCheckMemInterUnmap(pMemory, bSubdeviceHandleProvided) memCheckMemInterUnmap_DISPATCH(pMemory, bSubdeviceHandleProvided)
253 #define memGetMemoryMappingDescriptor(pMemory, ppMemDesc) memGetMemoryMappingDescriptor_DISPATCH(pMemory, ppMemDesc)
254 #define memCheckCopyPermissions(pMemory, pDstGpu, pDstDevice) memCheckCopyPermissions_DISPATCH(pMemory, pDstGpu, pDstDevice)
255 #define memIsReady(pMemory, bCopyConstructorContext) memIsReady_DISPATCH(pMemory, bCopyConstructorContext)
256 #define memIsGpuMapAllowed(pMemory, pGpu) memIsGpuMapAllowed_DISPATCH(pMemory, pGpu)
257 #define memIsExportAllowed(pMemory) memIsExportAllowed_DISPATCH(pMemory)
258 #define memCtrlCmdGetSurfaceCompressionCoverageLvm(pMemory, pParams) memCtrlCmdGetSurfaceCompressionCoverageLvm_DISPATCH(pMemory, pParams)
259 #define memCtrlCmdGetSurfaceInfoLvm(pMemory, pSurfaceInfoParams) memCtrlCmdGetSurfaceInfoLvm_DISPATCH(pMemory, pSurfaceInfoParams)
260 #define memCtrlCmdSurfaceFlushGpuCache(pMemory, pCacheFlushParams) memCtrlCmdSurfaceFlushGpuCache_DISPATCH(pMemory, pCacheFlushParams)
261 #define memCtrlCmdGetMemPageSize(pMemory, pPageSizeParams) memCtrlCmdGetMemPageSize_DISPATCH(pMemory, pPageSizeParams)
262 #define memCtrlCmdSetTag(pMemory, pParams) memCtrlCmdSetTag_DISPATCH(pMemory, pParams)
263 #define memCtrlCmdGetTag(pMemory, pParams) memCtrlCmdGetTag_DISPATCH(pMemory, pParams)
264 #define memCtrlCmdGetSurfacePhysAttrLvm(pMemory, pGPAP) memCtrlCmdGetSurfacePhysAttrLvm_DISPATCH(pMemory, pGPAP)
265 #define memShareCallback(pResource, pInvokingClient, pParentRef, pSharePolicy) memShareCallback_DISPATCH(pResource, pInvokingClient, pParentRef, pSharePolicy)
266 #define memGetRefCount(pResource) memGetRefCount_DISPATCH(pResource)
267 #define memControlFilter(pResource, pCallContext, pParams) memControlFilter_DISPATCH(pResource, pCallContext, pParams)
268 #define memAddAdditionalDependants(pClient, pResource, pReference) memAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
269 #define memControlSerialization_Prologue(pResource, pCallContext, pParams) memControlSerialization_Prologue_DISPATCH(pResource, pCallContext, pParams)
270 #define memControl_Prologue(pResource, pCallContext, pParams) memControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
271 #define memCanCopy(pResource) memCanCopy_DISPATCH(pResource)
272 #define memIsPartialUnmapSupported(pResource) memIsPartialUnmapSupported_DISPATCH(pResource)
273 #define memPreDestruct(pResource) memPreDestruct_DISPATCH(pResource)
274 #define memMapTo(pResource, pParams) memMapTo_DISPATCH(pResource, pParams)
275 #define memControlSerialization_Epilogue(pResource, pCallContext, pParams) memControlSerialization_Epilogue_DISPATCH(pResource, pCallContext, pParams)
276 #define memControl_Epilogue(pResource, pCallContext, pParams) memControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
277 #define memUnmapFrom(pResource, pParams) memUnmapFrom_DISPATCH(pResource, pParams)
278 #define memAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) memAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
279 NV_STATUS memIsDuplicate_IMPL(struct Memory *pMemory, NvHandle hMemory, NvBool *pDuplicate);
280
memIsDuplicate_DISPATCH(struct Memory * pMemory,NvHandle hMemory,NvBool * pDuplicate)281 static inline NV_STATUS memIsDuplicate_DISPATCH(struct Memory *pMemory, NvHandle hMemory, NvBool *pDuplicate) {
282 return pMemory->__memIsDuplicate__(pMemory, hMemory, pDuplicate);
283 }
284
285 NV_STATUS memGetMapAddrSpace_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace);
286
memGetMapAddrSpace_DISPATCH(struct Memory * pMemory,CALL_CONTEXT * pCallContext,NvU32 mapFlags,NV_ADDRESS_SPACE * pAddrSpace)287 static inline NV_STATUS memGetMapAddrSpace_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
288 return pMemory->__memGetMapAddrSpace__(pMemory, pCallContext, mapFlags, pAddrSpace);
289 }
290
291 NV_STATUS memControl_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams);
292
memControl_DISPATCH(struct Memory * pMemory,CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)293 static inline NV_STATUS memControl_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
294 return pMemory->__memControl__(pMemory, pCallContext, pParams);
295 }
296
297 NV_STATUS memMap_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping);
298
memMap_DISPATCH(struct Memory * pMemory,CALL_CONTEXT * pCallContext,struct RS_CPU_MAP_PARAMS * pParams,RsCpuMapping * pCpuMapping)299 static inline NV_STATUS memMap_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
300 return pMemory->__memMap__(pMemory, pCallContext, pParams, pCpuMapping);
301 }
302
303 NV_STATUS memUnmap_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping);
304
memUnmap_DISPATCH(struct Memory * pMemory,CALL_CONTEXT * pCallContext,RsCpuMapping * pCpuMapping)305 static inline NV_STATUS memUnmap_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
306 return pMemory->__memUnmap__(pMemory, pCallContext, pCpuMapping);
307 }
308
309 NV_STATUS memGetMemInterMapParams_IMPL(struct Memory *pMemory, RMRES_MEM_INTER_MAP_PARAMS *pParams);
310
memGetMemInterMapParams_DISPATCH(struct Memory * pMemory,RMRES_MEM_INTER_MAP_PARAMS * pParams)311 static inline NV_STATUS memGetMemInterMapParams_DISPATCH(struct Memory *pMemory, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
312 return pMemory->__memGetMemInterMapParams__(pMemory, pParams);
313 }
314
memCheckMemInterUnmap_ac1694(struct Memory * pMemory,NvBool bSubdeviceHandleProvided)315 static inline NV_STATUS memCheckMemInterUnmap_ac1694(struct Memory *pMemory, NvBool bSubdeviceHandleProvided) {
316 return NV_OK;
317 }
318
memCheckMemInterUnmap_DISPATCH(struct Memory * pMemory,NvBool bSubdeviceHandleProvided)319 static inline NV_STATUS memCheckMemInterUnmap_DISPATCH(struct Memory *pMemory, NvBool bSubdeviceHandleProvided) {
320 return pMemory->__memCheckMemInterUnmap__(pMemory, bSubdeviceHandleProvided);
321 }
322
323 NV_STATUS memGetMemoryMappingDescriptor_IMPL(struct Memory *pMemory, MEMORY_DESCRIPTOR **ppMemDesc);
324
memGetMemoryMappingDescriptor_DISPATCH(struct Memory * pMemory,MEMORY_DESCRIPTOR ** ppMemDesc)325 static inline NV_STATUS memGetMemoryMappingDescriptor_DISPATCH(struct Memory *pMemory, MEMORY_DESCRIPTOR **ppMemDesc) {
326 return pMemory->__memGetMemoryMappingDescriptor__(pMemory, ppMemDesc);
327 }
328
memCheckCopyPermissions_ac1694(struct Memory * pMemory,struct OBJGPU * pDstGpu,struct Device * pDstDevice)329 static inline NV_STATUS memCheckCopyPermissions_ac1694(struct Memory *pMemory, struct OBJGPU *pDstGpu, struct Device *pDstDevice) {
330 return NV_OK;
331 }
332
memCheckCopyPermissions_DISPATCH(struct Memory * pMemory,struct OBJGPU * pDstGpu,struct Device * pDstDevice)333 static inline NV_STATUS memCheckCopyPermissions_DISPATCH(struct Memory *pMemory, struct OBJGPU *pDstGpu, struct Device *pDstDevice) {
334 return pMemory->__memCheckCopyPermissions__(pMemory, pDstGpu, pDstDevice);
335 }
336
337 NV_STATUS memIsReady_IMPL(struct Memory *pMemory, NvBool bCopyConstructorContext);
338
memIsReady_DISPATCH(struct Memory * pMemory,NvBool bCopyConstructorContext)339 static inline NV_STATUS memIsReady_DISPATCH(struct Memory *pMemory, NvBool bCopyConstructorContext) {
340 return pMemory->__memIsReady__(pMemory, bCopyConstructorContext);
341 }
342
memIsGpuMapAllowed_0c883b(struct Memory * pMemory,struct OBJGPU * pGpu)343 static inline NvBool memIsGpuMapAllowed_0c883b(struct Memory *pMemory, struct OBJGPU *pGpu) {
344 return ((NvBool)(0 == 0));
345 }
346
memIsGpuMapAllowed_DISPATCH(struct Memory * pMemory,struct OBJGPU * pGpu)347 static inline NvBool memIsGpuMapAllowed_DISPATCH(struct Memory *pMemory, struct OBJGPU *pGpu) {
348 return pMemory->__memIsGpuMapAllowed__(pMemory, pGpu);
349 }
350
memIsExportAllowed_0c883b(struct Memory * pMemory)351 static inline NvBool memIsExportAllowed_0c883b(struct Memory *pMemory) {
352 return ((NvBool)(0 == 0));
353 }
354
memIsExportAllowed_DISPATCH(struct Memory * pMemory)355 static inline NvBool memIsExportAllowed_DISPATCH(struct Memory *pMemory) {
356 return pMemory->__memIsExportAllowed__(pMemory);
357 }
358
359 NV_STATUS memCtrlCmdGetSurfaceCompressionCoverageLvm_IMPL(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_COMPRESSION_COVERAGE_PARAMS *pParams);
360
memCtrlCmdGetSurfaceCompressionCoverageLvm_DISPATCH(struct Memory * pMemory,NV0041_CTRL_GET_SURFACE_COMPRESSION_COVERAGE_PARAMS * pParams)361 static inline NV_STATUS memCtrlCmdGetSurfaceCompressionCoverageLvm_DISPATCH(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_COMPRESSION_COVERAGE_PARAMS *pParams) {
362 return pMemory->__memCtrlCmdGetSurfaceCompressionCoverageLvm__(pMemory, pParams);
363 }
364
365 NV_STATUS memCtrlCmdGetSurfaceInfoLvm_IMPL(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_INFO_PARAMS *pSurfaceInfoParams);
366
memCtrlCmdGetSurfaceInfoLvm_DISPATCH(struct Memory * pMemory,NV0041_CTRL_GET_SURFACE_INFO_PARAMS * pSurfaceInfoParams)367 static inline NV_STATUS memCtrlCmdGetSurfaceInfoLvm_DISPATCH(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_INFO_PARAMS *pSurfaceInfoParams) {
368 return pMemory->__memCtrlCmdGetSurfaceInfoLvm__(pMemory, pSurfaceInfoParams);
369 }
370
371 NV_STATUS memCtrlCmdSurfaceFlushGpuCache_IMPL(struct Memory *pMemory, NV0041_CTRL_SURFACE_FLUSH_GPU_CACHE_PARAMS *pCacheFlushParams);
372
memCtrlCmdSurfaceFlushGpuCache_DISPATCH(struct Memory * pMemory,NV0041_CTRL_SURFACE_FLUSH_GPU_CACHE_PARAMS * pCacheFlushParams)373 static inline NV_STATUS memCtrlCmdSurfaceFlushGpuCache_DISPATCH(struct Memory *pMemory, NV0041_CTRL_SURFACE_FLUSH_GPU_CACHE_PARAMS *pCacheFlushParams) {
374 return pMemory->__memCtrlCmdSurfaceFlushGpuCache__(pMemory, pCacheFlushParams);
375 }
376
377 NV_STATUS memCtrlCmdGetMemPageSize_IMPL(struct Memory *pMemory, NV0041_CTRL_GET_MEM_PAGE_SIZE_PARAMS *pPageSizeParams);
378
memCtrlCmdGetMemPageSize_DISPATCH(struct Memory * pMemory,NV0041_CTRL_GET_MEM_PAGE_SIZE_PARAMS * pPageSizeParams)379 static inline NV_STATUS memCtrlCmdGetMemPageSize_DISPATCH(struct Memory *pMemory, NV0041_CTRL_GET_MEM_PAGE_SIZE_PARAMS *pPageSizeParams) {
380 return pMemory->__memCtrlCmdGetMemPageSize__(pMemory, pPageSizeParams);
381 }
382
383 NV_STATUS memCtrlCmdSetTag_IMPL(struct Memory *pMemory, NV0041_CTRL_CMD_SET_TAG_PARAMS *pParams);
384
memCtrlCmdSetTag_DISPATCH(struct Memory * pMemory,NV0041_CTRL_CMD_SET_TAG_PARAMS * pParams)385 static inline NV_STATUS memCtrlCmdSetTag_DISPATCH(struct Memory *pMemory, NV0041_CTRL_CMD_SET_TAG_PARAMS *pParams) {
386 return pMemory->__memCtrlCmdSetTag__(pMemory, pParams);
387 }
388
389 NV_STATUS memCtrlCmdGetTag_IMPL(struct Memory *pMemory, NV0041_CTRL_CMD_GET_TAG_PARAMS *pParams);
390
memCtrlCmdGetTag_DISPATCH(struct Memory * pMemory,NV0041_CTRL_CMD_GET_TAG_PARAMS * pParams)391 static inline NV_STATUS memCtrlCmdGetTag_DISPATCH(struct Memory *pMemory, NV0041_CTRL_CMD_GET_TAG_PARAMS *pParams) {
392 return pMemory->__memCtrlCmdGetTag__(pMemory, pParams);
393 }
394
395 NV_STATUS memCtrlCmdGetSurfacePhysAttrLvm_IMPL(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_PHYS_ATTR_PARAMS *pGPAP);
396
memCtrlCmdGetSurfacePhysAttrLvm_DISPATCH(struct Memory * pMemory,NV0041_CTRL_GET_SURFACE_PHYS_ATTR_PARAMS * pGPAP)397 static inline NV_STATUS memCtrlCmdGetSurfacePhysAttrLvm_DISPATCH(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_PHYS_ATTR_PARAMS *pGPAP) {
398 return pMemory->__memCtrlCmdGetSurfacePhysAttrLvm__(pMemory, pGPAP);
399 }
400
memShareCallback_DISPATCH(struct Memory * pResource,struct RsClient * pInvokingClient,struct RsResourceRef * pParentRef,RS_SHARE_POLICY * pSharePolicy)401 static inline NvBool memShareCallback_DISPATCH(struct Memory *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
402 return pResource->__memShareCallback__(pResource, pInvokingClient, pParentRef, pSharePolicy);
403 }
404
memGetRefCount_DISPATCH(struct Memory * pResource)405 static inline NvU32 memGetRefCount_DISPATCH(struct Memory *pResource) {
406 return pResource->__memGetRefCount__(pResource);
407 }
408
memControlFilter_DISPATCH(struct Memory * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)409 static inline NV_STATUS memControlFilter_DISPATCH(struct Memory *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
410 return pResource->__memControlFilter__(pResource, pCallContext, pParams);
411 }
412
memAddAdditionalDependants_DISPATCH(struct RsClient * pClient,struct Memory * pResource,RsResourceRef * pReference)413 static inline void memAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct Memory *pResource, RsResourceRef *pReference) {
414 pResource->__memAddAdditionalDependants__(pClient, pResource, pReference);
415 }
416
memControlSerialization_Prologue_DISPATCH(struct Memory * pResource,CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)417 static inline NV_STATUS memControlSerialization_Prologue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
418 return pResource->__memControlSerialization_Prologue__(pResource, pCallContext, pParams);
419 }
420
memControl_Prologue_DISPATCH(struct Memory * pResource,CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)421 static inline NV_STATUS memControl_Prologue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
422 return pResource->__memControl_Prologue__(pResource, pCallContext, pParams);
423 }
424
memCanCopy_DISPATCH(struct Memory * pResource)425 static inline NvBool memCanCopy_DISPATCH(struct Memory *pResource) {
426 return pResource->__memCanCopy__(pResource);
427 }
428
memIsPartialUnmapSupported_DISPATCH(struct Memory * pResource)429 static inline NvBool memIsPartialUnmapSupported_DISPATCH(struct Memory *pResource) {
430 return pResource->__memIsPartialUnmapSupported__(pResource);
431 }
432
memPreDestruct_DISPATCH(struct Memory * pResource)433 static inline void memPreDestruct_DISPATCH(struct Memory *pResource) {
434 pResource->__memPreDestruct__(pResource);
435 }
436
memMapTo_DISPATCH(struct Memory * pResource,RS_RES_MAP_TO_PARAMS * pParams)437 static inline NV_STATUS memMapTo_DISPATCH(struct Memory *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
438 return pResource->__memMapTo__(pResource, pParams);
439 }
440
memControlSerialization_Epilogue_DISPATCH(struct Memory * pResource,CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)441 static inline void memControlSerialization_Epilogue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
442 pResource->__memControlSerialization_Epilogue__(pResource, pCallContext, pParams);
443 }
444
memControl_Epilogue_DISPATCH(struct Memory * pResource,CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)445 static inline void memControl_Epilogue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
446 pResource->__memControl_Epilogue__(pResource, pCallContext, pParams);
447 }
448
memUnmapFrom_DISPATCH(struct Memory * pResource,RS_RES_UNMAP_FROM_PARAMS * pParams)449 static inline NV_STATUS memUnmapFrom_DISPATCH(struct Memory *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
450 return pResource->__memUnmapFrom__(pResource, pParams);
451 }
452
memAccessCallback_DISPATCH(struct Memory * pResource,struct RsClient * pInvokingClient,void * pAllocParams,RsAccessRight accessRight)453 static inline NvBool memAccessCallback_DISPATCH(struct Memory *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
454 return pResource->__memAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
455 }
456
457 NV_STATUS memConstruct_IMPL(struct Memory *arg_pMemory, CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
458
459 #define __nvoc_memConstruct(arg_pMemory, arg_pCallContext, arg_pParams) memConstruct_IMPL(arg_pMemory, arg_pCallContext, arg_pParams)
460 NV_STATUS memCopyConstruct_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *pParams);
461
462 #ifdef __nvoc_mem_h_disabled
memCopyConstruct(struct Memory * pMemory,CALL_CONTEXT * pCallContext,struct RS_RES_ALLOC_PARAMS_INTERNAL * pParams)463 static inline NV_STATUS memCopyConstruct(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *pParams) {
464 NV_ASSERT_FAILED_PRECOMP("Memory was disabled!");
465 return NV_ERR_NOT_SUPPORTED;
466 }
467 #else //__nvoc_mem_h_disabled
468 #define memCopyConstruct(pMemory, pCallContext, pParams) memCopyConstruct_IMPL(pMemory, pCallContext, pParams)
469 #endif //__nvoc_mem_h_disabled
470
471 void memDestruct_IMPL(struct Memory *pMemory);
472
473 #define __nvoc_memDestruct(pMemory) memDestruct_IMPL(pMemory)
474 NV_STATUS memConstructCommon_IMPL(struct Memory *pMemory, NvU32 categoryClassId, NvU32 flags, MEMORY_DESCRIPTOR *pMemDesc, NvU32 heapOwner, struct Heap *pHeap, NvU32 attr, NvU32 attr2, NvU32 Pitch, NvU32 type, NvU32 tag, HWRESOURCE_INFO *pHwResource);
475
476 #ifdef __nvoc_mem_h_disabled
memConstructCommon(struct Memory * pMemory,NvU32 categoryClassId,NvU32 flags,MEMORY_DESCRIPTOR * pMemDesc,NvU32 heapOwner,struct Heap * pHeap,NvU32 attr,NvU32 attr2,NvU32 Pitch,NvU32 type,NvU32 tag,HWRESOURCE_INFO * pHwResource)477 static inline NV_STATUS memConstructCommon(struct Memory *pMemory, NvU32 categoryClassId, NvU32 flags, MEMORY_DESCRIPTOR *pMemDesc, NvU32 heapOwner, struct Heap *pHeap, NvU32 attr, NvU32 attr2, NvU32 Pitch, NvU32 type, NvU32 tag, HWRESOURCE_INFO *pHwResource) {
478 NV_ASSERT_FAILED_PRECOMP("Memory was disabled!");
479 return NV_ERR_NOT_SUPPORTED;
480 }
481 #else //__nvoc_mem_h_disabled
482 #define memConstructCommon(pMemory, categoryClassId, flags, pMemDesc, heapOwner, pHeap, attr, attr2, Pitch, type, tag, pHwResource) memConstructCommon_IMPL(pMemory, categoryClassId, flags, pMemDesc, heapOwner, pHeap, attr, attr2, Pitch, type, tag, pHwResource)
483 #endif //__nvoc_mem_h_disabled
484
485 void memDestructCommon_IMPL(struct Memory *pMemory);
486
487 #ifdef __nvoc_mem_h_disabled
memDestructCommon(struct Memory * pMemory)488 static inline void memDestructCommon(struct Memory *pMemory) {
489 NV_ASSERT_FAILED_PRECOMP("Memory was disabled!");
490 }
491 #else //__nvoc_mem_h_disabled
492 #define memDestructCommon(pMemory) memDestructCommon_IMPL(pMemory)
493 #endif //__nvoc_mem_h_disabled
494
495 NV_STATUS memCreateMemDesc_IMPL(struct OBJGPU *pGpu, MEMORY_DESCRIPTOR **ppMemDesc, NV_ADDRESS_SPACE addrSpace, NvU64 FBOffset, NvU64 length, NvU32 attr, NvU32 attr2);
496
497 #define memCreateMemDesc(pGpu, ppMemDesc, addrSpace, FBOffset, length, attr, attr2) memCreateMemDesc_IMPL(pGpu, ppMemDesc, addrSpace, FBOffset, length, attr, attr2)
498 NV_STATUS memCreateKernelMapping_IMPL(struct Memory *pMemory, NvU32 Protect, NvBool bClear);
499
500 #ifdef __nvoc_mem_h_disabled
memCreateKernelMapping(struct Memory * pMemory,NvU32 Protect,NvBool bClear)501 static inline NV_STATUS memCreateKernelMapping(struct Memory *pMemory, NvU32 Protect, NvBool bClear) {
502 NV_ASSERT_FAILED_PRECOMP("Memory was disabled!");
503 return NV_ERR_NOT_SUPPORTED;
504 }
505 #else //__nvoc_mem_h_disabled
506 #define memCreateKernelMapping(pMemory, Protect, bClear) memCreateKernelMapping_IMPL(pMemory, Protect, bClear)
507 #endif //__nvoc_mem_h_disabled
508
509 NV_STATUS memGetByHandle_IMPL(struct RsClient *pClient, NvHandle hMemory, struct Memory **ppMemory);
510
511 #define memGetByHandle(pClient, hMemory, ppMemory) memGetByHandle_IMPL(pClient, hMemory, ppMemory)
512 NV_STATUS memGetByHandleAndDevice_IMPL(struct RsClient *pClient, NvHandle hMemory, NvHandle hDevice, struct Memory **ppMemory);
513
514 #define memGetByHandleAndDevice(pClient, hMemory, hDevice, ppMemory) memGetByHandleAndDevice_IMPL(pClient, hMemory, hDevice, ppMemory)
515 NV_STATUS memGetByHandleAndGroupedGpu_IMPL(struct RsClient *pClient, NvHandle hMemory, struct OBJGPU *pGpu, struct Memory **ppMemory);
516
517 #define memGetByHandleAndGroupedGpu(pClient, hMemory, pGpu, ppMemory) memGetByHandleAndGroupedGpu_IMPL(pClient, hMemory, pGpu, ppMemory)
518 #undef PRIVATE_FIELD
519
520
521 #endif
522
523
524 #ifdef __cplusplus
525 } // extern "C"
526 #endif
527
528 #endif // _G_MEM_NVOC_H_
529