1 #ifndef _G_MEM_NVOC_H_ 2 #define _G_MEM_NVOC_H_ 3 #include "nvoc/runtime.h" 4 5 #ifdef __cplusplus 6 extern "C" { 7 #endif 8 9 /* 10 * SPDX-FileCopyrightText: Copyright (c) 1993-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved. 11 * SPDX-License-Identifier: MIT 12 * 13 * Permission is hereby granted, free of charge, to any person obtaining a 14 * copy of this software and associated documentation files (the "Software"), 15 * to deal in the Software without restriction, including without limitation 16 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 17 * and/or sell copies of the Software, and to permit persons to whom the 18 * Software is furnished to do so, subject to the following conditions: 19 * 20 * The above copyright notice and this permission notice shall be included in 21 * all copies or substantial portions of the Software. 22 * 23 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 24 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 25 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 26 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 27 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 28 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 29 * DEALINGS IN THE SOFTWARE. 30 */ 31 32 #include "g_mem_nvoc.h" 33 34 #ifndef _MEMORY_API_H_ 35 #define _MEMORY_API_H_ 36 37 #include "core/core.h" 38 #include "resserv/rs_resource.h" 39 #include "rmapi/rmapi.h" 40 #include "rmapi/resource.h" 41 42 #include "containers/btree.h" 43 44 #include "ctrl/ctrl0041.h" 45 46 struct Device; 47 48 #ifndef __NVOC_CLASS_Device_TYPEDEF__ 49 #define __NVOC_CLASS_Device_TYPEDEF__ 50 typedef struct Device Device; 51 #endif /* __NVOC_CLASS_Device_TYPEDEF__ */ 52 53 #ifndef __nvoc_class_id_Device 54 #define __nvoc_class_id_Device 0xe0ac20 55 #endif /* __nvoc_class_id_Device */ 56 57 58 struct Subdevice; 59 60 #ifndef __NVOC_CLASS_Subdevice_TYPEDEF__ 61 #define __NVOC_CLASS_Subdevice_TYPEDEF__ 62 typedef struct Subdevice Subdevice; 63 #endif /* __NVOC_CLASS_Subdevice_TYPEDEF__ */ 64 65 #ifndef __nvoc_class_id_Subdevice 66 #define __nvoc_class_id_Subdevice 0x4b01b3 67 #endif /* __nvoc_class_id_Subdevice */ 68 69 70 struct RsClient; 71 72 #ifndef __NVOC_CLASS_RsClient_TYPEDEF__ 73 #define __NVOC_CLASS_RsClient_TYPEDEF__ 74 typedef struct RsClient RsClient; 75 #endif /* __NVOC_CLASS_RsClient_TYPEDEF__ */ 76 77 #ifndef __nvoc_class_id_RsClient 78 #define __nvoc_class_id_RsClient 0x8f87e5 79 #endif /* __nvoc_class_id_RsClient */ 80 81 82 struct Heap; 83 84 #ifndef __NVOC_CLASS_Heap_TYPEDEF__ 85 #define __NVOC_CLASS_Heap_TYPEDEF__ 86 typedef struct Heap Heap; 87 #endif /* __NVOC_CLASS_Heap_TYPEDEF__ */ 88 89 #ifndef __nvoc_class_id_Heap 90 #define __nvoc_class_id_Heap 0x556e9a 91 #endif /* __nvoc_class_id_Heap */ 92 93 94 struct OBJGPU; 95 96 #ifndef __NVOC_CLASS_OBJGPU_TYPEDEF__ 97 #define __NVOC_CLASS_OBJGPU_TYPEDEF__ 98 typedef struct OBJGPU OBJGPU; 99 #endif /* __NVOC_CLASS_OBJGPU_TYPEDEF__ */ 100 101 #ifndef __nvoc_class_id_OBJGPU 102 #define __nvoc_class_id_OBJGPU 0x7ef3cb 103 #endif /* __nvoc_class_id_OBJGPU */ 104 105 106 typedef struct MEMORY_DESCRIPTOR MEMORY_DESCRIPTOR; 107 typedef struct PmuMapping PmuMapping; 108 typedef struct HWRESOURCE_INFO HWRESOURCE_INFO; 109 110 // 111 // vGPU non-stall interrupt info 112 // 113 typedef struct _def_client_vgpu_ns_intr 114 { 115 NvU32 nsSemValue; // Non stall interrupt semaphore value 116 NvU32 nsSemOffset; // Non stall interrupt semaphore offset. Currently it is always 0. 117 NvBool isSemaMemValidationEnabled; // Enable change in Non stall interrupt sema value check 118 // while generating event 119 NvU64 guestDomainId; // guest ID that we need to use to inject interrupt 120 NvU64 guestMSIAddr; // MSI address allocated by guest OS 121 NvU32 guestMSIData; // MSI data value set by guest OS 122 void *pVgpuVfioRef; // Reference to vgpu device in nvidia-vgpu-vfio module 123 void *pEventDpc; // DPC event to pass the interrupt 124 } VGPU_NS_INTR; 125 126 typedef struct 127 { 128 struct Memory *pNext; 129 struct Memory *pPrev; 130 } memCircularListItem; 131 132 /*! 133 * RM internal class representing NV01_MEMORY_XXX 134 * 135 * @note Memory cannot be a GpuResource because NoDeviceMemory 136 * subclass is not allocated under a device. 137 */ 138 #ifdef NVOC_MEM_H_PRIVATE_ACCESS_ALLOWED 139 #define PRIVATE_FIELD(x) x 140 #else 141 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x) 142 #endif 143 struct Memory { 144 const struct NVOC_RTTI *__nvoc_rtti; 145 struct RmResource __nvoc_base_RmResource; 146 struct Object *__nvoc_pbase_Object; 147 struct RsResource *__nvoc_pbase_RsResource; 148 struct RmResourceCommon *__nvoc_pbase_RmResourceCommon; 149 struct RmResource *__nvoc_pbase_RmResource; 150 struct Memory *__nvoc_pbase_Memory; 151 NV_STATUS (*__memIsDuplicate__)(struct Memory *, NvHandle, NvBool *); 152 NV_STATUS (*__memGetMapAddrSpace__)(struct Memory *, CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *); 153 NV_STATUS (*__memControl__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *); 154 NV_STATUS (*__memMap__)(struct Memory *, CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, RsCpuMapping *); 155 NV_STATUS (*__memUnmap__)(struct Memory *, CALL_CONTEXT *, RsCpuMapping *); 156 NV_STATUS (*__memGetMemInterMapParams__)(struct Memory *, RMRES_MEM_INTER_MAP_PARAMS *); 157 NV_STATUS (*__memCheckMemInterUnmap__)(struct Memory *, NvBool); 158 NV_STATUS (*__memGetMemoryMappingDescriptor__)(struct Memory *, MEMORY_DESCRIPTOR **); 159 NV_STATUS (*__memCheckCopyPermissions__)(struct Memory *, struct OBJGPU *, struct Device *); 160 NV_STATUS (*__memIsReady__)(struct Memory *, NvBool); 161 NvBool (*__memIsGpuMapAllowed__)(struct Memory *, struct OBJGPU *); 162 NV_STATUS (*__memCtrlCmdGetSurfaceCompressionCoverageLvm__)(struct Memory *, NV0041_CTRL_GET_SURFACE_COMPRESSION_COVERAGE_PARAMS *); 163 NV_STATUS (*__memCtrlCmdGetSurfaceInfoLvm__)(struct Memory *, NV0041_CTRL_GET_SURFACE_INFO_PARAMS *); 164 NV_STATUS (*__memCtrlCmdSurfaceFlushGpuCache__)(struct Memory *, NV0041_CTRL_SURFACE_FLUSH_GPU_CACHE_PARAMS *); 165 NV_STATUS (*__memCtrlCmdGetMemPageSize__)(struct Memory *, NV0041_CTRL_GET_MEM_PAGE_SIZE_PARAMS *); 166 NV_STATUS (*__memCtrlCmdSetTag__)(struct Memory *, NV0041_CTRL_CMD_SET_TAG_PARAMS *); 167 NV_STATUS (*__memCtrlCmdGetTag__)(struct Memory *, NV0041_CTRL_CMD_GET_TAG_PARAMS *); 168 NvBool (*__memShareCallback__)(struct Memory *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *); 169 NvU32 (*__memGetRefCount__)(struct Memory *); 170 NV_STATUS (*__memControlFilter__)(struct Memory *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *); 171 void (*__memAddAdditionalDependants__)(struct RsClient *, struct Memory *, RsResourceRef *); 172 NV_STATUS (*__memUnmapFrom__)(struct Memory *, RS_RES_UNMAP_FROM_PARAMS *); 173 NV_STATUS (*__memControlSerialization_Prologue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *); 174 NV_STATUS (*__memControl_Prologue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *); 175 NvBool (*__memCanCopy__)(struct Memory *); 176 void (*__memPreDestruct__)(struct Memory *); 177 NV_STATUS (*__memMapTo__)(struct Memory *, RS_RES_MAP_TO_PARAMS *); 178 void (*__memControlSerialization_Epilogue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *); 179 void (*__memControl_Epilogue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *); 180 NV_STATUS (*__memControlLookup__)(struct Memory *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **); 181 NvBool (*__memAccessCallback__)(struct Memory *, struct RsClient *, void *, RsAccessRight); 182 NvBool bConstructed; 183 struct Device *pDevice; 184 struct Subdevice *pSubDevice; 185 struct OBJGPU *pGpu; 186 NvBool bBcResource; 187 NvU32 categoryClassId; 188 NvU64 Length; 189 NvU32 HeapOwner; 190 NvU32 RefCount; 191 struct Heap *pHeap; 192 MEMORY_DESCRIPTOR *pMemDesc; 193 NvBool isMemDescOwner; 194 memCircularListItem dupListItem; 195 NvP64 KernelVAddr; 196 NvP64 KernelMapPriv; 197 PmuMapping *pPmuMappingList; 198 NODE Node; 199 NvU32 Attr; 200 NvU32 Attr2; 201 NvU32 Pitch; 202 NvU32 Type; 203 NvU32 Flags; 204 NvU32 tag; 205 NvU64 osDeviceHandle; 206 HWRESOURCE_INFO *pHwResource; 207 NvBool bRpcAlloc; 208 VGPU_NS_INTR vgpuNsIntr; 209 }; 210 211 #ifndef __NVOC_CLASS_Memory_TYPEDEF__ 212 #define __NVOC_CLASS_Memory_TYPEDEF__ 213 typedef struct Memory Memory; 214 #endif /* __NVOC_CLASS_Memory_TYPEDEF__ */ 215 216 #ifndef __nvoc_class_id_Memory 217 #define __nvoc_class_id_Memory 0x4789f2 218 #endif /* __nvoc_class_id_Memory */ 219 220 extern const struct NVOC_CLASS_DEF __nvoc_class_def_Memory; 221 222 #define __staticCast_Memory(pThis) \ 223 ((pThis)->__nvoc_pbase_Memory) 224 225 #ifdef __nvoc_mem_h_disabled 226 #define __dynamicCast_Memory(pThis) ((Memory*)NULL) 227 #else //__nvoc_mem_h_disabled 228 #define __dynamicCast_Memory(pThis) \ 229 ((Memory*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(Memory))) 230 #endif //__nvoc_mem_h_disabled 231 232 233 NV_STATUS __nvoc_objCreateDynamic_Memory(Memory**, Dynamic*, NvU32, va_list); 234 235 NV_STATUS __nvoc_objCreate_Memory(Memory**, Dynamic*, NvU32, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams); 236 #define __objCreate_Memory(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \ 237 __nvoc_objCreate_Memory((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams) 238 239 #define memIsDuplicate(pMemory, hMemory, pDuplicate) memIsDuplicate_DISPATCH(pMemory, hMemory, pDuplicate) 240 #define memGetMapAddrSpace(pMemory, pCallContext, mapFlags, pAddrSpace) memGetMapAddrSpace_DISPATCH(pMemory, pCallContext, mapFlags, pAddrSpace) 241 #define memControl(pMemory, pCallContext, pParams) memControl_DISPATCH(pMemory, pCallContext, pParams) 242 #define memMap(pMemory, pCallContext, pParams, pCpuMapping) memMap_DISPATCH(pMemory, pCallContext, pParams, pCpuMapping) 243 #define memUnmap(pMemory, pCallContext, pCpuMapping) memUnmap_DISPATCH(pMemory, pCallContext, pCpuMapping) 244 #define memGetMemInterMapParams(pMemory, pParams) memGetMemInterMapParams_DISPATCH(pMemory, pParams) 245 #define memCheckMemInterUnmap(pMemory, bSubdeviceHandleProvided) memCheckMemInterUnmap_DISPATCH(pMemory, bSubdeviceHandleProvided) 246 #define memGetMemoryMappingDescriptor(pMemory, ppMemDesc) memGetMemoryMappingDescriptor_DISPATCH(pMemory, ppMemDesc) 247 #define memCheckCopyPermissions(pMemory, pDstGpu, pDstDevice) memCheckCopyPermissions_DISPATCH(pMemory, pDstGpu, pDstDevice) 248 #define memIsReady(pMemory, bCopyConstructorContext) memIsReady_DISPATCH(pMemory, bCopyConstructorContext) 249 #define memIsGpuMapAllowed(pMemory, pGpu) memIsGpuMapAllowed_DISPATCH(pMemory, pGpu) 250 #define memCtrlCmdGetSurfaceCompressionCoverageLvm(pMemory, pParams) memCtrlCmdGetSurfaceCompressionCoverageLvm_DISPATCH(pMemory, pParams) 251 #define memCtrlCmdGetSurfaceInfoLvm(pMemory, pSurfaceInfoParams) memCtrlCmdGetSurfaceInfoLvm_DISPATCH(pMemory, pSurfaceInfoParams) 252 #define memCtrlCmdSurfaceFlushGpuCache(pMemory, pCacheFlushParams) memCtrlCmdSurfaceFlushGpuCache_DISPATCH(pMemory, pCacheFlushParams) 253 #define memCtrlCmdGetMemPageSize(pMemory, pPageSizeParams) memCtrlCmdGetMemPageSize_DISPATCH(pMemory, pPageSizeParams) 254 #define memCtrlCmdSetTag(pMemory, pParams) memCtrlCmdSetTag_DISPATCH(pMemory, pParams) 255 #define memCtrlCmdGetTag(pMemory, pParams) memCtrlCmdGetTag_DISPATCH(pMemory, pParams) 256 #define memShareCallback(pResource, pInvokingClient, pParentRef, pSharePolicy) memShareCallback_DISPATCH(pResource, pInvokingClient, pParentRef, pSharePolicy) 257 #define memGetRefCount(pResource) memGetRefCount_DISPATCH(pResource) 258 #define memControlFilter(pResource, pCallContext, pParams) memControlFilter_DISPATCH(pResource, pCallContext, pParams) 259 #define memAddAdditionalDependants(pClient, pResource, pReference) memAddAdditionalDependants_DISPATCH(pClient, pResource, pReference) 260 #define memUnmapFrom(pResource, pParams) memUnmapFrom_DISPATCH(pResource, pParams) 261 #define memControlSerialization_Prologue(pResource, pCallContext, pParams) memControlSerialization_Prologue_DISPATCH(pResource, pCallContext, pParams) 262 #define memControl_Prologue(pResource, pCallContext, pParams) memControl_Prologue_DISPATCH(pResource, pCallContext, pParams) 263 #define memCanCopy(pResource) memCanCopy_DISPATCH(pResource) 264 #define memPreDestruct(pResource) memPreDestruct_DISPATCH(pResource) 265 #define memMapTo(pResource, pParams) memMapTo_DISPATCH(pResource, pParams) 266 #define memControlSerialization_Epilogue(pResource, pCallContext, pParams) memControlSerialization_Epilogue_DISPATCH(pResource, pCallContext, pParams) 267 #define memControl_Epilogue(pResource, pCallContext, pParams) memControl_Epilogue_DISPATCH(pResource, pCallContext, pParams) 268 #define memControlLookup(pResource, pParams, ppEntry) memControlLookup_DISPATCH(pResource, pParams, ppEntry) 269 #define memAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) memAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight) 270 NV_STATUS memIsDuplicate_IMPL(struct Memory *pMemory, NvHandle hMemory, NvBool *pDuplicate); 271 272 static inline NV_STATUS memIsDuplicate_DISPATCH(struct Memory *pMemory, NvHandle hMemory, NvBool *pDuplicate) { 273 return pMemory->__memIsDuplicate__(pMemory, hMemory, pDuplicate); 274 } 275 276 NV_STATUS memGetMapAddrSpace_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace); 277 278 static inline NV_STATUS memGetMapAddrSpace_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) { 279 return pMemory->__memGetMapAddrSpace__(pMemory, pCallContext, mapFlags, pAddrSpace); 280 } 281 282 NV_STATUS memControl_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams); 283 284 static inline NV_STATUS memControl_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) { 285 return pMemory->__memControl__(pMemory, pCallContext, pParams); 286 } 287 288 NV_STATUS memMap_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping); 289 290 static inline NV_STATUS memMap_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) { 291 return pMemory->__memMap__(pMemory, pCallContext, pParams, pCpuMapping); 292 } 293 294 NV_STATUS memUnmap_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping); 295 296 static inline NV_STATUS memUnmap_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) { 297 return pMemory->__memUnmap__(pMemory, pCallContext, pCpuMapping); 298 } 299 300 NV_STATUS memGetMemInterMapParams_IMPL(struct Memory *pMemory, RMRES_MEM_INTER_MAP_PARAMS *pParams); 301 302 static inline NV_STATUS memGetMemInterMapParams_DISPATCH(struct Memory *pMemory, RMRES_MEM_INTER_MAP_PARAMS *pParams) { 303 return pMemory->__memGetMemInterMapParams__(pMemory, pParams); 304 } 305 306 static inline NV_STATUS memCheckMemInterUnmap_ac1694(struct Memory *pMemory, NvBool bSubdeviceHandleProvided) { 307 return NV_OK; 308 } 309 310 static inline NV_STATUS memCheckMemInterUnmap_DISPATCH(struct Memory *pMemory, NvBool bSubdeviceHandleProvided) { 311 return pMemory->__memCheckMemInterUnmap__(pMemory, bSubdeviceHandleProvided); 312 } 313 314 NV_STATUS memGetMemoryMappingDescriptor_IMPL(struct Memory *pMemory, MEMORY_DESCRIPTOR **ppMemDesc); 315 316 static inline NV_STATUS memGetMemoryMappingDescriptor_DISPATCH(struct Memory *pMemory, MEMORY_DESCRIPTOR **ppMemDesc) { 317 return pMemory->__memGetMemoryMappingDescriptor__(pMemory, ppMemDesc); 318 } 319 320 static inline NV_STATUS memCheckCopyPermissions_ac1694(struct Memory *pMemory, struct OBJGPU *pDstGpu, struct Device *pDstDevice) { 321 return NV_OK; 322 } 323 324 static inline NV_STATUS memCheckCopyPermissions_DISPATCH(struct Memory *pMemory, struct OBJGPU *pDstGpu, struct Device *pDstDevice) { 325 return pMemory->__memCheckCopyPermissions__(pMemory, pDstGpu, pDstDevice); 326 } 327 328 NV_STATUS memIsReady_IMPL(struct Memory *pMemory, NvBool bCopyConstructorContext); 329 330 static inline NV_STATUS memIsReady_DISPATCH(struct Memory *pMemory, NvBool bCopyConstructorContext) { 331 return pMemory->__memIsReady__(pMemory, bCopyConstructorContext); 332 } 333 334 static inline NvBool memIsGpuMapAllowed_0c883b(struct Memory *pMemory, struct OBJGPU *pGpu) { 335 return ((NvBool)(0 == 0)); 336 } 337 338 static inline NvBool memIsGpuMapAllowed_DISPATCH(struct Memory *pMemory, struct OBJGPU *pGpu) { 339 return pMemory->__memIsGpuMapAllowed__(pMemory, pGpu); 340 } 341 342 NV_STATUS memCtrlCmdGetSurfaceCompressionCoverageLvm_IMPL(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_COMPRESSION_COVERAGE_PARAMS *pParams); 343 344 static inline NV_STATUS memCtrlCmdGetSurfaceCompressionCoverageLvm_DISPATCH(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_COMPRESSION_COVERAGE_PARAMS *pParams) { 345 return pMemory->__memCtrlCmdGetSurfaceCompressionCoverageLvm__(pMemory, pParams); 346 } 347 348 NV_STATUS memCtrlCmdGetSurfaceInfoLvm_IMPL(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_INFO_PARAMS *pSurfaceInfoParams); 349 350 static inline NV_STATUS memCtrlCmdGetSurfaceInfoLvm_DISPATCH(struct Memory *pMemory, NV0041_CTRL_GET_SURFACE_INFO_PARAMS *pSurfaceInfoParams) { 351 return pMemory->__memCtrlCmdGetSurfaceInfoLvm__(pMemory, pSurfaceInfoParams); 352 } 353 354 NV_STATUS memCtrlCmdSurfaceFlushGpuCache_IMPL(struct Memory *pMemory, NV0041_CTRL_SURFACE_FLUSH_GPU_CACHE_PARAMS *pCacheFlushParams); 355 356 static inline NV_STATUS memCtrlCmdSurfaceFlushGpuCache_DISPATCH(struct Memory *pMemory, NV0041_CTRL_SURFACE_FLUSH_GPU_CACHE_PARAMS *pCacheFlushParams) { 357 return pMemory->__memCtrlCmdSurfaceFlushGpuCache__(pMemory, pCacheFlushParams); 358 } 359 360 NV_STATUS memCtrlCmdGetMemPageSize_IMPL(struct Memory *pMemory, NV0041_CTRL_GET_MEM_PAGE_SIZE_PARAMS *pPageSizeParams); 361 362 static inline NV_STATUS memCtrlCmdGetMemPageSize_DISPATCH(struct Memory *pMemory, NV0041_CTRL_GET_MEM_PAGE_SIZE_PARAMS *pPageSizeParams) { 363 return pMemory->__memCtrlCmdGetMemPageSize__(pMemory, pPageSizeParams); 364 } 365 366 NV_STATUS memCtrlCmdSetTag_IMPL(struct Memory *pMemory, NV0041_CTRL_CMD_SET_TAG_PARAMS *pParams); 367 368 static inline NV_STATUS memCtrlCmdSetTag_DISPATCH(struct Memory *pMemory, NV0041_CTRL_CMD_SET_TAG_PARAMS *pParams) { 369 return pMemory->__memCtrlCmdSetTag__(pMemory, pParams); 370 } 371 372 NV_STATUS memCtrlCmdGetTag_IMPL(struct Memory *pMemory, NV0041_CTRL_CMD_GET_TAG_PARAMS *pParams); 373 374 static inline NV_STATUS memCtrlCmdGetTag_DISPATCH(struct Memory *pMemory, NV0041_CTRL_CMD_GET_TAG_PARAMS *pParams) { 375 return pMemory->__memCtrlCmdGetTag__(pMemory, pParams); 376 } 377 378 static inline NvBool memShareCallback_DISPATCH(struct Memory *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) { 379 return pResource->__memShareCallback__(pResource, pInvokingClient, pParentRef, pSharePolicy); 380 } 381 382 static inline NvU32 memGetRefCount_DISPATCH(struct Memory *pResource) { 383 return pResource->__memGetRefCount__(pResource); 384 } 385 386 static inline NV_STATUS memControlFilter_DISPATCH(struct Memory *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) { 387 return pResource->__memControlFilter__(pResource, pCallContext, pParams); 388 } 389 390 static inline void memAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct Memory *pResource, RsResourceRef *pReference) { 391 pResource->__memAddAdditionalDependants__(pClient, pResource, pReference); 392 } 393 394 static inline NV_STATUS memUnmapFrom_DISPATCH(struct Memory *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) { 395 return pResource->__memUnmapFrom__(pResource, pParams); 396 } 397 398 static inline NV_STATUS memControlSerialization_Prologue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) { 399 return pResource->__memControlSerialization_Prologue__(pResource, pCallContext, pParams); 400 } 401 402 static inline NV_STATUS memControl_Prologue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) { 403 return pResource->__memControl_Prologue__(pResource, pCallContext, pParams); 404 } 405 406 static inline NvBool memCanCopy_DISPATCH(struct Memory *pResource) { 407 return pResource->__memCanCopy__(pResource); 408 } 409 410 static inline void memPreDestruct_DISPATCH(struct Memory *pResource) { 411 pResource->__memPreDestruct__(pResource); 412 } 413 414 static inline NV_STATUS memMapTo_DISPATCH(struct Memory *pResource, RS_RES_MAP_TO_PARAMS *pParams) { 415 return pResource->__memMapTo__(pResource, pParams); 416 } 417 418 static inline void memControlSerialization_Epilogue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) { 419 pResource->__memControlSerialization_Epilogue__(pResource, pCallContext, pParams); 420 } 421 422 static inline void memControl_Epilogue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) { 423 pResource->__memControl_Epilogue__(pResource, pCallContext, pParams); 424 } 425 426 static inline NV_STATUS memControlLookup_DISPATCH(struct Memory *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) { 427 return pResource->__memControlLookup__(pResource, pParams, ppEntry); 428 } 429 430 static inline NvBool memAccessCallback_DISPATCH(struct Memory *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) { 431 return pResource->__memAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight); 432 } 433 434 NV_STATUS memConstruct_IMPL(struct Memory *arg_pMemory, CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams); 435 436 #define __nvoc_memConstruct(arg_pMemory, arg_pCallContext, arg_pParams) memConstruct_IMPL(arg_pMemory, arg_pCallContext, arg_pParams) 437 NV_STATUS memCopyConstruct_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *pParams); 438 439 #ifdef __nvoc_mem_h_disabled 440 static inline NV_STATUS memCopyConstruct(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *pParams) { 441 NV_ASSERT_FAILED_PRECOMP("Memory was disabled!"); 442 return NV_ERR_NOT_SUPPORTED; 443 } 444 #else //__nvoc_mem_h_disabled 445 #define memCopyConstruct(pMemory, pCallContext, pParams) memCopyConstruct_IMPL(pMemory, pCallContext, pParams) 446 #endif //__nvoc_mem_h_disabled 447 448 void memDestruct_IMPL(struct Memory *pMemory); 449 450 #define __nvoc_memDestruct(pMemory) memDestruct_IMPL(pMemory) 451 NV_STATUS memConstructCommon_IMPL(struct Memory *pMemory, NvU32 categoryClassId, NvU32 flags, MEMORY_DESCRIPTOR *pMemDesc, NvU32 heapOwner, struct Heap *pHeap, NvU32 attr, NvU32 attr2, NvU32 Pitch, NvU32 type, NvU32 tag, HWRESOURCE_INFO *pHwResource); 452 453 #ifdef __nvoc_mem_h_disabled 454 static inline NV_STATUS memConstructCommon(struct Memory *pMemory, NvU32 categoryClassId, NvU32 flags, MEMORY_DESCRIPTOR *pMemDesc, NvU32 heapOwner, struct Heap *pHeap, NvU32 attr, NvU32 attr2, NvU32 Pitch, NvU32 type, NvU32 tag, HWRESOURCE_INFO *pHwResource) { 455 NV_ASSERT_FAILED_PRECOMP("Memory was disabled!"); 456 return NV_ERR_NOT_SUPPORTED; 457 } 458 #else //__nvoc_mem_h_disabled 459 #define memConstructCommon(pMemory, categoryClassId, flags, pMemDesc, heapOwner, pHeap, attr, attr2, Pitch, type, tag, pHwResource) memConstructCommon_IMPL(pMemory, categoryClassId, flags, pMemDesc, heapOwner, pHeap, attr, attr2, Pitch, type, tag, pHwResource) 460 #endif //__nvoc_mem_h_disabled 461 462 void memDestructCommon_IMPL(struct Memory *pMemory); 463 464 #ifdef __nvoc_mem_h_disabled 465 static inline void memDestructCommon(struct Memory *pMemory) { 466 NV_ASSERT_FAILED_PRECOMP("Memory was disabled!"); 467 } 468 #else //__nvoc_mem_h_disabled 469 #define memDestructCommon(pMemory) memDestructCommon_IMPL(pMemory) 470 #endif //__nvoc_mem_h_disabled 471 472 NV_STATUS memCreateMemDesc_IMPL(struct OBJGPU *pGpu, MEMORY_DESCRIPTOR **ppMemDesc, NV_ADDRESS_SPACE addrSpace, NvU64 FBOffset, NvU64 length, NvU32 attr, NvU32 attr2); 473 474 #define memCreateMemDesc(pGpu, ppMemDesc, addrSpace, FBOffset, length, attr, attr2) memCreateMemDesc_IMPL(pGpu, ppMemDesc, addrSpace, FBOffset, length, attr, attr2) 475 NV_STATUS memCreateKernelMapping_IMPL(struct Memory *pMemory, NvU32 Protect, NvBool bClear); 476 477 #ifdef __nvoc_mem_h_disabled 478 static inline NV_STATUS memCreateKernelMapping(struct Memory *pMemory, NvU32 Protect, NvBool bClear) { 479 NV_ASSERT_FAILED_PRECOMP("Memory was disabled!"); 480 return NV_ERR_NOT_SUPPORTED; 481 } 482 #else //__nvoc_mem_h_disabled 483 #define memCreateKernelMapping(pMemory, Protect, bClear) memCreateKernelMapping_IMPL(pMemory, Protect, bClear) 484 #endif //__nvoc_mem_h_disabled 485 486 NV_STATUS memGetByHandle_IMPL(struct RsClient *pClient, NvHandle hMemory, struct Memory **ppMemory); 487 488 #define memGetByHandle(pClient, hMemory, ppMemory) memGetByHandle_IMPL(pClient, hMemory, ppMemory) 489 NV_STATUS memGetByHandleAndDevice_IMPL(struct RsClient *pClient, NvHandle hMemory, NvHandle hDevice, struct Memory **ppMemory); 490 491 #define memGetByHandleAndDevice(pClient, hMemory, hDevice, ppMemory) memGetByHandleAndDevice_IMPL(pClient, hMemory, hDevice, ppMemory) 492 NV_STATUS memGetByHandleAndGroupedGpu_IMPL(struct RsClient *pClient, NvHandle hMemory, struct OBJGPU *pGpu, struct Memory **ppMemory); 493 494 #define memGetByHandleAndGroupedGpu(pClient, hMemory, pGpu, ppMemory) memGetByHandleAndGroupedGpu_IMPL(pClient, hMemory, pGpu, ppMemory) 495 #undef PRIVATE_FIELD 496 497 498 #endif 499 500 501 #ifdef __cplusplus 502 } // extern "C" 503 #endif 504 #endif // _G_MEM_NVOC_H_ 505