1
2 #ifndef _G_MEM_MGR_NVOC_H_
3 #define _G_MEM_MGR_NVOC_H_
4 #include "nvoc/runtime.h"
5
6 // Version of generated metadata structures
7 #ifdef NVOC_METADATA_VERSION
8 #undef NVOC_METADATA_VERSION
9 #endif
10 #define NVOC_METADATA_VERSION 0
11
12 #ifdef __cplusplus
13 extern "C" {
14 #endif
15
16 /*
17 * SPDX-FileCopyrightText: Copyright (c) 1993-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
18 * SPDX-License-Identifier: MIT
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining a
21 * copy of this software and associated documentation files (the "Software"),
22 * to deal in the Software without restriction, including without limitation
23 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
24 * and/or sell copies of the Software, and to permit persons to whom the
25 * Software is furnished to do so, subject to the following conditions:
26 *
27 * The above copyright notice and this permission notice shall be included in
28 * all copies or substantial portions of the Software.
29 *
30 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
31 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
32 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
33 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
34 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
35 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
36 * DEALINGS IN THE SOFTWARE.
37 */
38
39 #pragma once
40 #include "g_mem_mgr_nvoc.h"
41
42 #ifndef MEM_MGR_H
43 #define MEM_MGR_H
44
45 #include "core/core.h"
46 #include "gpu/eng_state.h"
47
48 #include "gpu/mem_mgr/fbsr.h"
49 #include "gpu/gpu.h"
50
51 #include "mem_mgr/mem.h"
52
53 #include "mem_mgr/mem_list.h"
54
55 #include "gpu/mem_mgr/virt_mem_allocator_common.h"
56 #include "containers/map.h"
57 #include "gpu/mem_mgr/heap_base.h"
58 #include "mem_mgr/vaspace.h"
59
60 #include "gpu/mem_mgr/phys_mem_allocator/phys_mem_allocator.h"
61
62
63 struct CeUtils;
64
65 #ifndef __NVOC_CLASS_CeUtils_TYPEDEF__
66 #define __NVOC_CLASS_CeUtils_TYPEDEF__
67 typedef struct CeUtils CeUtils;
68 #endif /* __NVOC_CLASS_CeUtils_TYPEDEF__ */
69
70 #ifndef __nvoc_class_id_CeUtils
71 #define __nvoc_class_id_CeUtils 0x8b8bae
72 #endif /* __nvoc_class_id_CeUtils */
73
74
75
76 typedef volatile struct _cl906f_tag1 Nv906fControl;
77 typedef struct KERNEL_MIG_GPU_INSTANCE KERNEL_MIG_GPU_INSTANCE;
78
79 typedef struct
80 {
81 MEMORY_DESCRIPTOR *pMemDesc;
82 NvU64 offset;
83
84 //
85 // Private, should only be used by MemUtils layer
86 // Expected to be 0 when map is called
87 // Become 0 when unmapped
88 //
89 void *pMapping;
90 void *pMappingPriv;
91 } TRANSFER_SURFACE;
92
93 // Memory transfer engine types.
94 typedef enum
95 {
96 TRANSFER_TYPE_PROCESSOR = 0, // CPU/GSP/DPU depending on execution context
97 TRANSFER_TYPE_GSP_DMA, // Dma engine internal to GSP
98 TRANSFER_TYPE_CE, // Copy Engine using CeUtils channel
99 TRANSFER_TYPE_CE_PRI, // Copy Engine using PRIs
100 TRANSFER_TYPE_BAR0, // Copy using BAR0 PRAMIN
101 } TRANSFER_TYPE;
102
103 #define TRANSFER_FLAGS_NONE 0
104 #define TRANSFER_FLAGS_DEFER_FLUSH NVBIT32(0) // Applicable only for write operations
105 #define TRANSFER_FLAGS_SHADOW_ALLOC NVBIT32(1) // Applicable only for non-PROCESSOR transfers
106 #define TRANSFER_FLAGS_SHADOW_INIT_MEM NVBIT32(2) // Applicable only for non-PROCESSOR transfers
107 #define TRANSFER_FLAGS_PERSISTENT_CPU_MAPPING NVBIT32(3) // Require long lived PROCESSOR mapping
108 #define TRANSFER_FLAGS_DESTROY_MAPPING NVBIT32(4) // Destroy any cached mappings when complete
109 #define TRANSFER_FLAGS_USE_BAR1 NVBIT32(5) // Use only BAR1 for PROCESSOR transfers
110 #define TRANSFER_FLAGS_PREFER_CE NVBIT32(6) // Use CE if possible (BAR0 on simulation for perf)
111 #define TRANSFER_FLAGS_CE_PRI_DEFER_FLUSH NVBIT32(7) // Defer CE flush; only affects PRI CE operations
112
113 // Protection flags: at most 1 may be set, none means READ_WRITE by default
114 #define TRANSFER_FLAGS_MAP_PROTECT_READABLE NVBIT32(8) // Transfer is only reading data
115 #define TRANSFER_FLAGS_MAP_PROTECT_WRITEABLE NVBIT32(9) // Transfer is only writing data
116
117 #define TRANSFER_FLAGS_PREFER_PROCESSOR NVBIT32(10) // Use BAR1/2 if possible
118 #define TRANSFER_FLAGS_ALLOW_MAPPING_REUSE NVBIT32(11) // Prefer existing full-allocation mapping
119 // (see memdescGetKernelMapping())
120 // Only affects BeginTransfer/EndTransfer
121 // Mapping lifetime controlled by original mapper
122 // Intented for short uses,
123 // where it can't be unmapped by the owner
124
125 typedef struct
126 {
127 NvU32 bar1Size;
128 NvU32 bar1AvailSize;
129 NvU32 bankSwizzleAlignment;
130 NvU32 bar1MaxContigAvailSize;
131 } GETBAR1INFO, *PGETBAR1INFO;
132
133 //
134 // RM Default PTE kind
135 // Bug #2242255, introducing the RM Default kind to allow sharing memory between
136 // different architectures especially between Turing+ and Pre Turing chips
137 //
138 #define RM_DEFAULT_PTE_KIND 0x100
139
140 typedef enum
141 {
142 FB_IS_KIND_Z, // Kind is a Z buffer
143 FB_IS_KIND_ZBC, // Zero bandwidth clears
144 FB_IS_KIND_ZBC_ALLOWS_1, // ZBC with 1 bit of tag
145 FB_IS_KIND_ZBC_ALLOWS_2, // ZBC with 2 bits of tag
146 FB_IS_KIND_ZBC_ALLOWS_4, // ZBC with 4 bits of tag
147 FB_IS_KIND_COMPRESSIBLE, // Any compressible kind
148 FB_IS_KIND_COMPRESSIBLE_1, // Compressible with 1 comp tag bit
149 FB_IS_KIND_COMPRESSIBLE_2, // Compressible with 2 comp tag bits
150 FB_IS_KIND_COMPRESSIBLE_4, // Compressible with 4 comp tag bits
151 FB_IS_KIND_SUPPORTED, // Kind is supported
152 FB_IS_KIND_DISALLOW_PLC, // Kind Disallows PLC
153 } FB_IS_KIND_OP;
154
155 // Surface compression parameters
156 typedef struct COMPR_INFO
157 {
158 // Surface kind; if not compressed, following parameters are ignored
159 NvU32 kind;
160
161 // Compression page shift; 0 if kind is uncompressed
162 NvU32 compPageShift;
163
164 //
165 // Are comptags are determined per-page by PA?
166 // If set, following parameters are ignored
167 //
168 NvBool bPhysBasedComptags;
169
170 // see GMMU_COMPR_INFO
171 NvU32 compPageIndexLo;
172 NvU32 compPageIndexHi;
173 NvU32 compTagLineMin;
174 NvU32 compTagLineMultiplier;
175 } COMPR_INFO;
176
177 //
178 // Individual entry for logging Fb reserved use-cases
179 //
180 typedef struct NV_FB_RSVD_BLOCK_LOG_ENTRY
181 {
182 // Owner tag associated with reservation block
183 NvU32 ownerId;
184
185 // Size of the memory reserved
186 NvU64 rsvdSize;
187 } NV_FB_RSVD_BLOCK_LOG_ENTRY;
188
189 // Total number of FB internal reservation enries
190 #define NV_FB_RSVD_BLOCK_LOG_ENTRY_MAX 10U
191
192 //
193 // Structure for logging Fb reserved use-cases
194 //
195 typedef struct NV_FB_RSVD_BLOCK_LOG_INFO
196 {
197 // Counter for logging entries
198 NvU32 counter;
199
200 // List of all reserved entries
201 NV_FB_RSVD_BLOCK_LOG_ENTRY rsvdBlockList[NV_FB_RSVD_BLOCK_LOG_ENTRY_MAX];
202 } NV_FB_RSVD_BLOCK_LOG_INFO;
203
204 //
205 // Macro for initializing reserved block log data
206 //
207 #define NV_FB_RSVD_BLOCK_LOG_INIT(pMem) \
208 { \
209 ((pMem)->rsvdBlockInfo).counter = 0; \
210 for (NvU32 i = 0; i < NV_FB_RSVD_BLOCK_LOG_ENTRY_MAX; i++) \
211 { \
212 ((pMem)->rsvdBlockInfo).rsvdBlockList[i].ownerId = 0; \
213 ((pMem)->rsvdBlockInfo).rsvdBlockList[i].rsvdSize = 0; \
214 } \
215 }
216
217 //
218 // Macro for adding new reserved block entry to the list
219 // If unable to log, marks the status as NV_ERR_NO_MEMORY otherwise keeps it unchanged
220 //
221 #define NV_FB_RSVD_BLOCK_LOG_ENTRY_ADD(status, pMem, tag, size) \
222 { \
223 if(((pMem)->rsvdBlockInfo).counter < NV_FB_RSVD_BLOCK_LOG_ENTRY_MAX) \
224 { \
225 ((pMem)->rsvdBlockInfo).rsvdBlockList[((pMem)->rsvdBlockInfo).counter].ownerId = (tag); \
226 ((pMem)->rsvdBlockInfo).rsvdBlockList[((pMem)->rsvdBlockInfo).counter].rsvdSize = (size); \
227 (((pMem)->rsvdBlockInfo).counter)++; \
228 } \
229 else \
230 { \
231 status = NV_ERR_NO_MEMORY; \
232 } \
233 }
234
235 //
236 // Fixed Channel Properties for Memutils Object
237 //
238
239 typedef NV_STATUS FbScrubCallback(OBJGPU *);
240
241 #define BLOCK_INDEX_FROM_ADDR(addr,size) ((NvU32)((addr) >> size))
242 #define BLOCK_ADDR_FROM_INDEX(idx,size) (((NvU64)(idx)) << size)
243
244 #define MEMUTILS_SIZE_PER_BLOCK_INBYTES (0x68)
245 #define MEMUTILS_TOTAL_SIZE_PER_BLOCK_INBYTES (0x60) //(COPY + PB SEMA)
246 #define MEMUTILS_TD_BLOCKS_PER_CHUNK 0x40
247
248 #define BLOCK_INDEX_FROM_ADDR(addr,size) ((NvU32)((addr) >> size))
249 #define BLOCK_ADDR_FROM_INDEX(idx,size) (((NvU64)(idx)) << size)
250
251 #define MEMUTILS_NUM_PAYLOAD_SEMAPHORES (2)
252 #define MEMUTILS_NUM_GPFIFIO_ENTRIES (32)
253 // PB size should be a multiple of chunk size
254 #define MEMUTILS_CHANNEL_PB_SIZE (0x10 * MEMUTILS_SIZE_PER_BLOCK_INBYTES * \
255 MEMUTILS_TD_BLOCKS_PER_CHUNK)
256 #define MEMUTILS_CHANNEL_SEMAPHORE_SIZE (4 * MEMUTILS_NUM_PAYLOAD_SEMAPHORES)
257 #define MEMUTILS_CHANNEL_NOTIFIER_SIZE (sizeof(NvNotification) * 1)
258
259 // offset and line length should be a multiple of 4KB
260 #define MEMUTIL_SCRUB_OFFSET_ALIGNMENT (4 * 1024)
261 #define MEMUTIL_SCRUB_LINE_LENGTH_ALIGNMENT (4 * 1024)
262
263 typedef enum {
264 CE_SCRUBBER_CHANNEL,
265 FAST_SCRUBBER_CHANNEL,
266 COPY_CHANNEL,
267 SWL_SCRUBBER_CHANNEL,
268 MAX_CHANNEL_TYPE
269 } CHANNEL_KIND;
270
271 // This will be moved to a channel object next
272 typedef struct OBJCHANNEL
273 {
274 NvHandle deviceId; // Device Handle
275 NvHandle physMemId; // Memory Handle
276 NvHandle channelId; // Channel Handle
277 NvHandle subdeviceId; // Subdevice Handle
278 NvHandle errNotifierIdVirt;
279 NvHandle errNotifierIdPhys;
280 NvHandle engineObjectId;
281 NvHandle eventId;
282 NvHandle pushBufferId;
283 NvHandle bitMapSemPhysId;
284 NvHandle bitMapSemVirtId;
285 NvHandle hVASpaceId; // VASpace handle, when scrubber in virtual mode
286 NvHandle hFbAlias; // Used only for virtual channels
287 NvHandle hFbAliasVA;
288 // to be moved later
289
290 NvU32 channelSize;
291 NvU32 channelNumGpFifioEntries;
292 NvU32 channelPbSize;
293 NvU32 channelNotifierSize;
294 NvU32 methodSizePerBlock;
295 NvU32 semaOffset;
296 NvU32 finishPayloadOffset;
297 NvU32 authTagBufSemaOffset;
298 NvU32 finishPayload;
299 NvBool isChannelSynchronized;
300 NvBool isProgressChecked;
301 //
302 // RM internal channels are created as privileged channels (physical address access) by default
303 // For MMU Bug: 2739505, we need to switch to use channels in non-privileged mode.
304 //
305 NvBool bUseVasForCeCopy; // set to NV_TRUE, when scrubber operates in virtual address
306 struct RsClient *pRsClient;
307 struct OBJVASPACE *pVAS;
308 NvU32 engineType;
309 NvU64 startFbOffset;
310 NvU64 fbSize;
311 NvU64 fbAliasVA;
312 NvU64 vaStartOffset;
313 // to be moved to a separate object later
314
315 NvU32 *pBlockPendingState;
316 NvU32 *pBlockDoneState;
317 NvU32 blockCount;
318 NvHandle hClient;
319 NvBool bClientAllocated;
320 NvU64 pbGpuVA;
321 NvU64 pbGpuBitMapVA;
322 NvU64 pbGpuNotifierVA;
323 MEMORY_DESCRIPTOR *pUserdMemdesc;
324 MEMORY_DESCRIPTOR *pChannelBufferMemdesc;
325 MEMORY_DESCRIPTOR *pErrNotifierMemdesc;
326 NvU8 *pbCpuVA;
327 NvU8 *pbBitMapVA;
328 Nv906fControl *pControlGPFifo;
329 NvU32 classEngineID;
330 NVOS10_EVENT_KERNEL_CALLBACK_EX callback;
331 NvU32 state;
332 NvU32 hTdCopyClass;
333 NvU32 sec2Class;
334 NvU32 minBlockSize;
335 NvU32 maxBlockSize;
336 NvU32 channelPutOffset;
337 NvU8 blockShift;
338 NvU32 lastPayloadPushed;
339 NvBool isChannelActive;
340 NvU32 workSubmitToken;
341 //
342 // Work submit token read from notifier memory.
343 //
344 NvNotification *pTokenFromNotifier;
345 NvU32 lastSubmittedEntry;
346 NvHandle lastAllocatedHandle;
347 CHANNEL_KIND type;
348
349 // Used for Volta+
350 NvHandle doorbellRegionHandle;
351 NvU8 *pDoorbellRegion;
352 NvU32 *pDoorbellRegisterOffset;
353 NvBool bUseDoorbellRegister;
354 NvHandle hUserD;
355 NvBool bClientUserd;
356
357 OBJGPU *pGpu;
358 NvU32 ceId;
359
360 // Used by Partition Scrubber
361 KERNEL_MIG_GPU_INSTANCE *pKernelMIGGpuInstance;
362 NvHandle hPartitionRef;
363
364 NvBool bUseBar1;
365
366 NvBool bSecure;
367
368 } OBJCHANNEL, *POBJCHANNEL;
369
370 #define NV_METHOD(SubCh, Method, Num) \
371 (DRF_DEF(906F, _DMA_INCR, _OPCODE, _VALUE) | \
372 DRF_NUM(906F, _DMA_INCR, _COUNT, Num) | \
373 DRF_NUM(906F, _DMA_INCR, _SUBCHANNEL, SubCh) | \
374 DRF_NUM(906F, _DMA_INCR, _ADDRESS, (Method) >> 2))
375
376 #define PUSH_DATA(Data) MEM_WR32(ptr++, (Data))
377
378 #define PUSH_PAIR(SubCh, Method, Data) \
379 do \
380 { \
381 PUSH_DATA(NV_METHOD(SubCh, (Method), 1)); \
382 PUSH_DATA((Data)); \
383 } while (0)
384
385 //-----------------------------------------------------------------------------
386
387 typedef struct
388 {
389 NvU32 lastSubmittedBlock;
390 NvBool isTopDownScrubber;
391 NvBool isActive;
392 NvU32 scrubberState;
393 NvU32 currentFbRegion;
394 NvU32 startBlock;
395 NvU32 endBlock;
396 NvU32 *pPendingBitMap;
397 NvU32 *pDoneBitMap;
398 NvU32 blockCount;
399 struct OBJCE *pCe;
400 NvBool bCeInUse;
401 OBJCHANNEL tdHeapState;
402 OBJCHANNEL allocationScrubberState;
403 } OBJSCRUB;
404
405 typedef struct
406 {
407 NvU64 base; // Base/start address of the region
408 NvU64 limit; // Last/end address of region
409 NvU64 rsvdSize; // Memory RM may be required to allocate in this region
410 NvBool bRsvdRegion; // Reserved region -- not publicly usable
411 NvU32 performance; // Relative performance. Higher is faster
412 NvBool bSupportCompressed; // Support compressed kinds
413 NvBool bSupportISO; // Support ISO (display, cursor, video) surfaces
414 NvBool bProtected; // Represents a protected region of memory.
415 NvBool bInternalHeap; // PMA:Used for internal RM allocations
416 NvBool bLostOnSuspend; // Not required to be Saved during S/R.
417 NvBool bPreserveOnSuspend; // Required to be Saved during S/R.
418 } FB_REGION_DESCRIPTOR, *PFB_REGION_DESCRIPTOR;
419
420 #define MAX_FB_REGIONS 16
421
422 // Maximum number of contexts created for WHQL test WDDM Max Contexts
423 #define WHQL_TEST_MAX_CONTEXTS 100
424
425 // Object 'get' macros for FB relative object retrievals.
426 #define MEMORY_MANAGER_GET_HEAP(p) ((p)->pHeap)
427
428 typedef struct _def_fb_mem_node
429 {
430 struct _def_fb_mem_node *pNext;
431
432 NvBool bFreeDescriptor;
433 PMEMORY_DESCRIPTOR pMemDesc;
434
435 } FB_MEM_NODE, *PFB_MEM_NODE;
436
437 // defines for MemoryManager::fbsrReservedRanges
438 #define MAX_FBSR_RESERVED_REGIONS 5 // Max. Memory descriptors for RM Instance memory
439 #define FBSR_RESERVED_INST_MEMORY_BEFORE_BAR2PTE 0 // Described on Kernel-RM and Physical-RM (Monolithic / GSP offload)
440 #define FBSR_RESERVED_INST_MEMORY_AFTER_BAR2PTE 1 // Described on Kernel-RM and Physical-RM (Monolithic / GSP offload)
441 #define FBSR_RESERVED_INST_MEMORY_GSP_HEAP 2 // Allocated on Kernel-RM and sent to Physical-RM (GSP offload)
442 #define FBSR_RESERVED_INST_MEMORY_GSP_NON_WPR 3 // Described on Physical-RM (GSP offload)
443 #define FBSR_RESERVED_INST_MEMORY_VGA_WORKSPACE 4 // Described on Physical-RM (GSP offload)
444
445 /*!
446 * MemoryManager provides the root memory management of GPU video memory.
447 * External entities might provide suballocators on top of MemoryManager.
448 *
449 * MemoryManager can have static information on the memory system (e.g.: list of
450 * kinds, etc), however MemoryManager does not have direct access to the GPU
451 * memory system (e.g.: BAR0 registers). It relies on KernelMemorySystem for
452 * operations on the memory system.
453 *
454 * MemoryManager is instantiated in VGPU guest/GSP Client as well as the VGPU
455 * host/GSP-RM.
456 */
457
458 #define MEM_MGR_STUB_ORIN(...)
459
460
461 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
462 // the matching C source file, but causes diagnostics to be issued if another
463 // source file references the field.
464 #ifdef NVOC_MEM_MGR_H_PRIVATE_ACCESS_ALLOWED
465 #define PRIVATE_FIELD(x) x
466 #else
467 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
468 #endif
469
470 struct RM_POOL_ALLOC_MEM_RESERVE_INFO;
471
472 struct __nvoc_inner_struc_MemoryManager_1__ {
473 NvU64 fbUsableMemSize;
474 NvU64 fbTotalMemSizeMb;
475 NvU64 fbAddrSpaceSizeMb;
476 NvU64 mapRamSizeMb;
477 NvU64 fbOverrideSizeMb;
478 NvU64 reservedMemSize;
479 NvU32 numFBRegions;
480 FB_REGION_DESCRIPTOR fbRegion[16];
481 NvU32 numFBRegionPriority;
482 NvU64 maxFBPSize;
483 NvU64 minFBPSize;
484 NvU32 fbRegionPriority[16];
485 NvU64 ReservedConsoleDispMemSize;
486 PMEMORY_DESCRIPTOR pReservedConsoleMemDesc;
487 NvU32 lowerRangeMag;
488 NvU32 lowerRangeScale;
489 NvU32 middleRangeMag;
490 NvU32 middleRangeScale;
491 NvU32 upperRangeMag;
492 NvU32 upperRangeScale;
493 };
494
495 struct MIG_MEMORY_PARTITIONING_INFO {
496 struct NV_RANGE partitionableMemoryRange;
497 struct NV_RANGE partitionableBar1Range;
498 NvHandle hClient;
499 NvHandle hDevice;
500 NvHandle hSubdevice;
501 };
502
503
504
505 struct MemoryManager {
506
507 // Metadata
508 const struct NVOC_RTTI *__nvoc_rtti;
509
510 // Parent (i.e. superclass or base class) object pointers
511 struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
512
513 // Ancestor object pointers for `staticCast` feature
514 struct Object *__nvoc_pbase_Object; // obj super^2
515 struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE; // engstate super
516 struct MemoryManager *__nvoc_pbase_MemoryManager; // memmgr
517
518 // Vtable with 42 per-object function pointers
519 NV_STATUS (*__memmgrConstructEngine__)(OBJGPU *, struct MemoryManager * /*this*/, ENGDESCRIPTOR); // virtual override (engstate) base (engstate)
520 NV_STATUS (*__memmgrStatePreInitLocked__)(OBJGPU *, struct MemoryManager * /*this*/); // virtual override (engstate) base (engstate)
521 NV_STATUS (*__memmgrStateInitLocked__)(OBJGPU *, struct MemoryManager * /*this*/); // virtual override (engstate) base (engstate)
522 NV_STATUS (*__memmgrStateLoad__)(OBJGPU *, struct MemoryManager * /*this*/, NvU32); // virtual override (engstate) base (engstate)
523 NV_STATUS (*__memmgrStatePostLoad__)(OBJGPU *, struct MemoryManager * /*this*/, NvU32); // virtual override (engstate) base (engstate)
524 NV_STATUS (*__memmgrStatePreUnload__)(OBJGPU *, struct MemoryManager * /*this*/, NvU32); // virtual override (engstate) base (engstate)
525 NV_STATUS (*__memmgrStateUnload__)(OBJGPU *, struct MemoryManager * /*this*/, NvU32); // virtual override (engstate) base (engstate)
526 void (*__memmgrStateDestroy__)(OBJGPU *, struct MemoryManager * /*this*/); // virtual override (engstate) base (engstate)
527 NV_STATUS (*__memmgrAllocateConsoleRegion__)(OBJGPU *, struct MemoryManager * /*this*/, FB_REGION_DESCRIPTOR *); // halified (2 hals) body
528 NV_STATUS (*__memmgrMemUtilsSec2CtxInit__)(OBJGPU *, struct MemoryManager * /*this*/, OBJCHANNEL *); // halified (2 hals) body
529 NvBool (*__memmgrMemUtilsCheckMemoryFastScrubEnable__)(OBJGPU *, struct MemoryManager * /*this*/, NvU32, NvBool, RmPhysAddr, NvU32, NV_ADDRESS_SPACE); // halified (2 hals) body
530 NV_STATUS (*__memmgrAllocDetermineAlignment__)(OBJGPU *, struct MemoryManager * /*this*/, NvU64 *, NvU64 *, NvU64, NvU32, NvU32, NvU32, NvU64); // halified (2 hals) body
531 NvU64 (*__memmgrGetMaxContextSize__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (3 hals) body
532 NvU64 (*__memmgrGetFbTaxSize__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (2 hals) body
533 void (*__memmgrScrubRegistryOverrides__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (2 hals) body
534 NvU32 (*__memmgrGetPteKindBl__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (2 hals) body
535 NvU32 (*__memmgrGetPteKindPitch__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (2 hals) body
536 NvU32 (*__memmgrChooseKindCompressC__)(OBJGPU *, struct MemoryManager * /*this*/, FB_ALLOC_PAGE_FORMAT *); // halified (2 hals) body
537 NV_STATUS (*__memmgrGetFlaKind__)(OBJGPU *, struct MemoryManager * /*this*/, NvU32 *); // halified (2 hals) body
538 NvBool (*__memmgrIsMemDescSupportedByFla__)(OBJGPU *, struct MemoryManager * /*this*/, MEMORY_DESCRIPTOR *); // halified (3 hals) body
539 NvU32 (*__memmgrDetermineComptag__)(OBJGPU *, struct MemoryManager * /*this*/, RmPhysAddr); // halified (2 hals) body
540 NvU32 (*__memmgrGetGrHeapReservationSize__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (2 hals) body
541 NvU32 (*__memmgrGetRunlistEntriesReservedFbSpace__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (2 hals) body
542 NvU32 (*__memmgrGetUserdReservedFbSpace__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (2 hals) body
543 NV_STATUS (*__memmgrCheckReservedMemorySize__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (2 hals) body
544 NV_STATUS (*__memmgrReadMmuLock__)(OBJGPU *, struct MemoryManager * /*this*/, NvBool *, NvU64 *, NvU64 *); // halified (2 hals) body
545 NV_STATUS (*__memmgrBlockMemLockedMemory__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (2 hals) body
546 NV_STATUS (*__memmgrInsertUnprotectedRegionAtBottomOfFb__)(OBJGPU *, struct MemoryManager * /*this*/, NvU64 *); // halified (2 hals) body
547 NV_STATUS (*__memmgrInitBaseFbRegions__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (2 hals) body
548 void (*__memmgrGetDisablePlcKind__)(struct MemoryManager * /*this*/, NvU32 *); // halified (2 hals) body
549 void (*__memmgrEnableDynamicPageOfflining__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (3 hals) body
550 NV_STATUS (*__memmgrSetPartitionableMem__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (2 hals) body
551 NV_STATUS (*__memmgrAllocMIGGPUInstanceMemory__)(OBJGPU *, struct MemoryManager * /*this*/, NvU32, NvHandle *, struct NV_RANGE *, struct Heap **); // halified (2 hals)
552 NV_STATUS (*__memmgrGetBlackListPages__)(OBJGPU *, struct MemoryManager * /*this*/, BLACKLIST_ADDRESS *, NvU32 *); // halified (2 hals) body
553 NV_STATUS (*__memmgrDiscoverMIGPartitionableMemoryRange__)(OBJGPU *, struct MemoryManager * /*this*/, struct NV_RANGE *); // halified (2 hals) body
554 NvU32 (*__memmgrGetFBEndReserveSizeEstimate__)(OBJGPU *, struct MemoryManager * /*this*/); // halified (2 hals)
555 void (*__memmgrInitMissing__)(struct OBJGPU *, struct MemoryManager * /*this*/); // virtual inherited (engstate) base (engstate)
556 NV_STATUS (*__memmgrStatePreInitUnlocked__)(struct OBJGPU *, struct MemoryManager * /*this*/); // virtual inherited (engstate) base (engstate)
557 NV_STATUS (*__memmgrStateInitUnlocked__)(struct OBJGPU *, struct MemoryManager * /*this*/); // virtual inherited (engstate) base (engstate)
558 NV_STATUS (*__memmgrStatePreLoad__)(struct OBJGPU *, struct MemoryManager * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
559 NV_STATUS (*__memmgrStatePostUnload__)(struct OBJGPU *, struct MemoryManager * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
560 NvBool (*__memmgrIsPresent__)(struct OBJGPU *, struct MemoryManager * /*this*/); // virtual inherited (engstate) base (engstate)
561
562 // Data members
563 NvBool bFbsrWddmModeEnabled;
564 NvBool bFbRegionsSupported;
565 NvBool bPmaSupportedOnPlatform;
566 NvBool bPmaEnabled;
567 NvBool bPmaInitialized;
568 NvBool bPmaForcePersistence;
569 NvBool bClientPageTablesPmaManaged;
570 NvBool bScanoutSysmem;
571 NvBool bMixedDensityFbp;
572 NvBool bPreferSlowRegion;
573 NvBool bPersistentStandbyBuffer;
574 NvBool bEnableFbsrPagedDma;
575 NvBool bDisallowSplitLowerMemory;
576 NvBool bIgnoreUpperMemory;
577 NvBool bSmallPageCompression;
578 NvBool bSysmemCompressionSupportDef;
579 NvBool bBug1698088IncreaseRmReserveMemoryWar;
580 NvBool bBug2301372IncreaseRmReserveMemoryWar;
581 NvBool bEnableFbsrFileMode;
582 NvBool bEnableDynamicPageOfflining;
583 NvBool bVgpuPmaSupport;
584 NvBool bScrubChannelSetupInProgress;
585 NvBool bBug3922001DisableCtxBufOnSim;
586 NvBool bEnableDynamicGranularityPageArrays;
587 NvBool bAllowNoncontiguousAllocation;
588 NvBool bLocalEgmSupported;
589 NvBool bLocalEgmEnabled;
590 NvU32 localEgmPeerId;
591 NvS32 localEgmNodeId;
592 NvU64 localEgmBasePhysAddr;
593 NvU64 localEgmSize;
594 NvBool bEccInterleavedVidmemScrub;
595 NvBool bScrubberInitialized;
596 NvBool bAllowSysmemHugePages;
597 NvBool bEccScrubOverride;
598 NvU64 sysmemPageSize;
599 struct Heap *pHeap;
600 NvBool bScrubOnFreeEnabled;
601 NvBool bFastScrubberEnabled;
602 NvBool bDisableAsyncScrubforMods;
603 NvBool bUseVasForCeMemoryOps;
604 NvBool bCePhysicalVidmemAccessNotSupported;
605 NvBool bRmExecutingEccScrub;
606 NvBool bBug1441072EccScrubWar;
607 NvU64 heapStartOffset;
608 NvU64 rsvdMemoryBase;
609 NvU64 rsvdMemorySize;
610 struct CeUtils *pCeUtils;
611 NvBool bDisableGlobalCeUtils;
612 OBJSCRUB eccScrubberState;
613 struct __nvoc_inner_struc_MemoryManager_1__ Ram;
614 NvU32 PteKindOverride;
615 NvU32 zbcSurfaces;
616 NvU64 overrideInitHeapMin;
617 NvU64 overrideHeapMax;
618 NvU64 rsvdMemorySizeIncrement;
619 struct OBJFBSR *pFbsr[8];
620 struct OBJFBSR *pActiveFbsr;
621 NvU32 fbsrStartMode;
622 NvU32 fixedFbsrModesMask;
623 MEMORY_DESCRIPTOR *fbsrReservedRanges[5];
624 PFB_MEM_NODE pMemHeadNode;
625 PFB_MEM_NODE pMemTailNode;
626 struct RM_POOL_ALLOC_MEM_RESERVE_INFO *pPageLevelReserve;
627 struct MIG_MEMORY_PARTITIONING_INFO MIGMemoryPartitioningInfo;
628 NV_FB_RSVD_BLOCK_LOG_INFO rsvdBlockInfo;
629 NvHandle hClient;
630 NvHandle hDevice;
631 NvHandle hSubdevice;
632 NvBool bReservedMemAtBottom;
633 NvU64 bug4146226ReserveOffset;
634 NvBool bBug4146226ReserveWar;
635 NvHandle hThirdPartyP2P;
636 NvBool bMonitoredFenceSupported;
637 NvBool b64BitSemaphoresSupported;
638 NvBool bGenericKindSupport;
639 };
640
641 #ifndef __NVOC_CLASS_MemoryManager_TYPEDEF__
642 #define __NVOC_CLASS_MemoryManager_TYPEDEF__
643 typedef struct MemoryManager MemoryManager;
644 #endif /* __NVOC_CLASS_MemoryManager_TYPEDEF__ */
645
646 #ifndef __nvoc_class_id_MemoryManager
647 #define __nvoc_class_id_MemoryManager 0x22ad47
648 #endif /* __nvoc_class_id_MemoryManager */
649
650 // Casting support
651 extern const struct NVOC_CLASS_DEF __nvoc_class_def_MemoryManager;
652
653 #define __staticCast_MemoryManager(pThis) \
654 ((pThis)->__nvoc_pbase_MemoryManager)
655
656 #ifdef __nvoc_mem_mgr_h_disabled
657 #define __dynamicCast_MemoryManager(pThis) ((MemoryManager*)NULL)
658 #else //__nvoc_mem_mgr_h_disabled
659 #define __dynamicCast_MemoryManager(pThis) \
660 ((MemoryManager*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(MemoryManager)))
661 #endif //__nvoc_mem_mgr_h_disabled
662
663 // Property macros
664 #define PDB_PROP_MEMMGR_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
665 #define PDB_PROP_MEMMGR_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
666
667 NV_STATUS __nvoc_objCreateDynamic_MemoryManager(MemoryManager**, Dynamic*, NvU32, va_list);
668
669 NV_STATUS __nvoc_objCreate_MemoryManager(MemoryManager**, Dynamic*, NvU32);
670 #define __objCreate_MemoryManager(ppNewObj, pParent, createFlags) \
671 __nvoc_objCreate_MemoryManager((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
672
673
674 // Wrapper macros
675 #define memmgrConstructEngine_FNPTR(pMemoryManager) pMemoryManager->__memmgrConstructEngine__
676 #define memmgrConstructEngine(pGpu, pMemoryManager, arg3) memmgrConstructEngine_DISPATCH(pGpu, pMemoryManager, arg3)
677 #define memmgrStatePreInitLocked_FNPTR(pMemoryManager) pMemoryManager->__memmgrStatePreInitLocked__
678 #define memmgrStatePreInitLocked(pGpu, pMemoryManager) memmgrStatePreInitLocked_DISPATCH(pGpu, pMemoryManager)
679 #define memmgrStateInitLocked_FNPTR(pMemoryManager) pMemoryManager->__memmgrStateInitLocked__
680 #define memmgrStateInitLocked(pGpu, pMemoryManager) memmgrStateInitLocked_DISPATCH(pGpu, pMemoryManager)
681 #define memmgrStateLoad_FNPTR(pMemoryManager) pMemoryManager->__memmgrStateLoad__
682 #define memmgrStateLoad(pGpu, pMemoryManager, arg3) memmgrStateLoad_DISPATCH(pGpu, pMemoryManager, arg3)
683 #define memmgrStatePostLoad_FNPTR(pMemoryManager) pMemoryManager->__memmgrStatePostLoad__
684 #define memmgrStatePostLoad(pGpu, pMemoryManager, arg3) memmgrStatePostLoad_DISPATCH(pGpu, pMemoryManager, arg3)
685 #define memmgrStatePreUnload_FNPTR(pMemoryManager) pMemoryManager->__memmgrStatePreUnload__
686 #define memmgrStatePreUnload(pGpu, pMemoryManager, arg3) memmgrStatePreUnload_DISPATCH(pGpu, pMemoryManager, arg3)
687 #define memmgrStateUnload_FNPTR(pMemoryManager) pMemoryManager->__memmgrStateUnload__
688 #define memmgrStateUnload(pGpu, pMemoryManager, arg3) memmgrStateUnload_DISPATCH(pGpu, pMemoryManager, arg3)
689 #define memmgrStateDestroy_FNPTR(pMemoryManager) pMemoryManager->__memmgrStateDestroy__
690 #define memmgrStateDestroy(pGpu, pMemoryManager) memmgrStateDestroy_DISPATCH(pGpu, pMemoryManager)
691 #define memmgrAllocateConsoleRegion_FNPTR(pMemoryManager) pMemoryManager->__memmgrAllocateConsoleRegion__
692 #define memmgrAllocateConsoleRegion(pGpu, pMemoryManager, arg3) memmgrAllocateConsoleRegion_DISPATCH(pGpu, pMemoryManager, arg3)
693 #define memmgrAllocateConsoleRegion_HAL(pGpu, pMemoryManager, arg3) memmgrAllocateConsoleRegion_DISPATCH(pGpu, pMemoryManager, arg3)
694 #define memmgrMemUtilsSec2CtxInit_FNPTR(pMemoryManager) pMemoryManager->__memmgrMemUtilsSec2CtxInit__
695 #define memmgrMemUtilsSec2CtxInit(pGpu, pMemoryManager, arg3) memmgrMemUtilsSec2CtxInit_DISPATCH(pGpu, pMemoryManager, arg3)
696 #define memmgrMemUtilsSec2CtxInit_HAL(pGpu, pMemoryManager, arg3) memmgrMemUtilsSec2CtxInit_DISPATCH(pGpu, pMemoryManager, arg3)
697 #define memmgrMemUtilsCheckMemoryFastScrubEnable_FNPTR(pMemoryManager) pMemoryManager->__memmgrMemUtilsCheckMemoryFastScrubEnable__
698 #define memmgrMemUtilsCheckMemoryFastScrubEnable(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7) memmgrMemUtilsCheckMemoryFastScrubEnable_DISPATCH(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7)
699 #define memmgrMemUtilsCheckMemoryFastScrubEnable_HAL(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7) memmgrMemUtilsCheckMemoryFastScrubEnable_DISPATCH(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7)
700 #define memmgrAllocDetermineAlignment_FNPTR(pMemoryManager) pMemoryManager->__memmgrAllocDetermineAlignment__
701 #define memmgrAllocDetermineAlignment(pGpu, pMemoryManager, pMemSize, pAlign, alignPad, allocFlags, retAttr, retAttr2, hwAlignment) memmgrAllocDetermineAlignment_DISPATCH(pGpu, pMemoryManager, pMemSize, pAlign, alignPad, allocFlags, retAttr, retAttr2, hwAlignment)
702 #define memmgrAllocDetermineAlignment_HAL(pGpu, pMemoryManager, pMemSize, pAlign, alignPad, allocFlags, retAttr, retAttr2, hwAlignment) memmgrAllocDetermineAlignment_DISPATCH(pGpu, pMemoryManager, pMemSize, pAlign, alignPad, allocFlags, retAttr, retAttr2, hwAlignment)
703 #define memmgrGetMaxContextSize_FNPTR(pMemoryManager) pMemoryManager->__memmgrGetMaxContextSize__
704 #define memmgrGetMaxContextSize(pGpu, pMemoryManager) memmgrGetMaxContextSize_DISPATCH(pGpu, pMemoryManager)
705 #define memmgrGetMaxContextSize_HAL(pGpu, pMemoryManager) memmgrGetMaxContextSize_DISPATCH(pGpu, pMemoryManager)
706 #define memmgrGetFbTaxSize_FNPTR(pMemoryManager) pMemoryManager->__memmgrGetFbTaxSize__
707 #define memmgrGetFbTaxSize(pGpu, pMemoryManager) memmgrGetFbTaxSize_DISPATCH(pGpu, pMemoryManager)
708 #define memmgrGetFbTaxSize_HAL(pGpu, pMemoryManager) memmgrGetFbTaxSize_DISPATCH(pGpu, pMemoryManager)
709 #define memmgrScrubRegistryOverrides_FNPTR(pMemoryManager) pMemoryManager->__memmgrScrubRegistryOverrides__
710 #define memmgrScrubRegistryOverrides(pGpu, pMemoryManager) memmgrScrubRegistryOverrides_DISPATCH(pGpu, pMemoryManager)
711 #define memmgrScrubRegistryOverrides_HAL(pGpu, pMemoryManager) memmgrScrubRegistryOverrides_DISPATCH(pGpu, pMemoryManager)
712 #define memmgrGetPteKindBl_FNPTR(pMemoryManager) pMemoryManager->__memmgrGetPteKindBl__
713 #define memmgrGetPteKindBl(pGpu, pMemoryManager) memmgrGetPteKindBl_DISPATCH(pGpu, pMemoryManager)
714 #define memmgrGetPteKindBl_HAL(pGpu, pMemoryManager) memmgrGetPteKindBl_DISPATCH(pGpu, pMemoryManager)
715 #define memmgrGetPteKindPitch_FNPTR(pMemoryManager) pMemoryManager->__memmgrGetPteKindPitch__
716 #define memmgrGetPteKindPitch(pGpu, pMemoryManager) memmgrGetPteKindPitch_DISPATCH(pGpu, pMemoryManager)
717 #define memmgrGetPteKindPitch_HAL(pGpu, pMemoryManager) memmgrGetPteKindPitch_DISPATCH(pGpu, pMemoryManager)
718 #define memmgrChooseKindCompressC_FNPTR(pMemoryManager) pMemoryManager->__memmgrChooseKindCompressC__
719 #define memmgrChooseKindCompressC(pGpu, pMemoryManager, arg3) memmgrChooseKindCompressC_DISPATCH(pGpu, pMemoryManager, arg3)
720 #define memmgrChooseKindCompressC_HAL(pGpu, pMemoryManager, arg3) memmgrChooseKindCompressC_DISPATCH(pGpu, pMemoryManager, arg3)
721 #define memmgrGetFlaKind_FNPTR(pMemoryManager) pMemoryManager->__memmgrGetFlaKind__
722 #define memmgrGetFlaKind(pGpu, pMemoryManager, arg3) memmgrGetFlaKind_DISPATCH(pGpu, pMemoryManager, arg3)
723 #define memmgrGetFlaKind_HAL(pGpu, pMemoryManager, arg3) memmgrGetFlaKind_DISPATCH(pGpu, pMemoryManager, arg3)
724 #define memmgrIsMemDescSupportedByFla_FNPTR(pMemoryManager) pMemoryManager->__memmgrIsMemDescSupportedByFla__
725 #define memmgrIsMemDescSupportedByFla(pGpu, pMemoryManager, pMemDesc) memmgrIsMemDescSupportedByFla_DISPATCH(pGpu, pMemoryManager, pMemDesc)
726 #define memmgrIsMemDescSupportedByFla_HAL(pGpu, pMemoryManager, pMemDesc) memmgrIsMemDescSupportedByFla_DISPATCH(pGpu, pMemoryManager, pMemDesc)
727 #define memmgrDetermineComptag_FNPTR(pMemoryManager) pMemoryManager->__memmgrDetermineComptag__
728 #define memmgrDetermineComptag(pGpu, pMemoryManager, arg3) memmgrDetermineComptag_DISPATCH(pGpu, pMemoryManager, arg3)
729 #define memmgrDetermineComptag_HAL(pGpu, pMemoryManager, arg3) memmgrDetermineComptag_DISPATCH(pGpu, pMemoryManager, arg3)
730 #define memmgrGetGrHeapReservationSize_FNPTR(pMemoryManager) pMemoryManager->__memmgrGetGrHeapReservationSize__
731 #define memmgrGetGrHeapReservationSize(pGpu, pMemoryManager) memmgrGetGrHeapReservationSize_DISPATCH(pGpu, pMemoryManager)
732 #define memmgrGetGrHeapReservationSize_HAL(pGpu, pMemoryManager) memmgrGetGrHeapReservationSize_DISPATCH(pGpu, pMemoryManager)
733 #define memmgrGetRunlistEntriesReservedFbSpace_FNPTR(pMemoryManager) pMemoryManager->__memmgrGetRunlistEntriesReservedFbSpace__
734 #define memmgrGetRunlistEntriesReservedFbSpace(pGpu, pMemoryManager) memmgrGetRunlistEntriesReservedFbSpace_DISPATCH(pGpu, pMemoryManager)
735 #define memmgrGetRunlistEntriesReservedFbSpace_HAL(pGpu, pMemoryManager) memmgrGetRunlistEntriesReservedFbSpace_DISPATCH(pGpu, pMemoryManager)
736 #define memmgrGetUserdReservedFbSpace_FNPTR(pMemoryManager) pMemoryManager->__memmgrGetUserdReservedFbSpace__
737 #define memmgrGetUserdReservedFbSpace(pGpu, pMemoryManager) memmgrGetUserdReservedFbSpace_DISPATCH(pGpu, pMemoryManager)
738 #define memmgrGetUserdReservedFbSpace_HAL(pGpu, pMemoryManager) memmgrGetUserdReservedFbSpace_DISPATCH(pGpu, pMemoryManager)
739 #define memmgrCheckReservedMemorySize_FNPTR(pMemoryManager) pMemoryManager->__memmgrCheckReservedMemorySize__
740 #define memmgrCheckReservedMemorySize(pGpu, pMemoryManager) memmgrCheckReservedMemorySize_DISPATCH(pGpu, pMemoryManager)
741 #define memmgrCheckReservedMemorySize_HAL(pGpu, pMemoryManager) memmgrCheckReservedMemorySize_DISPATCH(pGpu, pMemoryManager)
742 #define memmgrReadMmuLock_FNPTR(pMemoryManager) pMemoryManager->__memmgrReadMmuLock__
743 #define memmgrReadMmuLock(pGpu, pMemoryManager, pbIsValid, pMmuLockLo, pMmuLockHi) memmgrReadMmuLock_DISPATCH(pGpu, pMemoryManager, pbIsValid, pMmuLockLo, pMmuLockHi)
744 #define memmgrReadMmuLock_HAL(pGpu, pMemoryManager, pbIsValid, pMmuLockLo, pMmuLockHi) memmgrReadMmuLock_DISPATCH(pGpu, pMemoryManager, pbIsValid, pMmuLockLo, pMmuLockHi)
745 #define memmgrBlockMemLockedMemory_FNPTR(pMemoryManager) pMemoryManager->__memmgrBlockMemLockedMemory__
746 #define memmgrBlockMemLockedMemory(pGpu, pMemoryManager) memmgrBlockMemLockedMemory_DISPATCH(pGpu, pMemoryManager)
747 #define memmgrBlockMemLockedMemory_HAL(pGpu, pMemoryManager) memmgrBlockMemLockedMemory_DISPATCH(pGpu, pMemoryManager)
748 #define memmgrInsertUnprotectedRegionAtBottomOfFb_FNPTR(pMemoryManager) pMemoryManager->__memmgrInsertUnprotectedRegionAtBottomOfFb__
749 #define memmgrInsertUnprotectedRegionAtBottomOfFb(pGpu, pMemoryManager, pSize) memmgrInsertUnprotectedRegionAtBottomOfFb_DISPATCH(pGpu, pMemoryManager, pSize)
750 #define memmgrInsertUnprotectedRegionAtBottomOfFb_HAL(pGpu, pMemoryManager, pSize) memmgrInsertUnprotectedRegionAtBottomOfFb_DISPATCH(pGpu, pMemoryManager, pSize)
751 #define memmgrInitBaseFbRegions_FNPTR(pMemoryManager) pMemoryManager->__memmgrInitBaseFbRegions__
752 #define memmgrInitBaseFbRegions(pGpu, pMemoryManager) memmgrInitBaseFbRegions_DISPATCH(pGpu, pMemoryManager)
753 #define memmgrInitBaseFbRegions_HAL(pGpu, pMemoryManager) memmgrInitBaseFbRegions_DISPATCH(pGpu, pMemoryManager)
754 #define memmgrGetDisablePlcKind_FNPTR(pMemoryManager) pMemoryManager->__memmgrGetDisablePlcKind__
755 #define memmgrGetDisablePlcKind(pMemoryManager, pteKind) memmgrGetDisablePlcKind_DISPATCH(pMemoryManager, pteKind)
756 #define memmgrGetDisablePlcKind_HAL(pMemoryManager, pteKind) memmgrGetDisablePlcKind_DISPATCH(pMemoryManager, pteKind)
757 #define memmgrEnableDynamicPageOfflining_FNPTR(pMemoryManager) pMemoryManager->__memmgrEnableDynamicPageOfflining__
758 #define memmgrEnableDynamicPageOfflining(pGpu, pMemoryManager) memmgrEnableDynamicPageOfflining_DISPATCH(pGpu, pMemoryManager)
759 #define memmgrEnableDynamicPageOfflining_HAL(pGpu, pMemoryManager) memmgrEnableDynamicPageOfflining_DISPATCH(pGpu, pMemoryManager)
760 #define memmgrSetPartitionableMem_FNPTR(pMemoryManager) pMemoryManager->__memmgrSetPartitionableMem__
761 #define memmgrSetPartitionableMem(pGpu, pMemoryManager) memmgrSetPartitionableMem_DISPATCH(pGpu, pMemoryManager)
762 #define memmgrSetPartitionableMem_HAL(pGpu, pMemoryManager) memmgrSetPartitionableMem_DISPATCH(pGpu, pMemoryManager)
763 #define memmgrAllocMIGGPUInstanceMemory_FNPTR(pMemoryManager) pMemoryManager->__memmgrAllocMIGGPUInstanceMemory__
764 #define memmgrAllocMIGGPUInstanceMemory(pGpu, pMemoryManager, swizzId, phMemory, pAddrRange, ppMemoryPartitionHeap) memmgrAllocMIGGPUInstanceMemory_DISPATCH(pGpu, pMemoryManager, swizzId, phMemory, pAddrRange, ppMemoryPartitionHeap)
765 #define memmgrAllocMIGGPUInstanceMemory_HAL(pGpu, pMemoryManager, swizzId, phMemory, pAddrRange, ppMemoryPartitionHeap) memmgrAllocMIGGPUInstanceMemory_DISPATCH(pGpu, pMemoryManager, swizzId, phMemory, pAddrRange, ppMemoryPartitionHeap)
766 #define memmgrGetBlackListPages_FNPTR(pMemoryManager) pMemoryManager->__memmgrGetBlackListPages__
767 #define memmgrGetBlackListPages(pGpu, pMemoryManager, pBlAddrs, pCount) memmgrGetBlackListPages_DISPATCH(pGpu, pMemoryManager, pBlAddrs, pCount)
768 #define memmgrGetBlackListPages_HAL(pGpu, pMemoryManager, pBlAddrs, pCount) memmgrGetBlackListPages_DISPATCH(pGpu, pMemoryManager, pBlAddrs, pCount)
769 #define memmgrDiscoverMIGPartitionableMemoryRange_FNPTR(pMemoryManager) pMemoryManager->__memmgrDiscoverMIGPartitionableMemoryRange__
770 #define memmgrDiscoverMIGPartitionableMemoryRange(pGpu, pMemoryManager, pMemoryRange) memmgrDiscoverMIGPartitionableMemoryRange_DISPATCH(pGpu, pMemoryManager, pMemoryRange)
771 #define memmgrDiscoverMIGPartitionableMemoryRange_HAL(pGpu, pMemoryManager, pMemoryRange) memmgrDiscoverMIGPartitionableMemoryRange_DISPATCH(pGpu, pMemoryManager, pMemoryRange)
772 #define memmgrGetFBEndReserveSizeEstimate_FNPTR(pMemoryManager) pMemoryManager->__memmgrGetFBEndReserveSizeEstimate__
773 #define memmgrGetFBEndReserveSizeEstimate(pGpu, pMemoryManager) memmgrGetFBEndReserveSizeEstimate_DISPATCH(pGpu, pMemoryManager)
774 #define memmgrGetFBEndReserveSizeEstimate_HAL(pGpu, pMemoryManager) memmgrGetFBEndReserveSizeEstimate_DISPATCH(pGpu, pMemoryManager)
775 #define memmgrInitMissing_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateInitMissing__
776 #define memmgrInitMissing(pGpu, pEngstate) memmgrInitMissing_DISPATCH(pGpu, pEngstate)
777 #define memmgrStatePreInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreInitUnlocked__
778 #define memmgrStatePreInitUnlocked(pGpu, pEngstate) memmgrStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
779 #define memmgrStateInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStateInitUnlocked__
780 #define memmgrStateInitUnlocked(pGpu, pEngstate) memmgrStateInitUnlocked_DISPATCH(pGpu, pEngstate)
781 #define memmgrStatePreLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreLoad__
782 #define memmgrStatePreLoad(pGpu, pEngstate, arg3) memmgrStatePreLoad_DISPATCH(pGpu, pEngstate, arg3)
783 #define memmgrStatePostUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePostUnload__
784 #define memmgrStatePostUnload(pGpu, pEngstate, arg3) memmgrStatePostUnload_DISPATCH(pGpu, pEngstate, arg3)
785 #define memmgrIsPresent_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateIsPresent__
786 #define memmgrIsPresent(pGpu, pEngstate) memmgrIsPresent_DISPATCH(pGpu, pEngstate)
787
788 // Dispatch functions
memmgrConstructEngine_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,ENGDESCRIPTOR arg3)789 static inline NV_STATUS memmgrConstructEngine_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, ENGDESCRIPTOR arg3) {
790 return pMemoryManager->__memmgrConstructEngine__(pGpu, pMemoryManager, arg3);
791 }
792
memmgrStatePreInitLocked_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)793 static inline NV_STATUS memmgrStatePreInitLocked_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
794 return pMemoryManager->__memmgrStatePreInitLocked__(pGpu, pMemoryManager);
795 }
796
memmgrStateInitLocked_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)797 static inline NV_STATUS memmgrStateInitLocked_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
798 return pMemoryManager->__memmgrStateInitLocked__(pGpu, pMemoryManager);
799 }
800
memmgrStateLoad_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 arg3)801 static inline NV_STATUS memmgrStateLoad_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3) {
802 return pMemoryManager->__memmgrStateLoad__(pGpu, pMemoryManager, arg3);
803 }
804
memmgrStatePostLoad_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 arg3)805 static inline NV_STATUS memmgrStatePostLoad_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3) {
806 return pMemoryManager->__memmgrStatePostLoad__(pGpu, pMemoryManager, arg3);
807 }
808
memmgrStatePreUnload_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 arg3)809 static inline NV_STATUS memmgrStatePreUnload_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3) {
810 return pMemoryManager->__memmgrStatePreUnload__(pGpu, pMemoryManager, arg3);
811 }
812
memmgrStateUnload_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 arg3)813 static inline NV_STATUS memmgrStateUnload_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3) {
814 return pMemoryManager->__memmgrStateUnload__(pGpu, pMemoryManager, arg3);
815 }
816
memmgrStateDestroy_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)817 static inline void memmgrStateDestroy_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
818 pMemoryManager->__memmgrStateDestroy__(pGpu, pMemoryManager);
819 }
820
memmgrAllocateConsoleRegion_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_REGION_DESCRIPTOR * arg3)821 static inline NV_STATUS memmgrAllocateConsoleRegion_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_REGION_DESCRIPTOR *arg3) {
822 return pMemoryManager->__memmgrAllocateConsoleRegion__(pGpu, pMemoryManager, arg3);
823 }
824
memmgrMemUtilsSec2CtxInit_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3)825 static inline NV_STATUS memmgrMemUtilsSec2CtxInit_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3) {
826 return pMemoryManager->__memmgrMemUtilsSec2CtxInit__(pGpu, pMemoryManager, arg3);
827 }
828
memmgrMemUtilsCheckMemoryFastScrubEnable_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 arg3,NvBool arg4,RmPhysAddr arg5,NvU32 arg6,NV_ADDRESS_SPACE arg7)829 static inline NvBool memmgrMemUtilsCheckMemoryFastScrubEnable_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3, NvBool arg4, RmPhysAddr arg5, NvU32 arg6, NV_ADDRESS_SPACE arg7) {
830 return pMemoryManager->__memmgrMemUtilsCheckMemoryFastScrubEnable__(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7);
831 }
832
memmgrAllocDetermineAlignment_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 * pMemSize,NvU64 * pAlign,NvU64 alignPad,NvU32 allocFlags,NvU32 retAttr,NvU32 retAttr2,NvU64 hwAlignment)833 static inline NV_STATUS memmgrAllocDetermineAlignment_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *pMemSize, NvU64 *pAlign, NvU64 alignPad, NvU32 allocFlags, NvU32 retAttr, NvU32 retAttr2, NvU64 hwAlignment) {
834 return pMemoryManager->__memmgrAllocDetermineAlignment__(pGpu, pMemoryManager, pMemSize, pAlign, alignPad, allocFlags, retAttr, retAttr2, hwAlignment);
835 }
836
memmgrGetMaxContextSize_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)837 static inline NvU64 memmgrGetMaxContextSize_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
838 return pMemoryManager->__memmgrGetMaxContextSize__(pGpu, pMemoryManager);
839 }
840
memmgrGetFbTaxSize_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)841 static inline NvU64 memmgrGetFbTaxSize_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
842 return pMemoryManager->__memmgrGetFbTaxSize__(pGpu, pMemoryManager);
843 }
844
memmgrScrubRegistryOverrides_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)845 static inline void memmgrScrubRegistryOverrides_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
846 pMemoryManager->__memmgrScrubRegistryOverrides__(pGpu, pMemoryManager);
847 }
848
memmgrGetPteKindBl_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)849 static inline NvU32 memmgrGetPteKindBl_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
850 return pMemoryManager->__memmgrGetPteKindBl__(pGpu, pMemoryManager);
851 }
852
memmgrGetPteKindPitch_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)853 static inline NvU32 memmgrGetPteKindPitch_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
854 return pMemoryManager->__memmgrGetPteKindPitch__(pGpu, pMemoryManager);
855 }
856
memmgrChooseKindCompressC_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_ALLOC_PAGE_FORMAT * arg3)857 static inline NvU32 memmgrChooseKindCompressC_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_PAGE_FORMAT *arg3) {
858 return pMemoryManager->__memmgrChooseKindCompressC__(pGpu, pMemoryManager, arg3);
859 }
860
memmgrGetFlaKind_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 * arg3)861 static inline NV_STATUS memmgrGetFlaKind_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 *arg3) {
862 return pMemoryManager->__memmgrGetFlaKind__(pGpu, pMemoryManager, arg3);
863 }
864
memmgrIsMemDescSupportedByFla_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR * pMemDesc)865 static inline NvBool memmgrIsMemDescSupportedByFla_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc) {
866 return pMemoryManager->__memmgrIsMemDescSupportedByFla__(pGpu, pMemoryManager, pMemDesc);
867 }
868
memmgrDetermineComptag_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,RmPhysAddr arg3)869 static inline NvU32 memmgrDetermineComptag_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, RmPhysAddr arg3) {
870 return pMemoryManager->__memmgrDetermineComptag__(pGpu, pMemoryManager, arg3);
871 }
872
memmgrGetGrHeapReservationSize_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)873 static inline NvU32 memmgrGetGrHeapReservationSize_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
874 return pMemoryManager->__memmgrGetGrHeapReservationSize__(pGpu, pMemoryManager);
875 }
876
memmgrGetRunlistEntriesReservedFbSpace_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)877 static inline NvU32 memmgrGetRunlistEntriesReservedFbSpace_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
878 return pMemoryManager->__memmgrGetRunlistEntriesReservedFbSpace__(pGpu, pMemoryManager);
879 }
880
memmgrGetUserdReservedFbSpace_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)881 static inline NvU32 memmgrGetUserdReservedFbSpace_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
882 return pMemoryManager->__memmgrGetUserdReservedFbSpace__(pGpu, pMemoryManager);
883 }
884
memmgrCheckReservedMemorySize_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)885 static inline NV_STATUS memmgrCheckReservedMemorySize_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
886 return pMemoryManager->__memmgrCheckReservedMemorySize__(pGpu, pMemoryManager);
887 }
888
memmgrReadMmuLock_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvBool * pbIsValid,NvU64 * pMmuLockLo,NvU64 * pMmuLockHi)889 static inline NV_STATUS memmgrReadMmuLock_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvBool *pbIsValid, NvU64 *pMmuLockLo, NvU64 *pMmuLockHi) {
890 return pMemoryManager->__memmgrReadMmuLock__(pGpu, pMemoryManager, pbIsValid, pMmuLockLo, pMmuLockHi);
891 }
892
memmgrBlockMemLockedMemory_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)893 static inline NV_STATUS memmgrBlockMemLockedMemory_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
894 return pMemoryManager->__memmgrBlockMemLockedMemory__(pGpu, pMemoryManager);
895 }
896
memmgrInsertUnprotectedRegionAtBottomOfFb_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 * pSize)897 static inline NV_STATUS memmgrInsertUnprotectedRegionAtBottomOfFb_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *pSize) {
898 return pMemoryManager->__memmgrInsertUnprotectedRegionAtBottomOfFb__(pGpu, pMemoryManager, pSize);
899 }
900
memmgrInitBaseFbRegions_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)901 static inline NV_STATUS memmgrInitBaseFbRegions_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
902 return pMemoryManager->__memmgrInitBaseFbRegions__(pGpu, pMemoryManager);
903 }
904
memmgrGetDisablePlcKind_DISPATCH(struct MemoryManager * pMemoryManager,NvU32 * pteKind)905 static inline void memmgrGetDisablePlcKind_DISPATCH(struct MemoryManager *pMemoryManager, NvU32 *pteKind) {
906 pMemoryManager->__memmgrGetDisablePlcKind__(pMemoryManager, pteKind);
907 }
908
memmgrEnableDynamicPageOfflining_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)909 static inline void memmgrEnableDynamicPageOfflining_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
910 pMemoryManager->__memmgrEnableDynamicPageOfflining__(pGpu, pMemoryManager);
911 }
912
memmgrSetPartitionableMem_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)913 static inline NV_STATUS memmgrSetPartitionableMem_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
914 return pMemoryManager->__memmgrSetPartitionableMem__(pGpu, pMemoryManager);
915 }
916
memmgrAllocMIGGPUInstanceMemory_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 swizzId,NvHandle * phMemory,struct NV_RANGE * pAddrRange,struct Heap ** ppMemoryPartitionHeap)917 static inline NV_STATUS memmgrAllocMIGGPUInstanceMemory_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 swizzId, NvHandle *phMemory, struct NV_RANGE *pAddrRange, struct Heap **ppMemoryPartitionHeap) {
918 return pMemoryManager->__memmgrAllocMIGGPUInstanceMemory__(pGpu, pMemoryManager, swizzId, phMemory, pAddrRange, ppMemoryPartitionHeap);
919 }
920
memmgrGetBlackListPages_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,BLACKLIST_ADDRESS * pBlAddrs,NvU32 * pCount)921 static inline NV_STATUS memmgrGetBlackListPages_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, BLACKLIST_ADDRESS *pBlAddrs, NvU32 *pCount) {
922 return pMemoryManager->__memmgrGetBlackListPages__(pGpu, pMemoryManager, pBlAddrs, pCount);
923 }
924
memmgrDiscoverMIGPartitionableMemoryRange_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,struct NV_RANGE * pMemoryRange)925 static inline NV_STATUS memmgrDiscoverMIGPartitionableMemoryRange_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct NV_RANGE *pMemoryRange) {
926 return pMemoryManager->__memmgrDiscoverMIGPartitionableMemoryRange__(pGpu, pMemoryManager, pMemoryRange);
927 }
928
memmgrGetFBEndReserveSizeEstimate_DISPATCH(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)929 static inline NvU32 memmgrGetFBEndReserveSizeEstimate_DISPATCH(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
930 return pMemoryManager->__memmgrGetFBEndReserveSizeEstimate__(pGpu, pMemoryManager);
931 }
932
memmgrInitMissing_DISPATCH(struct OBJGPU * pGpu,struct MemoryManager * pEngstate)933 static inline void memmgrInitMissing_DISPATCH(struct OBJGPU *pGpu, struct MemoryManager *pEngstate) {
934 pEngstate->__memmgrInitMissing__(pGpu, pEngstate);
935 }
936
memmgrStatePreInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct MemoryManager * pEngstate)937 static inline NV_STATUS memmgrStatePreInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct MemoryManager *pEngstate) {
938 return pEngstate->__memmgrStatePreInitUnlocked__(pGpu, pEngstate);
939 }
940
memmgrStateInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct MemoryManager * pEngstate)941 static inline NV_STATUS memmgrStateInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct MemoryManager *pEngstate) {
942 return pEngstate->__memmgrStateInitUnlocked__(pGpu, pEngstate);
943 }
944
memmgrStatePreLoad_DISPATCH(struct OBJGPU * pGpu,struct MemoryManager * pEngstate,NvU32 arg3)945 static inline NV_STATUS memmgrStatePreLoad_DISPATCH(struct OBJGPU *pGpu, struct MemoryManager *pEngstate, NvU32 arg3) {
946 return pEngstate->__memmgrStatePreLoad__(pGpu, pEngstate, arg3);
947 }
948
memmgrStatePostUnload_DISPATCH(struct OBJGPU * pGpu,struct MemoryManager * pEngstate,NvU32 arg3)949 static inline NV_STATUS memmgrStatePostUnload_DISPATCH(struct OBJGPU *pGpu, struct MemoryManager *pEngstate, NvU32 arg3) {
950 return pEngstate->__memmgrStatePostUnload__(pGpu, pEngstate, arg3);
951 }
952
memmgrIsPresent_DISPATCH(struct OBJGPU * pGpu,struct MemoryManager * pEngstate)953 static inline NvBool memmgrIsPresent_DISPATCH(struct OBJGPU *pGpu, struct MemoryManager *pEngstate) {
954 return pEngstate->__memmgrIsPresent__(pGpu, pEngstate);
955 }
956
957 NV_STATUS memmgrSavePowerMgmtState_KERNEL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
958
959
960 #ifdef __nvoc_mem_mgr_h_disabled
memmgrSavePowerMgmtState(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)961 static inline NV_STATUS memmgrSavePowerMgmtState(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
962 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
963 return NV_ERR_NOT_SUPPORTED;
964 }
965 #else //__nvoc_mem_mgr_h_disabled
966 #define memmgrSavePowerMgmtState(pGpu, pMemoryManager) memmgrSavePowerMgmtState_KERNEL(pGpu, pMemoryManager)
967 #endif //__nvoc_mem_mgr_h_disabled
968
969 #define memmgrSavePowerMgmtState_HAL(pGpu, pMemoryManager) memmgrSavePowerMgmtState(pGpu, pMemoryManager)
970
971 NV_STATUS memmgrRestorePowerMgmtState_KERNEL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
972
973
974 #ifdef __nvoc_mem_mgr_h_disabled
memmgrRestorePowerMgmtState(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)975 static inline NV_STATUS memmgrRestorePowerMgmtState(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
976 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
977 return NV_ERR_NOT_SUPPORTED;
978 }
979 #else //__nvoc_mem_mgr_h_disabled
980 #define memmgrRestorePowerMgmtState(pGpu, pMemoryManager) memmgrRestorePowerMgmtState_KERNEL(pGpu, pMemoryManager)
981 #endif //__nvoc_mem_mgr_h_disabled
982
983 #define memmgrRestorePowerMgmtState_HAL(pGpu, pMemoryManager) memmgrRestorePowerMgmtState(pGpu, pMemoryManager)
984
985 NvU64 memmgrDeterminePageSize_IMPL(struct MemoryManager *pMemoryManager, NvHandle hClient, NvU64 memSize, NvU32 memFormat, NvU32 pageFormatFlags, NvU32 *pRetAttr, NvU32 *pRetAttr2);
986
987
988 #ifdef __nvoc_mem_mgr_h_disabled
memmgrDeterminePageSize(struct MemoryManager * pMemoryManager,NvHandle hClient,NvU64 memSize,NvU32 memFormat,NvU32 pageFormatFlags,NvU32 * pRetAttr,NvU32 * pRetAttr2)989 static inline NvU64 memmgrDeterminePageSize(struct MemoryManager *pMemoryManager, NvHandle hClient, NvU64 memSize, NvU32 memFormat, NvU32 pageFormatFlags, NvU32 *pRetAttr, NvU32 *pRetAttr2) {
990 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
991 return 0;
992 }
993 #else //__nvoc_mem_mgr_h_disabled
994 #define memmgrDeterminePageSize(pMemoryManager, hClient, memSize, memFormat, pageFormatFlags, pRetAttr, pRetAttr2) memmgrDeterminePageSize_IMPL(pMemoryManager, hClient, memSize, memFormat, pageFormatFlags, pRetAttr, pRetAttr2)
995 #endif //__nvoc_mem_mgr_h_disabled
996
997 #define memmgrDeterminePageSize_HAL(pMemoryManager, hClient, memSize, memFormat, pageFormatFlags, pRetAttr, pRetAttr2) memmgrDeterminePageSize(pMemoryManager, hClient, memSize, memFormat, pageFormatFlags, pRetAttr, pRetAttr2)
998
memmgrReserveConsoleRegion_56cd7a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_REGION_DESCRIPTOR * arg3)999 static inline NV_STATUS memmgrReserveConsoleRegion_56cd7a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_REGION_DESCRIPTOR *arg3) {
1000 return NV_OK;
1001 }
1002
1003
1004 #ifdef __nvoc_mem_mgr_h_disabled
memmgrReserveConsoleRegion(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_REGION_DESCRIPTOR * arg3)1005 static inline NV_STATUS memmgrReserveConsoleRegion(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_REGION_DESCRIPTOR *arg3) {
1006 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1007 return NV_ERR_NOT_SUPPORTED;
1008 }
1009 #else //__nvoc_mem_mgr_h_disabled
1010 #define memmgrReserveConsoleRegion(pGpu, pMemoryManager, arg3) memmgrReserveConsoleRegion_56cd7a(pGpu, pMemoryManager, arg3)
1011 #endif //__nvoc_mem_mgr_h_disabled
1012
1013 #define memmgrReserveConsoleRegion_HAL(pGpu, pMemoryManager, arg3) memmgrReserveConsoleRegion(pGpu, pMemoryManager, arg3)
1014
1015 NV_STATUS memmgrGetKindComprForGpu_KERNEL(struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *arg2, OBJGPU *pGpu, NvU64 offset, NvU32 *kind, COMPR_INFO *pComprInfo);
1016
1017
1018 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetKindComprForGpu(struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR * arg2,OBJGPU * pGpu,NvU64 offset,NvU32 * kind,COMPR_INFO * pComprInfo)1019 static inline NV_STATUS memmgrGetKindComprForGpu(struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *arg2, OBJGPU *pGpu, NvU64 offset, NvU32 *kind, COMPR_INFO *pComprInfo) {
1020 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1021 return NV_ERR_NOT_SUPPORTED;
1022 }
1023 #else //__nvoc_mem_mgr_h_disabled
1024 #define memmgrGetKindComprForGpu(pMemoryManager, arg2, pGpu, offset, kind, pComprInfo) memmgrGetKindComprForGpu_KERNEL(pMemoryManager, arg2, pGpu, offset, kind, pComprInfo)
1025 #endif //__nvoc_mem_mgr_h_disabled
1026
1027 #define memmgrGetKindComprForGpu_HAL(pMemoryManager, arg2, pGpu, offset, kind, pComprInfo) memmgrGetKindComprForGpu(pMemoryManager, arg2, pGpu, offset, kind, pComprInfo)
1028
memmgrScrubInit_56cd7a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1029 static inline NV_STATUS memmgrScrubInit_56cd7a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1030 return NV_OK;
1031 }
1032
1033
1034 #ifdef __nvoc_mem_mgr_h_disabled
memmgrScrubInit(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1035 static inline NV_STATUS memmgrScrubInit(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1036 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1037 return NV_ERR_NOT_SUPPORTED;
1038 }
1039 #else //__nvoc_mem_mgr_h_disabled
1040 #define memmgrScrubInit(pGpu, pMemoryManager) memmgrScrubInit_56cd7a(pGpu, pMemoryManager)
1041 #endif //__nvoc_mem_mgr_h_disabled
1042
1043 #define memmgrScrubInit_HAL(pGpu, pMemoryManager) memmgrScrubInit(pGpu, pMemoryManager)
1044
1045 NV_STATUS memmgrScrubHandlePostSchedulingEnable_GP100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1046
1047
1048 #ifdef __nvoc_mem_mgr_h_disabled
memmgrScrubHandlePostSchedulingEnable(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1049 static inline NV_STATUS memmgrScrubHandlePostSchedulingEnable(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1050 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1051 return NV_ERR_NOT_SUPPORTED;
1052 }
1053 #else //__nvoc_mem_mgr_h_disabled
1054 #define memmgrScrubHandlePostSchedulingEnable(pGpu, pMemoryManager) memmgrScrubHandlePostSchedulingEnable_GP100(pGpu, pMemoryManager)
1055 #endif //__nvoc_mem_mgr_h_disabled
1056
1057 #define memmgrScrubHandlePostSchedulingEnable_HAL(pGpu, pMemoryManager) memmgrScrubHandlePostSchedulingEnable(pGpu, pMemoryManager)
1058
memmgrGetScrubState_f2d351(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 * arg3,NvU64 * arg4,NvBool * arg5)1059 static inline void memmgrGetScrubState_f2d351(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *arg3, NvU64 *arg4, NvBool *arg5) {
1060 NV_ASSERT_PRECOMP(0);
1061 }
1062
1063
1064 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetScrubState(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 * arg3,NvU64 * arg4,NvBool * arg5)1065 static inline void memmgrGetScrubState(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *arg3, NvU64 *arg4, NvBool *arg5) {
1066 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1067 }
1068 #else //__nvoc_mem_mgr_h_disabled
1069 #define memmgrGetScrubState(pGpu, pMemoryManager, arg3, arg4, arg5) memmgrGetScrubState_f2d351(pGpu, pMemoryManager, arg3, arg4, arg5)
1070 #endif //__nvoc_mem_mgr_h_disabled
1071
1072 #define memmgrGetScrubState_HAL(pGpu, pMemoryManager, arg3, arg4, arg5) memmgrGetScrubState(pGpu, pMemoryManager, arg3, arg4, arg5)
1073
memmgrScrubInternalRegions_b3696a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1074 static inline void memmgrScrubInternalRegions_b3696a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1075 return;
1076 }
1077
1078
1079 #ifdef __nvoc_mem_mgr_h_disabled
memmgrScrubInternalRegions(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1080 static inline void memmgrScrubInternalRegions(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1081 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1082 }
1083 #else //__nvoc_mem_mgr_h_disabled
1084 #define memmgrScrubInternalRegions(pGpu, pMemoryManager) memmgrScrubInternalRegions_b3696a(pGpu, pMemoryManager)
1085 #endif //__nvoc_mem_mgr_h_disabled
1086
1087 #define memmgrScrubInternalRegions_HAL(pGpu, pMemoryManager) memmgrScrubInternalRegions(pGpu, pMemoryManager)
1088
memmgrEccScrubInProgress_491d52(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1089 static inline NvBool memmgrEccScrubInProgress_491d52(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1090 return ((NvBool)(0 != 0));
1091 }
1092
1093 NvBool memmgrEccScrubInProgress_GP100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1094
1095
1096 #ifdef __nvoc_mem_mgr_h_disabled
memmgrEccScrubInProgress(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1097 static inline NvBool memmgrEccScrubInProgress(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1098 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1099 return NV_FALSE;
1100 }
1101 #else //__nvoc_mem_mgr_h_disabled
1102 #define memmgrEccScrubInProgress(pGpu, pMemoryManager) memmgrEccScrubInProgress_491d52(pGpu, pMemoryManager)
1103 #endif //__nvoc_mem_mgr_h_disabled
1104
1105 #define memmgrEccScrubInProgress_HAL(pGpu, pMemoryManager) memmgrEccScrubInProgress(pGpu, pMemoryManager)
1106
memmgrAsyncScrubRegion_f2d351(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 arg3,NvU64 arg4)1107 static inline void memmgrAsyncScrubRegion_f2d351(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 arg3, NvU64 arg4) {
1108 NV_ASSERT_PRECOMP(0);
1109 }
1110
1111
1112 #ifdef __nvoc_mem_mgr_h_disabled
memmgrAsyncScrubRegion(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 arg3,NvU64 arg4)1113 static inline void memmgrAsyncScrubRegion(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 arg3, NvU64 arg4) {
1114 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1115 }
1116 #else //__nvoc_mem_mgr_h_disabled
1117 #define memmgrAsyncScrubRegion(pGpu, pMemoryManager, arg3, arg4) memmgrAsyncScrubRegion_f2d351(pGpu, pMemoryManager, arg3, arg4)
1118 #endif //__nvoc_mem_mgr_h_disabled
1119
1120 #define memmgrAsyncScrubRegion_HAL(pGpu, pMemoryManager, arg3, arg4) memmgrAsyncScrubRegion(pGpu, pMemoryManager, arg3, arg4)
1121
1122 NV_STATUS memmgrScrubHandlePreSchedulingDisable_GP100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1123
1124
1125 #ifdef __nvoc_mem_mgr_h_disabled
memmgrScrubHandlePreSchedulingDisable(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1126 static inline NV_STATUS memmgrScrubHandlePreSchedulingDisable(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1127 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1128 return NV_ERR_NOT_SUPPORTED;
1129 }
1130 #else //__nvoc_mem_mgr_h_disabled
1131 #define memmgrScrubHandlePreSchedulingDisable(pGpu, pMemoryManager) memmgrScrubHandlePreSchedulingDisable_GP100(pGpu, pMemoryManager)
1132 #endif //__nvoc_mem_mgr_h_disabled
1133
1134 #define memmgrScrubHandlePreSchedulingDisable_HAL(pGpu, pMemoryManager) memmgrScrubHandlePreSchedulingDisable(pGpu, pMemoryManager)
1135
memmgrScrubDestroy_b3696a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1136 static inline void memmgrScrubDestroy_b3696a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1137 return;
1138 }
1139
1140
1141 #ifdef __nvoc_mem_mgr_h_disabled
memmgrScrubDestroy(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1142 static inline void memmgrScrubDestroy(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1143 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1144 }
1145 #else //__nvoc_mem_mgr_h_disabled
1146 #define memmgrScrubDestroy(pGpu, pMemoryManager) memmgrScrubDestroy_b3696a(pGpu, pMemoryManager)
1147 #endif //__nvoc_mem_mgr_h_disabled
1148
1149 #define memmgrScrubDestroy_HAL(pGpu, pMemoryManager) memmgrScrubDestroy(pGpu, pMemoryManager)
1150
memmgrScrubMemory_b3696a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,RmPhysAddr arg3,NvU64 arg4)1151 static inline void memmgrScrubMemory_b3696a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, RmPhysAddr arg3, NvU64 arg4) {
1152 return;
1153 }
1154
1155 void memmgrScrubMemory_GP100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, RmPhysAddr arg3, NvU64 arg4);
1156
1157
1158 #ifdef __nvoc_mem_mgr_h_disabled
memmgrScrubMemory(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,RmPhysAddr arg3,NvU64 arg4)1159 static inline void memmgrScrubMemory(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, RmPhysAddr arg3, NvU64 arg4) {
1160 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1161 }
1162 #else //__nvoc_mem_mgr_h_disabled
1163 #define memmgrScrubMemory(pGpu, pMemoryManager, arg3, arg4) memmgrScrubMemory_b3696a(pGpu, pMemoryManager, arg3, arg4)
1164 #endif //__nvoc_mem_mgr_h_disabled
1165
1166 #define memmgrScrubMemory_HAL(pGpu, pMemoryManager, arg3, arg4) memmgrScrubMemory(pGpu, pMemoryManager, arg3, arg4)
1167
memmgrMemUtilsMemSetBlocking_92bfc3(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3,RmPhysAddr arg4,NvU64 arg5)1168 static inline NV_STATUS memmgrMemUtilsMemSetBlocking_92bfc3(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3, RmPhysAddr arg4, NvU64 arg5) {
1169 NV_ASSERT_PRECOMP(0);
1170 return NV_ERR_NOT_SUPPORTED;
1171 }
1172
1173
1174 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemUtilsMemSetBlocking(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3,RmPhysAddr arg4,NvU64 arg5)1175 static inline NV_STATUS memmgrMemUtilsMemSetBlocking(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3, RmPhysAddr arg4, NvU64 arg5) {
1176 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1177 return NV_ERR_NOT_SUPPORTED;
1178 }
1179 #else //__nvoc_mem_mgr_h_disabled
1180 #define memmgrMemUtilsMemSetBlocking(pGpu, pMemoryManager, arg3, arg4, arg5) memmgrMemUtilsMemSetBlocking_92bfc3(pGpu, pMemoryManager, arg3, arg4, arg5)
1181 #endif //__nvoc_mem_mgr_h_disabled
1182
1183 #define memmgrMemUtilsMemSetBlocking_HAL(pGpu, pMemoryManager, arg3, arg4, arg5) memmgrMemUtilsMemSetBlocking(pGpu, pMemoryManager, arg3, arg4, arg5)
1184
memmgrMemUtilsMemSet_92bfc3(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3,RmPhysAddr arg4,NvU64 arg5,NvU32 arg6,NvU32 * arg7)1185 static inline NV_STATUS memmgrMemUtilsMemSet_92bfc3(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3, RmPhysAddr arg4, NvU64 arg5, NvU32 arg6, NvU32 *arg7) {
1186 NV_ASSERT_PRECOMP(0);
1187 return NV_ERR_NOT_SUPPORTED;
1188 }
1189
1190
1191 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemUtilsMemSet(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3,RmPhysAddr arg4,NvU64 arg5,NvU32 arg6,NvU32 * arg7)1192 static inline NV_STATUS memmgrMemUtilsMemSet(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3, RmPhysAddr arg4, NvU64 arg5, NvU32 arg6, NvU32 *arg7) {
1193 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1194 return NV_ERR_NOT_SUPPORTED;
1195 }
1196 #else //__nvoc_mem_mgr_h_disabled
1197 #define memmgrMemUtilsMemSet(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7) memmgrMemUtilsMemSet_92bfc3(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7)
1198 #endif //__nvoc_mem_mgr_h_disabled
1199
1200 #define memmgrMemUtilsMemSet_HAL(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7) memmgrMemUtilsMemSet(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7)
1201
memmgrMemUtilsAllocateEccScrubber_92bfc3(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3)1202 static inline NV_STATUS memmgrMemUtilsAllocateEccScrubber_92bfc3(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3) {
1203 NV_ASSERT_PRECOMP(0);
1204 return NV_ERR_NOT_SUPPORTED;
1205 }
1206
1207
1208 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemUtilsAllocateEccScrubber(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3)1209 static inline NV_STATUS memmgrMemUtilsAllocateEccScrubber(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3) {
1210 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1211 return NV_ERR_NOT_SUPPORTED;
1212 }
1213 #else //__nvoc_mem_mgr_h_disabled
1214 #define memmgrMemUtilsAllocateEccScrubber(pGpu, pMemoryManager, arg3) memmgrMemUtilsAllocateEccScrubber_92bfc3(pGpu, pMemoryManager, arg3)
1215 #endif //__nvoc_mem_mgr_h_disabled
1216
1217 #define memmgrMemUtilsAllocateEccScrubber_HAL(pGpu, pMemoryManager, arg3) memmgrMemUtilsAllocateEccScrubber(pGpu, pMemoryManager, arg3)
1218
memmgrMemUtilsAllocateEccAllocScrubber_92bfc3(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3)1219 static inline NV_STATUS memmgrMemUtilsAllocateEccAllocScrubber_92bfc3(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3) {
1220 NV_ASSERT_PRECOMP(0);
1221 return NV_ERR_NOT_SUPPORTED;
1222 }
1223
1224
1225 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemUtilsAllocateEccAllocScrubber(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3)1226 static inline NV_STATUS memmgrMemUtilsAllocateEccAllocScrubber(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3) {
1227 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1228 return NV_ERR_NOT_SUPPORTED;
1229 }
1230 #else //__nvoc_mem_mgr_h_disabled
1231 #define memmgrMemUtilsAllocateEccAllocScrubber(pGpu, pMemoryManager, arg3) memmgrMemUtilsAllocateEccAllocScrubber_92bfc3(pGpu, pMemoryManager, arg3)
1232 #endif //__nvoc_mem_mgr_h_disabled
1233
1234 #define memmgrMemUtilsAllocateEccAllocScrubber_HAL(pGpu, pMemoryManager, arg3) memmgrMemUtilsAllocateEccAllocScrubber(pGpu, pMemoryManager, arg3)
1235
1236 NV_STATUS memmgrMemUtilsChannelInitialize_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3);
1237
1238
1239 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemUtilsChannelInitialize(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3)1240 static inline NV_STATUS memmgrMemUtilsChannelInitialize(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3) {
1241 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1242 return NV_ERR_NOT_SUPPORTED;
1243 }
1244 #else //__nvoc_mem_mgr_h_disabled
1245 #define memmgrMemUtilsChannelInitialize(pGpu, pMemoryManager, arg3) memmgrMemUtilsChannelInitialize_GM107(pGpu, pMemoryManager, arg3)
1246 #endif //__nvoc_mem_mgr_h_disabled
1247
1248 #define memmgrMemUtilsChannelInitialize_HAL(pGpu, pMemoryManager, arg3) memmgrMemUtilsChannelInitialize(pGpu, pMemoryManager, arg3)
1249
1250 NV_STATUS memmgrMemUtilsCopyEngineInitialize_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3);
1251
1252
1253 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemUtilsCopyEngineInitialize(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3)1254 static inline NV_STATUS memmgrMemUtilsCopyEngineInitialize(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3) {
1255 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1256 return NV_ERR_NOT_SUPPORTED;
1257 }
1258 #else //__nvoc_mem_mgr_h_disabled
1259 #define memmgrMemUtilsCopyEngineInitialize(pGpu, pMemoryManager, arg3) memmgrMemUtilsCopyEngineInitialize_GM107(pGpu, pMemoryManager, arg3)
1260 #endif //__nvoc_mem_mgr_h_disabled
1261
1262 #define memmgrMemUtilsCopyEngineInitialize_HAL(pGpu, pMemoryManager, arg3) memmgrMemUtilsCopyEngineInitialize(pGpu, pMemoryManager, arg3)
1263
1264 NV_STATUS memmgrMemUtilsGetCopyEngineClass_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 *pClass);
1265
1266
1267 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemUtilsGetCopyEngineClass(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 * pClass)1268 static inline NV_STATUS memmgrMemUtilsGetCopyEngineClass(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 *pClass) {
1269 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1270 return NV_ERR_NOT_SUPPORTED;
1271 }
1272 #else //__nvoc_mem_mgr_h_disabled
1273 #define memmgrMemUtilsGetCopyEngineClass(pGpu, pMemoryManager, pClass) memmgrMemUtilsGetCopyEngineClass_GM107(pGpu, pMemoryManager, pClass)
1274 #endif //__nvoc_mem_mgr_h_disabled
1275
1276 #define memmgrMemUtilsGetCopyEngineClass_HAL(pGpu, pMemoryManager, pClass) memmgrMemUtilsGetCopyEngineClass(pGpu, pMemoryManager, pClass)
1277
1278 NV_STATUS memmgrMemUtilsCreateMemoryAlias_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3);
1279
1280
1281 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemUtilsCreateMemoryAlias(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3)1282 static inline NV_STATUS memmgrMemUtilsCreateMemoryAlias(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3) {
1283 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1284 return NV_ERR_NOT_SUPPORTED;
1285 }
1286 #else //__nvoc_mem_mgr_h_disabled
1287 #define memmgrMemUtilsCreateMemoryAlias(pGpu, pMemoryManager, arg3) memmgrMemUtilsCreateMemoryAlias_GM107(pGpu, pMemoryManager, arg3)
1288 #endif //__nvoc_mem_mgr_h_disabled
1289
1290 #define memmgrMemUtilsCreateMemoryAlias_HAL(pGpu, pMemoryManager, arg3) memmgrMemUtilsCreateMemoryAlias(pGpu, pMemoryManager, arg3)
1291
1292 NV_STATUS memmgrAllocHal_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *pFbAllocInfo);
1293
1294
1295 #ifdef __nvoc_mem_mgr_h_disabled
memmgrAllocHal(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_ALLOC_INFO * pFbAllocInfo)1296 static inline NV_STATUS memmgrAllocHal(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *pFbAllocInfo) {
1297 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1298 return NV_ERR_NOT_SUPPORTED;
1299 }
1300 #else //__nvoc_mem_mgr_h_disabled
1301 #define memmgrAllocHal(pGpu, pMemoryManager, pFbAllocInfo) memmgrAllocHal_GM107(pGpu, pMemoryManager, pFbAllocInfo)
1302 #endif //__nvoc_mem_mgr_h_disabled
1303
1304 #define memmgrAllocHal_HAL(pGpu, pMemoryManager, pFbAllocInfo) memmgrAllocHal(pGpu, pMemoryManager, pFbAllocInfo)
1305
1306 NV_STATUS memmgrFreeHal_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *pFbAllocInfo, PRMTIMEOUT pTimeout);
1307
1308
1309 #ifdef __nvoc_mem_mgr_h_disabled
memmgrFreeHal(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_ALLOC_INFO * pFbAllocInfo,PRMTIMEOUT pTimeout)1310 static inline NV_STATUS memmgrFreeHal(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *pFbAllocInfo, PRMTIMEOUT pTimeout) {
1311 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1312 return NV_ERR_NOT_SUPPORTED;
1313 }
1314 #else //__nvoc_mem_mgr_h_disabled
1315 #define memmgrFreeHal(pGpu, pMemoryManager, pFbAllocInfo, pTimeout) memmgrFreeHal_GM107(pGpu, pMemoryManager, pFbAllocInfo, pTimeout)
1316 #endif //__nvoc_mem_mgr_h_disabled
1317
1318 #define memmgrFreeHal_HAL(pGpu, pMemoryManager, pFbAllocInfo, pTimeout) memmgrFreeHal(pGpu, pMemoryManager, pFbAllocInfo, pTimeout)
1319
memmgrUpdateSurfaceCompression_5baef9(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,Memory * arg3,NvBool arg4)1320 static inline NV_STATUS memmgrUpdateSurfaceCompression_5baef9(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, Memory *arg3, NvBool arg4) {
1321 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1322 }
1323
1324
1325 #ifdef __nvoc_mem_mgr_h_disabled
memmgrUpdateSurfaceCompression(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,Memory * arg3,NvBool arg4)1326 static inline NV_STATUS memmgrUpdateSurfaceCompression(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, Memory *arg3, NvBool arg4) {
1327 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1328 return NV_ERR_NOT_SUPPORTED;
1329 }
1330 #else //__nvoc_mem_mgr_h_disabled
1331 #define memmgrUpdateSurfaceCompression(pGpu, pMemoryManager, arg3, arg4) memmgrUpdateSurfaceCompression_5baef9(pGpu, pMemoryManager, arg3, arg4)
1332 #endif //__nvoc_mem_mgr_h_disabled
1333
1334 #define memmgrUpdateSurfaceCompression_HAL(pGpu, pMemoryManager, arg3, arg4) memmgrUpdateSurfaceCompression(pGpu, pMemoryManager, arg3, arg4)
1335
1336 NV_STATUS memmgrGetBankPlacementData_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 *pBankPlacementLowData);
1337
1338
1339 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetBankPlacementData(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 * pBankPlacementLowData)1340 static inline NV_STATUS memmgrGetBankPlacementData(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 *pBankPlacementLowData) {
1341 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1342 return NV_ERR_NOT_SUPPORTED;
1343 }
1344 #else //__nvoc_mem_mgr_h_disabled
1345 #define memmgrGetBankPlacementData(pGpu, pMemoryManager, pBankPlacementLowData) memmgrGetBankPlacementData_GM107(pGpu, pMemoryManager, pBankPlacementLowData)
1346 #endif //__nvoc_mem_mgr_h_disabled
1347
1348 #define memmgrGetBankPlacementData_HAL(pGpu, pMemoryManager, pBankPlacementLowData) memmgrGetBankPlacementData(pGpu, pMemoryManager, pBankPlacementLowData)
1349
1350 void memmgrDirtyForPmTest_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvBool partialDirty);
1351
1352
1353 #ifdef __nvoc_mem_mgr_h_disabled
memmgrDirtyForPmTest(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvBool partialDirty)1354 static inline void memmgrDirtyForPmTest(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvBool partialDirty) {
1355 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1356 }
1357 #else //__nvoc_mem_mgr_h_disabled
1358 #define memmgrDirtyForPmTest(pGpu, pMemoryManager, partialDirty) memmgrDirtyForPmTest_GM107(pGpu, pMemoryManager, partialDirty)
1359 #endif //__nvoc_mem_mgr_h_disabled
1360
1361 #define memmgrDirtyForPmTest_HAL(pGpu, pMemoryManager, partialDirty) memmgrDirtyForPmTest(pGpu, pMemoryManager, partialDirty)
1362
1363 NvU64 memmgrGetReservedHeapSizeMb_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1364
1365
1366 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetReservedHeapSizeMb(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1367 static inline NvU64 memmgrGetReservedHeapSizeMb(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1368 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1369 return 0;
1370 }
1371 #else //__nvoc_mem_mgr_h_disabled
1372 #define memmgrGetReservedHeapSizeMb(pGpu, pMemoryManager) memmgrGetReservedHeapSizeMb_GM107(pGpu, pMemoryManager)
1373 #endif //__nvoc_mem_mgr_h_disabled
1374
1375 #define memmgrGetReservedHeapSizeMb_HAL(pGpu, pMemoryManager) memmgrGetReservedHeapSizeMb(pGpu, pMemoryManager)
1376
1377 NV_STATUS memmgrInitFbRegionsHal_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1378
memmgrInitFbRegionsHal_56cd7a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1379 static inline NV_STATUS memmgrInitFbRegionsHal_56cd7a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1380 return NV_OK;
1381 }
1382
1383
1384 #ifdef __nvoc_mem_mgr_h_disabled
memmgrInitFbRegionsHal(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1385 static inline NV_STATUS memmgrInitFbRegionsHal(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1386 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1387 return NV_ERR_NOT_SUPPORTED;
1388 }
1389 #else //__nvoc_mem_mgr_h_disabled
1390 #define memmgrInitFbRegionsHal(pGpu, pMemoryManager) memmgrInitFbRegionsHal_56cd7a(pGpu, pMemoryManager)
1391 #endif //__nvoc_mem_mgr_h_disabled
1392
1393 #define memmgrInitFbRegionsHal_HAL(pGpu, pMemoryManager) memmgrInitFbRegionsHal(pGpu, pMemoryManager)
1394
1395 void memmgrHandleSizeOverrides_GP100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1396
1397
1398 #ifdef __nvoc_mem_mgr_h_disabled
memmgrHandleSizeOverrides(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1399 static inline void memmgrHandleSizeOverrides(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1400 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1401 }
1402 #else //__nvoc_mem_mgr_h_disabled
1403 #define memmgrHandleSizeOverrides(pGpu, pMemoryManager) memmgrHandleSizeOverrides_GP100(pGpu, pMemoryManager)
1404 #endif //__nvoc_mem_mgr_h_disabled
1405
1406 #define memmgrHandleSizeOverrides_HAL(pGpu, pMemoryManager) memmgrHandleSizeOverrides(pGpu, pMemoryManager)
1407
1408 NV_STATUS memmgrFinishHandleSizeOverrides_GP100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1409
1410
1411 #ifdef __nvoc_mem_mgr_h_disabled
memmgrFinishHandleSizeOverrides(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1412 static inline NV_STATUS memmgrFinishHandleSizeOverrides(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1413 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1414 return NV_ERR_NOT_SUPPORTED;
1415 }
1416 #else //__nvoc_mem_mgr_h_disabled
1417 #define memmgrFinishHandleSizeOverrides(pGpu, pMemoryManager) memmgrFinishHandleSizeOverrides_GP100(pGpu, pMemoryManager)
1418 #endif //__nvoc_mem_mgr_h_disabled
1419
1420 #define memmgrFinishHandleSizeOverrides_HAL(pGpu, pMemoryManager) memmgrFinishHandleSizeOverrides(pGpu, pMemoryManager)
1421
1422 NV_STATUS memmgrGetBAR1InfoForDevice_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct Device *pDevice, PGETBAR1INFO bar1Info);
1423
1424
1425 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetBAR1InfoForDevice(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,struct Device * pDevice,PGETBAR1INFO bar1Info)1426 static inline NV_STATUS memmgrGetBAR1InfoForDevice(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct Device *pDevice, PGETBAR1INFO bar1Info) {
1427 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1428 return NV_ERR_NOT_SUPPORTED;
1429 }
1430 #else //__nvoc_mem_mgr_h_disabled
1431 #define memmgrGetBAR1InfoForDevice(pGpu, pMemoryManager, pDevice, bar1Info) memmgrGetBAR1InfoForDevice_GM107(pGpu, pMemoryManager, pDevice, bar1Info)
1432 #endif //__nvoc_mem_mgr_h_disabled
1433
1434 #define memmgrGetBAR1InfoForDevice_HAL(pGpu, pMemoryManager, pDevice, bar1Info) memmgrGetBAR1InfoForDevice(pGpu, pMemoryManager, pDevice, bar1Info)
1435
1436 NvU64 memmgrGetVgpuHostRmReservedFb_KERNEL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 vgpuTypeId);
1437
1438 NvU64 memmgrGetVgpuHostRmReservedFb_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 vgpuTypeId);
1439
1440 NvU64 memmgrGetVgpuHostRmReservedFb_GA100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 vgpuTypeId);
1441
1442
1443 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetVgpuHostRmReservedFb(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 vgpuTypeId)1444 static inline NvU64 memmgrGetVgpuHostRmReservedFb(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 vgpuTypeId) {
1445 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1446 return 0;
1447 }
1448 #else //__nvoc_mem_mgr_h_disabled
1449 #define memmgrGetVgpuHostRmReservedFb(pGpu, pMemoryManager, vgpuTypeId) memmgrGetVgpuHostRmReservedFb_KERNEL(pGpu, pMemoryManager, vgpuTypeId)
1450 #endif //__nvoc_mem_mgr_h_disabled
1451
1452 #define memmgrGetVgpuHostRmReservedFb_HAL(pGpu, pMemoryManager, vgpuTypeId) memmgrGetVgpuHostRmReservedFb(pGpu, pMemoryManager, vgpuTypeId)
1453
1454 NvU64 memmgrGetRsvdSizeForSr_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1455
1456
1457 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetRsvdSizeForSr(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1458 static inline NvU64 memmgrGetRsvdSizeForSr(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1459 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1460 return 0;
1461 }
1462 #else //__nvoc_mem_mgr_h_disabled
1463 #define memmgrGetRsvdSizeForSr(pGpu, pMemoryManager) memmgrGetRsvdSizeForSr_GM107(pGpu, pMemoryManager)
1464 #endif //__nvoc_mem_mgr_h_disabled
1465
1466 #define memmgrGetRsvdSizeForSr_HAL(pGpu, pMemoryManager) memmgrGetRsvdSizeForSr(pGpu, pMemoryManager)
1467
memmgrVerifyDepthSurfaceAttrs_cbe027(struct MemoryManager * pMemoryManager,NvU32 arg2,NvU32 arg3)1468 static inline NvBool memmgrVerifyDepthSurfaceAttrs_cbe027(struct MemoryManager *pMemoryManager, NvU32 arg2, NvU32 arg3) {
1469 return ((NvBool)(0 == 0));
1470 }
1471
1472
1473 #ifdef __nvoc_mem_mgr_h_disabled
memmgrVerifyDepthSurfaceAttrs(struct MemoryManager * pMemoryManager,NvU32 arg2,NvU32 arg3)1474 static inline NvBool memmgrVerifyDepthSurfaceAttrs(struct MemoryManager *pMemoryManager, NvU32 arg2, NvU32 arg3) {
1475 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1476 return NV_FALSE;
1477 }
1478 #else //__nvoc_mem_mgr_h_disabled
1479 #define memmgrVerifyDepthSurfaceAttrs(pMemoryManager, arg2, arg3) memmgrVerifyDepthSurfaceAttrs_cbe027(pMemoryManager, arg2, arg3)
1480 #endif //__nvoc_mem_mgr_h_disabled
1481
1482 #define memmgrVerifyDepthSurfaceAttrs_HAL(pMemoryManager, arg2, arg3) memmgrVerifyDepthSurfaceAttrs(pMemoryManager, arg2, arg3)
1483
1484 NV_STATUS memmgrAllocMemToSaveVgaWorkspace_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR **arg3, MEMORY_DESCRIPTOR **arg4);
1485
memmgrAllocMemToSaveVgaWorkspace_46f6a7(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR ** arg3,MEMORY_DESCRIPTOR ** arg4)1486 static inline NV_STATUS memmgrAllocMemToSaveVgaWorkspace_46f6a7(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR **arg3, MEMORY_DESCRIPTOR **arg4) {
1487 return NV_ERR_NOT_SUPPORTED;
1488 }
1489
memmgrAllocMemToSaveVgaWorkspace_5baef9(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR ** arg3,MEMORY_DESCRIPTOR ** arg4)1490 static inline NV_STATUS memmgrAllocMemToSaveVgaWorkspace_5baef9(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR **arg3, MEMORY_DESCRIPTOR **arg4) {
1491 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1492 }
1493
1494
1495 #ifdef __nvoc_mem_mgr_h_disabled
memmgrAllocMemToSaveVgaWorkspace(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR ** arg3,MEMORY_DESCRIPTOR ** arg4)1496 static inline NV_STATUS memmgrAllocMemToSaveVgaWorkspace(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR **arg3, MEMORY_DESCRIPTOR **arg4) {
1497 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1498 return NV_ERR_NOT_SUPPORTED;
1499 }
1500 #else //__nvoc_mem_mgr_h_disabled
1501 #define memmgrAllocMemToSaveVgaWorkspace(pGpu, pMemoryManager, arg3, arg4) memmgrAllocMemToSaveVgaWorkspace_5baef9(pGpu, pMemoryManager, arg3, arg4)
1502 #endif //__nvoc_mem_mgr_h_disabled
1503
1504 #define memmgrAllocMemToSaveVgaWorkspace_HAL(pGpu, pMemoryManager, arg3, arg4) memmgrAllocMemToSaveVgaWorkspace(pGpu, pMemoryManager, arg3, arg4)
1505
1506 NvBool memmgrComparePhysicalAddresses_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3, NvU64 arg4, NvU32 arg5, NvU64 arg6);
1507
1508
1509 #ifdef __nvoc_mem_mgr_h_disabled
memmgrComparePhysicalAddresses(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 arg3,NvU64 arg4,NvU32 arg5,NvU64 arg6)1510 static inline NvBool memmgrComparePhysicalAddresses(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3, NvU64 arg4, NvU32 arg5, NvU64 arg6) {
1511 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1512 return NV_FALSE;
1513 }
1514 #else //__nvoc_mem_mgr_h_disabled
1515 #define memmgrComparePhysicalAddresses(pGpu, pMemoryManager, arg3, arg4, arg5, arg6) memmgrComparePhysicalAddresses_GM107(pGpu, pMemoryManager, arg3, arg4, arg5, arg6)
1516 #endif //__nvoc_mem_mgr_h_disabled
1517
1518 #define memmgrComparePhysicalAddresses_HAL(pGpu, pMemoryManager, arg3, arg4, arg5, arg6) memmgrComparePhysicalAddresses(pGpu, pMemoryManager, arg3, arg4, arg5, arg6)
1519
1520 RmPhysAddr memmgrGetInvalidOffset_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1521
1522
1523 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetInvalidOffset(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1524 static inline RmPhysAddr memmgrGetInvalidOffset(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1525 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1526 RmPhysAddr ret;
1527 portMemSet(&ret, 0, sizeof(RmPhysAddr));
1528 return ret;
1529 }
1530 #else //__nvoc_mem_mgr_h_disabled
1531 #define memmgrGetInvalidOffset(pGpu, pMemoryManager) memmgrGetInvalidOffset_GM107(pGpu, pMemoryManager)
1532 #endif //__nvoc_mem_mgr_h_disabled
1533
1534 #define memmgrGetInvalidOffset_HAL(pGpu, pMemoryManager) memmgrGetInvalidOffset(pGpu, pMemoryManager)
1535
1536 NvU64 memmgrGetAddrSpaceSizeMB_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1537
1538
1539 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetAddrSpaceSizeMB(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1540 static inline NvU64 memmgrGetAddrSpaceSizeMB(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1541 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1542 return 0;
1543 }
1544 #else //__nvoc_mem_mgr_h_disabled
1545 #define memmgrGetAddrSpaceSizeMB(pGpu, pMemoryManager) memmgrGetAddrSpaceSizeMB_GM107(pGpu, pMemoryManager)
1546 #endif //__nvoc_mem_mgr_h_disabled
1547
1548 #define memmgrGetAddrSpaceSizeMB_HAL(pGpu, pMemoryManager) memmgrGetAddrSpaceSizeMB(pGpu, pMemoryManager)
1549
1550 NvU64 memmgrGetUsableMemSizeMB_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1551
1552
1553 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetUsableMemSizeMB(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1554 static inline NvU64 memmgrGetUsableMemSizeMB(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1555 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1556 return 0;
1557 }
1558 #else //__nvoc_mem_mgr_h_disabled
1559 #define memmgrGetUsableMemSizeMB(pGpu, pMemoryManager) memmgrGetUsableMemSizeMB_GM107(pGpu, pMemoryManager)
1560 #endif //__nvoc_mem_mgr_h_disabled
1561
1562 #define memmgrGetUsableMemSizeMB_HAL(pGpu, pMemoryManager) memmgrGetUsableMemSizeMB(pGpu, pMemoryManager)
1563
1564 NV_STATUS memmgrGetSurfacePhysAttr_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, Memory *pMemory, NvU64 *pOffset, NvU32 *pMemAperture, NvU32 *pMemKind, NvU32 *pComprOffset, NvU32 *pComprKind, NvU32 *pLineMin, NvU32 *pLineMax, NvU32 *pZCullId, NvU32 *pGpuCacheAttr, NvU32 *pGpuP2PCacheAttr, NvU64 *contigSegmentSize);
1565
1566
1567 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetSurfacePhysAttr(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,Memory * pMemory,NvU64 * pOffset,NvU32 * pMemAperture,NvU32 * pMemKind,NvU32 * pComprOffset,NvU32 * pComprKind,NvU32 * pLineMin,NvU32 * pLineMax,NvU32 * pZCullId,NvU32 * pGpuCacheAttr,NvU32 * pGpuP2PCacheAttr,NvU64 * contigSegmentSize)1568 static inline NV_STATUS memmgrGetSurfacePhysAttr(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, Memory *pMemory, NvU64 *pOffset, NvU32 *pMemAperture, NvU32 *pMemKind, NvU32 *pComprOffset, NvU32 *pComprKind, NvU32 *pLineMin, NvU32 *pLineMax, NvU32 *pZCullId, NvU32 *pGpuCacheAttr, NvU32 *pGpuP2PCacheAttr, NvU64 *contigSegmentSize) {
1569 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1570 return NV_ERR_NOT_SUPPORTED;
1571 }
1572 #else //__nvoc_mem_mgr_h_disabled
1573 #define memmgrGetSurfacePhysAttr(pGpu, pMemoryManager, pMemory, pOffset, pMemAperture, pMemKind, pComprOffset, pComprKind, pLineMin, pLineMax, pZCullId, pGpuCacheAttr, pGpuP2PCacheAttr, contigSegmentSize) memmgrGetSurfacePhysAttr_GM107(pGpu, pMemoryManager, pMemory, pOffset, pMemAperture, pMemKind, pComprOffset, pComprKind, pLineMin, pLineMax, pZCullId, pGpuCacheAttr, pGpuP2PCacheAttr, contigSegmentSize)
1574 #endif //__nvoc_mem_mgr_h_disabled
1575
1576 #define memmgrGetSurfacePhysAttr_HAL(pGpu, pMemoryManager, pMemory, pOffset, pMemAperture, pMemKind, pComprOffset, pComprKind, pLineMin, pLineMax, pZCullId, pGpuCacheAttr, pGpuP2PCacheAttr, contigSegmentSize) memmgrGetSurfacePhysAttr(pGpu, pMemoryManager, pMemory, pOffset, pMemAperture, pMemKind, pComprOffset, pComprKind, pLineMin, pLineMax, pZCullId, pGpuCacheAttr, pGpuP2PCacheAttr, contigSegmentSize)
1577
memmgrVerifyComprAttrs_cbe027(struct MemoryManager * pMemoryManager,NvU32 arg2,NvU32 arg3,NvU32 arg4)1578 static inline NvBool memmgrVerifyComprAttrs_cbe027(struct MemoryManager *pMemoryManager, NvU32 arg2, NvU32 arg3, NvU32 arg4) {
1579 return ((NvBool)(0 == 0));
1580 }
1581
1582
1583 #ifdef __nvoc_mem_mgr_h_disabled
memmgrVerifyComprAttrs(struct MemoryManager * pMemoryManager,NvU32 arg2,NvU32 arg3,NvU32 arg4)1584 static inline NvBool memmgrVerifyComprAttrs(struct MemoryManager *pMemoryManager, NvU32 arg2, NvU32 arg3, NvU32 arg4) {
1585 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1586 return NV_FALSE;
1587 }
1588 #else //__nvoc_mem_mgr_h_disabled
1589 #define memmgrVerifyComprAttrs(pMemoryManager, arg2, arg3, arg4) memmgrVerifyComprAttrs_cbe027(pMemoryManager, arg2, arg3, arg4)
1590 #endif //__nvoc_mem_mgr_h_disabled
1591
1592 #define memmgrVerifyComprAttrs_HAL(pMemoryManager, arg2, arg3, arg4) memmgrVerifyComprAttrs(pMemoryManager, arg2, arg3, arg4)
1593
1594 NvBool memmgrIsKindCompressible_TU102(struct MemoryManager *pMemoryManager, NvU32 arg2);
1595
1596
1597 #ifdef __nvoc_mem_mgr_h_disabled
memmgrIsKindCompressible(struct MemoryManager * pMemoryManager,NvU32 arg2)1598 static inline NvBool memmgrIsKindCompressible(struct MemoryManager *pMemoryManager, NvU32 arg2) {
1599 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1600 return NV_FALSE;
1601 }
1602 #else //__nvoc_mem_mgr_h_disabled
1603 #define memmgrIsKindCompressible(pMemoryManager, arg2) memmgrIsKindCompressible_TU102(pMemoryManager, arg2)
1604 #endif //__nvoc_mem_mgr_h_disabled
1605
1606 #define memmgrIsKindCompressible_HAL(pMemoryManager, arg2) memmgrIsKindCompressible(pMemoryManager, arg2)
1607
memmgrIsKindBlocklinear_491d52(struct MemoryManager * pMemoryManager,NvU32 arg2)1608 static inline NvBool memmgrIsKindBlocklinear_491d52(struct MemoryManager *pMemoryManager, NvU32 arg2) {
1609 return ((NvBool)(0 != 0));
1610 }
1611
1612
1613 #ifdef __nvoc_mem_mgr_h_disabled
memmgrIsKindBlocklinear(struct MemoryManager * pMemoryManager,NvU32 arg2)1614 static inline NvBool memmgrIsKindBlocklinear(struct MemoryManager *pMemoryManager, NvU32 arg2) {
1615 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1616 return NV_FALSE;
1617 }
1618 #else //__nvoc_mem_mgr_h_disabled
1619 #define memmgrIsKindBlocklinear(pMemoryManager, arg2) memmgrIsKindBlocklinear_491d52(pMemoryManager, arg2)
1620 #endif //__nvoc_mem_mgr_h_disabled
1621
1622 #define memmgrIsKindBlocklinear_HAL(pMemoryManager, arg2) memmgrIsKindBlocklinear(pMemoryManager, arg2)
1623
1624 NvU32 memmgrChooseKindZ_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_PAGE_FORMAT *arg3);
1625
1626
1627 #ifdef __nvoc_mem_mgr_h_disabled
memmgrChooseKindZ(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_ALLOC_PAGE_FORMAT * arg3)1628 static inline NvU32 memmgrChooseKindZ(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_PAGE_FORMAT *arg3) {
1629 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1630 return 0;
1631 }
1632 #else //__nvoc_mem_mgr_h_disabled
1633 #define memmgrChooseKindZ(pGpu, pMemoryManager, arg3) memmgrChooseKindZ_TU102(pGpu, pMemoryManager, arg3)
1634 #endif //__nvoc_mem_mgr_h_disabled
1635
1636 #define memmgrChooseKindZ_HAL(pGpu, pMemoryManager, arg3) memmgrChooseKindZ(pGpu, pMemoryManager, arg3)
1637
1638 NvU32 memmgrChooseKindCompressZ_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_PAGE_FORMAT *arg3);
1639
1640
1641 #ifdef __nvoc_mem_mgr_h_disabled
memmgrChooseKindCompressZ(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_ALLOC_PAGE_FORMAT * arg3)1642 static inline NvU32 memmgrChooseKindCompressZ(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_PAGE_FORMAT *arg3) {
1643 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1644 return 0;
1645 }
1646 #else //__nvoc_mem_mgr_h_disabled
1647 #define memmgrChooseKindCompressZ(pGpu, pMemoryManager, arg3) memmgrChooseKindCompressZ_TU102(pGpu, pMemoryManager, arg3)
1648 #endif //__nvoc_mem_mgr_h_disabled
1649
1650 #define memmgrChooseKindCompressZ_HAL(pGpu, pMemoryManager, arg3) memmgrChooseKindCompressZ(pGpu, pMemoryManager, arg3)
1651
memmgrChooseKindCompressCForMS2_4a4dee(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 arg3)1652 static inline NvU32 memmgrChooseKindCompressCForMS2_4a4dee(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3) {
1653 return 0;
1654 }
1655
1656
1657 #ifdef __nvoc_mem_mgr_h_disabled
memmgrChooseKindCompressCForMS2(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 arg3)1658 static inline NvU32 memmgrChooseKindCompressCForMS2(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3) {
1659 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1660 return 0;
1661 }
1662 #else //__nvoc_mem_mgr_h_disabled
1663 #define memmgrChooseKindCompressCForMS2(pGpu, pMemoryManager, arg3) memmgrChooseKindCompressCForMS2_4a4dee(pGpu, pMemoryManager, arg3)
1664 #endif //__nvoc_mem_mgr_h_disabled
1665
1666 #define memmgrChooseKindCompressCForMS2_HAL(pGpu, pMemoryManager, arg3) memmgrChooseKindCompressCForMS2(pGpu, pMemoryManager, arg3)
1667
1668 NvU32 memmgrGetUncompressedKind_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 kind, NvBool releaseReacquire);
1669
1670
1671 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetUncompressedKind(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 kind,NvBool releaseReacquire)1672 static inline NvU32 memmgrGetUncompressedKind(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 kind, NvBool releaseReacquire) {
1673 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1674 return 0;
1675 }
1676 #else //__nvoc_mem_mgr_h_disabled
1677 #define memmgrGetUncompressedKind(pGpu, pMemoryManager, kind, releaseReacquire) memmgrGetUncompressedKind_TU102(pGpu, pMemoryManager, kind, releaseReacquire)
1678 #endif //__nvoc_mem_mgr_h_disabled
1679
1680 #define memmgrGetUncompressedKind_HAL(pGpu, pMemoryManager, kind, releaseReacquire) memmgrGetUncompressedKind(pGpu, pMemoryManager, kind, releaseReacquire)
1681
memmgrGetUncompressedKindForMS2_5baef9(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 arg3,NvU32 * arg4)1682 static inline NV_STATUS memmgrGetUncompressedKindForMS2_5baef9(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3, NvU32 *arg4) {
1683 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1684 }
1685
1686
1687 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetUncompressedKindForMS2(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 arg3,NvU32 * arg4)1688 static inline NV_STATUS memmgrGetUncompressedKindForMS2(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3, NvU32 *arg4) {
1689 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1690 return NV_ERR_NOT_SUPPORTED;
1691 }
1692 #else //__nvoc_mem_mgr_h_disabled
1693 #define memmgrGetUncompressedKindForMS2(pGpu, pMemoryManager, arg3, arg4) memmgrGetUncompressedKindForMS2_5baef9(pGpu, pMemoryManager, arg3, arg4)
1694 #endif //__nvoc_mem_mgr_h_disabled
1695
1696 #define memmgrGetUncompressedKindForMS2_HAL(pGpu, pMemoryManager, arg3, arg4) memmgrGetUncompressedKindForMS2(pGpu, pMemoryManager, arg3, arg4)
1697
1698 NV_STATUS memmgrChooseKind_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_PAGE_FORMAT *arg3, NvU32 arg4, NvU32 *arg5);
1699
1700
1701 #ifdef __nvoc_mem_mgr_h_disabled
memmgrChooseKind(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_ALLOC_PAGE_FORMAT * arg3,NvU32 arg4,NvU32 * arg5)1702 static inline NV_STATUS memmgrChooseKind(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_PAGE_FORMAT *arg3, NvU32 arg4, NvU32 *arg5) {
1703 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1704 return NV_ERR_NOT_SUPPORTED;
1705 }
1706 #else //__nvoc_mem_mgr_h_disabled
1707 #define memmgrChooseKind(pGpu, pMemoryManager, arg3, arg4, arg5) memmgrChooseKind_TU102(pGpu, pMemoryManager, arg3, arg4, arg5)
1708 #endif //__nvoc_mem_mgr_h_disabled
1709
1710 #define memmgrChooseKind_HAL(pGpu, pMemoryManager, arg3, arg4, arg5) memmgrChooseKind(pGpu, pMemoryManager, arg3, arg4, arg5)
1711
1712 NvBool memmgrIsKind_TU102(struct MemoryManager *pMemoryManager, FB_IS_KIND_OP arg2, NvU32 arg3);
1713
1714
1715 #ifdef __nvoc_mem_mgr_h_disabled
memmgrIsKind(struct MemoryManager * pMemoryManager,FB_IS_KIND_OP arg2,NvU32 arg3)1716 static inline NvBool memmgrIsKind(struct MemoryManager *pMemoryManager, FB_IS_KIND_OP arg2, NvU32 arg3) {
1717 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1718 return NV_FALSE;
1719 }
1720 #else //__nvoc_mem_mgr_h_disabled
1721 #define memmgrIsKind(pMemoryManager, arg2, arg3) memmgrIsKind_TU102(pMemoryManager, arg2, arg3)
1722 #endif //__nvoc_mem_mgr_h_disabled
1723
1724 #define memmgrIsKind_HAL(pMemoryManager, arg2, arg3) memmgrIsKind(pMemoryManager, arg2, arg3)
1725
1726 NvU32 memmgrGetMessageKind_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1727
1728
1729 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetMessageKind(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1730 static inline NvU32 memmgrGetMessageKind(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1731 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1732 return 0;
1733 }
1734 #else //__nvoc_mem_mgr_h_disabled
1735 #define memmgrGetMessageKind(pGpu, pMemoryManager) memmgrGetMessageKind_TU102(pGpu, pMemoryManager)
1736 #endif //__nvoc_mem_mgr_h_disabled
1737
1738 #define memmgrGetMessageKind_HAL(pGpu, pMemoryManager) memmgrGetMessageKind(pGpu, pMemoryManager)
1739
1740 NvU32 memmgrGetDefaultPteKindForNoHandle_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1741
1742
1743 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetDefaultPteKindForNoHandle(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1744 static inline NvU32 memmgrGetDefaultPteKindForNoHandle(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1745 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1746 return 0;
1747 }
1748 #else //__nvoc_mem_mgr_h_disabled
1749 #define memmgrGetDefaultPteKindForNoHandle(pGpu, pMemoryManager) memmgrGetDefaultPteKindForNoHandle_TU102(pGpu, pMemoryManager)
1750 #endif //__nvoc_mem_mgr_h_disabled
1751
1752 #define memmgrGetDefaultPteKindForNoHandle_HAL(pGpu, pMemoryManager) memmgrGetDefaultPteKindForNoHandle(pGpu, pMemoryManager)
1753
1754 NvBool memmgrIsSurfaceBlockLinear_TU102(struct MemoryManager *pMemoryManager, Memory *arg2, NvU32 arg3, NvU32 arg4);
1755
1756
1757 #ifdef __nvoc_mem_mgr_h_disabled
memmgrIsSurfaceBlockLinear(struct MemoryManager * pMemoryManager,Memory * arg2,NvU32 arg3,NvU32 arg4)1758 static inline NvBool memmgrIsSurfaceBlockLinear(struct MemoryManager *pMemoryManager, Memory *arg2, NvU32 arg3, NvU32 arg4) {
1759 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1760 return NV_FALSE;
1761 }
1762 #else //__nvoc_mem_mgr_h_disabled
1763 #define memmgrIsSurfaceBlockLinear(pMemoryManager, arg2, arg3, arg4) memmgrIsSurfaceBlockLinear_TU102(pMemoryManager, arg2, arg3, arg4)
1764 #endif //__nvoc_mem_mgr_h_disabled
1765
1766 #define memmgrIsSurfaceBlockLinear_HAL(pMemoryManager, arg2, arg3, arg4) memmgrIsSurfaceBlockLinear(pMemoryManager, arg2, arg3, arg4)
1767
1768 NvU32 memmgrGetHwPteKindFromSwPteKind_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 pteKind);
1769
1770
1771 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetHwPteKindFromSwPteKind(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 pteKind)1772 static inline NvU32 memmgrGetHwPteKindFromSwPteKind(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 pteKind) {
1773 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1774 return 0;
1775 }
1776 #else //__nvoc_mem_mgr_h_disabled
1777 #define memmgrGetHwPteKindFromSwPteKind(pGpu, pMemoryManager, pteKind) memmgrGetHwPteKindFromSwPteKind_TU102(pGpu, pMemoryManager, pteKind)
1778 #endif //__nvoc_mem_mgr_h_disabled
1779
1780 #define memmgrGetHwPteKindFromSwPteKind_HAL(pGpu, pMemoryManager, pteKind) memmgrGetHwPteKindFromSwPteKind(pGpu, pMemoryManager, pteKind)
1781
1782 NvU32 memmgrGetSwPteKindFromHwPteKind_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 pteKind);
1783
1784
1785 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetSwPteKindFromHwPteKind(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 pteKind)1786 static inline NvU32 memmgrGetSwPteKindFromHwPteKind(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 pteKind) {
1787 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1788 return 0;
1789 }
1790 #else //__nvoc_mem_mgr_h_disabled
1791 #define memmgrGetSwPteKindFromHwPteKind(pGpu, pMemoryManager, pteKind) memmgrGetSwPteKindFromHwPteKind_TU102(pGpu, pMemoryManager, pteKind)
1792 #endif //__nvoc_mem_mgr_h_disabled
1793
1794 #define memmgrGetSwPteKindFromHwPteKind_HAL(pGpu, pMemoryManager, pteKind) memmgrGetSwPteKindFromHwPteKind(pGpu, pMemoryManager, pteKind)
1795
1796 void memmgrGetPteKindForScrubber_TU102(struct MemoryManager *pMemoryManager, NvU32 *arg2);
1797
1798
1799 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetPteKindForScrubber(struct MemoryManager * pMemoryManager,NvU32 * arg2)1800 static inline void memmgrGetPteKindForScrubber(struct MemoryManager *pMemoryManager, NvU32 *arg2) {
1801 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1802 }
1803 #else //__nvoc_mem_mgr_h_disabled
1804 #define memmgrGetPteKindForScrubber(pMemoryManager, arg2) memmgrGetPteKindForScrubber_TU102(pMemoryManager, arg2)
1805 #endif //__nvoc_mem_mgr_h_disabled
1806
1807 #define memmgrGetPteKindForScrubber_HAL(pMemoryManager, arg2) memmgrGetPteKindForScrubber(pMemoryManager, arg2)
1808
1809 NvU32 memmgrGetCtagOffsetFromParams_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *arg3);
1810
1811
1812 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetCtagOffsetFromParams(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_ALLOC_INFO * arg3)1813 static inline NvU32 memmgrGetCtagOffsetFromParams(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *arg3) {
1814 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1815 return 0;
1816 }
1817 #else //__nvoc_mem_mgr_h_disabled
1818 #define memmgrGetCtagOffsetFromParams(pGpu, pMemoryManager, arg3) memmgrGetCtagOffsetFromParams_TU102(pGpu, pMemoryManager, arg3)
1819 #endif //__nvoc_mem_mgr_h_disabled
1820
1821 #define memmgrGetCtagOffsetFromParams_HAL(pGpu, pMemoryManager, arg3) memmgrGetCtagOffsetFromParams(pGpu, pMemoryManager, arg3)
1822
1823 void memmgrSetCtagOffsetInParams_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *arg3, NvU32 arg4);
1824
1825
1826 #ifdef __nvoc_mem_mgr_h_disabled
memmgrSetCtagOffsetInParams(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_ALLOC_INFO * arg3,NvU32 arg4)1827 static inline void memmgrSetCtagOffsetInParams(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *arg3, NvU32 arg4) {
1828 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1829 }
1830 #else //__nvoc_mem_mgr_h_disabled
1831 #define memmgrSetCtagOffsetInParams(pGpu, pMemoryManager, arg3, arg4) memmgrSetCtagOffsetInParams_TU102(pGpu, pMemoryManager, arg3, arg4)
1832 #endif //__nvoc_mem_mgr_h_disabled
1833
1834 #define memmgrSetCtagOffsetInParams_HAL(pGpu, pMemoryManager, arg3, arg4) memmgrSetCtagOffsetInParams(pGpu, pMemoryManager, arg3, arg4)
1835
memmgrChannelPushSemaphoreMethodsBlock_f2d351(struct MemoryManager * pMemoryManager,NvU32 arg2,NvU64 arg3,NvU32 arg4,NvU32 ** arg5)1836 static inline void memmgrChannelPushSemaphoreMethodsBlock_f2d351(struct MemoryManager *pMemoryManager, NvU32 arg2, NvU64 arg3, NvU32 arg4, NvU32 **arg5) {
1837 NV_ASSERT_PRECOMP(0);
1838 }
1839
1840
1841 #ifdef __nvoc_mem_mgr_h_disabled
memmgrChannelPushSemaphoreMethodsBlock(struct MemoryManager * pMemoryManager,NvU32 arg2,NvU64 arg3,NvU32 arg4,NvU32 ** arg5)1842 static inline void memmgrChannelPushSemaphoreMethodsBlock(struct MemoryManager *pMemoryManager, NvU32 arg2, NvU64 arg3, NvU32 arg4, NvU32 **arg5) {
1843 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1844 }
1845 #else //__nvoc_mem_mgr_h_disabled
1846 #define memmgrChannelPushSemaphoreMethodsBlock(pMemoryManager, arg2, arg3, arg4, arg5) memmgrChannelPushSemaphoreMethodsBlock_f2d351(pMemoryManager, arg2, arg3, arg4, arg5)
1847 #endif //__nvoc_mem_mgr_h_disabled
1848
1849 #define memmgrChannelPushSemaphoreMethodsBlock_HAL(pMemoryManager, arg2, arg3, arg4, arg5) memmgrChannelPushSemaphoreMethodsBlock(pMemoryManager, arg2, arg3, arg4, arg5)
1850
memmgrChannelPushAddressMethodsBlock_f2d351(struct MemoryManager * pMemoryManager,NvBool arg2,NvU32 arg3,RmPhysAddr arg4,NvU32 ** arg5)1851 static inline void memmgrChannelPushAddressMethodsBlock_f2d351(struct MemoryManager *pMemoryManager, NvBool arg2, NvU32 arg3, RmPhysAddr arg4, NvU32 **arg5) {
1852 NV_ASSERT_PRECOMP(0);
1853 }
1854
1855
1856 #ifdef __nvoc_mem_mgr_h_disabled
memmgrChannelPushAddressMethodsBlock(struct MemoryManager * pMemoryManager,NvBool arg2,NvU32 arg3,RmPhysAddr arg4,NvU32 ** arg5)1857 static inline void memmgrChannelPushAddressMethodsBlock(struct MemoryManager *pMemoryManager, NvBool arg2, NvU32 arg3, RmPhysAddr arg4, NvU32 **arg5) {
1858 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1859 }
1860 #else //__nvoc_mem_mgr_h_disabled
1861 #define memmgrChannelPushAddressMethodsBlock(pMemoryManager, arg2, arg3, arg4, arg5) memmgrChannelPushAddressMethodsBlock_f2d351(pMemoryManager, arg2, arg3, arg4, arg5)
1862 #endif //__nvoc_mem_mgr_h_disabled
1863
1864 #define memmgrChannelPushAddressMethodsBlock_HAL(pMemoryManager, arg2, arg3, arg4, arg5) memmgrChannelPushAddressMethodsBlock(pMemoryManager, arg2, arg3, arg4, arg5)
1865
1866 NV_STATUS memmgrScrubMapDoorbellRegion_GV100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3);
1867
1868
1869 #ifdef __nvoc_mem_mgr_h_disabled
memmgrScrubMapDoorbellRegion(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3)1870 static inline NV_STATUS memmgrScrubMapDoorbellRegion(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3) {
1871 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1872 return NV_ERR_NOT_SUPPORTED;
1873 }
1874 #else //__nvoc_mem_mgr_h_disabled
1875 #define memmgrScrubMapDoorbellRegion(pGpu, pMemoryManager, arg3) memmgrScrubMapDoorbellRegion_GV100(pGpu, pMemoryManager, arg3)
1876 #endif //__nvoc_mem_mgr_h_disabled
1877
1878 #define memmgrScrubMapDoorbellRegion_HAL(pGpu, pMemoryManager, arg3) memmgrScrubMapDoorbellRegion(pGpu, pMemoryManager, arg3)
1879
1880 NV_STATUS memmgrSetAllocParameters_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *pFbAllocInfo);
1881
1882
1883 #ifdef __nvoc_mem_mgr_h_disabled
memmgrSetAllocParameters(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_ALLOC_INFO * pFbAllocInfo)1884 static inline NV_STATUS memmgrSetAllocParameters(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *pFbAllocInfo) {
1885 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1886 return NV_ERR_NOT_SUPPORTED;
1887 }
1888 #else //__nvoc_mem_mgr_h_disabled
1889 #define memmgrSetAllocParameters(pGpu, pMemoryManager, pFbAllocInfo) memmgrSetAllocParameters_GM107(pGpu, pMemoryManager, pFbAllocInfo)
1890 #endif //__nvoc_mem_mgr_h_disabled
1891
1892 #define memmgrSetAllocParameters_HAL(pGpu, pMemoryManager, pFbAllocInfo) memmgrSetAllocParameters(pGpu, pMemoryManager, pFbAllocInfo)
1893
1894 void memmgrCalcReservedFbSpaceForUVM_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *arg3);
1895
1896
1897 #ifdef __nvoc_mem_mgr_h_disabled
memmgrCalcReservedFbSpaceForUVM(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 * arg3)1898 static inline void memmgrCalcReservedFbSpaceForUVM(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *arg3) {
1899 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1900 }
1901 #else //__nvoc_mem_mgr_h_disabled
1902 #define memmgrCalcReservedFbSpaceForUVM(pGpu, pMemoryManager, arg3) memmgrCalcReservedFbSpaceForUVM_GM107(pGpu, pMemoryManager, arg3)
1903 #endif //__nvoc_mem_mgr_h_disabled
1904
1905 #define memmgrCalcReservedFbSpaceForUVM_HAL(pGpu, pMemoryManager, arg3) memmgrCalcReservedFbSpaceForUVM(pGpu, pMemoryManager, arg3)
1906
1907 void memmgrCalcReservedFbSpaceHal_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *arg3, NvU64 *arg4, NvU64 *arg5);
1908
1909
1910 #ifdef __nvoc_mem_mgr_h_disabled
memmgrCalcReservedFbSpaceHal(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 * arg3,NvU64 * arg4,NvU64 * arg5)1911 static inline void memmgrCalcReservedFbSpaceHal(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *arg3, NvU64 *arg4, NvU64 *arg5) {
1912 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1913 }
1914 #else //__nvoc_mem_mgr_h_disabled
1915 #define memmgrCalcReservedFbSpaceHal(pGpu, pMemoryManager, arg3, arg4, arg5) memmgrCalcReservedFbSpaceHal_GM107(pGpu, pMemoryManager, arg3, arg4, arg5)
1916 #endif //__nvoc_mem_mgr_h_disabled
1917
1918 #define memmgrCalcReservedFbSpaceHal_HAL(pGpu, pMemoryManager, arg3, arg4, arg5) memmgrCalcReservedFbSpaceHal(pGpu, pMemoryManager, arg3, arg4, arg5)
1919
1920 NV_STATUS memmgrInitReservedMemory_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 arg3);
1921
1922
1923 #ifdef __nvoc_mem_mgr_h_disabled
memmgrInitReservedMemory(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 arg3)1924 static inline NV_STATUS memmgrInitReservedMemory(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 arg3) {
1925 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1926 return NV_ERR_NOT_SUPPORTED;
1927 }
1928 #else //__nvoc_mem_mgr_h_disabled
1929 #define memmgrInitReservedMemory(pGpu, pMemoryManager, arg3) memmgrInitReservedMemory_GM107(pGpu, pMemoryManager, arg3)
1930 #endif //__nvoc_mem_mgr_h_disabled
1931
1932 #define memmgrInitReservedMemory_HAL(pGpu, pMemoryManager, arg3) memmgrInitReservedMemory(pGpu, pMemoryManager, arg3)
1933
1934 NV_STATUS memmgrPreInitReservedMemory_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
1935
1936
1937 #ifdef __nvoc_mem_mgr_h_disabled
memmgrPreInitReservedMemory(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1938 static inline NV_STATUS memmgrPreInitReservedMemory(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1939 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1940 return NV_ERR_NOT_SUPPORTED;
1941 }
1942 #else //__nvoc_mem_mgr_h_disabled
1943 #define memmgrPreInitReservedMemory(pGpu, pMemoryManager) memmgrPreInitReservedMemory_GM107(pGpu, pMemoryManager)
1944 #endif //__nvoc_mem_mgr_h_disabled
1945
1946 #define memmgrPreInitReservedMemory_HAL(pGpu, pMemoryManager) memmgrPreInitReservedMemory(pGpu, pMemoryManager)
1947
1948 NV_STATUS memmgrSetMemDescPageSize_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, PMEMORY_DESCRIPTOR arg3, ADDRESS_TRANSLATION arg4, RM_ATTR_PAGE_SIZE arg5);
1949
1950
1951 #ifdef __nvoc_mem_mgr_h_disabled
memmgrSetMemDescPageSize(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,PMEMORY_DESCRIPTOR arg3,ADDRESS_TRANSLATION arg4,RM_ATTR_PAGE_SIZE arg5)1952 static inline NV_STATUS memmgrSetMemDescPageSize(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, PMEMORY_DESCRIPTOR arg3, ADDRESS_TRANSLATION arg4, RM_ATTR_PAGE_SIZE arg5) {
1953 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1954 return NV_ERR_NOT_SUPPORTED;
1955 }
1956 #else //__nvoc_mem_mgr_h_disabled
1957 #define memmgrSetMemDescPageSize(pGpu, pMemoryManager, arg3, arg4, arg5) memmgrSetMemDescPageSize_GM107(pGpu, pMemoryManager, arg3, arg4, arg5)
1958 #endif //__nvoc_mem_mgr_h_disabled
1959
1960 #define memmgrSetMemDescPageSize_HAL(pGpu, pMemoryManager, arg3, arg4, arg5) memmgrSetMemDescPageSize(pGpu, pMemoryManager, arg3, arg4, arg5)
1961
1962 NV_STATUS memmgrGetBlackListPagesForHeap_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct Heap *pHeap);
1963
1964
1965 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetBlackListPagesForHeap(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,struct Heap * pHeap)1966 static inline NV_STATUS memmgrGetBlackListPagesForHeap(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct Heap *pHeap) {
1967 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1968 return NV_ERR_NOT_SUPPORTED;
1969 }
1970 #else //__nvoc_mem_mgr_h_disabled
1971 #define memmgrGetBlackListPagesForHeap(pGpu, pMemoryManager, pHeap) memmgrGetBlackListPagesForHeap_GM107(pGpu, pMemoryManager, pHeap)
1972 #endif //__nvoc_mem_mgr_h_disabled
1973
1974 #define memmgrGetBlackListPagesForHeap_HAL(pGpu, pMemoryManager, pHeap) memmgrGetBlackListPagesForHeap(pGpu, pMemoryManager, pHeap)
1975
memmgrValidateFBEndReservation_56cd7a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1976 static inline NV_STATUS memmgrValidateFBEndReservation_56cd7a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1977 return NV_OK;
1978 }
1979
1980
1981 #ifdef __nvoc_mem_mgr_h_disabled
memmgrValidateFBEndReservation(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1982 static inline NV_STATUS memmgrValidateFBEndReservation(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1983 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
1984 return NV_ERR_NOT_SUPPORTED;
1985 }
1986 #else //__nvoc_mem_mgr_h_disabled
1987 #define memmgrValidateFBEndReservation(pGpu, pMemoryManager) memmgrValidateFBEndReservation_56cd7a(pGpu, pMemoryManager)
1988 #endif //__nvoc_mem_mgr_h_disabled
1989
1990 #define memmgrValidateFBEndReservation_HAL(pGpu, pMemoryManager) memmgrValidateFBEndReservation(pGpu, pMemoryManager)
1991
memmgrReserveMemoryForFakeWPR_56cd7a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1992 static inline NV_STATUS memmgrReserveMemoryForFakeWPR_56cd7a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1993 return NV_OK;
1994 }
1995
1996
1997 #ifdef __nvoc_mem_mgr_h_disabled
memmgrReserveMemoryForFakeWPR(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)1998 static inline NV_STATUS memmgrReserveMemoryForFakeWPR(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
1999 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2000 return NV_ERR_NOT_SUPPORTED;
2001 }
2002 #else //__nvoc_mem_mgr_h_disabled
2003 #define memmgrReserveMemoryForFakeWPR(pGpu, pMemoryManager) memmgrReserveMemoryForFakeWPR_56cd7a(pGpu, pMemoryManager)
2004 #endif //__nvoc_mem_mgr_h_disabled
2005
2006 #define memmgrReserveMemoryForFakeWPR_HAL(pGpu, pMemoryManager) memmgrReserveMemoryForFakeWPR(pGpu, pMemoryManager)
2007
memmgrReserveMemoryForPmu_56cd7a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2008 static inline NV_STATUS memmgrReserveMemoryForPmu_56cd7a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2009 return NV_OK;
2010 }
2011
2012
2013 #ifdef __nvoc_mem_mgr_h_disabled
memmgrReserveMemoryForPmu(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2014 static inline NV_STATUS memmgrReserveMemoryForPmu(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2015 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2016 return NV_ERR_NOT_SUPPORTED;
2017 }
2018 #else //__nvoc_mem_mgr_h_disabled
2019 #define memmgrReserveMemoryForPmu(pGpu, pMemoryManager) memmgrReserveMemoryForPmu_56cd7a(pGpu, pMemoryManager)
2020 #endif //__nvoc_mem_mgr_h_disabled
2021
2022 #define memmgrReserveMemoryForPmu_HAL(pGpu, pMemoryManager) memmgrReserveMemoryForPmu(pGpu, pMemoryManager)
2023
2024 void memmgrFreeFbsrMemory_KERNEL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2025
2026
2027 #ifdef __nvoc_mem_mgr_h_disabled
memmgrFreeFbsrMemory(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2028 static inline void memmgrFreeFbsrMemory(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2029 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2030 }
2031 #else //__nvoc_mem_mgr_h_disabled
2032 #define memmgrFreeFbsrMemory(pGpu, pMemoryManager) memmgrFreeFbsrMemory_KERNEL(pGpu, pMemoryManager)
2033 #endif //__nvoc_mem_mgr_h_disabled
2034
2035 #define memmgrFreeFbsrMemory_HAL(pGpu, pMemoryManager) memmgrFreeFbsrMemory(pGpu, pMemoryManager)
2036
memmgrReserveVgaWorkspaceMemDescForFbsr_46f6a7(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2037 static inline NV_STATUS memmgrReserveVgaWorkspaceMemDescForFbsr_46f6a7(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2038 return NV_ERR_NOT_SUPPORTED;
2039 }
2040
2041 NV_STATUS memmgrReserveVgaWorkspaceMemDescForFbsr_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2042
2043
2044 #ifdef __nvoc_mem_mgr_h_disabled
memmgrReserveVgaWorkspaceMemDescForFbsr(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2045 static inline NV_STATUS memmgrReserveVgaWorkspaceMemDescForFbsr(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2046 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2047 return NV_ERR_NOT_SUPPORTED;
2048 }
2049 #else //__nvoc_mem_mgr_h_disabled
2050 #define memmgrReserveVgaWorkspaceMemDescForFbsr(pGpu, pMemoryManager) memmgrReserveVgaWorkspaceMemDescForFbsr_46f6a7(pGpu, pMemoryManager)
2051 #endif //__nvoc_mem_mgr_h_disabled
2052
2053 #define memmgrReserveVgaWorkspaceMemDescForFbsr_HAL(pGpu, pMemoryManager) memmgrReserveVgaWorkspaceMemDescForFbsr(pGpu, pMemoryManager)
2054
memmgrCalculateHeapOffsetWithGSP_46f6a7(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 * offset)2055 static inline NV_STATUS memmgrCalculateHeapOffsetWithGSP_46f6a7(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 *offset) {
2056 return NV_ERR_NOT_SUPPORTED;
2057 }
2058
2059
2060 #ifdef __nvoc_mem_mgr_h_disabled
memmgrCalculateHeapOffsetWithGSP(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 * offset)2061 static inline NV_STATUS memmgrCalculateHeapOffsetWithGSP(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 *offset) {
2062 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2063 return NV_ERR_NOT_SUPPORTED;
2064 }
2065 #else //__nvoc_mem_mgr_h_disabled
2066 #define memmgrCalculateHeapOffsetWithGSP(pGpu, pMemoryManager, offset) memmgrCalculateHeapOffsetWithGSP_46f6a7(pGpu, pMemoryManager, offset)
2067 #endif //__nvoc_mem_mgr_h_disabled
2068
2069 #define memmgrCalculateHeapOffsetWithGSP_HAL(pGpu, pMemoryManager, offset) memmgrCalculateHeapOffsetWithGSP(pGpu, pMemoryManager, offset)
2070
2071 NV_STATUS memmgrConstructEngine_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, ENGDESCRIPTOR arg3);
2072
2073 NV_STATUS memmgrStatePreInitLocked_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2074
2075 NV_STATUS memmgrStateInitLocked_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2076
2077 NV_STATUS memmgrStateLoad_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3);
2078
2079 NV_STATUS memmgrStatePostLoad_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3);
2080
2081 NV_STATUS memmgrStatePreUnload_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3);
2082
2083 NV_STATUS memmgrStateUnload_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3);
2084
2085 void memmgrStateDestroy_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2086
memmgrAllocateConsoleRegion_56cd7a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_REGION_DESCRIPTOR * arg3)2087 static inline NV_STATUS memmgrAllocateConsoleRegion_56cd7a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_REGION_DESCRIPTOR *arg3) {
2088 return NV_OK;
2089 }
2090
2091 NV_STATUS memmgrAllocateConsoleRegion_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_REGION_DESCRIPTOR *arg3);
2092
2093 NV_STATUS memmgrMemUtilsSec2CtxInit_GH100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3);
2094
memmgrMemUtilsSec2CtxInit_46f6a7(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3)2095 static inline NV_STATUS memmgrMemUtilsSec2CtxInit_46f6a7(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3) {
2096 return NV_ERR_NOT_SUPPORTED;
2097 }
2098
2099 NvBool memmgrMemUtilsCheckMemoryFastScrubEnable_GH100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3, NvBool arg4, RmPhysAddr arg5, NvU32 arg6, NV_ADDRESS_SPACE arg7);
2100
memmgrMemUtilsCheckMemoryFastScrubEnable_491d52(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 arg3,NvBool arg4,RmPhysAddr arg5,NvU32 arg6,NV_ADDRESS_SPACE arg7)2101 static inline NvBool memmgrMemUtilsCheckMemoryFastScrubEnable_491d52(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 arg3, NvBool arg4, RmPhysAddr arg5, NvU32 arg6, NV_ADDRESS_SPACE arg7) {
2102 return ((NvBool)(0 != 0));
2103 }
2104
2105 NV_STATUS memmgrAllocDetermineAlignment_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *pMemSize, NvU64 *pAlign, NvU64 alignPad, NvU32 allocFlags, NvU32 retAttr, NvU32 retAttr2, NvU64 hwAlignment);
2106
2107 NV_STATUS memmgrAllocDetermineAlignment_GA100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *pMemSize, NvU64 *pAlign, NvU64 alignPad, NvU32 allocFlags, NvU32 retAttr, NvU32 retAttr2, NvU64 hwAlignment);
2108
2109 NvU64 memmgrGetMaxContextSize_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2110
2111 NvU64 memmgrGetMaxContextSize_GA100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2112
2113 NvU64 memmgrGetMaxContextSize_AD102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2114
2115 NvU64 memmgrGetFbTaxSize_VF(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2116
memmgrGetFbTaxSize_4a4dee(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2117 static inline NvU64 memmgrGetFbTaxSize_4a4dee(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2118 return 0;
2119 }
2120
2121 void memmgrScrubRegistryOverrides_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2122
2123 void memmgrScrubRegistryOverrides_GA100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2124
2125 NvU32 memmgrGetPteKindBl_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2126
memmgrGetPteKindBl_474d46(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2127 static inline NvU32 memmgrGetPteKindBl_474d46(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2128 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
2129 }
2130
2131 NvU32 memmgrGetPteKindPitch_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2132
memmgrGetPteKindPitch_474d46(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2133 static inline NvU32 memmgrGetPteKindPitch_474d46(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2134 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
2135 }
2136
2137 NvU32 memmgrChooseKindCompressC_GP100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_PAGE_FORMAT *arg3);
2138
memmgrChooseKindCompressC_474d46(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_ALLOC_PAGE_FORMAT * arg3)2139 static inline NvU32 memmgrChooseKindCompressC_474d46(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_PAGE_FORMAT *arg3) {
2140 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
2141 }
2142
2143 NV_STATUS memmgrGetFlaKind_GA100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 *arg3);
2144
memmgrGetFlaKind_46f6a7(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 * arg3)2145 static inline NV_STATUS memmgrGetFlaKind_46f6a7(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 *arg3) {
2146 return NV_ERR_NOT_SUPPORTED;
2147 }
2148
2149 NvBool memmgrIsMemDescSupportedByFla_GB100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc);
2150
2151 NvBool memmgrIsMemDescSupportedByFla_GA100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc);
2152
memmgrIsMemDescSupportedByFla_46f6a7(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR * pMemDesc)2153 static inline NvBool memmgrIsMemDescSupportedByFla_46f6a7(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc) {
2154 return NV_ERR_NOT_SUPPORTED;
2155 }
2156
2157 NvU32 memmgrDetermineComptag_TU102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, RmPhysAddr arg3);
2158
memmgrDetermineComptag_13cd8d(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,RmPhysAddr arg3)2159 static inline NvU32 memmgrDetermineComptag_13cd8d(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, RmPhysAddr arg3) {
2160 NV_ASSERT_PRECOMP(0);
2161 return 0;
2162 }
2163
2164 NvU32 memmgrGetGrHeapReservationSize_VF(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2165
memmgrGetGrHeapReservationSize_4a4dee(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2166 static inline NvU32 memmgrGetGrHeapReservationSize_4a4dee(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2167 return 0;
2168 }
2169
2170 NvU32 memmgrGetGrHeapReservationSize_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2171
memmgrGetRunlistEntriesReservedFbSpace_4a4dee(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2172 static inline NvU32 memmgrGetRunlistEntriesReservedFbSpace_4a4dee(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2173 return 0;
2174 }
2175
2176 NvU32 memmgrGetRunlistEntriesReservedFbSpace_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2177
memmgrGetUserdReservedFbSpace_4a4dee(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2178 static inline NvU32 memmgrGetUserdReservedFbSpace_4a4dee(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2179 return 0;
2180 }
2181
2182 NvU32 memmgrGetUserdReservedFbSpace_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2183
2184 NV_STATUS memmgrCheckReservedMemorySize_GK104(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2185
memmgrCheckReservedMemorySize_56cd7a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2186 static inline NV_STATUS memmgrCheckReservedMemorySize_56cd7a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2187 return NV_OK;
2188 }
2189
2190 NV_STATUS memmgrReadMmuLock_GA100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvBool *pbIsValid, NvU64 *pMmuLockLo, NvU64 *pMmuLockHi);
2191
memmgrReadMmuLock_e133c0(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvBool * pbIsValid,NvU64 * pMmuLockLo,NvU64 * pMmuLockHi)2192 static inline NV_STATUS memmgrReadMmuLock_e133c0(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvBool *pbIsValid, NvU64 *pMmuLockLo, NvU64 *pMmuLockHi) {
2193 *pbIsValid = ((NvBool)(0 != 0));
2194 return NV_OK;
2195 }
2196
2197 NV_STATUS memmgrBlockMemLockedMemory_GA100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2198
memmgrBlockMemLockedMemory_56cd7a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2199 static inline NV_STATUS memmgrBlockMemLockedMemory_56cd7a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2200 return NV_OK;
2201 }
2202
2203 NV_STATUS memmgrInsertUnprotectedRegionAtBottomOfFb_GA100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *pSize);
2204
memmgrInsertUnprotectedRegionAtBottomOfFb_56cd7a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 * pSize)2205 static inline NV_STATUS memmgrInsertUnprotectedRegionAtBottomOfFb_56cd7a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *pSize) {
2206 return NV_OK;
2207 }
2208
2209 NV_STATUS memmgrInitBaseFbRegions_FWCLIENT(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2210
2211 NV_STATUS memmgrInitBaseFbRegions_VF(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2212
2213 NV_STATUS memmgrInitBaseFbRegions_GP102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2214
2215 void memmgrGetDisablePlcKind_GA100(struct MemoryManager *pMemoryManager, NvU32 *pteKind);
2216
memmgrGetDisablePlcKind_b3696a(struct MemoryManager * pMemoryManager,NvU32 * pteKind)2217 static inline void memmgrGetDisablePlcKind_b3696a(struct MemoryManager *pMemoryManager, NvU32 *pteKind) {
2218 return;
2219 }
2220
2221 void memmgrEnableDynamicPageOfflining_GA100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2222
2223 void memmgrEnableDynamicPageOfflining_GA102(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2224
memmgrEnableDynamicPageOfflining_b3696a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2225 static inline void memmgrEnableDynamicPageOfflining_b3696a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2226 return;
2227 }
2228
memmgrSetPartitionableMem_56cd7a(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2229 static inline NV_STATUS memmgrSetPartitionableMem_56cd7a(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2230 return NV_OK;
2231 }
2232
2233 NV_STATUS memmgrSetPartitionableMem_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2234
2235 NV_STATUS memmgrAllocMIGGPUInstanceMemory_VF(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 swizzId, NvHandle *phMemory, struct NV_RANGE *pAddrRange, struct Heap **ppMemoryPartitionHeap);
2236
2237 NV_STATUS memmgrAllocMIGGPUInstanceMemory_PF(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 swizzId, NvHandle *phMemory, struct NV_RANGE *pAddrRange, struct Heap **ppMemoryPartitionHeap);
2238
2239 NV_STATUS memmgrGetBlackListPages_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, BLACKLIST_ADDRESS *pBlAddrs, NvU32 *pCount);
2240
2241 NV_STATUS memmgrGetBlackListPages_GA100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, BLACKLIST_ADDRESS *pBlAddrs, NvU32 *pCount);
2242
2243 NV_STATUS memmgrDiscoverMIGPartitionableMemoryRange_VF(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct NV_RANGE *pMemoryRange);
2244
memmgrDiscoverMIGPartitionableMemoryRange_46f6a7(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,struct NV_RANGE * pMemoryRange)2245 static inline NV_STATUS memmgrDiscoverMIGPartitionableMemoryRange_46f6a7(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct NV_RANGE *pMemoryRange) {
2246 return NV_ERR_NOT_SUPPORTED;
2247 }
2248
2249 NvU32 memmgrGetFBEndReserveSizeEstimate_GB100(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2250
2251 NvU32 memmgrGetFBEndReserveSizeEstimate_GM107(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2252
memmgrIsLocalEgmSupported(struct MemoryManager * pMemoryManager)2253 static inline NvBool memmgrIsLocalEgmSupported(struct MemoryManager *pMemoryManager) {
2254 return pMemoryManager->bLocalEgmSupported;
2255 }
2256
memmgrIsLocalEgmEnabled(struct MemoryManager * pMemoryManager)2257 static inline NvBool memmgrIsLocalEgmEnabled(struct MemoryManager *pMemoryManager) {
2258 return pMemoryManager->bLocalEgmEnabled;
2259 }
2260
memmgrLocalEgmPeerId(struct MemoryManager * pMemoryManager)2261 static inline NvU32 memmgrLocalEgmPeerId(struct MemoryManager *pMemoryManager) {
2262 return pMemoryManager->localEgmPeerId;
2263 }
2264
memmgrLocalEgmBaseAddress(struct MemoryManager * pMemoryManager)2265 static inline NvU64 memmgrLocalEgmBaseAddress(struct MemoryManager *pMemoryManager) {
2266 return pMemoryManager->localEgmBasePhysAddr;
2267 }
2268
memmgrIsScrubOnFreeEnabled(struct MemoryManager * pMemoryManager)2269 static inline NvBool memmgrIsScrubOnFreeEnabled(struct MemoryManager *pMemoryManager) {
2270 return pMemoryManager->bScrubOnFreeEnabled;
2271 }
2272
memmgrIsFastScrubberEnabled(struct MemoryManager * pMemoryManager)2273 static inline NvBool memmgrIsFastScrubberEnabled(struct MemoryManager *pMemoryManager) {
2274 return pMemoryManager->bFastScrubberEnabled;
2275 }
2276
memmgrUseVasForCeMemoryOps(struct MemoryManager * pMemoryManager)2277 static inline NvBool memmgrUseVasForCeMemoryOps(struct MemoryManager *pMemoryManager) {
2278 return pMemoryManager->bUseVasForCeMemoryOps;
2279 }
2280
memmgrRmExecutingEccScrub(struct MemoryManager * pMemoryManager)2281 static inline NvBool memmgrRmExecutingEccScrub(struct MemoryManager *pMemoryManager) {
2282 return pMemoryManager->bRmExecutingEccScrub;
2283 }
2284
memmgrBug1441072EccScrubWar(struct MemoryManager * pMemoryManager)2285 static inline NvBool memmgrBug1441072EccScrubWar(struct MemoryManager *pMemoryManager) {
2286 return pMemoryManager->bBug1441072EccScrubWar;
2287 }
2288
memmgrIsPmaInitialized(struct MemoryManager * pMemoryManager)2289 static inline NvBool memmgrIsPmaInitialized(struct MemoryManager *pMemoryManager) {
2290 return pMemoryManager->bPmaInitialized;
2291 }
2292
memmgrSetPmaInitialized(struct MemoryManager * pMemoryManager,NvBool val)2293 static inline void memmgrSetPmaInitialized(struct MemoryManager *pMemoryManager, NvBool val) {
2294 pMemoryManager->bPmaInitialized = val;
2295 }
2296
memmgrAreFbRegionsSupported(struct MemoryManager * pMemoryManager)2297 static inline NvBool memmgrAreFbRegionsSupported(struct MemoryManager *pMemoryManager) {
2298 return pMemoryManager->bFbRegionsSupported;
2299 }
2300
memmgrIsPmaSupportedOnPlatform(struct MemoryManager * pMemoryManager)2301 static inline NvBool memmgrIsPmaSupportedOnPlatform(struct MemoryManager *pMemoryManager) {
2302 return pMemoryManager->bPmaSupportedOnPlatform;
2303 }
2304
memmgrIsPmaEnabled(struct MemoryManager * pMemoryManager)2305 static inline NvBool memmgrIsPmaEnabled(struct MemoryManager *pMemoryManager) {
2306 return pMemoryManager->bPmaEnabled;
2307 }
2308
memmgrIsPmaForcePersistence(struct MemoryManager * pMemoryManager)2309 static inline NvBool memmgrIsPmaForcePersistence(struct MemoryManager *pMemoryManager) {
2310 return pMemoryManager->bPmaForcePersistence;
2311 }
2312
memmgrSetPmaForcePersistence(struct MemoryManager * pMemoryManager,NvBool val)2313 static inline void memmgrSetPmaForcePersistence(struct MemoryManager *pMemoryManager, NvBool val) {
2314 pMemoryManager->bPmaForcePersistence = val;
2315 }
2316
memmgrAreClientPageTablesPmaManaged(struct MemoryManager * pMemoryManager)2317 static inline NvBool memmgrAreClientPageTablesPmaManaged(struct MemoryManager *pMemoryManager) {
2318 return pMemoryManager->bClientPageTablesPmaManaged;
2319 }
2320
memmgrSetClientPageTablesPmaManaged(struct MemoryManager * pMemoryManager,NvBool val)2321 static inline void memmgrSetClientPageTablesPmaManaged(struct MemoryManager *pMemoryManager, NvBool val) {
2322 pMemoryManager->bClientPageTablesPmaManaged = val;
2323 }
2324
memmgrGetRsvdMemoryBase(struct MemoryManager * pMemoryManager)2325 static inline NvU64 memmgrGetRsvdMemoryBase(struct MemoryManager *pMemoryManager) {
2326 return pMemoryManager->rsvdMemoryBase;
2327 }
2328
memmgrGetRsvdMemorySize(struct MemoryManager * pMemoryManager)2329 static inline NvU64 memmgrGetRsvdMemorySize(struct MemoryManager *pMemoryManager) {
2330 return pMemoryManager->rsvdMemorySize;
2331 }
2332
memmgrBug3922001DisableCtxBufOnSim(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2333 static inline NvBool memmgrBug3922001DisableCtxBufOnSim(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2334 return pMemoryManager->bBug3922001DisableCtxBufOnSim;
2335 }
2336
2337 void memmgrDestruct_IMPL(struct MemoryManager *pMemoryManager);
2338
2339 #define __nvoc_memmgrDestruct(pMemoryManager) memmgrDestruct_IMPL(pMemoryManager)
2340 NV_STATUS memmgrAllocResources_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_ALLOCATION_REQUEST *pAllocRequest, FB_ALLOC_INFO *pFbAllocInfo);
2341
2342 #ifdef __nvoc_mem_mgr_h_disabled
memmgrAllocResources(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,MEMORY_ALLOCATION_REQUEST * pAllocRequest,FB_ALLOC_INFO * pFbAllocInfo)2343 static inline NV_STATUS memmgrAllocResources(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_ALLOCATION_REQUEST *pAllocRequest, FB_ALLOC_INFO *pFbAllocInfo) {
2344 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2345 return NV_ERR_NOT_SUPPORTED;
2346 }
2347 #else //__nvoc_mem_mgr_h_disabled
2348 #define memmgrAllocResources(pGpu, pMemoryManager, pAllocRequest, pFbAllocInfo) memmgrAllocResources_IMPL(pGpu, pMemoryManager, pAllocRequest, pFbAllocInfo)
2349 #endif //__nvoc_mem_mgr_h_disabled
2350
2351 NV_STATUS memmgrFree_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct Heap *arg3, NvHandle arg4, NvHandle arg5, NvHandle arg6, NvU32 arg7, MEMORY_DESCRIPTOR *arg8);
2352
2353 #ifdef __nvoc_mem_mgr_h_disabled
memmgrFree(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,struct Heap * arg3,NvHandle arg4,NvHandle arg5,NvHandle arg6,NvU32 arg7,MEMORY_DESCRIPTOR * arg8)2354 static inline NV_STATUS memmgrFree(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct Heap *arg3, NvHandle arg4, NvHandle arg5, NvHandle arg6, NvU32 arg7, MEMORY_DESCRIPTOR *arg8) {
2355 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2356 return NV_ERR_NOT_SUPPORTED;
2357 }
2358 #else //__nvoc_mem_mgr_h_disabled
2359 #define memmgrFree(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7, arg8) memmgrFree_IMPL(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7, arg8)
2360 #endif //__nvoc_mem_mgr_h_disabled
2361
2362 NV_STATUS memmgrAddMemNode_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc, NvBool bFreeDescriptor);
2363
2364 #ifdef __nvoc_mem_mgr_h_disabled
memmgrAddMemNode(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR * pMemDesc,NvBool bFreeDescriptor)2365 static inline NV_STATUS memmgrAddMemNode(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc, NvBool bFreeDescriptor) {
2366 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2367 return NV_ERR_NOT_SUPPORTED;
2368 }
2369 #else //__nvoc_mem_mgr_h_disabled
2370 #define memmgrAddMemNode(pGpu, pMemoryManager, pMemDesc, bFreeDescriptor) memmgrAddMemNode_IMPL(pGpu, pMemoryManager, pMemDesc, bFreeDescriptor)
2371 #endif //__nvoc_mem_mgr_h_disabled
2372
2373 NV_STATUS memmgrAddMemNodes_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvBool bSaveAllRmAllocations);
2374
2375 #ifdef __nvoc_mem_mgr_h_disabled
memmgrAddMemNodes(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvBool bSaveAllRmAllocations)2376 static inline NV_STATUS memmgrAddMemNodes(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvBool bSaveAllRmAllocations) {
2377 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2378 return NV_ERR_NOT_SUPPORTED;
2379 }
2380 #else //__nvoc_mem_mgr_h_disabled
2381 #define memmgrAddMemNodes(pGpu, pMemoryManager, bSaveAllRmAllocations) memmgrAddMemNodes_IMPL(pGpu, pMemoryManager, bSaveAllRmAllocations)
2382 #endif //__nvoc_mem_mgr_h_disabled
2383
2384 void memmgrRemoveMemNodes_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2385
2386 #ifdef __nvoc_mem_mgr_h_disabled
memmgrRemoveMemNodes(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2387 static inline void memmgrRemoveMemNodes(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2388 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2389 }
2390 #else //__nvoc_mem_mgr_h_disabled
2391 #define memmgrRemoveMemNodes(pGpu, pMemoryManager) memmgrRemoveMemNodes_IMPL(pGpu, pMemoryManager)
2392 #endif //__nvoc_mem_mgr_h_disabled
2393
2394 struct Heap *memmgrGetDeviceSuballocator_IMPL(struct MemoryManager *pMemoryManager, NvBool bForceSubheap);
2395
2396 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetDeviceSuballocator(struct MemoryManager * pMemoryManager,NvBool bForceSubheap)2397 static inline struct Heap *memmgrGetDeviceSuballocator(struct MemoryManager *pMemoryManager, NvBool bForceSubheap) {
2398 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2399 return NULL;
2400 }
2401 #else //__nvoc_mem_mgr_h_disabled
2402 #define memmgrGetDeviceSuballocator(pMemoryManager, bForceSubheap) memmgrGetDeviceSuballocator_IMPL(pMemoryManager, bForceSubheap)
2403 #endif //__nvoc_mem_mgr_h_disabled
2404
2405 NV_STATUS memmgrMemCopy_IMPL(struct MemoryManager *pMemoryManager, TRANSFER_SURFACE *pDst, TRANSFER_SURFACE *pSrc, NvU32 size, NvU32 flags);
2406
2407 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemCopy(struct MemoryManager * pMemoryManager,TRANSFER_SURFACE * pDst,TRANSFER_SURFACE * pSrc,NvU32 size,NvU32 flags)2408 static inline NV_STATUS memmgrMemCopy(struct MemoryManager *pMemoryManager, TRANSFER_SURFACE *pDst, TRANSFER_SURFACE *pSrc, NvU32 size, NvU32 flags) {
2409 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2410 return NV_ERR_NOT_SUPPORTED;
2411 }
2412 #else //__nvoc_mem_mgr_h_disabled
2413 #define memmgrMemCopy(pMemoryManager, pDst, pSrc, size, flags) memmgrMemCopy_IMPL(pMemoryManager, pDst, pSrc, size, flags)
2414 #endif //__nvoc_mem_mgr_h_disabled
2415
2416 NV_STATUS memmgrMemsetInBlocks_IMPL(struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc, NvU32 value, NvU64 baseOffset, NvU64 size, NvU32 flags, NvU32 blockSize);
2417
2418 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemsetInBlocks(struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR * pMemDesc,NvU32 value,NvU64 baseOffset,NvU64 size,NvU32 flags,NvU32 blockSize)2419 static inline NV_STATUS memmgrMemsetInBlocks(struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc, NvU32 value, NvU64 baseOffset, NvU64 size, NvU32 flags, NvU32 blockSize) {
2420 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2421 return NV_ERR_NOT_SUPPORTED;
2422 }
2423 #else //__nvoc_mem_mgr_h_disabled
2424 #define memmgrMemsetInBlocks(pMemoryManager, pMemDesc, value, baseOffset, size, flags, blockSize) memmgrMemsetInBlocks_IMPL(pMemoryManager, pMemDesc, value, baseOffset, size, flags, blockSize)
2425 #endif //__nvoc_mem_mgr_h_disabled
2426
2427 NV_STATUS memmgrMemSet_IMPL(struct MemoryManager *pMemoryManager, TRANSFER_SURFACE *pDst, NvU32 value, NvU32 size, NvU32 flags);
2428
2429 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemSet(struct MemoryManager * pMemoryManager,TRANSFER_SURFACE * pDst,NvU32 value,NvU32 size,NvU32 flags)2430 static inline NV_STATUS memmgrMemSet(struct MemoryManager *pMemoryManager, TRANSFER_SURFACE *pDst, NvU32 value, NvU32 size, NvU32 flags) {
2431 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2432 return NV_ERR_NOT_SUPPORTED;
2433 }
2434 #else //__nvoc_mem_mgr_h_disabled
2435 #define memmgrMemSet(pMemoryManager, pDst, value, size, flags) memmgrMemSet_IMPL(pMemoryManager, pDst, value, size, flags)
2436 #endif //__nvoc_mem_mgr_h_disabled
2437
2438 NV_STATUS memmgrMemWrite_IMPL(struct MemoryManager *pMemoryManager, TRANSFER_SURFACE *pDst, void *pBuf, NvU64 size, NvU32 flags);
2439
2440 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemWrite(struct MemoryManager * pMemoryManager,TRANSFER_SURFACE * pDst,void * pBuf,NvU64 size,NvU32 flags)2441 static inline NV_STATUS memmgrMemWrite(struct MemoryManager *pMemoryManager, TRANSFER_SURFACE *pDst, void *pBuf, NvU64 size, NvU32 flags) {
2442 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2443 return NV_ERR_NOT_SUPPORTED;
2444 }
2445 #else //__nvoc_mem_mgr_h_disabled
2446 #define memmgrMemWrite(pMemoryManager, pDst, pBuf, size, flags) memmgrMemWrite_IMPL(pMemoryManager, pDst, pBuf, size, flags)
2447 #endif //__nvoc_mem_mgr_h_disabled
2448
2449 NV_STATUS memmgrMemRead_IMPL(struct MemoryManager *pMemoryManager, TRANSFER_SURFACE *pSrc, void *pBuf, NvU64 size, NvU32 flags);
2450
2451 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemRead(struct MemoryManager * pMemoryManager,TRANSFER_SURFACE * pSrc,void * pBuf,NvU64 size,NvU32 flags)2452 static inline NV_STATUS memmgrMemRead(struct MemoryManager *pMemoryManager, TRANSFER_SURFACE *pSrc, void *pBuf, NvU64 size, NvU32 flags) {
2453 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2454 return NV_ERR_NOT_SUPPORTED;
2455 }
2456 #else //__nvoc_mem_mgr_h_disabled
2457 #define memmgrMemRead(pMemoryManager, pSrc, pBuf, size, flags) memmgrMemRead_IMPL(pMemoryManager, pSrc, pBuf, size, flags)
2458 #endif //__nvoc_mem_mgr_h_disabled
2459
2460 NvU8 *memmgrMemBeginTransfer_IMPL(struct MemoryManager *pMemoryManager, TRANSFER_SURFACE *pTransferInfo, NvU64 shadowBufSize, NvU32 flags);
2461
2462 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemBeginTransfer(struct MemoryManager * pMemoryManager,TRANSFER_SURFACE * pTransferInfo,NvU64 shadowBufSize,NvU32 flags)2463 static inline NvU8 *memmgrMemBeginTransfer(struct MemoryManager *pMemoryManager, TRANSFER_SURFACE *pTransferInfo, NvU64 shadowBufSize, NvU32 flags) {
2464 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2465 return NULL;
2466 }
2467 #else //__nvoc_mem_mgr_h_disabled
2468 #define memmgrMemBeginTransfer(pMemoryManager, pTransferInfo, shadowBufSize, flags) memmgrMemBeginTransfer_IMPL(pMemoryManager, pTransferInfo, shadowBufSize, flags)
2469 #endif //__nvoc_mem_mgr_h_disabled
2470
2471 void memmgrMemEndTransfer_IMPL(struct MemoryManager *pMemoryManager, TRANSFER_SURFACE *pTransferInfo, NvU64 shadowBufSize, NvU32 flags);
2472
2473 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemEndTransfer(struct MemoryManager * pMemoryManager,TRANSFER_SURFACE * pTransferInfo,NvU64 shadowBufSize,NvU32 flags)2474 static inline void memmgrMemEndTransfer(struct MemoryManager *pMemoryManager, TRANSFER_SURFACE *pTransferInfo, NvU64 shadowBufSize, NvU32 flags) {
2475 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2476 }
2477 #else //__nvoc_mem_mgr_h_disabled
2478 #define memmgrMemEndTransfer(pMemoryManager, pTransferInfo, shadowBufSize, flags) memmgrMemEndTransfer_IMPL(pMemoryManager, pTransferInfo, shadowBufSize, flags)
2479 #endif //__nvoc_mem_mgr_h_disabled
2480
2481 NvU8 *memmgrMemDescBeginTransfer_IMPL(struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc, NvU32 flags);
2482
2483 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemDescBeginTransfer(struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR * pMemDesc,NvU32 flags)2484 static inline NvU8 *memmgrMemDescBeginTransfer(struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc, NvU32 flags) {
2485 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2486 return NULL;
2487 }
2488 #else //__nvoc_mem_mgr_h_disabled
2489 #define memmgrMemDescBeginTransfer(pMemoryManager, pMemDesc, flags) memmgrMemDescBeginTransfer_IMPL(pMemoryManager, pMemDesc, flags)
2490 #endif //__nvoc_mem_mgr_h_disabled
2491
2492 void memmgrMemDescEndTransfer_IMPL(struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc, NvU32 flags);
2493
2494 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemDescEndTransfer(struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR * pMemDesc,NvU32 flags)2495 static inline void memmgrMemDescEndTransfer(struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc, NvU32 flags) {
2496 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2497 }
2498 #else //__nvoc_mem_mgr_h_disabled
2499 #define memmgrMemDescEndTransfer(pMemoryManager, pMemDesc, flags) memmgrMemDescEndTransfer_IMPL(pMemoryManager, pMemDesc, flags)
2500 #endif //__nvoc_mem_mgr_h_disabled
2501
2502 NV_STATUS memmgrMemDescMemSet_IMPL(struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc, NvU32 value, NvU32 flags);
2503
2504 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemDescMemSet(struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR * pMemDesc,NvU32 value,NvU32 flags)2505 static inline NV_STATUS memmgrMemDescMemSet(struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *pMemDesc, NvU32 value, NvU32 flags) {
2506 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2507 return NV_ERR_NOT_SUPPORTED;
2508 }
2509 #else //__nvoc_mem_mgr_h_disabled
2510 #define memmgrMemDescMemSet(pMemoryManager, pMemDesc, value, flags) memmgrMemDescMemSet_IMPL(pMemoryManager, pMemDesc, value, flags)
2511 #endif //__nvoc_mem_mgr_h_disabled
2512
2513 NV_ADDRESS_SPACE memmgrAllocGetAddrSpace_IMPL(struct MemoryManager *pMemoryManager, NvU32 flags, NvU32 attr);
2514
2515 #ifdef __nvoc_mem_mgr_h_disabled
memmgrAllocGetAddrSpace(struct MemoryManager * pMemoryManager,NvU32 flags,NvU32 attr)2516 static inline NV_ADDRESS_SPACE memmgrAllocGetAddrSpace(struct MemoryManager *pMemoryManager, NvU32 flags, NvU32 attr) {
2517 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2518 NV_ADDRESS_SPACE ret;
2519 portMemSet(&ret, 0, sizeof(NV_ADDRESS_SPACE));
2520 return ret;
2521 }
2522 #else //__nvoc_mem_mgr_h_disabled
2523 #define memmgrAllocGetAddrSpace(pMemoryManager, flags, attr) memmgrAllocGetAddrSpace_IMPL(pMemoryManager, flags, attr)
2524 #endif //__nvoc_mem_mgr_h_disabled
2525
2526 NV_STATUS memmgrCreateHeap_IMPL(struct MemoryManager *pMemoryManager);
2527
2528 #ifdef __nvoc_mem_mgr_h_disabled
memmgrCreateHeap(struct MemoryManager * pMemoryManager)2529 static inline NV_STATUS memmgrCreateHeap(struct MemoryManager *pMemoryManager) {
2530 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2531 return NV_ERR_NOT_SUPPORTED;
2532 }
2533 #else //__nvoc_mem_mgr_h_disabled
2534 #define memmgrCreateHeap(pMemoryManager) memmgrCreateHeap_IMPL(pMemoryManager)
2535 #endif //__nvoc_mem_mgr_h_disabled
2536
2537 NV_STATUS memmgrGetUsedRamSize_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *arg3);
2538
2539 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetUsedRamSize(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 * arg3)2540 static inline NV_STATUS memmgrGetUsedRamSize(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *arg3) {
2541 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2542 return NV_ERR_NOT_SUPPORTED;
2543 }
2544 #else //__nvoc_mem_mgr_h_disabled
2545 #define memmgrGetUsedRamSize(pGpu, pMemoryManager, arg3) memmgrGetUsedRamSize_IMPL(pGpu, pMemoryManager, arg3)
2546 #endif //__nvoc_mem_mgr_h_disabled
2547
2548 NV_STATUS memmgrAllocHwResources_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *arg3);
2549
2550 #ifdef __nvoc_mem_mgr_h_disabled
memmgrAllocHwResources(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_ALLOC_INFO * arg3)2551 static inline NV_STATUS memmgrAllocHwResources(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *arg3) {
2552 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2553 return NV_ERR_NOT_SUPPORTED;
2554 }
2555 #else //__nvoc_mem_mgr_h_disabled
2556 #define memmgrAllocHwResources(pGpu, pMemoryManager, arg3) memmgrAllocHwResources_IMPL(pGpu, pMemoryManager, arg3)
2557 #endif //__nvoc_mem_mgr_h_disabled
2558
2559 NV_STATUS memmgrFreeHwResources_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *arg3);
2560
2561 #ifdef __nvoc_mem_mgr_h_disabled
memmgrFreeHwResources(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,FB_ALLOC_INFO * arg3)2562 static inline NV_STATUS memmgrFreeHwResources(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, FB_ALLOC_INFO *arg3) {
2563 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2564 return NV_ERR_NOT_SUPPORTED;
2565 }
2566 #else //__nvoc_mem_mgr_h_disabled
2567 #define memmgrFreeHwResources(pGpu, pMemoryManager, arg3) memmgrFreeHwResources_IMPL(pGpu, pMemoryManager, arg3)
2568 #endif //__nvoc_mem_mgr_h_disabled
2569
2570 NvBool memmgrLargePageSupported_IMPL(struct MemoryManager *pMemoryManager, NV_ADDRESS_SPACE arg2);
2571
2572 #ifdef __nvoc_mem_mgr_h_disabled
memmgrLargePageSupported(struct MemoryManager * pMemoryManager,NV_ADDRESS_SPACE arg2)2573 static inline NvBool memmgrLargePageSupported(struct MemoryManager *pMemoryManager, NV_ADDRESS_SPACE arg2) {
2574 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2575 return NV_FALSE;
2576 }
2577 #else //__nvoc_mem_mgr_h_disabled
2578 #define memmgrLargePageSupported(pMemoryManager, arg2) memmgrLargePageSupported_IMPL(pMemoryManager, arg2)
2579 #endif //__nvoc_mem_mgr_h_disabled
2580
2581 NvBool memmgrComprSupported_IMPL(struct MemoryManager *pMemoryManager, NV_ADDRESS_SPACE arg2);
2582
2583 #ifdef __nvoc_mem_mgr_h_disabled
memmgrComprSupported(struct MemoryManager * pMemoryManager,NV_ADDRESS_SPACE arg2)2584 static inline NvBool memmgrComprSupported(struct MemoryManager *pMemoryManager, NV_ADDRESS_SPACE arg2) {
2585 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2586 return NV_FALSE;
2587 }
2588 #else //__nvoc_mem_mgr_h_disabled
2589 #define memmgrComprSupported(pMemoryManager, arg2) memmgrComprSupported_IMPL(pMemoryManager, arg2)
2590 #endif //__nvoc_mem_mgr_h_disabled
2591
2592 NvU64 memmgrGetMappableRamSizeMb_IMPL(struct MemoryManager *pMemoryManager);
2593
2594 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetMappableRamSizeMb(struct MemoryManager * pMemoryManager)2595 static inline NvU64 memmgrGetMappableRamSizeMb(struct MemoryManager *pMemoryManager) {
2596 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2597 return 0;
2598 }
2599 #else //__nvoc_mem_mgr_h_disabled
2600 #define memmgrGetMappableRamSizeMb(pMemoryManager) memmgrGetMappableRamSizeMb_IMPL(pMemoryManager)
2601 #endif //__nvoc_mem_mgr_h_disabled
2602
2603 PFB_REGION_DESCRIPTOR memmgrLookupFbRegionByOffset_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, RmPhysAddr fbOffset, RmPhysAddr fbLimit);
2604
2605 #ifdef __nvoc_mem_mgr_h_disabled
memmgrLookupFbRegionByOffset(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,RmPhysAddr fbOffset,RmPhysAddr fbLimit)2606 static inline PFB_REGION_DESCRIPTOR memmgrLookupFbRegionByOffset(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, RmPhysAddr fbOffset, RmPhysAddr fbLimit) {
2607 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2608 return NULL;
2609 }
2610 #else //__nvoc_mem_mgr_h_disabled
2611 #define memmgrLookupFbRegionByOffset(pGpu, pMemoryManager, fbOffset, fbLimit) memmgrLookupFbRegionByOffset_IMPL(pGpu, pMemoryManager, fbOffset, fbLimit)
2612 #endif //__nvoc_mem_mgr_h_disabled
2613
2614 NV_STATUS memmgrFillMemdescForPhysAttr_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, PMEMORY_DESCRIPTOR arg3, ADDRESS_TRANSLATION arg4, NvU64 *arg5, NvU32 *arg6, NvU32 *arg7, NvU32 *arg8, NvU32 *arg9, NvU32 *arg10, NvU64 *arg11);
2615
2616 #ifdef __nvoc_mem_mgr_h_disabled
memmgrFillMemdescForPhysAttr(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,PMEMORY_DESCRIPTOR arg3,ADDRESS_TRANSLATION arg4,NvU64 * arg5,NvU32 * arg6,NvU32 * arg7,NvU32 * arg8,NvU32 * arg9,NvU32 * arg10,NvU64 * arg11)2617 static inline NV_STATUS memmgrFillMemdescForPhysAttr(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, PMEMORY_DESCRIPTOR arg3, ADDRESS_TRANSLATION arg4, NvU64 *arg5, NvU32 *arg6, NvU32 *arg7, NvU32 *arg8, NvU32 *arg9, NvU32 *arg10, NvU64 *arg11) {
2618 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2619 return NV_ERR_NOT_SUPPORTED;
2620 }
2621 #else //__nvoc_mem_mgr_h_disabled
2622 #define memmgrFillMemdescForPhysAttr(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11) memmgrFillMemdescForPhysAttr_IMPL(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11)
2623 #endif //__nvoc_mem_mgr_h_disabled
2624
2625 NV_STATUS memmgrSetPlatformPmaSupport_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2626
2627 #ifdef __nvoc_mem_mgr_h_disabled
memmgrSetPlatformPmaSupport(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2628 static inline NV_STATUS memmgrSetPlatformPmaSupport(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2629 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2630 return NV_ERR_NOT_SUPPORTED;
2631 }
2632 #else //__nvoc_mem_mgr_h_disabled
2633 #define memmgrSetPlatformPmaSupport(pGpu, pMemoryManager) memmgrSetPlatformPmaSupport_IMPL(pGpu, pMemoryManager)
2634 #endif //__nvoc_mem_mgr_h_disabled
2635
2636 void memmgrRegionSetupForPma_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2637
2638 #ifdef __nvoc_mem_mgr_h_disabled
memmgrRegionSetupForPma(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2639 static inline void memmgrRegionSetupForPma(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2640 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2641 }
2642 #else //__nvoc_mem_mgr_h_disabled
2643 #define memmgrRegionSetupForPma(pGpu, pMemoryManager) memmgrRegionSetupForPma_IMPL(pGpu, pMemoryManager)
2644 #endif //__nvoc_mem_mgr_h_disabled
2645
2646 NV_STATUS memmgrInitFbRegions_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2647
2648 #ifdef __nvoc_mem_mgr_h_disabled
memmgrInitFbRegions(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2649 static inline NV_STATUS memmgrInitFbRegions(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2650 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2651 return NV_ERR_NOT_SUPPORTED;
2652 }
2653 #else //__nvoc_mem_mgr_h_disabled
2654 #define memmgrInitFbRegions(pGpu, pMemoryManager) memmgrInitFbRegions_IMPL(pGpu, pMemoryManager)
2655 #endif //__nvoc_mem_mgr_h_disabled
2656
2657 void memmgrRegionSetupCommon_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2658
2659 #ifdef __nvoc_mem_mgr_h_disabled
memmgrRegionSetupCommon(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2660 static inline void memmgrRegionSetupCommon(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2661 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2662 }
2663 #else //__nvoc_mem_mgr_h_disabled
2664 #define memmgrRegionSetupCommon(pGpu, pMemoryManager) memmgrRegionSetupCommon_IMPL(pGpu, pMemoryManager)
2665 #endif //__nvoc_mem_mgr_h_disabled
2666
2667 void memmgrRegenerateFbRegionPriority_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2668
2669 #ifdef __nvoc_mem_mgr_h_disabled
memmgrRegenerateFbRegionPriority(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2670 static inline void memmgrRegenerateFbRegionPriority(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2671 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2672 }
2673 #else //__nvoc_mem_mgr_h_disabled
2674 #define memmgrRegenerateFbRegionPriority(pGpu, pMemoryManager) memmgrRegenerateFbRegionPriority_IMPL(pGpu, pMemoryManager)
2675 #endif //__nvoc_mem_mgr_h_disabled
2676
2677 NvU32 memmgrInsertFbRegion_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, PFB_REGION_DESCRIPTOR arg3);
2678
2679 #ifdef __nvoc_mem_mgr_h_disabled
memmgrInsertFbRegion(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,PFB_REGION_DESCRIPTOR arg3)2680 static inline NvU32 memmgrInsertFbRegion(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, PFB_REGION_DESCRIPTOR arg3) {
2681 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2682 return 0;
2683 }
2684 #else //__nvoc_mem_mgr_h_disabled
2685 #define memmgrInsertFbRegion(pGpu, pMemoryManager, arg3) memmgrInsertFbRegion_IMPL(pGpu, pMemoryManager, arg3)
2686 #endif //__nvoc_mem_mgr_h_disabled
2687
2688 void memmgrDumpFbRegions_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2689
2690 #ifdef __nvoc_mem_mgr_h_disabled
memmgrDumpFbRegions(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2691 static inline void memmgrDumpFbRegions(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2692 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2693 }
2694 #else //__nvoc_mem_mgr_h_disabled
2695 #define memmgrDumpFbRegions(pGpu, pMemoryManager) memmgrDumpFbRegions_IMPL(pGpu, pMemoryManager)
2696 #endif //__nvoc_mem_mgr_h_disabled
2697
2698 void memmgrClearFbRegions_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2699
2700 #ifdef __nvoc_mem_mgr_h_disabled
memmgrClearFbRegions(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2701 static inline void memmgrClearFbRegions(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2702 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2703 }
2704 #else //__nvoc_mem_mgr_h_disabled
2705 #define memmgrClearFbRegions(pGpu, pMemoryManager) memmgrClearFbRegions_IMPL(pGpu, pMemoryManager)
2706 #endif //__nvoc_mem_mgr_h_disabled
2707
2708 void memmgrReleaseConsoleRegion_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2709
2710 #ifdef __nvoc_mem_mgr_h_disabled
memmgrReleaseConsoleRegion(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2711 static inline void memmgrReleaseConsoleRegion(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2712 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2713 }
2714 #else //__nvoc_mem_mgr_h_disabled
2715 #define memmgrReleaseConsoleRegion(pGpu, pMemoryManager) memmgrReleaseConsoleRegion_IMPL(pGpu, pMemoryManager)
2716 #endif //__nvoc_mem_mgr_h_disabled
2717
2718 PMEMORY_DESCRIPTOR memmgrGetReservedConsoleMemDesc_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2719
2720 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetReservedConsoleMemDesc(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2721 static inline PMEMORY_DESCRIPTOR memmgrGetReservedConsoleMemDesc(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2722 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2723 return NULL;
2724 }
2725 #else //__nvoc_mem_mgr_h_disabled
2726 #define memmgrGetReservedConsoleMemDesc(pGpu, pMemoryManager) memmgrGetReservedConsoleMemDesc_IMPL(pGpu, pMemoryManager)
2727 #endif //__nvoc_mem_mgr_h_disabled
2728
2729 void memmgrReserveBar2BackingStore_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *arg3);
2730
2731 #ifdef __nvoc_mem_mgr_h_disabled
memmgrReserveBar2BackingStore(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 * arg3)2732 static inline void memmgrReserveBar2BackingStore(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *arg3) {
2733 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2734 }
2735 #else //__nvoc_mem_mgr_h_disabled
2736 #define memmgrReserveBar2BackingStore(pGpu, pMemoryManager, arg3) memmgrReserveBar2BackingStore_IMPL(pGpu, pMemoryManager, arg3)
2737 #endif //__nvoc_mem_mgr_h_disabled
2738
2739 void memmgrCalcReservedFbSpace_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2740
2741 #ifdef __nvoc_mem_mgr_h_disabled
memmgrCalcReservedFbSpace(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2742 static inline void memmgrCalcReservedFbSpace(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2743 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2744 }
2745 #else //__nvoc_mem_mgr_h_disabled
2746 #define memmgrCalcReservedFbSpace(pGpu, pMemoryManager) memmgrCalcReservedFbSpace_IMPL(pGpu, pMemoryManager)
2747 #endif //__nvoc_mem_mgr_h_disabled
2748
2749 void memmgrMemUtilsSetupChannelBufferSizes_IMPL(struct MemoryManager *pMemoryManager, OBJCHANNEL *arg2, NvU32 arg3);
2750
2751 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemUtilsSetupChannelBufferSizes(struct MemoryManager * pMemoryManager,OBJCHANNEL * arg2,NvU32 arg3)2752 static inline void memmgrMemUtilsSetupChannelBufferSizes(struct MemoryManager *pMemoryManager, OBJCHANNEL *arg2, NvU32 arg3) {
2753 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2754 }
2755 #else //__nvoc_mem_mgr_h_disabled
2756 #define memmgrMemUtilsSetupChannelBufferSizes(pMemoryManager, arg2, arg3) memmgrMemUtilsSetupChannelBufferSizes_IMPL(pMemoryManager, arg2, arg3)
2757 #endif //__nvoc_mem_mgr_h_disabled
2758
2759 NV_STATUS memmgrMemUtilsChannelSchedulingSetup_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3);
2760
2761 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemUtilsChannelSchedulingSetup(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,OBJCHANNEL * arg3)2762 static inline NV_STATUS memmgrMemUtilsChannelSchedulingSetup(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, OBJCHANNEL *arg3) {
2763 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2764 return NV_ERR_NOT_SUPPORTED;
2765 }
2766 #else //__nvoc_mem_mgr_h_disabled
2767 #define memmgrMemUtilsChannelSchedulingSetup(pGpu, pMemoryManager, arg3) memmgrMemUtilsChannelSchedulingSetup_IMPL(pGpu, pMemoryManager, arg3)
2768 #endif //__nvoc_mem_mgr_h_disabled
2769
2770 NV_STATUS memmgrGetKindComprFromMemDesc_IMPL(struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *arg2, NvU64 offset, NvU32 *kind, COMPR_INFO *pComprInfo);
2771
2772 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetKindComprFromMemDesc(struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR * arg2,NvU64 offset,NvU32 * kind,COMPR_INFO * pComprInfo)2773 static inline NV_STATUS memmgrGetKindComprFromMemDesc(struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR *arg2, NvU64 offset, NvU32 *kind, COMPR_INFO *pComprInfo) {
2774 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2775 return NV_ERR_NOT_SUPPORTED;
2776 }
2777 #else //__nvoc_mem_mgr_h_disabled
2778 #define memmgrGetKindComprFromMemDesc(pMemoryManager, arg2, offset, kind, pComprInfo) memmgrGetKindComprFromMemDesc_IMPL(pMemoryManager, arg2, offset, kind, pComprInfo)
2779 #endif //__nvoc_mem_mgr_h_disabled
2780
2781 NV_STATUS memmgrFillComprInfo_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 arg3, NvU32 arg4, NvU32 arg5, NvU64 arg6, NvU32 arg7, COMPR_INFO *arg8);
2782
2783 #ifdef __nvoc_mem_mgr_h_disabled
memmgrFillComprInfo(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 arg3,NvU32 arg4,NvU32 arg5,NvU64 arg6,NvU32 arg7,COMPR_INFO * arg8)2784 static inline NV_STATUS memmgrFillComprInfo(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 arg3, NvU32 arg4, NvU32 arg5, NvU64 arg6, NvU32 arg7, COMPR_INFO *arg8) {
2785 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2786 return NV_ERR_NOT_SUPPORTED;
2787 }
2788 #else //__nvoc_mem_mgr_h_disabled
2789 #define memmgrFillComprInfo(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7, arg8) memmgrFillComprInfo_IMPL(pGpu, pMemoryManager, arg3, arg4, arg5, arg6, arg7, arg8)
2790 #endif //__nvoc_mem_mgr_h_disabled
2791
2792 void memmgrComprInfoDisableCompression_IMPL(struct MemoryManager *pMemoryManager, COMPR_INFO *pComprInfo);
2793
2794 #ifdef __nvoc_mem_mgr_h_disabled
memmgrComprInfoDisableCompression(struct MemoryManager * pMemoryManager,COMPR_INFO * pComprInfo)2795 static inline void memmgrComprInfoDisableCompression(struct MemoryManager *pMemoryManager, COMPR_INFO *pComprInfo) {
2796 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2797 }
2798 #else //__nvoc_mem_mgr_h_disabled
2799 #define memmgrComprInfoDisableCompression(pMemoryManager, pComprInfo) memmgrComprInfoDisableCompression_IMPL(pMemoryManager, pComprInfo)
2800 #endif //__nvoc_mem_mgr_h_disabled
2801
2802 void memmgrFillComprInfoUncompressed_IMPL(struct MemoryManager *pMemoryManager, NvU32 kind, COMPR_INFO *pComprInfo);
2803
2804 #ifdef __nvoc_mem_mgr_h_disabled
memmgrFillComprInfoUncompressed(struct MemoryManager * pMemoryManager,NvU32 kind,COMPR_INFO * pComprInfo)2805 static inline void memmgrFillComprInfoUncompressed(struct MemoryManager *pMemoryManager, NvU32 kind, COMPR_INFO *pComprInfo) {
2806 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2807 }
2808 #else //__nvoc_mem_mgr_h_disabled
2809 #define memmgrFillComprInfoUncompressed(pMemoryManager, kind, pComprInfo) memmgrFillComprInfoUncompressed_IMPL(pMemoryManager, kind, pComprInfo)
2810 #endif //__nvoc_mem_mgr_h_disabled
2811
2812 NV_STATUS memmgrPmaInitialize_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, PMA *pPma);
2813
2814 #ifdef __nvoc_mem_mgr_h_disabled
memmgrPmaInitialize(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,PMA * pPma)2815 static inline NV_STATUS memmgrPmaInitialize(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, PMA *pPma) {
2816 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2817 return NV_ERR_NOT_SUPPORTED;
2818 }
2819 #else //__nvoc_mem_mgr_h_disabled
2820 #define memmgrPmaInitialize(pGpu, pMemoryManager, pPma) memmgrPmaInitialize_IMPL(pGpu, pMemoryManager, pPma)
2821 #endif //__nvoc_mem_mgr_h_disabled
2822
2823 NV_STATUS memmgrPmaRegisterRegions_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct Heap *pHeap, PMA *pPma);
2824
2825 #ifdef __nvoc_mem_mgr_h_disabled
memmgrPmaRegisterRegions(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,struct Heap * pHeap,PMA * pPma)2826 static inline NV_STATUS memmgrPmaRegisterRegions(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct Heap *pHeap, PMA *pPma) {
2827 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2828 return NV_ERR_NOT_SUPPORTED;
2829 }
2830 #else //__nvoc_mem_mgr_h_disabled
2831 #define memmgrPmaRegisterRegions(pGpu, pMemoryManager, pHeap, pPma) memmgrPmaRegisterRegions_IMPL(pGpu, pMemoryManager, pHeap, pPma)
2832 #endif //__nvoc_mem_mgr_h_disabled
2833
2834 NV_STATUS memmgrInitInternalChannels_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2835
2836 #ifdef __nvoc_mem_mgr_h_disabled
memmgrInitInternalChannels(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2837 static inline NV_STATUS memmgrInitInternalChannels(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2838 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2839 return NV_ERR_NOT_SUPPORTED;
2840 }
2841 #else //__nvoc_mem_mgr_h_disabled
2842 #define memmgrInitInternalChannels(pGpu, pMemoryManager) memmgrInitInternalChannels_IMPL(pGpu, pMemoryManager)
2843 #endif //__nvoc_mem_mgr_h_disabled
2844
2845 NV_STATUS memmgrDestroyInternalChannels_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2846
2847 #ifdef __nvoc_mem_mgr_h_disabled
memmgrDestroyInternalChannels(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2848 static inline NV_STATUS memmgrDestroyInternalChannels(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2849 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2850 return NV_ERR_NOT_SUPPORTED;
2851 }
2852 #else //__nvoc_mem_mgr_h_disabled
2853 #define memmgrDestroyInternalChannels(pGpu, pMemoryManager) memmgrDestroyInternalChannels_IMPL(pGpu, pMemoryManager)
2854 #endif //__nvoc_mem_mgr_h_disabled
2855
2856 NV_STATUS memmgrInitCeUtils_IMPL(struct MemoryManager *pMemoryManager, NvBool bFifoLite);
2857
2858 #ifdef __nvoc_mem_mgr_h_disabled
memmgrInitCeUtils(struct MemoryManager * pMemoryManager,NvBool bFifoLite)2859 static inline NV_STATUS memmgrInitCeUtils(struct MemoryManager *pMemoryManager, NvBool bFifoLite) {
2860 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2861 return NV_ERR_NOT_SUPPORTED;
2862 }
2863 #else //__nvoc_mem_mgr_h_disabled
2864 #define memmgrInitCeUtils(pMemoryManager, bFifoLite) memmgrInitCeUtils_IMPL(pMemoryManager, bFifoLite)
2865 #endif //__nvoc_mem_mgr_h_disabled
2866
2867 void memmgrDestroyCeUtils_IMPL(struct MemoryManager *pMemoryManager);
2868
2869 #ifdef __nvoc_mem_mgr_h_disabled
memmgrDestroyCeUtils(struct MemoryManager * pMemoryManager)2870 static inline void memmgrDestroyCeUtils(struct MemoryManager *pMemoryManager) {
2871 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2872 }
2873 #else //__nvoc_mem_mgr_h_disabled
2874 #define memmgrDestroyCeUtils(pMemoryManager) memmgrDestroyCeUtils_IMPL(pMemoryManager)
2875 #endif //__nvoc_mem_mgr_h_disabled
2876
2877 NV_STATUS memmgrSetMIGPartitionableBAR1Range_IMPL(OBJGPU *arg1, struct MemoryManager *arg2);
2878
2879 #ifdef __nvoc_mem_mgr_h_disabled
memmgrSetMIGPartitionableBAR1Range(OBJGPU * arg1,struct MemoryManager * arg2)2880 static inline NV_STATUS memmgrSetMIGPartitionableBAR1Range(OBJGPU *arg1, struct MemoryManager *arg2) {
2881 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2882 return NV_ERR_NOT_SUPPORTED;
2883 }
2884 #else //__nvoc_mem_mgr_h_disabled
2885 #define memmgrSetMIGPartitionableBAR1Range(arg1, arg2) memmgrSetMIGPartitionableBAR1Range_IMPL(arg1, arg2)
2886 #endif //__nvoc_mem_mgr_h_disabled
2887
2888 struct NV_RANGE memmgrGetMIGPartitionableBAR1Range_IMPL(OBJGPU *arg1, struct MemoryManager *arg2);
2889
2890 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetMIGPartitionableBAR1Range(OBJGPU * arg1,struct MemoryManager * arg2)2891 static inline struct NV_RANGE memmgrGetMIGPartitionableBAR1Range(OBJGPU *arg1, struct MemoryManager *arg2) {
2892 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2893 struct NV_RANGE ret;
2894 portMemSet(&ret, 0, sizeof(struct NV_RANGE));
2895 return ret;
2896 }
2897 #else //__nvoc_mem_mgr_h_disabled
2898 #define memmgrGetMIGPartitionableBAR1Range(arg1, arg2) memmgrGetMIGPartitionableBAR1Range_IMPL(arg1, arg2)
2899 #endif //__nvoc_mem_mgr_h_disabled
2900
2901 void memmgrSetMIGPartitionableMemoryRange_IMPL(OBJGPU *arg1, struct MemoryManager *arg2, struct NV_RANGE arg3);
2902
2903 #ifdef __nvoc_mem_mgr_h_disabled
memmgrSetMIGPartitionableMemoryRange(OBJGPU * arg1,struct MemoryManager * arg2,struct NV_RANGE arg3)2904 static inline void memmgrSetMIGPartitionableMemoryRange(OBJGPU *arg1, struct MemoryManager *arg2, struct NV_RANGE arg3) {
2905 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2906 }
2907 #else //__nvoc_mem_mgr_h_disabled
2908 #define memmgrSetMIGPartitionableMemoryRange(arg1, arg2, arg3) memmgrSetMIGPartitionableMemoryRange_IMPL(arg1, arg2, arg3)
2909 #endif //__nvoc_mem_mgr_h_disabled
2910
2911 struct NV_RANGE memmgrGetMIGPartitionableMemoryRange_IMPL(OBJGPU *arg1, struct MemoryManager *arg2);
2912
2913 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetMIGPartitionableMemoryRange(OBJGPU * arg1,struct MemoryManager * arg2)2914 static inline struct NV_RANGE memmgrGetMIGPartitionableMemoryRange(OBJGPU *arg1, struct MemoryManager *arg2) {
2915 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2916 struct NV_RANGE ret;
2917 portMemSet(&ret, 0, sizeof(struct NV_RANGE));
2918 return ret;
2919 }
2920 #else //__nvoc_mem_mgr_h_disabled
2921 #define memmgrGetMIGPartitionableMemoryRange(arg1, arg2) memmgrGetMIGPartitionableMemoryRange_IMPL(arg1, arg2)
2922 #endif //__nvoc_mem_mgr_h_disabled
2923
2924 NV_STATUS memmgrFreeMIGGPUInstanceMemory_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 swizzId, NvHandle hMemory, struct Heap **ppMemoryPartitionHeap);
2925
2926 #ifdef __nvoc_mem_mgr_h_disabled
memmgrFreeMIGGPUInstanceMemory(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU32 swizzId,NvHandle hMemory,struct Heap ** ppMemoryPartitionHeap)2927 static inline NV_STATUS memmgrFreeMIGGPUInstanceMemory(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU32 swizzId, NvHandle hMemory, struct Heap **ppMemoryPartitionHeap) {
2928 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2929 return NV_ERR_NOT_SUPPORTED;
2930 }
2931 #else //__nvoc_mem_mgr_h_disabled
2932 #define memmgrFreeMIGGPUInstanceMemory(pGpu, pMemoryManager, swizzId, hMemory, ppMemoryPartitionHeap) memmgrFreeMIGGPUInstanceMemory_IMPL(pGpu, pMemoryManager, swizzId, hMemory, ppMemoryPartitionHeap)
2933 #endif //__nvoc_mem_mgr_h_disabled
2934
2935 NV_STATUS memmgrPageLevelPoolsCreate_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2936
2937 #ifdef __nvoc_mem_mgr_h_disabled
memmgrPageLevelPoolsCreate(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2938 static inline NV_STATUS memmgrPageLevelPoolsCreate(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2939 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2940 return NV_ERR_NOT_SUPPORTED;
2941 }
2942 #else //__nvoc_mem_mgr_h_disabled
2943 #define memmgrPageLevelPoolsCreate(pGpu, pMemoryManager) memmgrPageLevelPoolsCreate_IMPL(pGpu, pMemoryManager)
2944 #endif //__nvoc_mem_mgr_h_disabled
2945
2946 void memmgrPageLevelPoolsDestroy_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2947
2948 #ifdef __nvoc_mem_mgr_h_disabled
memmgrPageLevelPoolsDestroy(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2949 static inline void memmgrPageLevelPoolsDestroy(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2950 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2951 }
2952 #else //__nvoc_mem_mgr_h_disabled
2953 #define memmgrPageLevelPoolsDestroy(pGpu, pMemoryManager) memmgrPageLevelPoolsDestroy_IMPL(pGpu, pMemoryManager)
2954 #endif //__nvoc_mem_mgr_h_disabled
2955
2956 NV_STATUS memmgrPageLevelPoolsGetInfo_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct Device *pDevice, struct RM_POOL_ALLOC_MEM_RESERVE_INFO **arg4);
2957
2958 #ifdef __nvoc_mem_mgr_h_disabled
memmgrPageLevelPoolsGetInfo(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,struct Device * pDevice,struct RM_POOL_ALLOC_MEM_RESERVE_INFO ** arg4)2959 static inline NV_STATUS memmgrPageLevelPoolsGetInfo(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, struct Device *pDevice, struct RM_POOL_ALLOC_MEM_RESERVE_INFO **arg4) {
2960 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2961 return NV_ERR_NOT_SUPPORTED;
2962 }
2963 #else //__nvoc_mem_mgr_h_disabled
2964 #define memmgrPageLevelPoolsGetInfo(pGpu, pMemoryManager, pDevice, arg4) memmgrPageLevelPoolsGetInfo_IMPL(pGpu, pMemoryManager, pDevice, arg4)
2965 #endif //__nvoc_mem_mgr_h_disabled
2966
2967 NV_STATUS memmgrAllocMIGMemoryAllocationInternalHandles_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2968
2969 #ifdef __nvoc_mem_mgr_h_disabled
memmgrAllocMIGMemoryAllocationInternalHandles(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2970 static inline NV_STATUS memmgrAllocMIGMemoryAllocationInternalHandles(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2971 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2972 return NV_ERR_NOT_SUPPORTED;
2973 }
2974 #else //__nvoc_mem_mgr_h_disabled
2975 #define memmgrAllocMIGMemoryAllocationInternalHandles(pGpu, pMemoryManager) memmgrAllocMIGMemoryAllocationInternalHandles_IMPL(pGpu, pMemoryManager)
2976 #endif //__nvoc_mem_mgr_h_disabled
2977
2978 void memmgrFreeMIGMemoryAllocationInternalHandles_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
2979
2980 #ifdef __nvoc_mem_mgr_h_disabled
memmgrFreeMIGMemoryAllocationInternalHandles(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)2981 static inline void memmgrFreeMIGMemoryAllocationInternalHandles(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
2982 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2983 }
2984 #else //__nvoc_mem_mgr_h_disabled
2985 #define memmgrFreeMIGMemoryAllocationInternalHandles(pGpu, pMemoryManager) memmgrFreeMIGMemoryAllocationInternalHandles_IMPL(pGpu, pMemoryManager)
2986 #endif //__nvoc_mem_mgr_h_disabled
2987
2988 void memmgrGetFreeMemoryForAllMIGGPUInstances_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *pBytes);
2989
2990 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetFreeMemoryForAllMIGGPUInstances(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 * pBytes)2991 static inline void memmgrGetFreeMemoryForAllMIGGPUInstances(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *pBytes) {
2992 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
2993 }
2994 #else //__nvoc_mem_mgr_h_disabled
2995 #define memmgrGetFreeMemoryForAllMIGGPUInstances(pGpu, pMemoryManager, pBytes) memmgrGetFreeMemoryForAllMIGGPUInstances_IMPL(pGpu, pMemoryManager, pBytes)
2996 #endif //__nvoc_mem_mgr_h_disabled
2997
2998 void memmgrGetTotalMemoryForAllMIGGPUInstances_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *pBytes);
2999
3000 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetTotalMemoryForAllMIGGPUInstances(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,NvU64 * pBytes)3001 static inline void memmgrGetTotalMemoryForAllMIGGPUInstances(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, NvU64 *pBytes) {
3002 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
3003 }
3004 #else //__nvoc_mem_mgr_h_disabled
3005 #define memmgrGetTotalMemoryForAllMIGGPUInstances(pGpu, pMemoryManager, pBytes) memmgrGetTotalMemoryForAllMIGGPUInstances_IMPL(pGpu, pMemoryManager, pBytes)
3006 #endif //__nvoc_mem_mgr_h_disabled
3007
3008 void memmgrGetTopLevelScrubberStatus_IMPL(OBJGPU *arg1, struct MemoryManager *arg2, NvBool *pbTopLevelScrubberEnabled, NvBool *pbTopLevelScrubberConstructed);
3009
3010 #ifdef __nvoc_mem_mgr_h_disabled
memmgrGetTopLevelScrubberStatus(OBJGPU * arg1,struct MemoryManager * arg2,NvBool * pbTopLevelScrubberEnabled,NvBool * pbTopLevelScrubberConstructed)3011 static inline void memmgrGetTopLevelScrubberStatus(OBJGPU *arg1, struct MemoryManager *arg2, NvBool *pbTopLevelScrubberEnabled, NvBool *pbTopLevelScrubberConstructed) {
3012 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
3013 }
3014 #else //__nvoc_mem_mgr_h_disabled
3015 #define memmgrGetTopLevelScrubberStatus(arg1, arg2, pbTopLevelScrubberEnabled, pbTopLevelScrubberConstructed) memmgrGetTopLevelScrubberStatus_IMPL(arg1, arg2, pbTopLevelScrubberEnabled, pbTopLevelScrubberConstructed)
3016 #endif //__nvoc_mem_mgr_h_disabled
3017
3018 MEMORY_DESCRIPTOR *memmgrMemUtilsGetMemDescFromHandle_IMPL(struct MemoryManager *pMemoryManager, NvHandle hClient, NvHandle hMemory);
3019
3020 #ifdef __nvoc_mem_mgr_h_disabled
memmgrMemUtilsGetMemDescFromHandle(struct MemoryManager * pMemoryManager,NvHandle hClient,NvHandle hMemory)3021 static inline MEMORY_DESCRIPTOR *memmgrMemUtilsGetMemDescFromHandle(struct MemoryManager *pMemoryManager, NvHandle hClient, NvHandle hMemory) {
3022 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
3023 return NULL;
3024 }
3025 #else //__nvoc_mem_mgr_h_disabled
3026 #define memmgrMemUtilsGetMemDescFromHandle(pMemoryManager, hClient, hMemory) memmgrMemUtilsGetMemDescFromHandle_IMPL(pMemoryManager, hClient, hMemory)
3027 #endif //__nvoc_mem_mgr_h_disabled
3028
3029 NV_STATUS memmgrVerifyGspDmaOps_IMPL(OBJGPU *arg1, struct MemoryManager *arg2);
3030
3031 #ifdef __nvoc_mem_mgr_h_disabled
memmgrVerifyGspDmaOps(OBJGPU * arg1,struct MemoryManager * arg2)3032 static inline NV_STATUS memmgrVerifyGspDmaOps(OBJGPU *arg1, struct MemoryManager *arg2) {
3033 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
3034 return NV_ERR_NOT_SUPPORTED;
3035 }
3036 #else //__nvoc_mem_mgr_h_disabled
3037 #define memmgrVerifyGspDmaOps(arg1, arg2) memmgrVerifyGspDmaOps_IMPL(arg1, arg2)
3038 #endif //__nvoc_mem_mgr_h_disabled
3039
3040 NV_STATUS memmgrAllocReservedFBRegionMemdesc_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR **ppMemdesc, NvU64 rangeStart, NvU64 allocSize, NvU64 memdescFlags, NV_FB_ALLOC_RM_INTERNAL_OWNER allocTag);
3041
3042 #ifdef __nvoc_mem_mgr_h_disabled
memmgrAllocReservedFBRegionMemdesc(OBJGPU * pGpu,struct MemoryManager * pMemoryManager,MEMORY_DESCRIPTOR ** ppMemdesc,NvU64 rangeStart,NvU64 allocSize,NvU64 memdescFlags,NV_FB_ALLOC_RM_INTERNAL_OWNER allocTag)3043 static inline NV_STATUS memmgrAllocReservedFBRegionMemdesc(OBJGPU *pGpu, struct MemoryManager *pMemoryManager, MEMORY_DESCRIPTOR **ppMemdesc, NvU64 rangeStart, NvU64 allocSize, NvU64 memdescFlags, NV_FB_ALLOC_RM_INTERNAL_OWNER allocTag) {
3044 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
3045 return NV_ERR_NOT_SUPPORTED;
3046 }
3047 #else //__nvoc_mem_mgr_h_disabled
3048 #define memmgrAllocReservedFBRegionMemdesc(pGpu, pMemoryManager, ppMemdesc, rangeStart, allocSize, memdescFlags, allocTag) memmgrAllocReservedFBRegionMemdesc_IMPL(pGpu, pMemoryManager, ppMemdesc, rangeStart, allocSize, memdescFlags, allocTag)
3049 #endif //__nvoc_mem_mgr_h_disabled
3050
3051 NV_STATUS memmgrReserveMemoryForFsp_IMPL(OBJGPU *pGpu, struct MemoryManager *pMemoryManager);
3052
3053 #ifdef __nvoc_mem_mgr_h_disabled
memmgrReserveMemoryForFsp(OBJGPU * pGpu,struct MemoryManager * pMemoryManager)3054 static inline NV_STATUS memmgrReserveMemoryForFsp(OBJGPU *pGpu, struct MemoryManager *pMemoryManager) {
3055 NV_ASSERT_FAILED_PRECOMP("MemoryManager was disabled!");
3056 return NV_ERR_NOT_SUPPORTED;
3057 }
3058 #else //__nvoc_mem_mgr_h_disabled
3059 #define memmgrReserveMemoryForFsp(pGpu, pMemoryManager) memmgrReserveMemoryForFsp_IMPL(pGpu, pMemoryManager)
3060 #endif //__nvoc_mem_mgr_h_disabled
3061
3062 #undef PRIVATE_FIELD
3063
3064
3065 #endif // MEM_MGR_H
3066
3067 #ifdef __cplusplus
3068 } // extern "C"
3069 #endif
3070
3071 #endif // _G_MEM_MGR_NVOC_H_
3072