1
2 #ifndef _G_KERN_MEM_SYS_NVOC_H_
3 #define _G_KERN_MEM_SYS_NVOC_H_
4 #include "nvoc/runtime.h"
5
6 // Version of generated metadata structures
7 #ifdef NVOC_METADATA_VERSION
8 #undef NVOC_METADATA_VERSION
9 #endif
10 #define NVOC_METADATA_VERSION 0
11
12 #ifdef __cplusplus
13 extern "C" {
14 #endif
15
16 /*
17 * SPDX-FileCopyrightText: Copyright (c) 1993-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
18 * SPDX-License-Identifier: MIT
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining a
21 * copy of this software and associated documentation files (the "Software"),
22 * to deal in the Software without restriction, including without limitation
23 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
24 * and/or sell copies of the Software, and to permit persons to whom the
25 * Software is furnished to do so, subject to the following conditions:
26 *
27 * The above copyright notice and this permission notice shall be included in
28 * all copies or substantial portions of the Software.
29 *
30 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
31 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
32 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
33 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
34 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
35 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
36 * DEALINGS IN THE SOFTWARE.
37 */
38
39 #pragma once
40 #include "g_kern_mem_sys_nvoc.h"
41
42 #ifndef KERN_MEM_SYS_H
43 #define KERN_MEM_SYS_H
44
45 #include "core/core.h"
46 #include "gpu/eng_state.h"
47 #include "gpu/gpu.h"
48 #include "containers/map.h"
49 #include "gpu/mem_mgr/heap_base.h"
50 #include "kernel/gpu/mig_mgr/kernel_mig_manager.h"
51 #include "ctrl/ctrl2080/ctrl2080fb.h"
52 #include "ctrl/ctrl2080/ctrl2080internal.h"
53 #include "ctrl/ctrl2080/ctrl2080fb.h"
54 //
55 // FB Cache (opcode, mem target) defines used by kmemsysCacheOp hal API
56 //
57 typedef enum
58 {
59 FB_CACHE_OP_UNDEFINED = 0,
60 // invalidate cache lines without writeback of dirty lines to memory
61 FB_CACHE_INVALIDATE = 1,
62 // writeback dirty lines but leave the lines in valid cache state
63 FB_CACHE_WRITEBACK,
64 // writeback dirty lines and then invalidates the cache state
65 FB_CACHE_EVICT,
66 } FB_CACHE_OP;
67
68 // target memory types for cache operations
69 typedef enum
70 {
71 FB_CACHE_MEM_UNDEFINED = 0,
72 FB_CACHE_SYSTEM_MEMORY = 1,
73 FB_CACHE_VIDEO_MEMORY,
74 FB_CACHE_PEER_MEMORY,
75 FB_CACHE_DIRTY,
76 FB_CACHE_COMPTAG_MEMORY,
77 FB_CACHE_DIRTY_ALL,
78 } FB_CACHE_MEMTYPE;
79
80 typedef enum
81 {
82 FB_CACHE_STATE_ENABLED,
83 FB_CACHE_STATE_DISABLED,
84 } FB_CACHE_STATE;
85
86 typedef enum
87 {
88 FB_CACHE_WRITE_MODE_WRITETHROUGH,
89 FB_CACHE_WRITE_MODE_WRITEBACK,
90 } FB_CACHE_WRITE_MODE;
91
92 typedef enum
93 {
94 FB_CACHE_BYPASS_MODE_ENABLED,
95 FB_CACHE_BYPASS_MODE_DISABLED,
96 } FB_CACHE_BYPASS_MODE; // FERMI (TEST) ONLY
97
98 typedef enum
99 {
100 FB_CACHE_RCM_STATE_FULL,
101 FB_CACHE_RCM_STATE_TRANSITIONING,
102 FB_CACHE_RCM_STATE_REDUCED,
103 FB_CACHE_RCM_STATE_ZERO_CACHE,
104 } FB_CACHE_RCM_STATE;
105
106 /*! Tracks NUMA information of GPU memory partitions */
107 typedef struct
108 {
109 NvBool bInUse; // Is the partition in use?
110 NvU64 offset; // FB offset of the partition
111 NvU64 size; // FB size of the partition
112 NvU32 numaNodeId; // OS NUMA Node Id of the partition.
113 } MEM_PARTITION_NUMA_INFO;
114
115 typedef struct MIG_MEM_BOUNDARY_CONFIG_TABLE
116 {
117 /*!
118 * Memory boundary config A (4KB aligned)
119 */
120 NvU64 memBoundaryCfgA;
121
122 /*!
123 * Memory boundary config B (4KB aligned)
124 */
125 NvU64 memBoundaryCfgB;
126
127 /*!
128 * Memory boundary config C (64KB aligned)
129 */
130 NvU32 memBoundaryCfgC;
131 } MIG_MEM_BOUNDARY_CONFIG_TABLE;
132
133 /*!
134 * @brief Structure carrying memory configuration information for specific GPU instance
135 * The information will be used to allocate memory when a GPU instance is
136 * created or queried. The structure will be indexed with swizzIDs
137 */
138 typedef struct MIG_GPU_INSTANCE_MEMORY_CONFIG
139 {
140 /*!
141 * First VMMU segment from where the GPU instance memory starts
142 */
143 NvU64 startingVmmuSegment;
144
145 /*!
146 * Size of the GPU instance memory in the form of number of vmmu segments
147 */
148 NvU64 memSizeInVmmuSegment;
149
150 /*!
151 * GPU Instance memory config initialization state
152 */
153 NvBool bInitialized;
154 } MIG_GPU_INSTANCE_MEMORY_CONFIG;
155
156 /* @ref NV2080_CTRL_INTERNAL_MEMSYS_GET_STATIC_CONFIG_PARAMS */
157 typedef NV2080_CTRL_INTERNAL_MEMSYS_GET_STATIC_CONFIG_PARAMS MEMORY_SYSTEM_STATIC_CONFIG;
158
159 #define FB_HWRESID_CTAGID_FERMI 15:0
160 #define FB_HWRESID_ZCULL_FERMI 30:16
161
162 #define FB_HWRESID_ZCULL_SHIFT_FERMI(i) (1 << (i))
163
164 #define FB_HWRESID_CTAGID_VAL_FERMI(n) \
165 (((n) >> DRF_SHIFT(FB_HWRESID_CTAGID_FERMI)) & DRF_MASK(FB_HWRESID_CTAGID_FERMI))
166
167 #define FB_HWRESID_CTAGID_NUM_FERMI(i) \
168 (((i) & DRF_MASK(FB_HWRESID_CTAGID_FERMI)) << DRF_SHIFT(FB_HWRESID_CTAGID_FERMI))
169
170 #define FB_SET_HWRESID_CTAGID_FERMI(h, i) \
171 h = ( ((h) & ~(DRF_MASK(FB_HWRESID_CTAGID_FERMI) << DRF_SHIFT(FB_HWRESID_CTAGID_FERMI))) | \
172 FB_HWRESID_CTAGID_NUM_FERMI(i) )
173
174 #define FB_HWRESID_ZCULL_NUM_FERMI(i) \
175 (((1<<i) & DRF_MASK(FB_HWRESID_ZCULL_FERMI)) << DRF_SHIFT(FB_HWRESID_ZCULL_FERMI))
176
177 #define FB_HWRESID_ZCULL_VAL_FERMI(n) \
178 (((n) >> DRF_SHIFT(FB_HWRESID_ZCULL_FERMI)) & DRF_MASK(FB_HWRESID_ZCULL_FERMI))
179
180 #define KMEMSYS_FB_NUMA_ONLINE_BASE 0
181
182 #define KMEMSYS_FB_NUMA_ONLINE_SIZE(memorySize, memblockSize) NV_ALIGN_DOWN64(memorySize, memblockSize)
183
184 /*!
185 * KernelMemorySystem is a logical abstraction of the GPU memory system. This
186 * type is instantiated in VGPU guest/GSP Client as well as the VGPU
187 * host/GSP-RM.
188 *
189 * When KernelMemorySystem wants to read or write hardware state, it does not
190 * have access to the registers on the GPU, it can however perform operations
191 * using the following mechanisms:
192 *
193 * 1.) access registers are virtualized across VFs, e.g.: registers within
194 * NV_VIRTUAL_FUNCTION_PRIV_XYZ.
195 *
196 * 2.) send a RPC to the VGPU Host/GSP-RM to perform the operation.
197 *
198 * Operations such as "get memory system bus width" are appropriate for this
199 * interface. Anything related to managing of the memory page
200 * tables/allocations should live in MemoryManager.
201 */
202
203
204 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
205 // the matching C source file, but causes diagnostics to be issued if another
206 // source file references the field.
207 #ifdef NVOC_KERN_MEM_SYS_H_PRIVATE_ACCESS_ALLOWED
208 #define PRIVATE_FIELD(x) x
209 #else
210 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
211 #endif
212
213
214 struct KernelMemorySystem {
215
216 // Metadata
217 const struct NVOC_RTTI *__nvoc_rtti;
218
219 // Parent (i.e. superclass or base class) object pointers
220 struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
221
222 // Ancestor object pointers for `staticCast` feature
223 struct Object *__nvoc_pbase_Object; // obj super^2
224 struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE; // engstate super
225 struct KernelMemorySystem *__nvoc_pbase_KernelMemorySystem; // kmemsys
226
227 // Vtable with 42 per-object function pointers
228 NV_STATUS (*__kmemsysConstructEngine__)(OBJGPU *, struct KernelMemorySystem * /*this*/, ENGDESCRIPTOR); // virtual override (engstate) base (engstate)
229 NV_STATUS (*__kmemsysStatePreInitLocked__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // virtual override (engstate) base (engstate)
230 NV_STATUS (*__kmemsysStateInitLocked__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // virtual override (engstate) base (engstate)
231 NV_STATUS (*__kmemsysStatePreLoad__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32); // virtual override (engstate) base (engstate)
232 NV_STATUS (*__kmemsysStatePostLoad__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32); // virtual override (engstate) base (engstate)
233 NV_STATUS (*__kmemsysStateLoad__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32); // virtual override (engstate) base (engstate)
234 NV_STATUS (*__kmemsysStatePreUnload__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32); // virtual override (engstate) base (engstate)
235 NV_STATUS (*__kmemsysStateUnload__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32); // virtual override (engstate) base (engstate)
236 void (*__kmemsysStateDestroy__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // virtual override (engstate) base (engstate)
237 NV_STATUS (*__kmemsysGetFbNumaInfo__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU64 *, NvU64 *, NvS32 *); // halified (2 hals) body
238 NV_STATUS (*__kmemsysReadUsableFbSize__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU64 *); // halified (2 hals) body
239 NV_STATUS (*__kmemsysGetUsableFbSize__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU64 *); // halified (2 hals) body
240 NV_STATUS (*__kmemsysCacheOp__)(OBJGPU *, struct KernelMemorySystem * /*this*/, PMEMORY_DESCRIPTOR, FB_CACHE_MEMTYPE, FB_CACHE_OP); // halified (2 hals) body
241 NV_STATUS (*__kmemsysDoCacheOp__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32, NvU32, NvU32, PRMTIMEOUT); // halified (2 hals) body
242 NvU32 (*__kmemsysReadL2SysmemInvalidateReg__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (2 hals) body
243 void (*__kmemsysWriteL2SysmemInvalidateReg__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32); // halified (2 hals) body
244 NvU32 (*__kmemsysReadL2PeermemInvalidateReg__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (2 hals) body
245 void (*__kmemsysWriteL2PeermemInvalidateReg__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32); // halified (2 hals) body
246 NV_STATUS (*__kmemsysInitFlushSysmemBuffer__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (3 hals) body
247 void (*__kmemsysProgramSysmemFlushBuffer__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (5 hals) body
248 NvU32 (*__kmemsysGetFlushSysmemBufferAddrShift__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (3 hals) body
249 NvBool (*__kmemsysIsPagePLCable__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU64, NvU64); // halified (4 hals) body
250 NV_STATUS (*__kmemsysReadMIGMemoryCfg__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (3 hals) body
251 NV_STATUS (*__kmemsysInitMIGMemoryPartitionTable__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (3 hals) body
252 NV_STATUS (*__kmemsysSwizzIdToVmmuSegmentsRange__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32, NvU32, NvU32); // halified (2 hals)
253 NV_STATUS (*__kmemsysNumaAddMemory__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32, NvU64, NvU64, NvS32 *); // halified (2 hals) body
254 void (*__kmemsysNumaRemoveMemory__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32); // halified (2 hals) body
255 void (*__kmemsysNumaRemoveAllMemory__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (2 hals) body
256 NV_STATUS (*__kmemsysPopulateMIGGPUInstanceMemConfig__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (2 hals) body
257 NV_STATUS (*__kmemsysSetupAllAtsPeers__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (2 hals) body
258 void (*__kmemsysRemoveAllAtsPeers__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (2 hals) body
259 NvBool (*__kmemsysAssertFbAckTimeoutPending__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (3 hals) body
260 NvU32 (*__kmemsysGetMaxFbpas__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (3 hals) body
261 NvU32 (*__kmemsysGetEccDedCountSize__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (2 hals) body
262 NvU32 (*__kmemsysGetEccDedCountRegAddr__)(OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32, NvU32); // halified (2 hals) body
263 NvU16 (*__kmemsysGetMaximumBlacklistPages__)(OBJGPU *, struct KernelMemorySystem * /*this*/); // halified (2 hals) body
264 NV_STATUS (*__kmemsysGetFbInfos__)(OBJGPU *, struct KernelMemorySystem * /*this*/, struct RsClient *, Device *, NvHandle, NV2080_CTRL_FB_GET_INFO_V2_PARAMS *, NvU64 *); // halified (2 hals)
265 void (*__kmemsysInitMissing__)(struct OBJGPU *, struct KernelMemorySystem * /*this*/); // virtual inherited (engstate) base (engstate)
266 NV_STATUS (*__kmemsysStatePreInitUnlocked__)(struct OBJGPU *, struct KernelMemorySystem * /*this*/); // virtual inherited (engstate) base (engstate)
267 NV_STATUS (*__kmemsysStateInitUnlocked__)(struct OBJGPU *, struct KernelMemorySystem * /*this*/); // virtual inherited (engstate) base (engstate)
268 NV_STATUS (*__kmemsysStatePostUnload__)(struct OBJGPU *, struct KernelMemorySystem * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
269 NvBool (*__kmemsysIsPresent__)(struct OBJGPU *, struct KernelMemorySystem * /*this*/); // virtual inherited (engstate) base (engstate)
270
271 // Data members
272 NvBool bDisableTiledCachingInvalidatesWithEccBug1521641;
273 NvBool bGpuCacheEnable;
274 NvBool bNumaNodesAdded;
275 NvBool bL2CleanFbPull;
276 NvU32 l2WriteMode;
277 NvBool bPreserveComptagBackingStoreOnSuspend;
278 NvBool bBug3656943WAR;
279 NvU32 overrideToGMK;
280 NvBool bDisablePlcForCertainOffsetsBug3046774;
281 const MEMORY_SYSTEM_STATIC_CONFIG *pStaticConfig;
282 MEM_PARTITION_NUMA_INFO *memPartitionNumaInfo;
283 MIG_MEM_BOUNDARY_CONFIG_TABLE memBoundaryCfgTable;
284 MIG_GPU_INSTANCE_MEMORY_CONFIG gpuInstanceMemConfig[15];
285 NV2080_CTRL_INTERNAL_MEMSYS_GET_MIG_MEMORY_PARTITION_TABLE_PARAMS migMemoryPartitionTable;
286 PMEMORY_DESCRIPTOR pSysmemFlushBufferMemDesc;
287 NvU64 sysmemFlushBuffer;
288 NvU64 fbOverrideStartKb;
289 NvU64 coherentCpuFbBase;
290 NvU64 coherentCpuFbEnd;
291 NvU64 coherentRsvdFbBase;
292 NvU64 numaOnlineBase;
293 NvU64 numaOnlineSize;
294 NvU64 numaMigPartitionSize[15];
295 NvBool bNumaMigPartitionSizeEnumerated;
296 };
297
298 #ifndef __NVOC_CLASS_KernelMemorySystem_TYPEDEF__
299 #define __NVOC_CLASS_KernelMemorySystem_TYPEDEF__
300 typedef struct KernelMemorySystem KernelMemorySystem;
301 #endif /* __NVOC_CLASS_KernelMemorySystem_TYPEDEF__ */
302
303 #ifndef __nvoc_class_id_KernelMemorySystem
304 #define __nvoc_class_id_KernelMemorySystem 0x7faff1
305 #endif /* __nvoc_class_id_KernelMemorySystem */
306
307 // Casting support
308 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelMemorySystem;
309
310 #define __staticCast_KernelMemorySystem(pThis) \
311 ((pThis)->__nvoc_pbase_KernelMemorySystem)
312
313 #ifdef __nvoc_kern_mem_sys_h_disabled
314 #define __dynamicCast_KernelMemorySystem(pThis) ((KernelMemorySystem*)NULL)
315 #else //__nvoc_kern_mem_sys_h_disabled
316 #define __dynamicCast_KernelMemorySystem(pThis) \
317 ((KernelMemorySystem*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelMemorySystem)))
318 #endif //__nvoc_kern_mem_sys_h_disabled
319
320 // Property macros
321 #define PDB_PROP_KMEMSYS_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
322 #define PDB_PROP_KMEMSYS_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
323
324 NV_STATUS __nvoc_objCreateDynamic_KernelMemorySystem(KernelMemorySystem**, Dynamic*, NvU32, va_list);
325
326 NV_STATUS __nvoc_objCreate_KernelMemorySystem(KernelMemorySystem**, Dynamic*, NvU32);
327 #define __objCreate_KernelMemorySystem(ppNewObj, pParent, createFlags) \
328 __nvoc_objCreate_KernelMemorySystem((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
329
330
331 // Wrapper macros
332 #define kmemsysConstructEngine_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysConstructEngine__
333 #define kmemsysConstructEngine(pGpu, pKernelMemorySystem, arg3) kmemsysConstructEngine_DISPATCH(pGpu, pKernelMemorySystem, arg3)
334 #define kmemsysStatePreInitLocked_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysStatePreInitLocked__
335 #define kmemsysStatePreInitLocked(pGpu, pKernelMemorySystem) kmemsysStatePreInitLocked_DISPATCH(pGpu, pKernelMemorySystem)
336 #define kmemsysStateInitLocked_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysStateInitLocked__
337 #define kmemsysStateInitLocked(pGpu, pKernelMemorySystem) kmemsysStateInitLocked_DISPATCH(pGpu, pKernelMemorySystem)
338 #define kmemsysStatePreLoad_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysStatePreLoad__
339 #define kmemsysStatePreLoad(pGpu, pKernelMemorySystem, flags) kmemsysStatePreLoad_DISPATCH(pGpu, pKernelMemorySystem, flags)
340 #define kmemsysStatePostLoad_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysStatePostLoad__
341 #define kmemsysStatePostLoad(pGpu, pKernelMemorySystem, flags) kmemsysStatePostLoad_DISPATCH(pGpu, pKernelMemorySystem, flags)
342 #define kmemsysStateLoad_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysStateLoad__
343 #define kmemsysStateLoad(pGpu, pKernelMemorySystem, flags) kmemsysStateLoad_DISPATCH(pGpu, pKernelMemorySystem, flags)
344 #define kmemsysStatePreUnload_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysStatePreUnload__
345 #define kmemsysStatePreUnload(pGpu, pKernelMemorySystem, flags) kmemsysStatePreUnload_DISPATCH(pGpu, pKernelMemorySystem, flags)
346 #define kmemsysStateUnload_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysStateUnload__
347 #define kmemsysStateUnload(pGpu, pKernelMemorySystem, flags) kmemsysStateUnload_DISPATCH(pGpu, pKernelMemorySystem, flags)
348 #define kmemsysStateDestroy_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysStateDestroy__
349 #define kmemsysStateDestroy(pGpu, pKernelMemorySystem) kmemsysStateDestroy_DISPATCH(pGpu, pKernelMemorySystem)
350 #define kmemsysGetFbNumaInfo_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysGetFbNumaInfo__
351 #define kmemsysGetFbNumaInfo(pGpu, pKernelMemorySystem, physAddr, rsvdPhysAddr, numaNodeId) kmemsysGetFbNumaInfo_DISPATCH(pGpu, pKernelMemorySystem, physAddr, rsvdPhysAddr, numaNodeId)
352 #define kmemsysGetFbNumaInfo_HAL(pGpu, pKernelMemorySystem, physAddr, rsvdPhysAddr, numaNodeId) kmemsysGetFbNumaInfo_DISPATCH(pGpu, pKernelMemorySystem, physAddr, rsvdPhysAddr, numaNodeId)
353 #define kmemsysReadUsableFbSize_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysReadUsableFbSize__
354 #define kmemsysReadUsableFbSize(pGpu, pKernelMemorySystem, pFbSize) kmemsysReadUsableFbSize_DISPATCH(pGpu, pKernelMemorySystem, pFbSize)
355 #define kmemsysReadUsableFbSize_HAL(pGpu, pKernelMemorySystem, pFbSize) kmemsysReadUsableFbSize_DISPATCH(pGpu, pKernelMemorySystem, pFbSize)
356 #define kmemsysGetUsableFbSize_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysGetUsableFbSize__
357 #define kmemsysGetUsableFbSize(pGpu, pKernelMemorySystem, pFbSize) kmemsysGetUsableFbSize_DISPATCH(pGpu, pKernelMemorySystem, pFbSize)
358 #define kmemsysGetUsableFbSize_HAL(pGpu, pKernelMemorySystem, pFbSize) kmemsysGetUsableFbSize_DISPATCH(pGpu, pKernelMemorySystem, pFbSize)
359 #define kmemsysCacheOp_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysCacheOp__
360 #define kmemsysCacheOp(pGpu, pKernelMemorySystem, arg3, arg4, operation) kmemsysCacheOp_DISPATCH(pGpu, pKernelMemorySystem, arg3, arg4, operation)
361 #define kmemsysCacheOp_HAL(pGpu, pKernelMemorySystem, arg3, arg4, operation) kmemsysCacheOp_DISPATCH(pGpu, pKernelMemorySystem, arg3, arg4, operation)
362 #define kmemsysDoCacheOp_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysDoCacheOp__
363 #define kmemsysDoCacheOp(pGpu, pKernelMemorySystem, arg3, arg4, arg5, arg6) kmemsysDoCacheOp_DISPATCH(pGpu, pKernelMemorySystem, arg3, arg4, arg5, arg6)
364 #define kmemsysDoCacheOp_HAL(pGpu, pKernelMemorySystem, arg3, arg4, arg5, arg6) kmemsysDoCacheOp_DISPATCH(pGpu, pKernelMemorySystem, arg3, arg4, arg5, arg6)
365 #define kmemsysReadL2SysmemInvalidateReg_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysReadL2SysmemInvalidateReg__
366 #define kmemsysReadL2SysmemInvalidateReg(pGpu, pKernelMemorySystem) kmemsysReadL2SysmemInvalidateReg_DISPATCH(pGpu, pKernelMemorySystem)
367 #define kmemsysReadL2SysmemInvalidateReg_HAL(pGpu, pKernelMemorySystem) kmemsysReadL2SysmemInvalidateReg_DISPATCH(pGpu, pKernelMemorySystem)
368 #define kmemsysWriteL2SysmemInvalidateReg_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysWriteL2SysmemInvalidateReg__
369 #define kmemsysWriteL2SysmemInvalidateReg(pGpu, pKernelMemorySystem, arg3) kmemsysWriteL2SysmemInvalidateReg_DISPATCH(pGpu, pKernelMemorySystem, arg3)
370 #define kmemsysWriteL2SysmemInvalidateReg_HAL(pGpu, pKernelMemorySystem, arg3) kmemsysWriteL2SysmemInvalidateReg_DISPATCH(pGpu, pKernelMemorySystem, arg3)
371 #define kmemsysReadL2PeermemInvalidateReg_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysReadL2PeermemInvalidateReg__
372 #define kmemsysReadL2PeermemInvalidateReg(pGpu, pKernelMemorySystem) kmemsysReadL2PeermemInvalidateReg_DISPATCH(pGpu, pKernelMemorySystem)
373 #define kmemsysReadL2PeermemInvalidateReg_HAL(pGpu, pKernelMemorySystem) kmemsysReadL2PeermemInvalidateReg_DISPATCH(pGpu, pKernelMemorySystem)
374 #define kmemsysWriteL2PeermemInvalidateReg_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysWriteL2PeermemInvalidateReg__
375 #define kmemsysWriteL2PeermemInvalidateReg(pGpu, pKernelMemorySystem, arg3) kmemsysWriteL2PeermemInvalidateReg_DISPATCH(pGpu, pKernelMemorySystem, arg3)
376 #define kmemsysWriteL2PeermemInvalidateReg_HAL(pGpu, pKernelMemorySystem, arg3) kmemsysWriteL2PeermemInvalidateReg_DISPATCH(pGpu, pKernelMemorySystem, arg3)
377 #define kmemsysInitFlushSysmemBuffer_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysInitFlushSysmemBuffer__
378 #define kmemsysInitFlushSysmemBuffer(pGpu, pKernelMemorySystem) kmemsysInitFlushSysmemBuffer_DISPATCH(pGpu, pKernelMemorySystem)
379 #define kmemsysInitFlushSysmemBuffer_HAL(pGpu, pKernelMemorySystem) kmemsysInitFlushSysmemBuffer_DISPATCH(pGpu, pKernelMemorySystem)
380 #define kmemsysProgramSysmemFlushBuffer_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysProgramSysmemFlushBuffer__
381 #define kmemsysProgramSysmemFlushBuffer(pGpu, pKernelMemorySystem) kmemsysProgramSysmemFlushBuffer_DISPATCH(pGpu, pKernelMemorySystem)
382 #define kmemsysProgramSysmemFlushBuffer_HAL(pGpu, pKernelMemorySystem) kmemsysProgramSysmemFlushBuffer_DISPATCH(pGpu, pKernelMemorySystem)
383 #define kmemsysGetFlushSysmemBufferAddrShift_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysGetFlushSysmemBufferAddrShift__
384 #define kmemsysGetFlushSysmemBufferAddrShift(pGpu, pKernelMemorySystem) kmemsysGetFlushSysmemBufferAddrShift_DISPATCH(pGpu, pKernelMemorySystem)
385 #define kmemsysGetFlushSysmemBufferAddrShift_HAL(pGpu, pKernelMemorySystem) kmemsysGetFlushSysmemBufferAddrShift_DISPATCH(pGpu, pKernelMemorySystem)
386 #define kmemsysIsPagePLCable_FNPTR(KernelMemorySystem) KernelMemorySystem->__kmemsysIsPagePLCable__
387 #define kmemsysIsPagePLCable(pGpu, KernelMemorySystem, physAddr, pageSize) kmemsysIsPagePLCable_DISPATCH(pGpu, KernelMemorySystem, physAddr, pageSize)
388 #define kmemsysIsPagePLCable_HAL(pGpu, KernelMemorySystem, physAddr, pageSize) kmemsysIsPagePLCable_DISPATCH(pGpu, KernelMemorySystem, physAddr, pageSize)
389 #define kmemsysReadMIGMemoryCfg_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysReadMIGMemoryCfg__
390 #define kmemsysReadMIGMemoryCfg(pGpu, pKernelMemorySystem) kmemsysReadMIGMemoryCfg_DISPATCH(pGpu, pKernelMemorySystem)
391 #define kmemsysReadMIGMemoryCfg_HAL(pGpu, pKernelMemorySystem) kmemsysReadMIGMemoryCfg_DISPATCH(pGpu, pKernelMemorySystem)
392 #define kmemsysInitMIGMemoryPartitionTable_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysInitMIGMemoryPartitionTable__
393 #define kmemsysInitMIGMemoryPartitionTable(pGpu, pKernelMemorySystem) kmemsysInitMIGMemoryPartitionTable_DISPATCH(pGpu, pKernelMemorySystem)
394 #define kmemsysInitMIGMemoryPartitionTable_HAL(pGpu, pKernelMemorySystem) kmemsysInitMIGMemoryPartitionTable_DISPATCH(pGpu, pKernelMemorySystem)
395 #define kmemsysSwizzIdToVmmuSegmentsRange_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysSwizzIdToVmmuSegmentsRange__
396 #define kmemsysSwizzIdToVmmuSegmentsRange(pGpu, pKernelMemorySystem, swizzId, vmmuSegmentSize, totalVmmuSegments) kmemsysSwizzIdToVmmuSegmentsRange_DISPATCH(pGpu, pKernelMemorySystem, swizzId, vmmuSegmentSize, totalVmmuSegments)
397 #define kmemsysSwizzIdToVmmuSegmentsRange_HAL(pGpu, pKernelMemorySystem, swizzId, vmmuSegmentSize, totalVmmuSegments) kmemsysSwizzIdToVmmuSegmentsRange_DISPATCH(pGpu, pKernelMemorySystem, swizzId, vmmuSegmentSize, totalVmmuSegments)
398 #define kmemsysNumaAddMemory_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysNumaAddMemory__
399 #define kmemsysNumaAddMemory(pGpu, pKernelMemorySystem, swizzId, offset, size, numaNodeId) kmemsysNumaAddMemory_DISPATCH(pGpu, pKernelMemorySystem, swizzId, offset, size, numaNodeId)
400 #define kmemsysNumaAddMemory_HAL(pGpu, pKernelMemorySystem, swizzId, offset, size, numaNodeId) kmemsysNumaAddMemory_DISPATCH(pGpu, pKernelMemorySystem, swizzId, offset, size, numaNodeId)
401 #define kmemsysNumaRemoveMemory_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysNumaRemoveMemory__
402 #define kmemsysNumaRemoveMemory(pGpu, pKernelMemorySystem, swizzId) kmemsysNumaRemoveMemory_DISPATCH(pGpu, pKernelMemorySystem, swizzId)
403 #define kmemsysNumaRemoveMemory_HAL(pGpu, pKernelMemorySystem, swizzId) kmemsysNumaRemoveMemory_DISPATCH(pGpu, pKernelMemorySystem, swizzId)
404 #define kmemsysNumaRemoveAllMemory_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysNumaRemoveAllMemory__
405 #define kmemsysNumaRemoveAllMemory(pGpu, pKernelMemorySystem) kmemsysNumaRemoveAllMemory_DISPATCH(pGpu, pKernelMemorySystem)
406 #define kmemsysNumaRemoveAllMemory_HAL(pGpu, pKernelMemorySystem) kmemsysNumaRemoveAllMemory_DISPATCH(pGpu, pKernelMemorySystem)
407 #define kmemsysPopulateMIGGPUInstanceMemConfig_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysPopulateMIGGPUInstanceMemConfig__
408 #define kmemsysPopulateMIGGPUInstanceMemConfig(pGpu, pKernelMemorySystem) kmemsysPopulateMIGGPUInstanceMemConfig_DISPATCH(pGpu, pKernelMemorySystem)
409 #define kmemsysPopulateMIGGPUInstanceMemConfig_HAL(pGpu, pKernelMemorySystem) kmemsysPopulateMIGGPUInstanceMemConfig_DISPATCH(pGpu, pKernelMemorySystem)
410 #define kmemsysSetupAllAtsPeers_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysSetupAllAtsPeers__
411 #define kmemsysSetupAllAtsPeers(pGpu, pKernelMemorySystem) kmemsysSetupAllAtsPeers_DISPATCH(pGpu, pKernelMemorySystem)
412 #define kmemsysSetupAllAtsPeers_HAL(pGpu, pKernelMemorySystem) kmemsysSetupAllAtsPeers_DISPATCH(pGpu, pKernelMemorySystem)
413 #define kmemsysRemoveAllAtsPeers_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysRemoveAllAtsPeers__
414 #define kmemsysRemoveAllAtsPeers(pGpu, pKernelMemorySystem) kmemsysRemoveAllAtsPeers_DISPATCH(pGpu, pKernelMemorySystem)
415 #define kmemsysRemoveAllAtsPeers_HAL(pGpu, pKernelMemorySystem) kmemsysRemoveAllAtsPeers_DISPATCH(pGpu, pKernelMemorySystem)
416 #define kmemsysAssertFbAckTimeoutPending_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysAssertFbAckTimeoutPending__
417 #define kmemsysAssertFbAckTimeoutPending(pGpu, pKernelMemorySystem) kmemsysAssertFbAckTimeoutPending_DISPATCH(pGpu, pKernelMemorySystem)
418 #define kmemsysAssertFbAckTimeoutPending_HAL(pGpu, pKernelMemorySystem) kmemsysAssertFbAckTimeoutPending_DISPATCH(pGpu, pKernelMemorySystem)
419 #define kmemsysGetMaxFbpas_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysGetMaxFbpas__
420 #define kmemsysGetMaxFbpas(pGpu, pKernelMemorySystem) kmemsysGetMaxFbpas_DISPATCH(pGpu, pKernelMemorySystem)
421 #define kmemsysGetMaxFbpas_HAL(pGpu, pKernelMemorySystem) kmemsysGetMaxFbpas_DISPATCH(pGpu, pKernelMemorySystem)
422 #define kmemsysGetEccDedCountSize_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysGetEccDedCountSize__
423 #define kmemsysGetEccDedCountSize(pGpu, pKernelMemorySystem) kmemsysGetEccDedCountSize_DISPATCH(pGpu, pKernelMemorySystem)
424 #define kmemsysGetEccDedCountSize_HAL(pGpu, pKernelMemorySystem) kmemsysGetEccDedCountSize_DISPATCH(pGpu, pKernelMemorySystem)
425 #define kmemsysGetEccDedCountRegAddr_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysGetEccDedCountRegAddr__
426 #define kmemsysGetEccDedCountRegAddr(pGpu, pKernelMemorySystem, fbpa, subp) kmemsysGetEccDedCountRegAddr_DISPATCH(pGpu, pKernelMemorySystem, fbpa, subp)
427 #define kmemsysGetEccDedCountRegAddr_HAL(pGpu, pKernelMemorySystem, fbpa, subp) kmemsysGetEccDedCountRegAddr_DISPATCH(pGpu, pKernelMemorySystem, fbpa, subp)
428 #define kmemsysGetMaximumBlacklistPages_FNPTR(pKernelMemorySystem) pKernelMemorySystem->__kmemsysGetMaximumBlacklistPages__
429 #define kmemsysGetMaximumBlacklistPages(pGpu, pKernelMemorySystem) kmemsysGetMaximumBlacklistPages_DISPATCH(pGpu, pKernelMemorySystem)
430 #define kmemsysGetMaximumBlacklistPages_HAL(pGpu, pKernelMemorySystem) kmemsysGetMaximumBlacklistPages_DISPATCH(pGpu, pKernelMemorySystem)
431 #define kmemsysGetFbInfos_FNPTR(arg_this) arg_this->__kmemsysGetFbInfos__
432 #define kmemsysGetFbInfos(arg1, arg_this, arg3, arg4, hSubdevice, pParams, pFbInfoListIndicesUnset) kmemsysGetFbInfos_DISPATCH(arg1, arg_this, arg3, arg4, hSubdevice, pParams, pFbInfoListIndicesUnset)
433 #define kmemsysGetFbInfos_HAL(arg1, arg_this, arg3, arg4, hSubdevice, pParams, pFbInfoListIndicesUnset) kmemsysGetFbInfos_DISPATCH(arg1, arg_this, arg3, arg4, hSubdevice, pParams, pFbInfoListIndicesUnset)
434 #define kmemsysInitMissing_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateInitMissing__
435 #define kmemsysInitMissing(pGpu, pEngstate) kmemsysInitMissing_DISPATCH(pGpu, pEngstate)
436 #define kmemsysStatePreInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreInitUnlocked__
437 #define kmemsysStatePreInitUnlocked(pGpu, pEngstate) kmemsysStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
438 #define kmemsysStateInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStateInitUnlocked__
439 #define kmemsysStateInitUnlocked(pGpu, pEngstate) kmemsysStateInitUnlocked_DISPATCH(pGpu, pEngstate)
440 #define kmemsysStatePostUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePostUnload__
441 #define kmemsysStatePostUnload(pGpu, pEngstate, arg3) kmemsysStatePostUnload_DISPATCH(pGpu, pEngstate, arg3)
442 #define kmemsysIsPresent_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateIsPresent__
443 #define kmemsysIsPresent(pGpu, pEngstate) kmemsysIsPresent_DISPATCH(pGpu, pEngstate)
444
445 // Dispatch functions
kmemsysConstructEngine_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,ENGDESCRIPTOR arg3)446 static inline NV_STATUS kmemsysConstructEngine_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, ENGDESCRIPTOR arg3) {
447 return pKernelMemorySystem->__kmemsysConstructEngine__(pGpu, pKernelMemorySystem, arg3);
448 }
449
kmemsysStatePreInitLocked_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)450 static inline NV_STATUS kmemsysStatePreInitLocked_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
451 return pKernelMemorySystem->__kmemsysStatePreInitLocked__(pGpu, pKernelMemorySystem);
452 }
453
kmemsysStateInitLocked_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)454 static inline NV_STATUS kmemsysStateInitLocked_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
455 return pKernelMemorySystem->__kmemsysStateInitLocked__(pGpu, pKernelMemorySystem);
456 }
457
kmemsysStatePreLoad_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 flags)458 static inline NV_STATUS kmemsysStatePreLoad_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 flags) {
459 return pKernelMemorySystem->__kmemsysStatePreLoad__(pGpu, pKernelMemorySystem, flags);
460 }
461
kmemsysStatePostLoad_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 flags)462 static inline NV_STATUS kmemsysStatePostLoad_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 flags) {
463 return pKernelMemorySystem->__kmemsysStatePostLoad__(pGpu, pKernelMemorySystem, flags);
464 }
465
kmemsysStateLoad_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 flags)466 static inline NV_STATUS kmemsysStateLoad_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 flags) {
467 return pKernelMemorySystem->__kmemsysStateLoad__(pGpu, pKernelMemorySystem, flags);
468 }
469
kmemsysStatePreUnload_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 flags)470 static inline NV_STATUS kmemsysStatePreUnload_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 flags) {
471 return pKernelMemorySystem->__kmemsysStatePreUnload__(pGpu, pKernelMemorySystem, flags);
472 }
473
kmemsysStateUnload_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 flags)474 static inline NV_STATUS kmemsysStateUnload_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 flags) {
475 return pKernelMemorySystem->__kmemsysStateUnload__(pGpu, pKernelMemorySystem, flags);
476 }
477
kmemsysStateDestroy_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)478 static inline void kmemsysStateDestroy_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
479 pKernelMemorySystem->__kmemsysStateDestroy__(pGpu, pKernelMemorySystem);
480 }
481
kmemsysGetFbNumaInfo_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU64 * physAddr,NvU64 * rsvdPhysAddr,NvS32 * numaNodeId)482 static inline NV_STATUS kmemsysGetFbNumaInfo_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 *physAddr, NvU64 *rsvdPhysAddr, NvS32 *numaNodeId) {
483 return pKernelMemorySystem->__kmemsysGetFbNumaInfo__(pGpu, pKernelMemorySystem, physAddr, rsvdPhysAddr, numaNodeId);
484 }
485
kmemsysReadUsableFbSize_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU64 * pFbSize)486 static inline NV_STATUS kmemsysReadUsableFbSize_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 *pFbSize) {
487 return pKernelMemorySystem->__kmemsysReadUsableFbSize__(pGpu, pKernelMemorySystem, pFbSize);
488 }
489
kmemsysGetUsableFbSize_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU64 * pFbSize)490 static inline NV_STATUS kmemsysGetUsableFbSize_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 *pFbSize) {
491 return pKernelMemorySystem->__kmemsysGetUsableFbSize__(pGpu, pKernelMemorySystem, pFbSize);
492 }
493
kmemsysCacheOp_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,PMEMORY_DESCRIPTOR arg3,FB_CACHE_MEMTYPE arg4,FB_CACHE_OP operation)494 static inline NV_STATUS kmemsysCacheOp_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, PMEMORY_DESCRIPTOR arg3, FB_CACHE_MEMTYPE arg4, FB_CACHE_OP operation) {
495 return pKernelMemorySystem->__kmemsysCacheOp__(pGpu, pKernelMemorySystem, arg3, arg4, operation);
496 }
497
kmemsysDoCacheOp_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 arg3,NvU32 arg4,NvU32 arg5,PRMTIMEOUT arg6)498 static inline NV_STATUS kmemsysDoCacheOp_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 arg3, NvU32 arg4, NvU32 arg5, PRMTIMEOUT arg6) {
499 return pKernelMemorySystem->__kmemsysDoCacheOp__(pGpu, pKernelMemorySystem, arg3, arg4, arg5, arg6);
500 }
501
kmemsysReadL2SysmemInvalidateReg_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)502 static inline NvU32 kmemsysReadL2SysmemInvalidateReg_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
503 return pKernelMemorySystem->__kmemsysReadL2SysmemInvalidateReg__(pGpu, pKernelMemorySystem);
504 }
505
kmemsysWriteL2SysmemInvalidateReg_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 arg3)506 static inline void kmemsysWriteL2SysmemInvalidateReg_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 arg3) {
507 pKernelMemorySystem->__kmemsysWriteL2SysmemInvalidateReg__(pGpu, pKernelMemorySystem, arg3);
508 }
509
kmemsysReadL2PeermemInvalidateReg_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)510 static inline NvU32 kmemsysReadL2PeermemInvalidateReg_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
511 return pKernelMemorySystem->__kmemsysReadL2PeermemInvalidateReg__(pGpu, pKernelMemorySystem);
512 }
513
kmemsysWriteL2PeermemInvalidateReg_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 arg3)514 static inline void kmemsysWriteL2PeermemInvalidateReg_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 arg3) {
515 pKernelMemorySystem->__kmemsysWriteL2PeermemInvalidateReg__(pGpu, pKernelMemorySystem, arg3);
516 }
517
kmemsysInitFlushSysmemBuffer_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)518 static inline NV_STATUS kmemsysInitFlushSysmemBuffer_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
519 return pKernelMemorySystem->__kmemsysInitFlushSysmemBuffer__(pGpu, pKernelMemorySystem);
520 }
521
kmemsysProgramSysmemFlushBuffer_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)522 static inline void kmemsysProgramSysmemFlushBuffer_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
523 pKernelMemorySystem->__kmemsysProgramSysmemFlushBuffer__(pGpu, pKernelMemorySystem);
524 }
525
kmemsysGetFlushSysmemBufferAddrShift_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)526 static inline NvU32 kmemsysGetFlushSysmemBufferAddrShift_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
527 return pKernelMemorySystem->__kmemsysGetFlushSysmemBufferAddrShift__(pGpu, pKernelMemorySystem);
528 }
529
kmemsysIsPagePLCable_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * KernelMemorySystem,NvU64 physAddr,NvU64 pageSize)530 static inline NvBool kmemsysIsPagePLCable_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *KernelMemorySystem, NvU64 physAddr, NvU64 pageSize) {
531 return KernelMemorySystem->__kmemsysIsPagePLCable__(pGpu, KernelMemorySystem, physAddr, pageSize);
532 }
533
kmemsysReadMIGMemoryCfg_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)534 static inline NV_STATUS kmemsysReadMIGMemoryCfg_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
535 return pKernelMemorySystem->__kmemsysReadMIGMemoryCfg__(pGpu, pKernelMemorySystem);
536 }
537
kmemsysInitMIGMemoryPartitionTable_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)538 static inline NV_STATUS kmemsysInitMIGMemoryPartitionTable_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
539 return pKernelMemorySystem->__kmemsysInitMIGMemoryPartitionTable__(pGpu, pKernelMemorySystem);
540 }
541
kmemsysSwizzIdToVmmuSegmentsRange_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 swizzId,NvU32 vmmuSegmentSize,NvU32 totalVmmuSegments)542 static inline NV_STATUS kmemsysSwizzIdToVmmuSegmentsRange_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId, NvU32 vmmuSegmentSize, NvU32 totalVmmuSegments) {
543 return pKernelMemorySystem->__kmemsysSwizzIdToVmmuSegmentsRange__(pGpu, pKernelMemorySystem, swizzId, vmmuSegmentSize, totalVmmuSegments);
544 }
545
kmemsysNumaAddMemory_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 swizzId,NvU64 offset,NvU64 size,NvS32 * numaNodeId)546 static inline NV_STATUS kmemsysNumaAddMemory_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId, NvU64 offset, NvU64 size, NvS32 *numaNodeId) {
547 return pKernelMemorySystem->__kmemsysNumaAddMemory__(pGpu, pKernelMemorySystem, swizzId, offset, size, numaNodeId);
548 }
549
kmemsysNumaRemoveMemory_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 swizzId)550 static inline void kmemsysNumaRemoveMemory_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId) {
551 pKernelMemorySystem->__kmemsysNumaRemoveMemory__(pGpu, pKernelMemorySystem, swizzId);
552 }
553
kmemsysNumaRemoveAllMemory_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)554 static inline void kmemsysNumaRemoveAllMemory_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
555 pKernelMemorySystem->__kmemsysNumaRemoveAllMemory__(pGpu, pKernelMemorySystem);
556 }
557
kmemsysPopulateMIGGPUInstanceMemConfig_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)558 static inline NV_STATUS kmemsysPopulateMIGGPUInstanceMemConfig_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
559 return pKernelMemorySystem->__kmemsysPopulateMIGGPUInstanceMemConfig__(pGpu, pKernelMemorySystem);
560 }
561
kmemsysSetupAllAtsPeers_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)562 static inline NV_STATUS kmemsysSetupAllAtsPeers_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
563 return pKernelMemorySystem->__kmemsysSetupAllAtsPeers__(pGpu, pKernelMemorySystem);
564 }
565
kmemsysRemoveAllAtsPeers_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)566 static inline void kmemsysRemoveAllAtsPeers_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
567 pKernelMemorySystem->__kmemsysRemoveAllAtsPeers__(pGpu, pKernelMemorySystem);
568 }
569
kmemsysAssertFbAckTimeoutPending_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)570 static inline NvBool kmemsysAssertFbAckTimeoutPending_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
571 return pKernelMemorySystem->__kmemsysAssertFbAckTimeoutPending__(pGpu, pKernelMemorySystem);
572 }
573
kmemsysGetMaxFbpas_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)574 static inline NvU32 kmemsysGetMaxFbpas_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
575 return pKernelMemorySystem->__kmemsysGetMaxFbpas__(pGpu, pKernelMemorySystem);
576 }
577
kmemsysGetEccDedCountSize_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)578 static inline NvU32 kmemsysGetEccDedCountSize_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
579 return pKernelMemorySystem->__kmemsysGetEccDedCountSize__(pGpu, pKernelMemorySystem);
580 }
581
kmemsysGetEccDedCountRegAddr_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 fbpa,NvU32 subp)582 static inline NvU32 kmemsysGetEccDedCountRegAddr_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 fbpa, NvU32 subp) {
583 return pKernelMemorySystem->__kmemsysGetEccDedCountRegAddr__(pGpu, pKernelMemorySystem, fbpa, subp);
584 }
585
kmemsysGetMaximumBlacklistPages_DISPATCH(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)586 static inline NvU16 kmemsysGetMaximumBlacklistPages_DISPATCH(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
587 return pKernelMemorySystem->__kmemsysGetMaximumBlacklistPages__(pGpu, pKernelMemorySystem);
588 }
589
kmemsysGetFbInfos_DISPATCH(OBJGPU * arg1,struct KernelMemorySystem * arg_this,struct RsClient * arg3,Device * arg4,NvHandle hSubdevice,NV2080_CTRL_FB_GET_INFO_V2_PARAMS * pParams,NvU64 * pFbInfoListIndicesUnset)590 static inline NV_STATUS kmemsysGetFbInfos_DISPATCH(OBJGPU *arg1, struct KernelMemorySystem *arg_this, struct RsClient *arg3, Device *arg4, NvHandle hSubdevice, NV2080_CTRL_FB_GET_INFO_V2_PARAMS *pParams, NvU64 *pFbInfoListIndicesUnset) {
591 return arg_this->__kmemsysGetFbInfos__(arg1, arg_this, arg3, arg4, hSubdevice, pParams, pFbInfoListIndicesUnset);
592 }
593
kmemsysInitMissing_DISPATCH(struct OBJGPU * pGpu,struct KernelMemorySystem * pEngstate)594 static inline void kmemsysInitMissing_DISPATCH(struct OBJGPU *pGpu, struct KernelMemorySystem *pEngstate) {
595 pEngstate->__kmemsysInitMissing__(pGpu, pEngstate);
596 }
597
kmemsysStatePreInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct KernelMemorySystem * pEngstate)598 static inline NV_STATUS kmemsysStatePreInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct KernelMemorySystem *pEngstate) {
599 return pEngstate->__kmemsysStatePreInitUnlocked__(pGpu, pEngstate);
600 }
601
kmemsysStateInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct KernelMemorySystem * pEngstate)602 static inline NV_STATUS kmemsysStateInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct KernelMemorySystem *pEngstate) {
603 return pEngstate->__kmemsysStateInitUnlocked__(pGpu, pEngstate);
604 }
605
kmemsysStatePostUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelMemorySystem * pEngstate,NvU32 arg3)606 static inline NV_STATUS kmemsysStatePostUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelMemorySystem *pEngstate, NvU32 arg3) {
607 return pEngstate->__kmemsysStatePostUnload__(pGpu, pEngstate, arg3);
608 }
609
kmemsysIsPresent_DISPATCH(struct OBJGPU * pGpu,struct KernelMemorySystem * pEngstate)610 static inline NvBool kmemsysIsPresent_DISPATCH(struct OBJGPU *pGpu, struct KernelMemorySystem *pEngstate) {
611 return pEngstate->__kmemsysIsPresent__(pGpu, pEngstate);
612 }
613
kmemsysAssertSysmemFlushBufferValid_b3696a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)614 static inline void kmemsysAssertSysmemFlushBufferValid_b3696a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
615 return;
616 }
617
618 void kmemsysAssertSysmemFlushBufferValid_GM107(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
619
620 void kmemsysAssertSysmemFlushBufferValid_GA100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
621
622 void kmemsysAssertSysmemFlushBufferValid_GH100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
623
624 void kmemsysAssertSysmemFlushBufferValid_GB100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
625
626
627 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysAssertSysmemFlushBufferValid(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)628 static inline void kmemsysAssertSysmemFlushBufferValid(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
629 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
630 }
631 #else //__nvoc_kern_mem_sys_h_disabled
632 #define kmemsysAssertSysmemFlushBufferValid(pGpu, pKernelMemorySystem) kmemsysAssertSysmemFlushBufferValid_b3696a(pGpu, pKernelMemorySystem)
633 #endif //__nvoc_kern_mem_sys_h_disabled
634
635 #define kmemsysAssertSysmemFlushBufferValid_HAL(pGpu, pKernelMemorySystem) kmemsysAssertSysmemFlushBufferValid(pGpu, pKernelMemorySystem)
636
637 NV_STATUS kmemsysInitStaticConfig_KERNEL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, MEMORY_SYSTEM_STATIC_CONFIG *pConfig);
638
639
640 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysInitStaticConfig(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,MEMORY_SYSTEM_STATIC_CONFIG * pConfig)641 static inline NV_STATUS kmemsysInitStaticConfig(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, MEMORY_SYSTEM_STATIC_CONFIG *pConfig) {
642 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
643 return NV_ERR_NOT_SUPPORTED;
644 }
645 #else //__nvoc_kern_mem_sys_h_disabled
646 #define kmemsysInitStaticConfig(pGpu, pKernelMemorySystem, pConfig) kmemsysInitStaticConfig_KERNEL(pGpu, pKernelMemorySystem, pConfig)
647 #endif //__nvoc_kern_mem_sys_h_disabled
648
649 #define kmemsysInitStaticConfig_HAL(pGpu, pKernelMemorySystem, pConfig) kmemsysInitStaticConfig(pGpu, pKernelMemorySystem, pConfig)
650
kmemsysPreFillCacheOnlyMemory_56cd7a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU64 arg3,NvU64 arg4)651 static inline NV_STATUS kmemsysPreFillCacheOnlyMemory_56cd7a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 arg3, NvU64 arg4) {
652 return NV_OK;
653 }
654
655 NV_STATUS kmemsysPreFillCacheOnlyMemory_GM107(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 arg3, NvU64 arg4);
656
657
658 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysPreFillCacheOnlyMemory(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU64 arg3,NvU64 arg4)659 static inline NV_STATUS kmemsysPreFillCacheOnlyMemory(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 arg3, NvU64 arg4) {
660 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
661 return NV_ERR_NOT_SUPPORTED;
662 }
663 #else //__nvoc_kern_mem_sys_h_disabled
664 #define kmemsysPreFillCacheOnlyMemory(pGpu, pKernelMemorySystem, arg3, arg4) kmemsysPreFillCacheOnlyMemory_56cd7a(pGpu, pKernelMemorySystem, arg3, arg4)
665 #endif //__nvoc_kern_mem_sys_h_disabled
666
667 #define kmemsysPreFillCacheOnlyMemory_HAL(pGpu, pKernelMemorySystem, arg3, arg4) kmemsysPreFillCacheOnlyMemory(pGpu, pKernelMemorySystem, arg3, arg4)
668
kmemsysCheckDisplayRemapperRange_14278f(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU64 arg3,NvU64 arg4)669 static inline NV_STATUS kmemsysCheckDisplayRemapperRange_14278f(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 arg3, NvU64 arg4) {
670 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_INVALID_STATE);
671 }
672
673
674 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysCheckDisplayRemapperRange(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU64 arg3,NvU64 arg4)675 static inline NV_STATUS kmemsysCheckDisplayRemapperRange(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 arg3, NvU64 arg4) {
676 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
677 return NV_ERR_NOT_SUPPORTED;
678 }
679 #else //__nvoc_kern_mem_sys_h_disabled
680 #define kmemsysCheckDisplayRemapperRange(pGpu, pKernelMemorySystem, arg3, arg4) kmemsysCheckDisplayRemapperRange_14278f(pGpu, pKernelMemorySystem, arg3, arg4)
681 #endif //__nvoc_kern_mem_sys_h_disabled
682
683 #define kmemsysCheckDisplayRemapperRange_HAL(pGpu, pKernelMemorySystem, arg3, arg4) kmemsysCheckDisplayRemapperRange(pGpu, pKernelMemorySystem, arg3, arg4)
684
kmemsysPostHeapCreate_b3696a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)685 static inline void kmemsysPostHeapCreate_b3696a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
686 return;
687 }
688
689
690 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysPostHeapCreate(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)691 static inline void kmemsysPostHeapCreate(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
692 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
693 }
694 #else //__nvoc_kern_mem_sys_h_disabled
695 #define kmemsysPostHeapCreate(pGpu, pKernelMemorySystem) kmemsysPostHeapCreate_b3696a(pGpu, pKernelMemorySystem)
696 #endif //__nvoc_kern_mem_sys_h_disabled
697
698 #define kmemsysPostHeapCreate_HAL(pGpu, pKernelMemorySystem) kmemsysPostHeapCreate(pGpu, pKernelMemorySystem)
699
kmemsysPreHeapDestruct_b3696a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)700 static inline void kmemsysPreHeapDestruct_b3696a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
701 return;
702 }
703
704
705 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysPreHeapDestruct(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)706 static inline void kmemsysPreHeapDestruct(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
707 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
708 }
709 #else //__nvoc_kern_mem_sys_h_disabled
710 #define kmemsysPreHeapDestruct(pGpu, pKernelMemorySystem) kmemsysPreHeapDestruct_b3696a(pGpu, pKernelMemorySystem)
711 #endif //__nvoc_kern_mem_sys_h_disabled
712
713 #define kmemsysPreHeapDestruct_HAL(pGpu, pKernelMemorySystem) kmemsysPreHeapDestruct(pGpu, pKernelMemorySystem)
714
715 NV_STATUS kmemsysAllocComprResources_KERNEL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, FB_ALLOC_INFO *arg3, NvU64 arg4, NvU32 arg5, NvU32 *arg6, NvU32 arg7);
716
717
718 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysAllocComprResources(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,FB_ALLOC_INFO * arg3,NvU64 arg4,NvU32 arg5,NvU32 * arg6,NvU32 arg7)719 static inline NV_STATUS kmemsysAllocComprResources(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, FB_ALLOC_INFO *arg3, NvU64 arg4, NvU32 arg5, NvU32 *arg6, NvU32 arg7) {
720 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
721 return NV_ERR_NOT_SUPPORTED;
722 }
723 #else //__nvoc_kern_mem_sys_h_disabled
724 #define kmemsysAllocComprResources(pGpu, pKernelMemorySystem, arg3, arg4, arg5, arg6, arg7) kmemsysAllocComprResources_KERNEL(pGpu, pKernelMemorySystem, arg3, arg4, arg5, arg6, arg7)
725 #endif //__nvoc_kern_mem_sys_h_disabled
726
727 #define kmemsysAllocComprResources_HAL(pGpu, pKernelMemorySystem, arg3, arg4, arg5, arg6, arg7) kmemsysAllocComprResources(pGpu, pKernelMemorySystem, arg3, arg4, arg5, arg6, arg7)
728
kmemsysFreeComprResources_b3696a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 hwResId)729 static inline void kmemsysFreeComprResources_b3696a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 hwResId) {
730 return;
731 }
732
733
734 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysFreeComprResources(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 hwResId)735 static inline void kmemsysFreeComprResources(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 hwResId) {
736 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
737 }
738 #else //__nvoc_kern_mem_sys_h_disabled
739 #define kmemsysFreeComprResources(pGpu, pKernelMemorySystem, hwResId) kmemsysFreeComprResources_b3696a(pGpu, pKernelMemorySystem, hwResId)
740 #endif //__nvoc_kern_mem_sys_h_disabled
741
742 #define kmemsysFreeComprResources_HAL(pGpu, pKernelMemorySystem, hwResId) kmemsysFreeComprResources(pGpu, pKernelMemorySystem, hwResId)
743
744 NvBool kmemsysNeedInvalidateGpuCacheOnMap_GV100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvBool bIsVolatile, NvU32 aperture);
745
746
747 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysNeedInvalidateGpuCacheOnMap(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvBool bIsVolatile,NvU32 aperture)748 static inline NvBool kmemsysNeedInvalidateGpuCacheOnMap(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvBool bIsVolatile, NvU32 aperture) {
749 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
750 return NV_FALSE;
751 }
752 #else //__nvoc_kern_mem_sys_h_disabled
753 #define kmemsysNeedInvalidateGpuCacheOnMap(pGpu, pKernelMemorySystem, bIsVolatile, aperture) kmemsysNeedInvalidateGpuCacheOnMap_GV100(pGpu, pKernelMemorySystem, bIsVolatile, aperture)
754 #endif //__nvoc_kern_mem_sys_h_disabled
755
756 #define kmemsysNeedInvalidateGpuCacheOnMap_HAL(pGpu, pKernelMemorySystem, bIsVolatile, aperture) kmemsysNeedInvalidateGpuCacheOnMap(pGpu, pKernelMemorySystem, bIsVolatile, aperture)
757
kmemsysCbcIsSafe_cbe027(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)758 static inline NvBool kmemsysCbcIsSafe_cbe027(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
759 return ((NvBool)(0 == 0));
760 }
761
762
763 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysCbcIsSafe(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)764 static inline NvBool kmemsysCbcIsSafe(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
765 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
766 return NV_FALSE;
767 }
768 #else //__nvoc_kern_mem_sys_h_disabled
769 #define kmemsysCbcIsSafe(pGpu, pKernelMemorySystem) kmemsysCbcIsSafe_cbe027(pGpu, pKernelMemorySystem)
770 #endif //__nvoc_kern_mem_sys_h_disabled
771
772 #define kmemsysCbcIsSafe_HAL(pGpu, pKernelMemorySystem) kmemsysCbcIsSafe(pGpu, pKernelMemorySystem)
773
774 void kmemsysGetEccCounts_TU102(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 *arg3, NvU32 *arg4);
775
776
777 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysGetEccCounts(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 * arg3,NvU32 * arg4)778 static inline void kmemsysGetEccCounts(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 *arg3, NvU32 *arg4) {
779 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
780 }
781 #else //__nvoc_kern_mem_sys_h_disabled
782 #define kmemsysGetEccCounts(pGpu, pKernelMemorySystem, arg3, arg4) kmemsysGetEccCounts_TU102(pGpu, pKernelMemorySystem, arg3, arg4)
783 #endif //__nvoc_kern_mem_sys_h_disabled
784
785 #define kmemsysGetEccCounts_HAL(pGpu, pKernelMemorySystem, arg3, arg4) kmemsysGetEccCounts(pGpu, pKernelMemorySystem, arg3, arg4)
786
787 NvU32 kmemsysGetL2EccDedCountRegAddr_TU102(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 fbpa, NvU32 subp);
788
789
790 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysGetL2EccDedCountRegAddr(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 fbpa,NvU32 subp)791 static inline NvU32 kmemsysGetL2EccDedCountRegAddr(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 fbpa, NvU32 subp) {
792 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
793 return 0;
794 }
795 #else //__nvoc_kern_mem_sys_h_disabled
796 #define kmemsysGetL2EccDedCountRegAddr(pGpu, pKernelMemorySystem, fbpa, subp) kmemsysGetL2EccDedCountRegAddr_TU102(pGpu, pKernelMemorySystem, fbpa, subp)
797 #endif //__nvoc_kern_mem_sys_h_disabled
798
799 #define kmemsysGetL2EccDedCountRegAddr_HAL(pGpu, pKernelMemorySystem, fbpa, subp) kmemsysGetL2EccDedCountRegAddr(pGpu, pKernelMemorySystem, fbpa, subp)
800
kmemsysPrepareForXVEReset_56cd7a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)801 static inline NV_STATUS kmemsysPrepareForXVEReset_56cd7a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
802 return NV_OK;
803 }
804
805
806 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysPrepareForXVEReset(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)807 static inline NV_STATUS kmemsysPrepareForXVEReset(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
808 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
809 return NV_ERR_NOT_SUPPORTED;
810 }
811 #else //__nvoc_kern_mem_sys_h_disabled
812 #define kmemsysPrepareForXVEReset(pGpu, pKernelMemorySystem) kmemsysPrepareForXVEReset_56cd7a(pGpu, pKernelMemorySystem)
813 #endif //__nvoc_kern_mem_sys_h_disabled
814
815 #define kmemsysPrepareForXVEReset_HAL(pGpu, pKernelMemorySystem) kmemsysPrepareForXVEReset(pGpu, pKernelMemorySystem)
816
817 NV_STATUS kmemsysConstructEngine_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, ENGDESCRIPTOR arg3);
818
819 NV_STATUS kmemsysStatePreInitLocked_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
820
821 NV_STATUS kmemsysStateInitLocked_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
822
823 NV_STATUS kmemsysStatePreLoad_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 flags);
824
825 NV_STATUS kmemsysStatePostLoad_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 flags);
826
827 NV_STATUS kmemsysStateLoad_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 flags);
828
829 NV_STATUS kmemsysStatePreUnload_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 flags);
830
831 NV_STATUS kmemsysStateUnload_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 flags);
832
833 void kmemsysStateDestroy_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
834
835 NV_STATUS kmemsysGetFbNumaInfo_GV100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 *physAddr, NvU64 *rsvdPhysAddr, NvS32 *numaNodeId);
836
kmemsysGetFbNumaInfo_56cd7a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU64 * physAddr,NvU64 * rsvdPhysAddr,NvS32 * numaNodeId)837 static inline NV_STATUS kmemsysGetFbNumaInfo_56cd7a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 *physAddr, NvU64 *rsvdPhysAddr, NvS32 *numaNodeId) {
838 return NV_OK;
839 }
840
841 NV_STATUS kmemsysReadUsableFbSize_GP102(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 *pFbSize);
842
843 NV_STATUS kmemsysReadUsableFbSize_GA102(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 *pFbSize);
844
845 NV_STATUS kmemsysGetUsableFbSize_KERNEL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 *pFbSize);
846
kmemsysGetUsableFbSize_5baef9(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU64 * pFbSize)847 static inline NV_STATUS kmemsysGetUsableFbSize_5baef9(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU64 *pFbSize) {
848 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
849 }
850
851 NV_STATUS kmemsysCacheOp_GM200(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, PMEMORY_DESCRIPTOR arg3, FB_CACHE_MEMTYPE arg4, FB_CACHE_OP operation);
852
853 NV_STATUS kmemsysCacheOp_GH100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, PMEMORY_DESCRIPTOR arg3, FB_CACHE_MEMTYPE arg4, FB_CACHE_OP operation);
854
855 NV_STATUS kmemsysDoCacheOp_GM107(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 arg3, NvU32 arg4, NvU32 arg5, PRMTIMEOUT arg6);
856
857 NV_STATUS kmemsysDoCacheOp_GH100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 arg3, NvU32 arg4, NvU32 arg5, PRMTIMEOUT arg6);
858
859 NvU32 kmemsysReadL2SysmemInvalidateReg_TU102(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
860
kmemsysReadL2SysmemInvalidateReg_68b109(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)861 static inline NvU32 kmemsysReadL2SysmemInvalidateReg_68b109(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
862 NV_ASSERT_OR_RETURN_PRECOMP(0, -1);
863 }
864
865 void kmemsysWriteL2SysmemInvalidateReg_TU102(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 arg3);
866
kmemsysWriteL2SysmemInvalidateReg_f2d351(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 arg3)867 static inline void kmemsysWriteL2SysmemInvalidateReg_f2d351(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 arg3) {
868 NV_ASSERT_PRECOMP(0);
869 }
870
871 NvU32 kmemsysReadL2PeermemInvalidateReg_TU102(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
872
kmemsysReadL2PeermemInvalidateReg_68b109(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)873 static inline NvU32 kmemsysReadL2PeermemInvalidateReg_68b109(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
874 NV_ASSERT_OR_RETURN_PRECOMP(0, -1);
875 }
876
877 void kmemsysWriteL2PeermemInvalidateReg_TU102(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 arg3);
878
kmemsysWriteL2PeermemInvalidateReg_f2d351(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 arg3)879 static inline void kmemsysWriteL2PeermemInvalidateReg_f2d351(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 arg3) {
880 NV_ASSERT_PRECOMP(0);
881 }
882
kmemsysInitFlushSysmemBuffer_56cd7a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)883 static inline NV_STATUS kmemsysInitFlushSysmemBuffer_56cd7a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
884 return NV_OK;
885 }
886
887 NV_STATUS kmemsysInitFlushSysmemBuffer_GM107(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
888
889 NV_STATUS kmemsysInitFlushSysmemBuffer_GA100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
890
kmemsysProgramSysmemFlushBuffer_b3696a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)891 static inline void kmemsysProgramSysmemFlushBuffer_b3696a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
892 return;
893 }
894
895 void kmemsysProgramSysmemFlushBuffer_GM107(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
896
897 void kmemsysProgramSysmemFlushBuffer_GA100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
898
899 void kmemsysProgramSysmemFlushBuffer_GH100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
900
901 void kmemsysProgramSysmemFlushBuffer_GB100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
902
kmemsysGetFlushSysmemBufferAddrShift_4a4dee(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)903 static inline NvU32 kmemsysGetFlushSysmemBufferAddrShift_4a4dee(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
904 return 0;
905 }
906
907 NvU32 kmemsysGetFlushSysmemBufferAddrShift_GM107(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
908
909 NvU32 kmemsysGetFlushSysmemBufferAddrShift_GB100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
910
kmemsysIsPagePLCable_cbe027(OBJGPU * pGpu,struct KernelMemorySystem * KernelMemorySystem,NvU64 physAddr,NvU64 pageSize)911 static inline NvBool kmemsysIsPagePLCable_cbe027(OBJGPU *pGpu, struct KernelMemorySystem *KernelMemorySystem, NvU64 physAddr, NvU64 pageSize) {
912 return ((NvBool)(0 == 0));
913 }
914
915 NvBool kmemsysIsPagePLCable_GA100(OBJGPU *pGpu, struct KernelMemorySystem *KernelMemorySystem, NvU64 physAddr, NvU64 pageSize);
916
917 NvBool kmemsysIsPagePLCable_GA102(OBJGPU *pGpu, struct KernelMemorySystem *KernelMemorySystem, NvU64 physAddr, NvU64 pageSize);
918
kmemsysIsPagePLCable_510167(OBJGPU * pGpu,struct KernelMemorySystem * KernelMemorySystem,NvU64 physAddr,NvU64 pageSize)919 static inline NvBool kmemsysIsPagePLCable_510167(OBJGPU *pGpu, struct KernelMemorySystem *KernelMemorySystem, NvU64 physAddr, NvU64 pageSize) {
920 NV_ASSERT_OR_RETURN_PRECOMP(0, ((NvBool)(0 == 0)));
921 }
922
kmemsysReadMIGMemoryCfg_46f6a7(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)923 static inline NV_STATUS kmemsysReadMIGMemoryCfg_46f6a7(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
924 return NV_ERR_NOT_SUPPORTED;
925 }
926
927 NV_STATUS kmemsysReadMIGMemoryCfg_GA100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
928
kmemsysInitMIGMemoryPartitionTable_56cd7a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)929 static inline NV_STATUS kmemsysInitMIGMemoryPartitionTable_56cd7a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
930 return NV_OK;
931 }
932
933 NV_STATUS kmemsysInitMIGMemoryPartitionTable_GA100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
934
935 NV_STATUS kmemsysSwizzIdToVmmuSegmentsRange_GA100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId, NvU32 vmmuSegmentSize, NvU32 totalVmmuSegments);
936
937 NV_STATUS kmemsysSwizzIdToVmmuSegmentsRange_GH100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId, NvU32 vmmuSegmentSize, NvU32 totalVmmuSegments);
938
939 NV_STATUS kmemsysNumaAddMemory_GH100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId, NvU64 offset, NvU64 size, NvS32 *numaNodeId);
940
kmemsysNumaAddMemory_56cd7a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 swizzId,NvU64 offset,NvU64 size,NvS32 * numaNodeId)941 static inline NV_STATUS kmemsysNumaAddMemory_56cd7a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId, NvU64 offset, NvU64 size, NvS32 *numaNodeId) {
942 return NV_OK;
943 }
944
945 void kmemsysNumaRemoveMemory_GH100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId);
946
kmemsysNumaRemoveMemory_b3696a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 swizzId)947 static inline void kmemsysNumaRemoveMemory_b3696a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId) {
948 return;
949 }
950
951 void kmemsysNumaRemoveAllMemory_GH100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
952
kmemsysNumaRemoveAllMemory_b3696a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)953 static inline void kmemsysNumaRemoveAllMemory_b3696a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
954 return;
955 }
956
kmemsysPopulateMIGGPUInstanceMemConfig_56cd7a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)957 static inline NV_STATUS kmemsysPopulateMIGGPUInstanceMemConfig_56cd7a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
958 return NV_OK;
959 }
960
961 NV_STATUS kmemsysPopulateMIGGPUInstanceMemConfig_KERNEL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
962
kmemsysSetupAllAtsPeers_46f6a7(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)963 static inline NV_STATUS kmemsysSetupAllAtsPeers_46f6a7(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
964 return NV_ERR_NOT_SUPPORTED;
965 }
966
967 NV_STATUS kmemsysSetupAllAtsPeers_GV100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
968
kmemsysRemoveAllAtsPeers_b3696a(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)969 static inline void kmemsysRemoveAllAtsPeers_b3696a(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
970 return;
971 }
972
973 void kmemsysRemoveAllAtsPeers_GV100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
974
975 NvBool kmemsysAssertFbAckTimeoutPending_GH100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
976
977 NvBool kmemsysAssertFbAckTimeoutPending_GB100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
978
kmemsysAssertFbAckTimeoutPending_491d52(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)979 static inline NvBool kmemsysAssertFbAckTimeoutPending_491d52(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
980 return ((NvBool)(0 != 0));
981 }
982
983 NvU32 kmemsysGetMaxFbpas_TU102(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
984
985 NvU32 kmemsysGetMaxFbpas_GA100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
986
kmemsysGetMaxFbpas_4a4dee(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)987 static inline NvU32 kmemsysGetMaxFbpas_4a4dee(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
988 return 0;
989 }
990
991 NvU32 kmemsysGetEccDedCountSize_TU102(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
992
993 NvU32 kmemsysGetEccDedCountSize_GH100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
994
995 NvU32 kmemsysGetEccDedCountRegAddr_TU102(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 fbpa, NvU32 subp);
996
997 NvU32 kmemsysGetEccDedCountRegAddr_GH100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 fbpa, NvU32 subp);
998
999 NvU16 kmemsysGetMaximumBlacklistPages_GM107(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
1000
1001 NvU16 kmemsysGetMaximumBlacklistPages_GA100(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
1002
kmemsysGetFbInfos_ac1694(OBJGPU * arg1,struct KernelMemorySystem * arg2,struct RsClient * arg3,Device * arg4,NvHandle hSubdevice,NV2080_CTRL_FB_GET_INFO_V2_PARAMS * pParams,NvU64 * pFbInfoListIndicesUnset)1003 static inline NV_STATUS kmemsysGetFbInfos_ac1694(OBJGPU *arg1, struct KernelMemorySystem *arg2, struct RsClient *arg3, Device *arg4, NvHandle hSubdevice, NV2080_CTRL_FB_GET_INFO_V2_PARAMS *pParams, NvU64 *pFbInfoListIndicesUnset) {
1004 return NV_OK;
1005 }
1006
1007 NV_STATUS kmemsysGetFbInfos_VF(OBJGPU *arg1, struct KernelMemorySystem *arg2, struct RsClient *arg3, Device *arg4, NvHandle hSubdevice, NV2080_CTRL_FB_GET_INFO_V2_PARAMS *pParams, NvU64 *pFbInfoListIndicesUnset);
1008
kmemsysIsL2CleanFbPull(struct KernelMemorySystem * pKernelMemorySystem)1009 static inline NvBool kmemsysIsL2CleanFbPull(struct KernelMemorySystem *pKernelMemorySystem) {
1010 return pKernelMemorySystem->bL2CleanFbPull;
1011 }
1012
1013 void kmemsysDestruct_IMPL(struct KernelMemorySystem *pKernelMemorySystem);
1014
1015 #define __nvoc_kmemsysDestruct(pKernelMemorySystem) kmemsysDestruct_IMPL(pKernelMemorySystem)
1016 NV_STATUS kmemsysEnsureSysmemFlushBufferInitialized_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
1017
1018 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysEnsureSysmemFlushBufferInitialized(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)1019 static inline NV_STATUS kmemsysEnsureSysmemFlushBufferInitialized(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
1020 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
1021 return NV_ERR_NOT_SUPPORTED;
1022 }
1023 #else //__nvoc_kern_mem_sys_h_disabled
1024 #define kmemsysEnsureSysmemFlushBufferInitialized(pGpu, pKernelMemorySystem) kmemsysEnsureSysmemFlushBufferInitialized_IMPL(pGpu, pKernelMemorySystem)
1025 #endif //__nvoc_kern_mem_sys_h_disabled
1026
1027 const MEMORY_SYSTEM_STATIC_CONFIG *kmemsysGetStaticConfig_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
1028
1029 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysGetStaticConfig(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)1030 static inline const MEMORY_SYSTEM_STATIC_CONFIG *kmemsysGetStaticConfig(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
1031 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
1032 return NULL;
1033 }
1034 #else //__nvoc_kern_mem_sys_h_disabled
1035 #define kmemsysGetStaticConfig(pGpu, pKernelMemorySystem) kmemsysGetStaticConfig_IMPL(pGpu, pKernelMemorySystem)
1036 #endif //__nvoc_kern_mem_sys_h_disabled
1037
1038 NV_STATUS kmemsysSetupCoherentCpuLink_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvBool bFlush);
1039
1040 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysSetupCoherentCpuLink(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvBool bFlush)1041 static inline NV_STATUS kmemsysSetupCoherentCpuLink(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvBool bFlush) {
1042 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
1043 return NV_ERR_NOT_SUPPORTED;
1044 }
1045 #else //__nvoc_kern_mem_sys_h_disabled
1046 #define kmemsysSetupCoherentCpuLink(pGpu, pKernelMemorySystem, bFlush) kmemsysSetupCoherentCpuLink_IMPL(pGpu, pKernelMemorySystem, bFlush)
1047 #endif //__nvoc_kern_mem_sys_h_disabled
1048
1049 void kmemsysTeardownCoherentCpuLink_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvBool bFlush);
1050
1051 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysTeardownCoherentCpuLink(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvBool bFlush)1052 static inline void kmemsysTeardownCoherentCpuLink(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvBool bFlush) {
1053 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
1054 }
1055 #else //__nvoc_kern_mem_sys_h_disabled
1056 #define kmemsysTeardownCoherentCpuLink(pGpu, pKernelMemorySystem, bFlush) kmemsysTeardownCoherentCpuLink_IMPL(pGpu, pKernelMemorySystem, bFlush)
1057 #endif //__nvoc_kern_mem_sys_h_disabled
1058
1059 NV_STATUS kmemsysSendL2InvalidateEvict_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 flags);
1060
1061 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysSendL2InvalidateEvict(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 flags)1062 static inline NV_STATUS kmemsysSendL2InvalidateEvict(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 flags) {
1063 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
1064 return NV_ERR_NOT_SUPPORTED;
1065 }
1066 #else //__nvoc_kern_mem_sys_h_disabled
1067 #define kmemsysSendL2InvalidateEvict(pGpu, pKernelMemorySystem, flags) kmemsysSendL2InvalidateEvict_IMPL(pGpu, pKernelMemorySystem, flags)
1068 #endif //__nvoc_kern_mem_sys_h_disabled
1069
1070 NV_STATUS kmemsysSendFlushL2AllRamsAndCaches_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem);
1071
1072 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysSendFlushL2AllRamsAndCaches(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem)1073 static inline NV_STATUS kmemsysSendFlushL2AllRamsAndCaches(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem) {
1074 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
1075 return NV_ERR_NOT_SUPPORTED;
1076 }
1077 #else //__nvoc_kern_mem_sys_h_disabled
1078 #define kmemsysSendFlushL2AllRamsAndCaches(pGpu, pKernelMemorySystem) kmemsysSendFlushL2AllRamsAndCaches_IMPL(pGpu, pKernelMemorySystem)
1079 #endif //__nvoc_kern_mem_sys_h_disabled
1080
1081 NV_STATUS kmemsysSwizzIdToMIGMemSize_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId, struct NV_RANGE totalRange, NvU32 *pPartitionSizeFlag, NvU64 *pSizeInBytes);
1082
1083 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysSwizzIdToMIGMemSize(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 swizzId,struct NV_RANGE totalRange,NvU32 * pPartitionSizeFlag,NvU64 * pSizeInBytes)1084 static inline NV_STATUS kmemsysSwizzIdToMIGMemSize(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId, struct NV_RANGE totalRange, NvU32 *pPartitionSizeFlag, NvU64 *pSizeInBytes) {
1085 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
1086 return NV_ERR_NOT_SUPPORTED;
1087 }
1088 #else //__nvoc_kern_mem_sys_h_disabled
1089 #define kmemsysSwizzIdToMIGMemSize(pGpu, pKernelMemorySystem, swizzId, totalRange, pPartitionSizeFlag, pSizeInBytes) kmemsysSwizzIdToMIGMemSize_IMPL(pGpu, pKernelMemorySystem, swizzId, totalRange, pPartitionSizeFlag, pSizeInBytes)
1090 #endif //__nvoc_kern_mem_sys_h_disabled
1091
1092 NV_STATUS kmemsysSwizzIdToMIGMemRange_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId, struct NV_RANGE totalRange, struct NV_RANGE *pAddrRange);
1093
1094 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysSwizzIdToMIGMemRange(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 swizzId,struct NV_RANGE totalRange,struct NV_RANGE * pAddrRange)1095 static inline NV_STATUS kmemsysSwizzIdToMIGMemRange(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId, struct NV_RANGE totalRange, struct NV_RANGE *pAddrRange) {
1096 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
1097 return NV_ERR_NOT_SUPPORTED;
1098 }
1099 #else //__nvoc_kern_mem_sys_h_disabled
1100 #define kmemsysSwizzIdToMIGMemRange(pGpu, pKernelMemorySystem, swizzId, totalRange, pAddrRange) kmemsysSwizzIdToMIGMemRange_IMPL(pGpu, pKernelMemorySystem, swizzId, totalRange, pAddrRange)
1101 #endif //__nvoc_kern_mem_sys_h_disabled
1102
1103 NV_STATUS kmemsysGetMIGGPUInstanceMemInfo_IMPL(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId, struct NV_RANGE *pAddrRange);
1104
1105 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysGetMIGGPUInstanceMemInfo(OBJGPU * pGpu,struct KernelMemorySystem * pKernelMemorySystem,NvU32 swizzId,struct NV_RANGE * pAddrRange)1106 static inline NV_STATUS kmemsysGetMIGGPUInstanceMemInfo(OBJGPU *pGpu, struct KernelMemorySystem *pKernelMemorySystem, NvU32 swizzId, struct NV_RANGE *pAddrRange) {
1107 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
1108 return NV_ERR_NOT_SUPPORTED;
1109 }
1110 #else //__nvoc_kern_mem_sys_h_disabled
1111 #define kmemsysGetMIGGPUInstanceMemInfo(pGpu, pKernelMemorySystem, swizzId, pAddrRange) kmemsysGetMIGGPUInstanceMemInfo_IMPL(pGpu, pKernelMemorySystem, swizzId, pAddrRange)
1112 #endif //__nvoc_kern_mem_sys_h_disabled
1113
1114 NV_STATUS kmemsysGetMIGGPUInstanceMemConfigFromSwizzId_IMPL(OBJGPU *arg1, struct KernelMemorySystem *arg2, NvU32 swizzId, const MIG_GPU_INSTANCE_MEMORY_CONFIG **arg4);
1115
1116 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysGetMIGGPUInstanceMemConfigFromSwizzId(OBJGPU * arg1,struct KernelMemorySystem * arg2,NvU32 swizzId,const MIG_GPU_INSTANCE_MEMORY_CONFIG ** arg4)1117 static inline NV_STATUS kmemsysGetMIGGPUInstanceMemConfigFromSwizzId(OBJGPU *arg1, struct KernelMemorySystem *arg2, NvU32 swizzId, const MIG_GPU_INSTANCE_MEMORY_CONFIG **arg4) {
1118 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
1119 return NV_ERR_NOT_SUPPORTED;
1120 }
1121 #else //__nvoc_kern_mem_sys_h_disabled
1122 #define kmemsysGetMIGGPUInstanceMemConfigFromSwizzId(arg1, arg2, swizzId, arg4) kmemsysGetMIGGPUInstanceMemConfigFromSwizzId_IMPL(arg1, arg2, swizzId, arg4)
1123 #endif //__nvoc_kern_mem_sys_h_disabled
1124
1125 NV_STATUS kmemsysInitMIGGPUInstanceMemConfigForSwizzId_IMPL(OBJGPU *arg1, struct KernelMemorySystem *arg2, NvU32 swizzId, NvU64 startingVmmuSegment, NvU64 memSizeInVmmuSegment);
1126
1127 #ifdef __nvoc_kern_mem_sys_h_disabled
kmemsysInitMIGGPUInstanceMemConfigForSwizzId(OBJGPU * arg1,struct KernelMemorySystem * arg2,NvU32 swizzId,NvU64 startingVmmuSegment,NvU64 memSizeInVmmuSegment)1128 static inline NV_STATUS kmemsysInitMIGGPUInstanceMemConfigForSwizzId(OBJGPU *arg1, struct KernelMemorySystem *arg2, NvU32 swizzId, NvU64 startingVmmuSegment, NvU64 memSizeInVmmuSegment) {
1129 NV_ASSERT_FAILED_PRECOMP("KernelMemorySystem was disabled!");
1130 return NV_ERR_NOT_SUPPORTED;
1131 }
1132 #else //__nvoc_kern_mem_sys_h_disabled
1133 #define kmemsysInitMIGGPUInstanceMemConfigForSwizzId(arg1, arg2, swizzId, startingVmmuSegment, memSizeInVmmuSegment) kmemsysInitMIGGPUInstanceMemConfigForSwizzId_IMPL(arg1, arg2, swizzId, startingVmmuSegment, memSizeInVmmuSegment)
1134 #endif //__nvoc_kern_mem_sys_h_disabled
1135
1136 #undef PRIVATE_FIELD
1137
1138
1139 #define IS_COHERENT_CPU_ATS_OFFSET(kmemsys, offset, length) \
1140 (kmemsys && ((offset) >= (kmemsys->coherentCpuFbBase + kmemsys->numaOnlineBase)) && \
1141 (((NvU64)offset + length) <= (kmemsys->coherentCpuFbBase + kmemsys->numaOnlineBase + kmemsys->numaOnlineSize)))
1142
1143 #define IS_COHERENT_FB_OFFSET(kmemsys, offset, length) \
1144 (kmemsys && (kmemsys->numaOnlineSize == 0) && \
1145 ((offset) >= (kmemsys->coherentCpuFbBase)) && \
1146 (((NvU64)offset + length) <= (kmemsys->coherentCpuFbEnd)))
1147
1148 #endif // KERN_MEM_SYS_H
1149
1150 #ifdef __cplusplus
1151 } // extern "C"
1152 #endif
1153
1154 #endif // _G_KERN_MEM_SYS_NVOC_H_
1155