1 #ifndef _G_KERNEL_MIG_MANAGER_NVOC_H_
2 #define _G_KERNEL_MIG_MANAGER_NVOC_H_
3 #include "nvoc/runtime.h"
4 
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8 
9 /*
10  * SPDX-FileCopyrightText: Copyright (c) 2021-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11  * SPDX-License-Identifier: MIT
12  *
13  * Permission is hereby granted, free of charge, to any person obtaining a
14  * copy of this software and associated documentation files (the "Software"),
15  * to deal in the Software without restriction, including without limitation
16  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17  * and/or sell copies of the Software, and to permit persons to whom the
18  * Software is furnished to do so, subject to the following conditions:
19  *
20  * The above copyright notice and this permission notice shall be included in
21  * all copies or substantial portions of the Software.
22  *
23  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29  * DEALINGS IN THE SOFTWARE.
30  */
31 
32 #include "g_kernel_mig_manager_nvoc.h"
33 
34 #ifndef KERNEL_MIG_MANAGER_H
35 #define KERNEL_MIG_MANAGER_H
36 
37 #include "core/core.h"
38 #include "gpu/eng_state.h"
39 #include "gpu/gpu.h"
40 #include "gpu_mgr/gpu_mgr.h"
41 #include "kernel/gpu/gr/kernel_graphics_manager.h"
42 #include "kernel/gpu_mgr/gpu_mgr.h"
43 #include "kernel/gpu/mmu/kern_gmmu.h"
44 #include "kernel/gpu/nvbitmask.h"
45 
46 #include "ctrl/ctrlc637.h"
47 
48 typedef struct KERNEL_MIG_GPU_INSTANCE KERNEL_MIG_GPU_INSTANCE;
49 
50 // Forward declaration of opaque type
51 typedef struct KERNEL_MIG_MANAGER_PRIVATE_DATA KERNEL_MIG_MANAGER_PRIVATE_DATA;
52 typedef struct MIG_GPU_INSTANCE MIG_GPU_INSTANCE;
53 
54 #define  IS_MIG_ENABLED(pGpu) (((pGpu) != NULL) && (GPU_GET_KERNEL_MIG_MANAGER(pGpu) != NULL) && \
55                                kmigmgrIsMIGEnabled((pGpu), GPU_GET_KERNEL_MIG_MANAGER(pGpu)))
56 #define  IS_MIG_IN_USE(pGpu)  (((pGpu) != NULL) && (GPU_GET_KERNEL_MIG_MANAGER(pGpu) != NULL) && \
57                                kmigmgrIsMIGGpuInstancingEnabled((pGpu), GPU_GET_KERNEL_MIG_MANAGER(pGpu)))
58 
59 #define FOR_EACH_VALID_GPU_INSTANCE(pGpu, pKernelMIGManager, pLocal)                 \
60     {                                                                                \
61         NvU32 i;                                                                     \
62         for (i = 0; i < KMIGMGR_MAX_GPU_INSTANCES; ++i)                              \
63         {                                                                            \
64             (pLocal) = kmigmgrGetMIGGpuInstanceSlot((pGpu), (pKernelMIGManager), i); \
65             if (((pLocal) == NULL) || !(pLocal)->bValid)                             \
66                 continue;
67 
68 #define FOR_EACH_VALID_GPU_INSTANCE_END()                                           \
69         }                                                                           \
70     }
71 
72 #define KMIGMGR_SWIZZID_INVALID             0xFFFFFFFF
73 #define KMIGMGR_MAX_GPU_SWIZZID             15
74 #define KMIGMGR_MAX_GPU_INSTANCES           GPUMGR_MAX_GPU_INSTANCES
75 #define KMIGMGR_MAX_COMPUTE_INSTANCES       GPUMGR_MAX_COMPUTE_INSTANCES
76 #define KMIGMGR_COMPUTE_INSTANCE_ID_INVALID 0xFFFFFFFF
77 #define KMIGMGR_COMPUTE_SIZE_INVALID        NV2080_CTRL_GPU_PARTITION_FLAG_COMPUTE_SIZE__SIZE
78 #define KMIGMGR_MAX_GPU_CTSID               21
79 #define KMIGMGR_CTSID_INVALID               0xFFFFFFFFUL
80 #define KMIGMGR_SPAN_OFFSET_INVALID         KMIGMGR_CTSID_INVALID
81 
82 #define KMIGMGR_INSTANCE_ATTRIBUTION_ID_INVALID            \
83     ((KMIGMGR_MAX_GPU_SWIZZID * KMIGMGR_MAX_GPU_SWIZZID) + \
84      KMIGMGR_MAX_COMPUTE_INSTANCES)
85 
86 MAKE_BITVECTOR(GFID_BIT_VECTOR, VMMU_MAX_GFID);
87 
88 typedef struct KMIGMGR_INSTANCE_HANDLES
89 {
90     /*!
91      * Client handle to make calls into this instance
92      */
93     NvHandle hClient;
94 
95     /*!
96      * Device handle to make calls into this instance
97      */
98     NvHandle hDevice;
99 
100     /*!
101      * Subdevice handle to make calls into this instance
102      */
103     NvHandle hSubdevice;
104 
105     /*!
106      * Subscription handle to make calls into this instance
107      */
108     NvHandle hSubscription;
109 } KMIGMGR_INSTANCE_HANDLES;
110 
111 typedef struct MIG_RESOURCE_ALLOCATION
112 {
113     /*!
114      * Logical GPC-IDs which are associated with this instance
115      * As current assumption is that GPCs within a instance is always
116      * physically contiguous, so we can use start and count also saving some
117      * memory however it will enforce contiguity restriction which may not be
118      * in future.
119      */
120     NvU32 gpcIds[KGRMGR_MAX_GPC];
121 
122     /*!
123      * Number of GPCs associated with this instance
124      */
125     NvU32 gpcCount;
126 
127     /*!
128      * Number of GFX GPCs associated with this instance. This should be a subset of gpcs included in gpcCount.
129      */
130     NvU32 gfxGpcCount;
131 
132     /*!
133      * VEID start offset for this instance
134      */
135     NvU32 veidOffset;
136 
137     /*!
138      * Number of VEIDs associated with this instance
139      */
140     NvU32 veidCount;
141 
142     /*!
143      * Bitvector of partitionable engines associated with this instance.
144      */
145     ENGTYPE_BIT_VECTOR engines;
146 
147     /*!
148      * Bitvector of local engine IDs associated with this instance.
149      */
150     ENGTYPE_BIT_VECTOR localEngines;
151 
152     /*!
153      * Virtualized GPC Count
154     */
155     NvU32 virtualGpcCount;
156 
157     /*!
158      * Number of SMs
159      */
160     NvU32 smCount;
161 } MIG_RESOURCE_ALLOCATION;
162 
163 typedef struct MIG_COMPUTE_INSTANCE
164 {
165     /*!
166      * Resource allocated for this instance
167      */
168     MIG_RESOURCE_ALLOCATION resourceAllocation;
169 
170     /*!
171      * States that this is a valid compute instance
172      */
173     NvBool bValid;
174 
175     /*!
176      * Flags indicating which engines (if any) are shared across multiple compute
177      * instances. Bit positions in this flag correspond to
178      * NVC637_CTRL_EXEC_PARTITIONS_SHARED_FLAG_*
179      */
180     NvU32 sharedEngFlag;
181 
182     /*!
183      * Compute instance ID
184      */
185     NvU32 id;
186 
187     /*!
188      * Shared object to track instance reference count
189      */
190     struct RsShared *pShare;
191 
192     /*!
193      * Opaque pointer to os-specific capabilities
194      */
195     OS_RM_CAPS *pOsRmCaps;
196 
197     /*!
198      * Compute instance UUID
199      */
200     NvUuid uuid;
201 
202     /*!
203      * Handles for RPC's into this instance
204      */
205     KMIGMGR_INSTANCE_HANDLES instanceHandles;
206 
207     /*!
208      * Span start of this compute instance indicating the "position" of the
209      * instance within a GPU instance's view. For non-CTS ID enabled chips,
210      * this corresponds to the start of a VEID segment. For CTS-ID chips, this
211      * corresponds to the offset from the first CTS ID of a given profile size.
212      */
213     NvU32 spanStart;
214 
215     /*!
216      * Compute Profile size associated with this MIG compute instance
217      * To associate an instance with a given compute profile, since a CTS
218      * ID may not have been assigned.
219      */
220     NvU32 computeSize;
221 } MIG_COMPUTE_INSTANCE;
222 
223 /*!
224  * @brief Situational params for compute instance creation API
225  *
226  * This structure comes with two specializations:
227  *  TYPE_REQUEST
228  *      Parameter refers to request data passed in via EXEC_PARTITIONS_CREATE ctrl
229  *      call. All resources claimed by new compute instance are chosen via allocator,
230  *      and the API may create multiple compute instances.
231  *  TYPE_RESTORE
232  *      Parameter refers to saved compute instance data. Most resources claimed by new
233  *      compute instance are determined by the save data, and others are claimed via
234  *      allocator.
235  *  requestFlags
236  *  TYPE_REQUEST_WITH_IDS
237  *      Parameter refers to request data passed in via EXEC_PARTITIONS_CREATE ctrl
238  *          call. All resources claimed by new instance are chosen via allocator unless
239  *          the _AT_SPAN flag is also specified.
240  *      RM also tries to allocate instance with compute instance id
241  *      requested by user. This flag is only supported on vGPU enabled RM build
242  *      and will be removed when vgpu plugin implements virtualized compute
243  *      instance ID support. (bug 2938187)
244  *      TYPE_REQUEST_AT_SPAN
245  *          Parameter refers to request data passed in via EXEC_PARTITIONS_CREATE ctrl
246  *          call. All resources claimed by new instance are attempt to be claimed by
247  *          the RM allocater starting at the specified resource span.
248  */
249 typedef struct KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS
250 {
251     enum
252     {
253         KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS_TYPE_REQUEST,
254         KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS_TYPE_RESTORE
255     } type;
256     union
257     {
258         struct
259         {
260             NvU32 count;
261             NVC637_CTRL_EXEC_PARTITIONS_INFO *pReqComputeInstanceInfo;
262             NvU32 requestFlags;
263         } request;
264         struct
265         {
266             struct GPUMGR_SAVE_COMPUTE_INSTANCE *pComputeInstanceSave;
267         } restore;
268     } inst;
269 } KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS;
270 
271 typedef struct KMIGMGR_CONFIGURE_INSTANCE_PARAMS
272 {
273     NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE profile;
274     NvU32 ctsId;
275     NvU32 veidSpanStart;
276 } KMIGMGR_CONFIGURE_INSTANCE_REQUEST;
277 
278 typedef struct KERNEL_MIG_GPU_INSTANCE
279 {
280     /*! Structure containing GPU instance profile */
281     const NV2080_CTRL_INTERNAL_MIGMGR_PROFILE_INFO *pProfile;
282 
283     /*!
284      * Resource allocated for this instance
285      */
286     MIG_RESOURCE_ALLOCATION resourceAllocation;
287 
288     /*!
289      * Mask of physical engines in this GPU instance which are assigned exclusively
290      * to some compute instance. Indexed via RM_ENGINE_TYPE_*
291      */
292     ENGTYPE_BIT_VECTOR exclusiveEngMask;
293 
294     /*!
295      * Mask of physical engines in this GPU instance which are assigned to at least
296      * one compute instance, but may be assigned to others.
297      * Indexed via RM_ENGINE_TYPE_*
298      */
299     ENGTYPE_BIT_VECTOR sharedEngMask;
300 
301     /*!
302      * compute instance info.
303      */
304     MIG_COMPUTE_INSTANCE MIGComputeInstance[KMIGMGR_MAX_COMPUTE_INSTANCES];
305 
306     /*!
307      * Bit Vector of GFID's associated with this instance.
308      */
309     GFID_BIT_VECTOR gfidMap;
310 
311     /*!
312      * GPU instance ID
313      */
314     NvU32 swizzId;
315 
316     /*!
317      * Validated user-provided instance flags - NV2080_CTRL_GPU_PARTITION_FLAG_*
318      */
319     NvU32 partitionFlag;
320 
321     /*!
322      * Memory handle associated with partitioned memory
323      */
324     NvHandle hMemory;
325 
326     /*!
327      * Shared object to track instance reference count
328      */
329     struct RsShared *pShare;
330 
331     /*!
332      * Heap used for managing instance's memory
333      */
334     struct Heap *pMemoryPartitionHeap;
335 
336     /*!
337      * States that this instance is valid
338      */
339     NvBool bValid;
340 
341     /*!
342      * Indicates that the GPU instance scrubber is initialized and should be
343      * accounted for / ignored in the instance refcount when determining
344      * whether or not a instance can be destroyed.
345      */
346     NvBool bMemoryPartitionScrubberInitialized;
347 
348     /*!
349      * Physical memory address range for this instance.
350      */
351     NV_RANGE memRange;
352 
353     /*!
354      * Memory pool for client page table allocations
355      */
356     RM_POOL_ALLOC_MEM_RESERVE_INFO *pPageTableMemPool;
357 
358     /*!
359      * Physical MIG GPU Instance info for this instance
360      */
361     MIG_GPU_INSTANCE *pMIGGpuInstance;
362 
363     /*!
364      * Mask of runlistIds for engines that belong to this instance
365      */
366     NvU64 runlistIdMask;
367 
368     /*!
369      * Opaque pointer to os-specific capabilities
370      */
371     OS_RM_CAPS *pOsRmCaps;
372 
373     /*!
374      * Handles for RPC's into this instance
375      */
376     KMIGMGR_INSTANCE_HANDLES instanceHandles;
377 
378     /*!
379      * Mask of CTS IDs in use
380      */
381     NvU64 ctsIdsInUseMask;
382 
383     /*!
384      * GR to CTS ID mapping
385      */
386     NvU32 grCtsIdMap[KMIGMGR_MAX_COMPUTE_INSTANCES];
387 
388     /*!
389      * Mask tracking which compute spans are currently in-use
390      */
391     NvU32 spanInUseMask;
392 
393     /*!
394      * GPU Instance UUID
395      */
396     NvUuid uuid;
397 } KERNEL_MIG_GPU_INSTANCE;
398 
399 /*!
400  * @brief Situational params for GPU instance creation API
401  *
402  * This structure comes with two specializations:
403  *  TYPE_REQUEST
404  *      Parameter refers to request data passed in via SET_PARTITIONS ctrl
405  *      call. All resources claimed by new GPU instance are chosen via allocator.
406  *  TYPE_RESTORE
407  *      Parameter refers to saved GPU instance data. Most resources claimed by new
408  *      GPU instance are determined by the save data, and others are claimed via
409  *      allocator.
410  */
411 typedef struct KMIGMGR_CREATE_GPU_INSTANCE_PARAMS
412 {
413     enum
414     {
415         KMIGMGR_CREATE_GPU_INSTANCE_PARAMS_TYPE_REQUEST,
416         KMIGMGR_CREATE_GPU_INSTANCE_PARAMS_TYPE_RESTORE
417     } type;
418     union
419     {
420         struct
421         {
422             NvU32    partitionFlag;
423             NV_RANGE placement;
424             NvBool   bUsePlacement;
425         } request;
426         struct
427         {
428             struct GPUMGR_SAVE_GPU_INSTANCE *pGPUInstanceSave;
429         } restore;
430     } inst;
431 } KMIGMGR_CREATE_GPU_INSTANCE_PARAMS;
432 
433 /*!
434  * @brief Packed pointer to a GPU instance/compute instance combo
435  * @note  Having NULL pKernelMIGGpuInstance and non-NULL pMIGComputeInstance is never expected
436  */
437 struct MIG_INSTANCE_REF
438 {
439     KERNEL_MIG_GPU_INSTANCE *pKernelMIGGpuInstance;
440     MIG_COMPUTE_INSTANCE *pMIGComputeInstance;
441 };
442 
443 typedef struct KERNEL_MIG_MANAGER_STATIC_INFO
444 {
445     /*! @ref NV2080_CTRL_CMD_INTERNAL_STATIC_MIGMGR_GET_PROFILES */
446     NV2080_CTRL_INTERNAL_STATIC_MIGMGR_GET_PROFILES_PARAMS *pProfiles;
447 
448     /*! Partitionable engines which are present on this GPU. */
449     ENGTYPE_BIT_VECTOR partitionableEngines;
450 
451     /*! Per swizzId FB memory page ranges */
452     NV2080_CTRL_INTERNAL_STATIC_MIGMGR_GET_SWIZZ_ID_FB_MEM_PAGE_RANGES_PARAMS *pSwizzIdFbMemPageRanges;
453 
454     /*! Compute instance profiles */
455     NV2080_CTRL_INTERNAL_STATIC_MIGMGR_GET_COMPUTE_PROFILES_PARAMS *pCIProfiles;
456 
457     /*! Skyline info used to determine GPU and compute instance resources available */
458     NV2080_CTRL_INTERNAL_STATIC_GRMGR_GET_SKYLINE_INFO_PARAMS *pSkylineInfo;
459 } KERNEL_MIG_MANAGER_STATIC_INFO;
460 
461 /*!
462  * KernelMIGManager provides kernel side services for managing MIG instances.
463  * It also maintains state relating to GPU partitioning and related state.
464  */
465 
466 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
467 // the matching C source file, but causes diagnostics to be issued if another
468 // source file references the field.
469 #ifdef NVOC_KERNEL_MIG_MANAGER_H_PRIVATE_ACCESS_ALLOWED
470 #define PRIVATE_FIELD(x) x
471 #else
472 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
473 #endif
474 
475 struct KernelMIGManager {
476     const struct NVOC_RTTI *__nvoc_rtti;
477     struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
478     struct Object *__nvoc_pbase_Object;
479     struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
480     struct KernelMIGManager *__nvoc_pbase_KernelMIGManager;
481     NV_STATUS (*__kmigmgrConstructEngine__)(OBJGPU *, struct KernelMIGManager *, ENGDESCRIPTOR);
482     NV_STATUS (*__kmigmgrStateInitLocked__)(OBJGPU *, struct KernelMIGManager *);
483     NV_STATUS (*__kmigmgrStateUnload__)(OBJGPU *, struct KernelMIGManager *, NvU32);
484     NV_STATUS (*__kmigmgrLoadStaticInfo__)(OBJGPU *, struct KernelMIGManager *);
485     NV_STATUS (*__kmigmgrSetStaticInfo__)(OBJGPU *, struct KernelMIGManager *);
486     void (*__kmigmgrClearStaticInfo__)(OBJGPU *, struct KernelMIGManager *);
487     NV_STATUS (*__kmigmgrSaveToPersistenceFromVgpuStaticInfo__)(OBJGPU *, struct KernelMIGManager *);
488     NV_STATUS (*__kmigmgrDeleteGPUInstanceRunlists__)(OBJGPU *, struct KernelMIGManager *, KERNEL_MIG_GPU_INSTANCE *);
489     NV_STATUS (*__kmigmgrCreateGPUInstanceRunlists__)(OBJGPU *, struct KernelMIGManager *, KERNEL_MIG_GPU_INSTANCE *);
490     NV_STATUS (*__kmigmgrRestoreFromPersistence__)(OBJGPU *, struct KernelMIGManager *);
491     NV_STATUS (*__kmigmgrCreateGPUInstanceCheck__)(OBJGPU *, struct KernelMIGManager *, NvBool);
492     NvBool (*__kmigmgrIsDevinitMIGBitSet__)(OBJGPU *, struct KernelMIGManager *);
493     NvBool (*__kmigmgrIsGPUInstanceCombinationValid__)(OBJGPU *, struct KernelMIGManager *, NvU32);
494     NvBool (*__kmigmgrIsGPUInstanceFlagValid__)(OBJGPU *, struct KernelMIGManager *, NvU32);
495     NV_STATUS (*__kmigmgrGenerateComputeInstanceUuid__)(OBJGPU *, struct KernelMIGManager *, NvU32, NvU32, NvUuid *);
496     NV_STATUS (*__kmigmgrGenerateGPUInstanceUuid__)(OBJGPU *, struct KernelMIGManager *, NvU32, NvUuid *);
497     NV_STATUS (*__kmigmgrCreateComputeInstances__)(OBJGPU *, struct KernelMIGManager *, KERNEL_MIG_GPU_INSTANCE *, NvBool, KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS, NvU32 *, NvBool);
498     NvBool (*__kmigmgrIsMemoryPartitioningRequested__)(OBJGPU *, struct KernelMIGManager *, NvU32);
499     NvBool (*__kmigmgrIsMemoryPartitioningNeeded__)(OBJGPU *, struct KernelMIGManager *, NvU32);
500     struct NV_RANGE (*__kmigmgrMemSizeFlagToSwizzIdRange__)(OBJGPU *, struct KernelMIGManager *, NvU32);
501     struct NV_RANGE (*__kmigmgrSwizzIdToSpan__)(OBJGPU *, struct KernelMIGManager *, NvU32);
502     NV_STATUS (*__kmigmgrSetMIGState__)(OBJGPU *, struct KernelMIGManager *, NvBool, NvBool, NvBool);
503     NvBool (*__kmigmgrIsCTSAlignmentRequired__)(OBJGPU *, struct KernelMIGManager *);
504     NV_STATUS (*__kmigmgrRestoreFromBootConfig__)(OBJGPU *, struct KernelMIGManager *);
505     NV_STATUS (*__kmigmgrStateLoad__)(POBJGPU, struct KernelMIGManager *, NvU32);
506     NV_STATUS (*__kmigmgrStatePreLoad__)(POBJGPU, struct KernelMIGManager *, NvU32);
507     NV_STATUS (*__kmigmgrStatePostUnload__)(POBJGPU, struct KernelMIGManager *, NvU32);
508     void (*__kmigmgrStateDestroy__)(POBJGPU, struct KernelMIGManager *);
509     NV_STATUS (*__kmigmgrStatePreUnload__)(POBJGPU, struct KernelMIGManager *, NvU32);
510     NV_STATUS (*__kmigmgrStateInitUnlocked__)(POBJGPU, struct KernelMIGManager *);
511     void (*__kmigmgrInitMissing__)(POBJGPU, struct KernelMIGManager *);
512     NV_STATUS (*__kmigmgrStatePreInitLocked__)(POBJGPU, struct KernelMIGManager *);
513     NV_STATUS (*__kmigmgrStatePreInitUnlocked__)(POBJGPU, struct KernelMIGManager *);
514     NV_STATUS (*__kmigmgrStatePostLoad__)(POBJGPU, struct KernelMIGManager *, NvU32);
515     NvBool (*__kmigmgrIsPresent__)(POBJGPU, struct KernelMIGManager *);
516     NvBool PRIVATE_FIELD(bIsA100ReducedConfig);
517     KERNEL_MIG_MANAGER_PRIVATE_DATA *PRIVATE_FIELD(pPrivate);
518     KERNEL_MIG_GPU_INSTANCE PRIVATE_FIELD(kernelMIGGpuInstance)[8];
519     NvBool PRIVATE_FIELD(bMIGEnabled);
520     NvU64 PRIVATE_FIELD(swizzIdInUseMask);
521     NvBool PRIVATE_FIELD(bRestoreWatchdog);
522     NvBool PRIVATE_FIELD(bReenableWatchdog);
523     union ENGTYPE_BIT_VECTOR PRIVATE_FIELD(partitionableEnginesInUse);
524     NvBool PRIVATE_FIELD(bDeviceProfilingInUse);
525     NvBool PRIVATE_FIELD(bMIGAutoOnlineEnabled);
526     NvBool PRIVATE_FIELD(bBootConfigSupported);
527     NvBool PRIVATE_FIELD(bAutoUpdateBootConfig);
528     NvBool PRIVATE_FIELD(bGlobalBootConfigUsed);
529     NvU64 PRIVATE_FIELD(validGlobalCTSIdMask);
530     NvU64 PRIVATE_FIELD(validGlobalGfxCTSIdMask);
531 };
532 
533 struct KernelMIGManager_PRIVATE {
534     const struct NVOC_RTTI *__nvoc_rtti;
535     struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
536     struct Object *__nvoc_pbase_Object;
537     struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
538     struct KernelMIGManager *__nvoc_pbase_KernelMIGManager;
539     NV_STATUS (*__kmigmgrConstructEngine__)(OBJGPU *, struct KernelMIGManager *, ENGDESCRIPTOR);
540     NV_STATUS (*__kmigmgrStateInitLocked__)(OBJGPU *, struct KernelMIGManager *);
541     NV_STATUS (*__kmigmgrStateUnload__)(OBJGPU *, struct KernelMIGManager *, NvU32);
542     NV_STATUS (*__kmigmgrLoadStaticInfo__)(OBJGPU *, struct KernelMIGManager *);
543     NV_STATUS (*__kmigmgrSetStaticInfo__)(OBJGPU *, struct KernelMIGManager *);
544     void (*__kmigmgrClearStaticInfo__)(OBJGPU *, struct KernelMIGManager *);
545     NV_STATUS (*__kmigmgrSaveToPersistenceFromVgpuStaticInfo__)(OBJGPU *, struct KernelMIGManager *);
546     NV_STATUS (*__kmigmgrDeleteGPUInstanceRunlists__)(OBJGPU *, struct KernelMIGManager *, KERNEL_MIG_GPU_INSTANCE *);
547     NV_STATUS (*__kmigmgrCreateGPUInstanceRunlists__)(OBJGPU *, struct KernelMIGManager *, KERNEL_MIG_GPU_INSTANCE *);
548     NV_STATUS (*__kmigmgrRestoreFromPersistence__)(OBJGPU *, struct KernelMIGManager *);
549     NV_STATUS (*__kmigmgrCreateGPUInstanceCheck__)(OBJGPU *, struct KernelMIGManager *, NvBool);
550     NvBool (*__kmigmgrIsDevinitMIGBitSet__)(OBJGPU *, struct KernelMIGManager *);
551     NvBool (*__kmigmgrIsGPUInstanceCombinationValid__)(OBJGPU *, struct KernelMIGManager *, NvU32);
552     NvBool (*__kmigmgrIsGPUInstanceFlagValid__)(OBJGPU *, struct KernelMIGManager *, NvU32);
553     NV_STATUS (*__kmigmgrGenerateComputeInstanceUuid__)(OBJGPU *, struct KernelMIGManager *, NvU32, NvU32, NvUuid *);
554     NV_STATUS (*__kmigmgrGenerateGPUInstanceUuid__)(OBJGPU *, struct KernelMIGManager *, NvU32, NvUuid *);
555     NV_STATUS (*__kmigmgrCreateComputeInstances__)(OBJGPU *, struct KernelMIGManager *, KERNEL_MIG_GPU_INSTANCE *, NvBool, KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS, NvU32 *, NvBool);
556     NvBool (*__kmigmgrIsMemoryPartitioningRequested__)(OBJGPU *, struct KernelMIGManager *, NvU32);
557     NvBool (*__kmigmgrIsMemoryPartitioningNeeded__)(OBJGPU *, struct KernelMIGManager *, NvU32);
558     struct NV_RANGE (*__kmigmgrMemSizeFlagToSwizzIdRange__)(OBJGPU *, struct KernelMIGManager *, NvU32);
559     struct NV_RANGE (*__kmigmgrSwizzIdToSpan__)(OBJGPU *, struct KernelMIGManager *, NvU32);
560     NV_STATUS (*__kmigmgrSetMIGState__)(OBJGPU *, struct KernelMIGManager *, NvBool, NvBool, NvBool);
561     NvBool (*__kmigmgrIsCTSAlignmentRequired__)(OBJGPU *, struct KernelMIGManager *);
562     NV_STATUS (*__kmigmgrRestoreFromBootConfig__)(OBJGPU *, struct KernelMIGManager *);
563     NV_STATUS (*__kmigmgrStateLoad__)(POBJGPU, struct KernelMIGManager *, NvU32);
564     NV_STATUS (*__kmigmgrStatePreLoad__)(POBJGPU, struct KernelMIGManager *, NvU32);
565     NV_STATUS (*__kmigmgrStatePostUnload__)(POBJGPU, struct KernelMIGManager *, NvU32);
566     void (*__kmigmgrStateDestroy__)(POBJGPU, struct KernelMIGManager *);
567     NV_STATUS (*__kmigmgrStatePreUnload__)(POBJGPU, struct KernelMIGManager *, NvU32);
568     NV_STATUS (*__kmigmgrStateInitUnlocked__)(POBJGPU, struct KernelMIGManager *);
569     void (*__kmigmgrInitMissing__)(POBJGPU, struct KernelMIGManager *);
570     NV_STATUS (*__kmigmgrStatePreInitLocked__)(POBJGPU, struct KernelMIGManager *);
571     NV_STATUS (*__kmigmgrStatePreInitUnlocked__)(POBJGPU, struct KernelMIGManager *);
572     NV_STATUS (*__kmigmgrStatePostLoad__)(POBJGPU, struct KernelMIGManager *, NvU32);
573     NvBool (*__kmigmgrIsPresent__)(POBJGPU, struct KernelMIGManager *);
574     NvBool bIsA100ReducedConfig;
575     KERNEL_MIG_MANAGER_PRIVATE_DATA *pPrivate;
576     KERNEL_MIG_GPU_INSTANCE kernelMIGGpuInstance[8];
577     NvBool bMIGEnabled;
578     NvU64 swizzIdInUseMask;
579     NvBool bRestoreWatchdog;
580     NvBool bReenableWatchdog;
581     union ENGTYPE_BIT_VECTOR partitionableEnginesInUse;
582     NvBool bDeviceProfilingInUse;
583     NvBool bMIGAutoOnlineEnabled;
584     NvBool bBootConfigSupported;
585     NvBool bAutoUpdateBootConfig;
586     NvBool bGlobalBootConfigUsed;
587     NvU64 validGlobalCTSIdMask;
588     NvU64 validGlobalGfxCTSIdMask;
589 };
590 
591 #ifndef __NVOC_CLASS_KernelMIGManager_TYPEDEF__
592 #define __NVOC_CLASS_KernelMIGManager_TYPEDEF__
593 typedef struct KernelMIGManager KernelMIGManager;
594 #endif /* __NVOC_CLASS_KernelMIGManager_TYPEDEF__ */
595 
596 #ifndef __nvoc_class_id_KernelMIGManager
597 #define __nvoc_class_id_KernelMIGManager 0x01c1bf
598 #endif /* __nvoc_class_id_KernelMIGManager */
599 
600 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelMIGManager;
601 
602 #define __staticCast_KernelMIGManager(pThis) \
603     ((pThis)->__nvoc_pbase_KernelMIGManager)
604 
605 #ifdef __nvoc_kernel_mig_manager_h_disabled
606 #define __dynamicCast_KernelMIGManager(pThis) ((KernelMIGManager*)NULL)
607 #else //__nvoc_kernel_mig_manager_h_disabled
608 #define __dynamicCast_KernelMIGManager(pThis) \
609     ((KernelMIGManager*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelMIGManager)))
610 #endif //__nvoc_kernel_mig_manager_h_disabled
611 
612 #define PDB_PROP_KMIGMGR_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
613 #define PDB_PROP_KMIGMGR_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
614 
615 NV_STATUS __nvoc_objCreateDynamic_KernelMIGManager(KernelMIGManager**, Dynamic*, NvU32, va_list);
616 
617 NV_STATUS __nvoc_objCreate_KernelMIGManager(KernelMIGManager**, Dynamic*, NvU32);
618 #define __objCreate_KernelMIGManager(ppNewObj, pParent, createFlags) \
619     __nvoc_objCreate_KernelMIGManager((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
620 
621 #define kmigmgrConstructEngine(arg0, arg1, arg2) kmigmgrConstructEngine_DISPATCH(arg0, arg1, arg2)
622 #define kmigmgrStateInitLocked(arg0, arg1) kmigmgrStateInitLocked_DISPATCH(arg0, arg1)
623 #define kmigmgrStateUnload(arg0, arg1, flags) kmigmgrStateUnload_DISPATCH(arg0, arg1, flags)
624 #define kmigmgrLoadStaticInfo(arg0, arg1) kmigmgrLoadStaticInfo_DISPATCH(arg0, arg1)
625 #define kmigmgrLoadStaticInfo_HAL(arg0, arg1) kmigmgrLoadStaticInfo_DISPATCH(arg0, arg1)
626 #define kmigmgrSetStaticInfo(arg0, arg1) kmigmgrSetStaticInfo_DISPATCH(arg0, arg1)
627 #define kmigmgrSetStaticInfo_HAL(arg0, arg1) kmigmgrSetStaticInfo_DISPATCH(arg0, arg1)
628 #define kmigmgrClearStaticInfo(arg0, arg1) kmigmgrClearStaticInfo_DISPATCH(arg0, arg1)
629 #define kmigmgrClearStaticInfo_HAL(arg0, arg1) kmigmgrClearStaticInfo_DISPATCH(arg0, arg1)
630 #define kmigmgrSaveToPersistenceFromVgpuStaticInfo(arg0, arg1) kmigmgrSaveToPersistenceFromVgpuStaticInfo_DISPATCH(arg0, arg1)
631 #define kmigmgrSaveToPersistenceFromVgpuStaticInfo_HAL(arg0, arg1) kmigmgrSaveToPersistenceFromVgpuStaticInfo_DISPATCH(arg0, arg1)
632 #define kmigmgrDeleteGPUInstanceRunlists(arg0, arg1, arg2) kmigmgrDeleteGPUInstanceRunlists_DISPATCH(arg0, arg1, arg2)
633 #define kmigmgrDeleteGPUInstanceRunlists_HAL(arg0, arg1, arg2) kmigmgrDeleteGPUInstanceRunlists_DISPATCH(arg0, arg1, arg2)
634 #define kmigmgrCreateGPUInstanceRunlists(arg0, arg1, arg2) kmigmgrCreateGPUInstanceRunlists_DISPATCH(arg0, arg1, arg2)
635 #define kmigmgrCreateGPUInstanceRunlists_HAL(arg0, arg1, arg2) kmigmgrCreateGPUInstanceRunlists_DISPATCH(arg0, arg1, arg2)
636 #define kmigmgrRestoreFromPersistence(arg0, arg1) kmigmgrRestoreFromPersistence_DISPATCH(arg0, arg1)
637 #define kmigmgrRestoreFromPersistence_HAL(arg0, arg1) kmigmgrRestoreFromPersistence_DISPATCH(arg0, arg1)
638 #define kmigmgrCreateGPUInstanceCheck(arg0, arg1, bMemoryPartitioningNeeded) kmigmgrCreateGPUInstanceCheck_DISPATCH(arg0, arg1, bMemoryPartitioningNeeded)
639 #define kmigmgrCreateGPUInstanceCheck_HAL(arg0, arg1, bMemoryPartitioningNeeded) kmigmgrCreateGPUInstanceCheck_DISPATCH(arg0, arg1, bMemoryPartitioningNeeded)
640 #define kmigmgrIsDevinitMIGBitSet(arg0, arg1) kmigmgrIsDevinitMIGBitSet_DISPATCH(arg0, arg1)
641 #define kmigmgrIsDevinitMIGBitSet_HAL(arg0, arg1) kmigmgrIsDevinitMIGBitSet_DISPATCH(arg0, arg1)
642 #define kmigmgrIsGPUInstanceCombinationValid(arg0, arg1, gpuInstanceFlag) kmigmgrIsGPUInstanceCombinationValid_DISPATCH(arg0, arg1, gpuInstanceFlag)
643 #define kmigmgrIsGPUInstanceCombinationValid_HAL(arg0, arg1, gpuInstanceFlag) kmigmgrIsGPUInstanceCombinationValid_DISPATCH(arg0, arg1, gpuInstanceFlag)
644 #define kmigmgrIsGPUInstanceFlagValid(arg0, arg1, gpuInstanceFlag) kmigmgrIsGPUInstanceFlagValid_DISPATCH(arg0, arg1, gpuInstanceFlag)
645 #define kmigmgrIsGPUInstanceFlagValid_HAL(arg0, arg1, gpuInstanceFlag) kmigmgrIsGPUInstanceFlagValid_DISPATCH(arg0, arg1, gpuInstanceFlag)
646 #define kmigmgrGenerateComputeInstanceUuid(arg0, arg1, swizzId, globalGrIdx, arg2) kmigmgrGenerateComputeInstanceUuid_DISPATCH(arg0, arg1, swizzId, globalGrIdx, arg2)
647 #define kmigmgrGenerateComputeInstanceUuid_HAL(arg0, arg1, swizzId, globalGrIdx, arg2) kmigmgrGenerateComputeInstanceUuid_DISPATCH(arg0, arg1, swizzId, globalGrIdx, arg2)
648 #define kmigmgrGenerateGPUInstanceUuid(arg0, arg1, swizzId, arg2) kmigmgrGenerateGPUInstanceUuid_DISPATCH(arg0, arg1, swizzId, arg2)
649 #define kmigmgrGenerateGPUInstanceUuid_HAL(arg0, arg1, swizzId, arg2) kmigmgrGenerateGPUInstanceUuid_DISPATCH(arg0, arg1, swizzId, arg2)
650 #define kmigmgrCreateComputeInstances(arg0, arg1, arg2, bQuery, arg3, pCIIds, bCreateCap) kmigmgrCreateComputeInstances_DISPATCH(arg0, arg1, arg2, bQuery, arg3, pCIIds, bCreateCap)
651 #define kmigmgrCreateComputeInstances_HAL(arg0, arg1, arg2, bQuery, arg3, pCIIds, bCreateCap) kmigmgrCreateComputeInstances_DISPATCH(arg0, arg1, arg2, bQuery, arg3, pCIIds, bCreateCap)
652 #define kmigmgrIsMemoryPartitioningRequested(arg0, arg1, partitionFlags) kmigmgrIsMemoryPartitioningRequested_DISPATCH(arg0, arg1, partitionFlags)
653 #define kmigmgrIsMemoryPartitioningRequested_HAL(arg0, arg1, partitionFlags) kmigmgrIsMemoryPartitioningRequested_DISPATCH(arg0, arg1, partitionFlags)
654 #define kmigmgrIsMemoryPartitioningNeeded(arg0, arg1, swizzId) kmigmgrIsMemoryPartitioningNeeded_DISPATCH(arg0, arg1, swizzId)
655 #define kmigmgrIsMemoryPartitioningNeeded_HAL(arg0, arg1, swizzId) kmigmgrIsMemoryPartitioningNeeded_DISPATCH(arg0, arg1, swizzId)
656 #define kmigmgrMemSizeFlagToSwizzIdRange(arg0, arg1, memSizeFlag) kmigmgrMemSizeFlagToSwizzIdRange_DISPATCH(arg0, arg1, memSizeFlag)
657 #define kmigmgrMemSizeFlagToSwizzIdRange_HAL(arg0, arg1, memSizeFlag) kmigmgrMemSizeFlagToSwizzIdRange_DISPATCH(arg0, arg1, memSizeFlag)
658 #define kmigmgrSwizzIdToSpan(arg0, arg1, swizzId) kmigmgrSwizzIdToSpan_DISPATCH(arg0, arg1, swizzId)
659 #define kmigmgrSwizzIdToSpan_HAL(arg0, arg1, swizzId) kmigmgrSwizzIdToSpan_DISPATCH(arg0, arg1, swizzId)
660 #define kmigmgrSetMIGState(arg0, arg1, bMemoryPartitioningNeeded, bEnable, bUnload) kmigmgrSetMIGState_DISPATCH(arg0, arg1, bMemoryPartitioningNeeded, bEnable, bUnload)
661 #define kmigmgrSetMIGState_HAL(arg0, arg1, bMemoryPartitioningNeeded, bEnable, bUnload) kmigmgrSetMIGState_DISPATCH(arg0, arg1, bMemoryPartitioningNeeded, bEnable, bUnload)
662 #define kmigmgrIsCTSAlignmentRequired(arg0, arg1) kmigmgrIsCTSAlignmentRequired_DISPATCH(arg0, arg1)
663 #define kmigmgrIsCTSAlignmentRequired_HAL(arg0, arg1) kmigmgrIsCTSAlignmentRequired_DISPATCH(arg0, arg1)
664 #define kmigmgrRestoreFromBootConfig(pGpu, pKernelMIGManager) kmigmgrRestoreFromBootConfig_DISPATCH(pGpu, pKernelMIGManager)
665 #define kmigmgrRestoreFromBootConfig_HAL(pGpu, pKernelMIGManager) kmigmgrRestoreFromBootConfig_DISPATCH(pGpu, pKernelMIGManager)
666 #define kmigmgrStateLoad(pGpu, pEngstate, arg0) kmigmgrStateLoad_DISPATCH(pGpu, pEngstate, arg0)
667 #define kmigmgrStatePreLoad(pGpu, pEngstate, arg0) kmigmgrStatePreLoad_DISPATCH(pGpu, pEngstate, arg0)
668 #define kmigmgrStatePostUnload(pGpu, pEngstate, arg0) kmigmgrStatePostUnload_DISPATCH(pGpu, pEngstate, arg0)
669 #define kmigmgrStateDestroy(pGpu, pEngstate) kmigmgrStateDestroy_DISPATCH(pGpu, pEngstate)
670 #define kmigmgrStatePreUnload(pGpu, pEngstate, arg0) kmigmgrStatePreUnload_DISPATCH(pGpu, pEngstate, arg0)
671 #define kmigmgrStateInitUnlocked(pGpu, pEngstate) kmigmgrStateInitUnlocked_DISPATCH(pGpu, pEngstate)
672 #define kmigmgrInitMissing(pGpu, pEngstate) kmigmgrInitMissing_DISPATCH(pGpu, pEngstate)
673 #define kmigmgrStatePreInitLocked(pGpu, pEngstate) kmigmgrStatePreInitLocked_DISPATCH(pGpu, pEngstate)
674 #define kmigmgrStatePreInitUnlocked(pGpu, pEngstate) kmigmgrStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
675 #define kmigmgrStatePostLoad(pGpu, pEngstate, arg0) kmigmgrStatePostLoad_DISPATCH(pGpu, pEngstate, arg0)
676 #define kmigmgrIsPresent(pGpu, pEngstate) kmigmgrIsPresent_DISPATCH(pGpu, pEngstate)
677 void kmigmgrDetectReducedConfig_KERNEL(OBJGPU *arg0, struct KernelMIGManager *arg1);
678 
679 
680 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDetectReducedConfig(OBJGPU * arg0,struct KernelMIGManager * arg1)681 static inline void kmigmgrDetectReducedConfig(OBJGPU *arg0, struct KernelMIGManager *arg1) {
682     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
683 }
684 #else //__nvoc_kernel_mig_manager_h_disabled
685 #define kmigmgrDetectReducedConfig(arg0, arg1) kmigmgrDetectReducedConfig_KERNEL(arg0, arg1)
686 #endif //__nvoc_kernel_mig_manager_h_disabled
687 
688 #define kmigmgrDetectReducedConfig_HAL(arg0, arg1) kmigmgrDetectReducedConfig(arg0, arg1)
689 
690 NV_STATUS kmigmgrGetComputeProfileFromGpcCount_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 gpcCount, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile);
691 
692 
693 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetComputeProfileFromGpcCount(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 gpcCount,NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE * pProfile)694 static inline NV_STATUS kmigmgrGetComputeProfileFromGpcCount(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 gpcCount, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile) {
695     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
696     return NV_ERR_NOT_SUPPORTED;
697 }
698 #else //__nvoc_kernel_mig_manager_h_disabled
699 #define kmigmgrGetComputeProfileFromGpcCount(arg0, arg1, gpcCount, pProfile) kmigmgrGetComputeProfileFromGpcCount_IMPL(arg0, arg1, gpcCount, pProfile)
700 #endif //__nvoc_kernel_mig_manager_h_disabled
701 
702 #define kmigmgrGetComputeProfileFromGpcCount_HAL(arg0, arg1, gpcCount, pProfile) kmigmgrGetComputeProfileFromGpcCount(arg0, arg1, gpcCount, pProfile)
703 
704 NV_STATUS kmigmgrConstructEngine_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, ENGDESCRIPTOR arg2);
705 
kmigmgrConstructEngine_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,ENGDESCRIPTOR arg2)706 static inline NV_STATUS kmigmgrConstructEngine_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, ENGDESCRIPTOR arg2) {
707     return arg1->__kmigmgrConstructEngine__(arg0, arg1, arg2);
708 }
709 
710 NV_STATUS kmigmgrStateInitLocked_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
711 
kmigmgrStateInitLocked_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1)712 static inline NV_STATUS kmigmgrStateInitLocked_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1) {
713     return arg1->__kmigmgrStateInitLocked__(arg0, arg1);
714 }
715 
716 NV_STATUS kmigmgrStateUnload_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 flags);
717 
kmigmgrStateUnload_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 flags)718 static inline NV_STATUS kmigmgrStateUnload_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 flags) {
719     return arg1->__kmigmgrStateUnload__(arg0, arg1, flags);
720 }
721 
722 NV_STATUS kmigmgrLoadStaticInfo_VF(OBJGPU *arg0, struct KernelMIGManager *arg1);
723 
724 NV_STATUS kmigmgrLoadStaticInfo_KERNEL(OBJGPU *arg0, struct KernelMIGManager *arg1);
725 
kmigmgrLoadStaticInfo_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1)726 static inline NV_STATUS kmigmgrLoadStaticInfo_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1) {
727     return arg1->__kmigmgrLoadStaticInfo__(arg0, arg1);
728 }
729 
730 NV_STATUS kmigmgrSetStaticInfo_VF(OBJGPU *arg0, struct KernelMIGManager *arg1);
731 
kmigmgrSetStaticInfo_46f6a7(OBJGPU * arg0,struct KernelMIGManager * arg1)732 static inline NV_STATUS kmigmgrSetStaticInfo_46f6a7(OBJGPU *arg0, struct KernelMIGManager *arg1) {
733     return NV_ERR_NOT_SUPPORTED;
734 }
735 
kmigmgrSetStaticInfo_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1)736 static inline NV_STATUS kmigmgrSetStaticInfo_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1) {
737     return arg1->__kmigmgrSetStaticInfo__(arg0, arg1);
738 }
739 
740 void kmigmgrClearStaticInfo_VF(OBJGPU *arg0, struct KernelMIGManager *arg1);
741 
kmigmgrClearStaticInfo_b3696a(OBJGPU * arg0,struct KernelMIGManager * arg1)742 static inline void kmigmgrClearStaticInfo_b3696a(OBJGPU *arg0, struct KernelMIGManager *arg1) {
743     return;
744 }
745 
kmigmgrClearStaticInfo_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1)746 static inline void kmigmgrClearStaticInfo_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1) {
747     arg1->__kmigmgrClearStaticInfo__(arg0, arg1);
748 }
749 
750 NV_STATUS kmigmgrSaveToPersistenceFromVgpuStaticInfo_VF(OBJGPU *arg0, struct KernelMIGManager *arg1);
751 
kmigmgrSaveToPersistenceFromVgpuStaticInfo_46f6a7(OBJGPU * arg0,struct KernelMIGManager * arg1)752 static inline NV_STATUS kmigmgrSaveToPersistenceFromVgpuStaticInfo_46f6a7(OBJGPU *arg0, struct KernelMIGManager *arg1) {
753     return NV_ERR_NOT_SUPPORTED;
754 }
755 
kmigmgrSaveToPersistenceFromVgpuStaticInfo_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1)756 static inline NV_STATUS kmigmgrSaveToPersistenceFromVgpuStaticInfo_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1) {
757     return arg1->__kmigmgrSaveToPersistenceFromVgpuStaticInfo__(arg0, arg1);
758 }
759 
kmigmgrDeleteGPUInstanceRunlists_56cd7a(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)760 static inline NV_STATUS kmigmgrDeleteGPUInstanceRunlists_56cd7a(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
761     return NV_OK;
762 }
763 
764 NV_STATUS kmigmgrDeleteGPUInstanceRunlists_FWCLIENT(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
765 
kmigmgrDeleteGPUInstanceRunlists_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)766 static inline NV_STATUS kmigmgrDeleteGPUInstanceRunlists_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
767     return arg1->__kmigmgrDeleteGPUInstanceRunlists__(arg0, arg1, arg2);
768 }
769 
kmigmgrCreateGPUInstanceRunlists_56cd7a(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)770 static inline NV_STATUS kmigmgrCreateGPUInstanceRunlists_56cd7a(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
771     return NV_OK;
772 }
773 
774 NV_STATUS kmigmgrCreateGPUInstanceRunlists_FWCLIENT(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
775 
kmigmgrCreateGPUInstanceRunlists_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)776 static inline NV_STATUS kmigmgrCreateGPUInstanceRunlists_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
777     return arg1->__kmigmgrCreateGPUInstanceRunlists__(arg0, arg1, arg2);
778 }
779 
780 NV_STATUS kmigmgrRestoreFromPersistence_VF(OBJGPU *arg0, struct KernelMIGManager *arg1);
781 
782 NV_STATUS kmigmgrRestoreFromPersistence_PF(OBJGPU *arg0, struct KernelMIGManager *arg1);
783 
kmigmgrRestoreFromPersistence_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1)784 static inline NV_STATUS kmigmgrRestoreFromPersistence_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1) {
785     return arg1->__kmigmgrRestoreFromPersistence__(arg0, arg1);
786 }
787 
788 NV_STATUS kmigmgrCreateGPUInstanceCheck_GA100(OBJGPU *arg0, struct KernelMIGManager *arg1, NvBool bMemoryPartitioningNeeded);
789 
kmigmgrCreateGPUInstanceCheck_46f6a7(OBJGPU * arg0,struct KernelMIGManager * arg1,NvBool bMemoryPartitioningNeeded)790 static inline NV_STATUS kmigmgrCreateGPUInstanceCheck_46f6a7(OBJGPU *arg0, struct KernelMIGManager *arg1, NvBool bMemoryPartitioningNeeded) {
791     return NV_ERR_NOT_SUPPORTED;
792 }
793 
kmigmgrCreateGPUInstanceCheck_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,NvBool bMemoryPartitioningNeeded)794 static inline NV_STATUS kmigmgrCreateGPUInstanceCheck_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, NvBool bMemoryPartitioningNeeded) {
795     return arg1->__kmigmgrCreateGPUInstanceCheck__(arg0, arg1, bMemoryPartitioningNeeded);
796 }
797 
798 NvBool kmigmgrIsDevinitMIGBitSet_VF(OBJGPU *arg0, struct KernelMIGManager *arg1);
799 
800 NvBool kmigmgrIsDevinitMIGBitSet_GA100(OBJGPU *arg0, struct KernelMIGManager *arg1);
801 
kmigmgrIsDevinitMIGBitSet_491d52(OBJGPU * arg0,struct KernelMIGManager * arg1)802 static inline NvBool kmigmgrIsDevinitMIGBitSet_491d52(OBJGPU *arg0, struct KernelMIGManager *arg1) {
803     return ((NvBool)(0 != 0));
804 }
805 
kmigmgrIsDevinitMIGBitSet_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1)806 static inline NvBool kmigmgrIsDevinitMIGBitSet_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1) {
807     return arg1->__kmigmgrIsDevinitMIGBitSet__(arg0, arg1);
808 }
809 
810 NvBool kmigmgrIsGPUInstanceCombinationValid_GA100(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 gpuInstanceFlag);
811 
812 NvBool kmigmgrIsGPUInstanceCombinationValid_GH100(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 gpuInstanceFlag);
813 
kmigmgrIsGPUInstanceCombinationValid_491d52(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 gpuInstanceFlag)814 static inline NvBool kmigmgrIsGPUInstanceCombinationValid_491d52(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 gpuInstanceFlag) {
815     return ((NvBool)(0 != 0));
816 }
817 
kmigmgrIsGPUInstanceCombinationValid_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 gpuInstanceFlag)818 static inline NvBool kmigmgrIsGPUInstanceCombinationValid_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 gpuInstanceFlag) {
819     return arg1->__kmigmgrIsGPUInstanceCombinationValid__(arg0, arg1, gpuInstanceFlag);
820 }
821 
822 NvBool kmigmgrIsGPUInstanceFlagValid_GA100(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 gpuInstanceFlag);
823 
824 NvBool kmigmgrIsGPUInstanceFlagValid_GH100(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 gpuInstanceFlag);
825 
kmigmgrIsGPUInstanceFlagValid_491d52(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 gpuInstanceFlag)826 static inline NvBool kmigmgrIsGPUInstanceFlagValid_491d52(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 gpuInstanceFlag) {
827     return ((NvBool)(0 != 0));
828 }
829 
kmigmgrIsGPUInstanceFlagValid_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 gpuInstanceFlag)830 static inline NvBool kmigmgrIsGPUInstanceFlagValid_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 gpuInstanceFlag) {
831     return arg1->__kmigmgrIsGPUInstanceFlagValid__(arg0, arg1, gpuInstanceFlag);
832 }
833 
834 NV_STATUS kmigmgrGenerateComputeInstanceUuid_VF(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvU32 globalGrIdx, NvUuid *arg2);
835 
kmigmgrGenerateComputeInstanceUuid_5baef9(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId,NvU32 globalGrIdx,NvUuid * arg2)836 static inline NV_STATUS kmigmgrGenerateComputeInstanceUuid_5baef9(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvU32 globalGrIdx, NvUuid *arg2) {
837     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
838 }
839 
kmigmgrGenerateComputeInstanceUuid_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId,NvU32 globalGrIdx,NvUuid * arg2)840 static inline NV_STATUS kmigmgrGenerateComputeInstanceUuid_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvU32 globalGrIdx, NvUuid *arg2) {
841     return arg1->__kmigmgrGenerateComputeInstanceUuid__(arg0, arg1, swizzId, globalGrIdx, arg2);
842 }
843 
844 NV_STATUS kmigmgrGenerateGPUInstanceUuid_VF(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvUuid *arg2);
845 
kmigmgrGenerateGPUInstanceUuid_5baef9(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId,NvUuid * arg2)846 static inline NV_STATUS kmigmgrGenerateGPUInstanceUuid_5baef9(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvUuid *arg2) {
847     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
848 }
849 
kmigmgrGenerateGPUInstanceUuid_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId,NvUuid * arg2)850 static inline NV_STATUS kmigmgrGenerateGPUInstanceUuid_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvUuid *arg2) {
851     return arg1->__kmigmgrGenerateGPUInstanceUuid__(arg0, arg1, swizzId, arg2);
852 }
853 
854 NV_STATUS kmigmgrCreateComputeInstances_VF(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, NvBool bQuery, KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS arg3, NvU32 *pCIIds, NvBool bCreateCap);
855 
856 NV_STATUS kmigmgrCreateComputeInstances_FWCLIENT(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, NvBool bQuery, KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS arg3, NvU32 *pCIIds, NvBool bCreateCap);
857 
kmigmgrCreateComputeInstances_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2,NvBool bQuery,KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS arg3,NvU32 * pCIIds,NvBool bCreateCap)858 static inline NV_STATUS kmigmgrCreateComputeInstances_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, NvBool bQuery, KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS arg3, NvU32 *pCIIds, NvBool bCreateCap) {
859     return arg1->__kmigmgrCreateComputeInstances__(arg0, arg1, arg2, bQuery, arg3, pCIIds, bCreateCap);
860 }
861 
862 NvBool kmigmgrIsMemoryPartitioningRequested_GA100(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 partitionFlags);
863 
kmigmgrIsMemoryPartitioningRequested_491d52(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 partitionFlags)864 static inline NvBool kmigmgrIsMemoryPartitioningRequested_491d52(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 partitionFlags) {
865     return ((NvBool)(0 != 0));
866 }
867 
kmigmgrIsMemoryPartitioningRequested_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 partitionFlags)868 static inline NvBool kmigmgrIsMemoryPartitioningRequested_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 partitionFlags) {
869     return arg1->__kmigmgrIsMemoryPartitioningRequested__(arg0, arg1, partitionFlags);
870 }
871 
872 NvBool kmigmgrIsMemoryPartitioningNeeded_GA100(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId);
873 
kmigmgrIsMemoryPartitioningNeeded_491d52(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId)874 static inline NvBool kmigmgrIsMemoryPartitioningNeeded_491d52(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId) {
875     return ((NvBool)(0 != 0));
876 }
877 
kmigmgrIsMemoryPartitioningNeeded_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId)878 static inline NvBool kmigmgrIsMemoryPartitioningNeeded_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId) {
879     return arg1->__kmigmgrIsMemoryPartitioningNeeded__(arg0, arg1, swizzId);
880 }
881 
kmigmgrMemSizeFlagToSwizzIdRange_d64cd6(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 memSizeFlag)882 static inline struct NV_RANGE kmigmgrMemSizeFlagToSwizzIdRange_d64cd6(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 memSizeFlag) {
883     return NV_RANGE_EMPTY;
884 }
885 
886 struct NV_RANGE kmigmgrMemSizeFlagToSwizzIdRange_GA100(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 memSizeFlag);
887 
kmigmgrMemSizeFlagToSwizzIdRange_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 memSizeFlag)888 static inline struct NV_RANGE kmigmgrMemSizeFlagToSwizzIdRange_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 memSizeFlag) {
889     return arg1->__kmigmgrMemSizeFlagToSwizzIdRange__(arg0, arg1, memSizeFlag);
890 }
891 
892 struct NV_RANGE kmigmgrSwizzIdToSpan_GA100(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId);
893 
kmigmgrSwizzIdToSpan_d64cd6(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId)894 static inline struct NV_RANGE kmigmgrSwizzIdToSpan_d64cd6(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId) {
895     return NV_RANGE_EMPTY;
896 }
897 
kmigmgrSwizzIdToSpan_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId)898 static inline struct NV_RANGE kmigmgrSwizzIdToSpan_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId) {
899     return arg1->__kmigmgrSwizzIdToSpan__(arg0, arg1, swizzId);
900 }
901 
902 NV_STATUS kmigmgrSetMIGState_VF(OBJGPU *arg0, struct KernelMIGManager *arg1, NvBool bMemoryPartitioningNeeded, NvBool bEnable, NvBool bUnload);
903 
904 NV_STATUS kmigmgrSetMIGState_FWCLIENT(OBJGPU *arg0, struct KernelMIGManager *arg1, NvBool bMemoryPartitioningNeeded, NvBool bEnable, NvBool bUnload);
905 
kmigmgrSetMIGState_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1,NvBool bMemoryPartitioningNeeded,NvBool bEnable,NvBool bUnload)906 static inline NV_STATUS kmigmgrSetMIGState_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1, NvBool bMemoryPartitioningNeeded, NvBool bEnable, NvBool bUnload) {
907     return arg1->__kmigmgrSetMIGState__(arg0, arg1, bMemoryPartitioningNeeded, bEnable, bUnload);
908 }
909 
910 NvBool kmigmgrIsCTSAlignmentRequired_VF(OBJGPU *arg0, struct KernelMIGManager *arg1);
911 
912 NvBool kmigmgrIsCTSAlignmentRequired_PF(OBJGPU *arg0, struct KernelMIGManager *arg1);
913 
kmigmgrIsCTSAlignmentRequired_DISPATCH(OBJGPU * arg0,struct KernelMIGManager * arg1)914 static inline NvBool kmigmgrIsCTSAlignmentRequired_DISPATCH(OBJGPU *arg0, struct KernelMIGManager *arg1) {
915     return arg1->__kmigmgrIsCTSAlignmentRequired__(arg0, arg1);
916 }
917 
918 NV_STATUS kmigmgrRestoreFromBootConfig_PF(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager);
919 
kmigmgrRestoreFromBootConfig_56cd7a(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)920 static inline NV_STATUS kmigmgrRestoreFromBootConfig_56cd7a(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
921     return NV_OK;
922 }
923 
kmigmgrRestoreFromBootConfig_DISPATCH(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)924 static inline NV_STATUS kmigmgrRestoreFromBootConfig_DISPATCH(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
925     return pKernelMIGManager->__kmigmgrRestoreFromBootConfig__(pGpu, pKernelMIGManager);
926 }
927 
kmigmgrStateLoad_DISPATCH(POBJGPU pGpu,struct KernelMIGManager * pEngstate,NvU32 arg0)928 static inline NV_STATUS kmigmgrStateLoad_DISPATCH(POBJGPU pGpu, struct KernelMIGManager *pEngstate, NvU32 arg0) {
929     return pEngstate->__kmigmgrStateLoad__(pGpu, pEngstate, arg0);
930 }
931 
kmigmgrStatePreLoad_DISPATCH(POBJGPU pGpu,struct KernelMIGManager * pEngstate,NvU32 arg0)932 static inline NV_STATUS kmigmgrStatePreLoad_DISPATCH(POBJGPU pGpu, struct KernelMIGManager *pEngstate, NvU32 arg0) {
933     return pEngstate->__kmigmgrStatePreLoad__(pGpu, pEngstate, arg0);
934 }
935 
kmigmgrStatePostUnload_DISPATCH(POBJGPU pGpu,struct KernelMIGManager * pEngstate,NvU32 arg0)936 static inline NV_STATUS kmigmgrStatePostUnload_DISPATCH(POBJGPU pGpu, struct KernelMIGManager *pEngstate, NvU32 arg0) {
937     return pEngstate->__kmigmgrStatePostUnload__(pGpu, pEngstate, arg0);
938 }
939 
kmigmgrStateDestroy_DISPATCH(POBJGPU pGpu,struct KernelMIGManager * pEngstate)940 static inline void kmigmgrStateDestroy_DISPATCH(POBJGPU pGpu, struct KernelMIGManager *pEngstate) {
941     pEngstate->__kmigmgrStateDestroy__(pGpu, pEngstate);
942 }
943 
kmigmgrStatePreUnload_DISPATCH(POBJGPU pGpu,struct KernelMIGManager * pEngstate,NvU32 arg0)944 static inline NV_STATUS kmigmgrStatePreUnload_DISPATCH(POBJGPU pGpu, struct KernelMIGManager *pEngstate, NvU32 arg0) {
945     return pEngstate->__kmigmgrStatePreUnload__(pGpu, pEngstate, arg0);
946 }
947 
kmigmgrStateInitUnlocked_DISPATCH(POBJGPU pGpu,struct KernelMIGManager * pEngstate)948 static inline NV_STATUS kmigmgrStateInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelMIGManager *pEngstate) {
949     return pEngstate->__kmigmgrStateInitUnlocked__(pGpu, pEngstate);
950 }
951 
kmigmgrInitMissing_DISPATCH(POBJGPU pGpu,struct KernelMIGManager * pEngstate)952 static inline void kmigmgrInitMissing_DISPATCH(POBJGPU pGpu, struct KernelMIGManager *pEngstate) {
953     pEngstate->__kmigmgrInitMissing__(pGpu, pEngstate);
954 }
955 
kmigmgrStatePreInitLocked_DISPATCH(POBJGPU pGpu,struct KernelMIGManager * pEngstate)956 static inline NV_STATUS kmigmgrStatePreInitLocked_DISPATCH(POBJGPU pGpu, struct KernelMIGManager *pEngstate) {
957     return pEngstate->__kmigmgrStatePreInitLocked__(pGpu, pEngstate);
958 }
959 
kmigmgrStatePreInitUnlocked_DISPATCH(POBJGPU pGpu,struct KernelMIGManager * pEngstate)960 static inline NV_STATUS kmigmgrStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelMIGManager *pEngstate) {
961     return pEngstate->__kmigmgrStatePreInitUnlocked__(pGpu, pEngstate);
962 }
963 
kmigmgrStatePostLoad_DISPATCH(POBJGPU pGpu,struct KernelMIGManager * pEngstate,NvU32 arg0)964 static inline NV_STATUS kmigmgrStatePostLoad_DISPATCH(POBJGPU pGpu, struct KernelMIGManager *pEngstate, NvU32 arg0) {
965     return pEngstate->__kmigmgrStatePostLoad__(pGpu, pEngstate, arg0);
966 }
967 
kmigmgrIsPresent_DISPATCH(POBJGPU pGpu,struct KernelMIGManager * pEngstate)968 static inline NvBool kmigmgrIsPresent_DISPATCH(POBJGPU pGpu, struct KernelMIGManager *pEngstate) {
969     return pEngstate->__kmigmgrIsPresent__(pGpu, pEngstate);
970 }
971 
kmigmgrUseLegacyVgpuPolicy(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)972 static inline NvBool kmigmgrUseLegacyVgpuPolicy(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
973     return ((NvBool)(0 != 0));
974 }
975 
kmigmgrIsMIGNvlinkP2PSupportOverridden(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)976 static inline NvBool kmigmgrIsMIGNvlinkP2PSupportOverridden(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
977     return ((NvBool)(0 != 0));
978 }
979 
kmigmgrSetMIGEnabled(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager,NvBool bEnabled)980 static inline void kmigmgrSetMIGEnabled(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, NvBool bEnabled) {
981     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
982     pKernelMIGManager_PRIVATE->bMIGEnabled = bEnabled;
983 }
984 
kmigmgrGetPartitionableEnginesInUse(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)985 static inline const union ENGTYPE_BIT_VECTOR *kmigmgrGetPartitionableEnginesInUse(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
986     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
987     return &pKernelMIGManager_PRIVATE->partitionableEnginesInUse;
988 }
989 
kmigmgrIsA100ReducedConfig(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)990 static inline NvBool kmigmgrIsA100ReducedConfig(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
991     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
992     return pKernelMIGManager_PRIVATE->bIsA100ReducedConfig;
993 }
994 
kmigmgrSetIsA100ReducedConfig(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager,NvBool bA100ReducedConfig)995 static inline void kmigmgrSetIsA100ReducedConfig(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, NvBool bA100ReducedConfig) {
996     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
997     pKernelMIGManager_PRIVATE->bIsA100ReducedConfig = bA100ReducedConfig;
998 }
999 
kmigmgrGetValidGlobalCTSIdMask(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)1000 static inline NvU64 kmigmgrGetValidGlobalCTSIdMask(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
1001     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
1002     return pKernelMIGManager_PRIVATE->validGlobalCTSIdMask;
1003 }
1004 
kmigmgrSetValidGlobalCTSIdMask(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager,NvU64 validGlobalCTSIdMask)1005 static inline void kmigmgrSetValidGlobalCTSIdMask(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, NvU64 validGlobalCTSIdMask) {
1006     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
1007     pKernelMIGManager_PRIVATE->validGlobalCTSIdMask = validGlobalCTSIdMask;
1008 }
1009 
kmigmgrGetValidGlobalGfxCTSIdMask(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)1010 static inline NvU64 kmigmgrGetValidGlobalGfxCTSIdMask(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
1011     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
1012     return pKernelMIGManager_PRIVATE->validGlobalGfxCTSIdMask;
1013 }
1014 
kmigmgrSetValidGlobalGfxCTSIdMask(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager,NvU64 validGlobalGfxCTSIdMask)1015 static inline void kmigmgrSetValidGlobalGfxCTSIdMask(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, NvU64 validGlobalGfxCTSIdMask) {
1016     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
1017     pKernelMIGManager_PRIVATE->validGlobalGfxCTSIdMask = validGlobalGfxCTSIdMask;
1018 }
1019 
1020 NV_STATUS kmigmgrIncRefCount_IMPL(struct RsShared *arg0);
1021 
1022 #define kmigmgrIncRefCount(arg0) kmigmgrIncRefCount_IMPL(arg0)
1023 NV_STATUS kmigmgrDecRefCount_IMPL(struct RsShared *arg0);
1024 
1025 #define kmigmgrDecRefCount(arg0) kmigmgrDecRefCount_IMPL(arg0)
1026 struct MIG_INSTANCE_REF kmigmgrMakeGIReference_IMPL(KERNEL_MIG_GPU_INSTANCE *arg0);
1027 
1028 #define kmigmgrMakeGIReference(arg0) kmigmgrMakeGIReference_IMPL(arg0)
1029 struct MIG_INSTANCE_REF kmigmgrMakeCIReference_IMPL(KERNEL_MIG_GPU_INSTANCE *arg0, MIG_COMPUTE_INSTANCE *arg1);
1030 
1031 #define kmigmgrMakeCIReference(arg0, arg1) kmigmgrMakeCIReference_IMPL(arg0, arg1)
1032 NV_STATUS kmigmgrEngineTypeXlate_IMPL(union ENGTYPE_BIT_VECTOR *pSrc, RM_ENGINE_TYPE srcEngineType, union ENGTYPE_BIT_VECTOR *pDst, RM_ENGINE_TYPE *pDstEngineType);
1033 
1034 #define kmigmgrEngineTypeXlate(pSrc, srcEngineType, pDst, pDstEngineType) kmigmgrEngineTypeXlate_IMPL(pSrc, srcEngineType, pDst, pDstEngineType)
1035 NvBool kmigmgrIsInstanceAttributionIdValid_IMPL(NvU16 id);
1036 
1037 #define kmigmgrIsInstanceAttributionIdValid(id) kmigmgrIsInstanceAttributionIdValid_IMPL(id)
1038 struct MIG_INSTANCE_REF kmigmgrMakeNoMIGReference_IMPL(void);
1039 
1040 #define kmigmgrMakeNoMIGReference() kmigmgrMakeNoMIGReference_IMPL()
1041 NvBool kmigmgrIsMIGReferenceValid_IMPL(struct MIG_INSTANCE_REF *arg0);
1042 
1043 #define kmigmgrIsMIGReferenceValid(arg0) kmigmgrIsMIGReferenceValid_IMPL(arg0)
1044 NvBool kmigmgrAreMIGReferencesSame_IMPL(struct MIG_INSTANCE_REF *arg0, struct MIG_INSTANCE_REF *arg1);
1045 
1046 #define kmigmgrAreMIGReferencesSame(arg0, arg1) kmigmgrAreMIGReferencesSame_IMPL(arg0, arg1)
1047 NvU32 kmigmgrCountEnginesOfType_IMPL(const union ENGTYPE_BIT_VECTOR *arg0, RM_ENGINE_TYPE arg1);
1048 
1049 #define kmigmgrCountEnginesOfType(arg0, arg1) kmigmgrCountEnginesOfType_IMPL(arg0, arg1)
1050 NvU16 kmigmgrGetAttributionIdFromMIGReference_IMPL(struct MIG_INSTANCE_REF arg0);
1051 
1052 #define kmigmgrGetAttributionIdFromMIGReference(arg0) kmigmgrGetAttributionIdFromMIGReference_IMPL(arg0)
1053 NV_STATUS kmigmgrAllocateInstanceEngines_IMPL(union ENGTYPE_BIT_VECTOR *pSourceEngines, NvBool bShared, struct NV_RANGE engTypeRange, NvU32 reqEngCount, union ENGTYPE_BIT_VECTOR *pOutEngines, union ENGTYPE_BIT_VECTOR *pExclusiveEngines, union ENGTYPE_BIT_VECTOR *pSharedEngines, union ENGTYPE_BIT_VECTOR *pAllocatableEngines);
1054 
1055 #define kmigmgrAllocateInstanceEngines(pSourceEngines, bShared, engTypeRange, reqEngCount, pOutEngines, pExclusiveEngines, pSharedEngines, pAllocatableEngines) kmigmgrAllocateInstanceEngines_IMPL(pSourceEngines, bShared, engTypeRange, reqEngCount, pOutEngines, pExclusiveEngines, pSharedEngines, pAllocatableEngines)
1056 void kmigmgrGetLocalEngineMask_IMPL(union ENGTYPE_BIT_VECTOR *pPhysicalEngineMask, union ENGTYPE_BIT_VECTOR *pLocalEngineMask);
1057 
1058 #define kmigmgrGetLocalEngineMask(pPhysicalEngineMask, pLocalEngineMask) kmigmgrGetLocalEngineMask_IMPL(pPhysicalEngineMask, pLocalEngineMask)
1059 NV_STATUS kmigmgrAllocGPUInstanceHandles_IMPL(OBJGPU *arg0, NvU32 swizzId, KERNEL_MIG_GPU_INSTANCE *arg1);
1060 
1061 #define kmigmgrAllocGPUInstanceHandles(arg0, swizzId, arg1) kmigmgrAllocGPUInstanceHandles_IMPL(arg0, swizzId, arg1)
1062 void kmigmgrFreeGPUInstanceHandles_IMPL(KERNEL_MIG_GPU_INSTANCE *arg0);
1063 
1064 #define kmigmgrFreeGPUInstanceHandles(arg0) kmigmgrFreeGPUInstanceHandles_IMPL(arg0)
1065 NvBool kmigmgrIsGPUInstanceReadyToBeDestroyed_IMPL(KERNEL_MIG_GPU_INSTANCE *arg0);
1066 
1067 #define kmigmgrIsGPUInstanceReadyToBeDestroyed(arg0) kmigmgrIsGPUInstanceReadyToBeDestroyed_IMPL(arg0)
1068 void kmigmgrDestruct_IMPL(struct KernelMIGManager *arg0);
1069 
1070 #define __nvoc_kmigmgrDestruct(arg0) kmigmgrDestruct_IMPL(arg0)
1071 void kmigmgrInitRegistryOverrides_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1072 
1073 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitRegistryOverrides(OBJGPU * arg0,struct KernelMIGManager * arg1)1074 static inline void kmigmgrInitRegistryOverrides(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1075     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1076 }
1077 #else //__nvoc_kernel_mig_manager_h_disabled
1078 #define kmigmgrInitRegistryOverrides(arg0, arg1) kmigmgrInitRegistryOverrides_IMPL(arg0, arg1)
1079 #endif //__nvoc_kernel_mig_manager_h_disabled
1080 
1081 KERNEL_MIG_GPU_INSTANCE *kmigmgrGetMIGGpuInstanceSlot_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 i);
1082 
1083 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetMIGGpuInstanceSlot(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 i)1084 static inline KERNEL_MIG_GPU_INSTANCE *kmigmgrGetMIGGpuInstanceSlot(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 i) {
1085     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1086     return NULL;
1087 }
1088 #else //__nvoc_kernel_mig_manager_h_disabled
1089 #define kmigmgrGetMIGGpuInstanceSlot(arg0, arg1, i) kmigmgrGetMIGGpuInstanceSlot_IMPL(arg0, arg1, i)
1090 #endif //__nvoc_kernel_mig_manager_h_disabled
1091 
1092 NvBool kmigmgrIsMIGSupported_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1093 
1094 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsMIGSupported(OBJGPU * arg0,struct KernelMIGManager * arg1)1095 static inline NvBool kmigmgrIsMIGSupported(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1096     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1097     return NV_FALSE;
1098 }
1099 #else //__nvoc_kernel_mig_manager_h_disabled
1100 #define kmigmgrIsMIGSupported(arg0, arg1) kmigmgrIsMIGSupported_IMPL(arg0, arg1)
1101 #endif //__nvoc_kernel_mig_manager_h_disabled
1102 
1103 NvBool kmigmgrIsMIGEnabled_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1104 
1105 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsMIGEnabled(OBJGPU * arg0,struct KernelMIGManager * arg1)1106 static inline NvBool kmigmgrIsMIGEnabled(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1107     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1108     return NV_FALSE;
1109 }
1110 #else //__nvoc_kernel_mig_manager_h_disabled
1111 #define kmigmgrIsMIGEnabled(arg0, arg1) kmigmgrIsMIGEnabled_IMPL(arg0, arg1)
1112 #endif //__nvoc_kernel_mig_manager_h_disabled
1113 
1114 NvBool kmigmgrIsMIGGpuInstancingEnabled_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1115 
1116 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsMIGGpuInstancingEnabled(OBJGPU * arg0,struct KernelMIGManager * arg1)1117 static inline NvBool kmigmgrIsMIGGpuInstancingEnabled(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1118     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1119     return NV_FALSE;
1120 }
1121 #else //__nvoc_kernel_mig_manager_h_disabled
1122 #define kmigmgrIsMIGGpuInstancingEnabled(arg0, arg1) kmigmgrIsMIGGpuInstancingEnabled_IMPL(arg0, arg1)
1123 #endif //__nvoc_kernel_mig_manager_h_disabled
1124 
1125 NvBool kmigmgrIsMIGMemPartitioningEnabled_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1126 
1127 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsMIGMemPartitioningEnabled(OBJGPU * arg0,struct KernelMIGManager * arg1)1128 static inline NvBool kmigmgrIsMIGMemPartitioningEnabled(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1129     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1130     return NV_FALSE;
1131 }
1132 #else //__nvoc_kernel_mig_manager_h_disabled
1133 #define kmigmgrIsMIGMemPartitioningEnabled(arg0, arg1) kmigmgrIsMIGMemPartitioningEnabled_IMPL(arg0, arg1)
1134 #endif //__nvoc_kernel_mig_manager_h_disabled
1135 
1136 const KERNEL_MIG_MANAGER_STATIC_INFO *kmigmgrGetStaticInfo_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1137 
1138 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetStaticInfo(OBJGPU * arg0,struct KernelMIGManager * arg1)1139 static inline const KERNEL_MIG_MANAGER_STATIC_INFO *kmigmgrGetStaticInfo(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1140     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1141     return NULL;
1142 }
1143 #else //__nvoc_kernel_mig_manager_h_disabled
1144 #define kmigmgrGetStaticInfo(arg0, arg1) kmigmgrGetStaticInfo_IMPL(arg0, arg1)
1145 #endif //__nvoc_kernel_mig_manager_h_disabled
1146 
1147 NV_STATUS kmigmgrSaveToPersistence_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1148 
1149 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSaveToPersistence(OBJGPU * arg0,struct KernelMIGManager * arg1)1150 static inline NV_STATUS kmigmgrSaveToPersistence(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1151     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1152     return NV_ERR_NOT_SUPPORTED;
1153 }
1154 #else //__nvoc_kernel_mig_manager_h_disabled
1155 #define kmigmgrSaveToPersistence(arg0, arg1) kmigmgrSaveToPersistence_IMPL(arg0, arg1)
1156 #endif //__nvoc_kernel_mig_manager_h_disabled
1157 
1158 NV_STATUS kmigmgrDisableWatchdog_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1159 
1160 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDisableWatchdog(OBJGPU * arg0,struct KernelMIGManager * arg1)1161 static inline NV_STATUS kmigmgrDisableWatchdog(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1162     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1163     return NV_ERR_NOT_SUPPORTED;
1164 }
1165 #else //__nvoc_kernel_mig_manager_h_disabled
1166 #define kmigmgrDisableWatchdog(arg0, arg1) kmigmgrDisableWatchdog_IMPL(arg0, arg1)
1167 #endif //__nvoc_kernel_mig_manager_h_disabled
1168 
1169 NV_STATUS kmigmgrRestoreWatchdog_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1170 
1171 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrRestoreWatchdog(OBJGPU * arg0,struct KernelMIGManager * arg1)1172 static inline NV_STATUS kmigmgrRestoreWatchdog(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1173     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1174     return NV_ERR_NOT_SUPPORTED;
1175 }
1176 #else //__nvoc_kernel_mig_manager_h_disabled
1177 #define kmigmgrRestoreWatchdog(arg0, arg1) kmigmgrRestoreWatchdog_IMPL(arg0, arg1)
1178 #endif //__nvoc_kernel_mig_manager_h_disabled
1179 
1180 NV_STATUS kmigmgrSetSwizzIdInUse_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId);
1181 
1182 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSetSwizzIdInUse(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId)1183 static inline NV_STATUS kmigmgrSetSwizzIdInUse(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId) {
1184     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1185     return NV_ERR_NOT_SUPPORTED;
1186 }
1187 #else //__nvoc_kernel_mig_manager_h_disabled
1188 #define kmigmgrSetSwizzIdInUse(arg0, arg1, swizzId) kmigmgrSetSwizzIdInUse_IMPL(arg0, arg1, swizzId)
1189 #endif //__nvoc_kernel_mig_manager_h_disabled
1190 
1191 NV_STATUS kmigmgrClearSwizzIdInUse_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId);
1192 
1193 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrClearSwizzIdInUse(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId)1194 static inline NV_STATUS kmigmgrClearSwizzIdInUse(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId) {
1195     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1196     return NV_ERR_NOT_SUPPORTED;
1197 }
1198 #else //__nvoc_kernel_mig_manager_h_disabled
1199 #define kmigmgrClearSwizzIdInUse(arg0, arg1, swizzId) kmigmgrClearSwizzIdInUse_IMPL(arg0, arg1, swizzId)
1200 #endif //__nvoc_kernel_mig_manager_h_disabled
1201 
1202 NvBool kmigmgrIsSwizzIdInUse_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId);
1203 
1204 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsSwizzIdInUse(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId)1205 static inline NvBool kmigmgrIsSwizzIdInUse(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId) {
1206     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1207     return NV_FALSE;
1208 }
1209 #else //__nvoc_kernel_mig_manager_h_disabled
1210 #define kmigmgrIsSwizzIdInUse(arg0, arg1, swizzId) kmigmgrIsSwizzIdInUse_IMPL(arg0, arg1, swizzId)
1211 #endif //__nvoc_kernel_mig_manager_h_disabled
1212 
1213 NV_STATUS kmigmgrGetInvalidSwizzIdMask_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvU64 *pUnsupportedSwizzIdMask);
1214 
1215 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetInvalidSwizzIdMask(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId,NvU64 * pUnsupportedSwizzIdMask)1216 static inline NV_STATUS kmigmgrGetInvalidSwizzIdMask(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvU64 *pUnsupportedSwizzIdMask) {
1217     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1218     return NV_ERR_NOT_SUPPORTED;
1219 }
1220 #else //__nvoc_kernel_mig_manager_h_disabled
1221 #define kmigmgrGetInvalidSwizzIdMask(arg0, arg1, swizzId, pUnsupportedSwizzIdMask) kmigmgrGetInvalidSwizzIdMask_IMPL(arg0, arg1, swizzId, pUnsupportedSwizzIdMask)
1222 #endif //__nvoc_kernel_mig_manager_h_disabled
1223 
1224 NvBool kmigmgrIsMIGNvlinkP2PSupported_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1225 
1226 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsMIGNvlinkP2PSupported(OBJGPU * arg0,struct KernelMIGManager * arg1)1227 static inline NvBool kmigmgrIsMIGNvlinkP2PSupported(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1228     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1229     return NV_FALSE;
1230 }
1231 #else //__nvoc_kernel_mig_manager_h_disabled
1232 #define kmigmgrIsMIGNvlinkP2PSupported(arg0, arg1) kmigmgrIsMIGNvlinkP2PSupported_IMPL(arg0, arg1)
1233 #endif //__nvoc_kernel_mig_manager_h_disabled
1234 
1235 NvU64 kmigmgrGetSwizzIdInUseMask_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1236 
1237 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetSwizzIdInUseMask(OBJGPU * arg0,struct KernelMIGManager * arg1)1238 static inline NvU64 kmigmgrGetSwizzIdInUseMask(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1239     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1240     return 0;
1241 }
1242 #else //__nvoc_kernel_mig_manager_h_disabled
1243 #define kmigmgrGetSwizzIdInUseMask(arg0, arg1) kmigmgrGetSwizzIdInUseMask_IMPL(arg0, arg1)
1244 #endif //__nvoc_kernel_mig_manager_h_disabled
1245 
1246 NV_STATUS kmigmgrSetEnginesInUse_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, union ENGTYPE_BIT_VECTOR *pEngines);
1247 
1248 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSetEnginesInUse(OBJGPU * arg0,struct KernelMIGManager * arg1,union ENGTYPE_BIT_VECTOR * pEngines)1249 static inline NV_STATUS kmigmgrSetEnginesInUse(OBJGPU *arg0, struct KernelMIGManager *arg1, union ENGTYPE_BIT_VECTOR *pEngines) {
1250     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1251     return NV_ERR_NOT_SUPPORTED;
1252 }
1253 #else //__nvoc_kernel_mig_manager_h_disabled
1254 #define kmigmgrSetEnginesInUse(arg0, arg1, pEngines) kmigmgrSetEnginesInUse_IMPL(arg0, arg1, pEngines)
1255 #endif //__nvoc_kernel_mig_manager_h_disabled
1256 
1257 NV_STATUS kmigmgrClearEnginesInUse_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, union ENGTYPE_BIT_VECTOR *pEngines);
1258 
1259 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrClearEnginesInUse(OBJGPU * arg0,struct KernelMIGManager * arg1,union ENGTYPE_BIT_VECTOR * pEngines)1260 static inline NV_STATUS kmigmgrClearEnginesInUse(OBJGPU *arg0, struct KernelMIGManager *arg1, union ENGTYPE_BIT_VECTOR *pEngines) {
1261     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1262     return NV_ERR_NOT_SUPPORTED;
1263 }
1264 #else //__nvoc_kernel_mig_manager_h_disabled
1265 #define kmigmgrClearEnginesInUse(arg0, arg1, pEngines) kmigmgrClearEnginesInUse_IMPL(arg0, arg1, pEngines)
1266 #endif //__nvoc_kernel_mig_manager_h_disabled
1267 
1268 NvBool kmigmgrIsEngineInUse_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, RM_ENGINE_TYPE rmEngineType);
1269 
1270 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsEngineInUse(OBJGPU * arg0,struct KernelMIGManager * arg1,RM_ENGINE_TYPE rmEngineType)1271 static inline NvBool kmigmgrIsEngineInUse(OBJGPU *arg0, struct KernelMIGManager *arg1, RM_ENGINE_TYPE rmEngineType) {
1272     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1273     return NV_FALSE;
1274 }
1275 #else //__nvoc_kernel_mig_manager_h_disabled
1276 #define kmigmgrIsEngineInUse(arg0, arg1, rmEngineType) kmigmgrIsEngineInUse_IMPL(arg0, arg1, rmEngineType)
1277 #endif //__nvoc_kernel_mig_manager_h_disabled
1278 
1279 NvBool kmigmgrIsEnginePartitionable_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, RM_ENGINE_TYPE rmEngineType);
1280 
1281 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsEnginePartitionable(OBJGPU * arg0,struct KernelMIGManager * arg1,RM_ENGINE_TYPE rmEngineType)1282 static inline NvBool kmigmgrIsEnginePartitionable(OBJGPU *arg0, struct KernelMIGManager *arg1, RM_ENGINE_TYPE rmEngineType) {
1283     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1284     return NV_FALSE;
1285 }
1286 #else //__nvoc_kernel_mig_manager_h_disabled
1287 #define kmigmgrIsEnginePartitionable(arg0, arg1, rmEngineType) kmigmgrIsEnginePartitionable_IMPL(arg0, arg1, rmEngineType)
1288 #endif //__nvoc_kernel_mig_manager_h_disabled
1289 
1290 NvBool kmigmgrIsEngineInInstance_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, RM_ENGINE_TYPE globalRmEngType, struct MIG_INSTANCE_REF arg2);
1291 
1292 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsEngineInInstance(OBJGPU * arg0,struct KernelMIGManager * arg1,RM_ENGINE_TYPE globalRmEngType,struct MIG_INSTANCE_REF arg2)1293 static inline NvBool kmigmgrIsEngineInInstance(OBJGPU *arg0, struct KernelMIGManager *arg1, RM_ENGINE_TYPE globalRmEngType, struct MIG_INSTANCE_REF arg2) {
1294     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1295     return NV_FALSE;
1296 }
1297 #else //__nvoc_kernel_mig_manager_h_disabled
1298 #define kmigmgrIsEngineInInstance(arg0, arg1, globalRmEngType, arg2) kmigmgrIsEngineInInstance_IMPL(arg0, arg1, globalRmEngType, arg2)
1299 #endif //__nvoc_kernel_mig_manager_h_disabled
1300 
1301 NvBool kmigmgrIsLocalEngineInInstance_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, RM_ENGINE_TYPE localRmEngType, struct MIG_INSTANCE_REF arg2);
1302 
1303 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsLocalEngineInInstance(OBJGPU * arg0,struct KernelMIGManager * arg1,RM_ENGINE_TYPE localRmEngType,struct MIG_INSTANCE_REF arg2)1304 static inline NvBool kmigmgrIsLocalEngineInInstance(OBJGPU *arg0, struct KernelMIGManager *arg1, RM_ENGINE_TYPE localRmEngType, struct MIG_INSTANCE_REF arg2) {
1305     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1306     return NV_FALSE;
1307 }
1308 #else //__nvoc_kernel_mig_manager_h_disabled
1309 #define kmigmgrIsLocalEngineInInstance(arg0, arg1, localRmEngType, arg2) kmigmgrIsLocalEngineInInstance_IMPL(arg0, arg1, localRmEngType, arg2)
1310 #endif //__nvoc_kernel_mig_manager_h_disabled
1311 
1312 NV_STATUS kmigmgrCreateGPUInstance_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvU8 *pUuid, KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg2, NvBool bValid, NvBool bCreateCap);
1313 
1314 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrCreateGPUInstance(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId,NvU8 * pUuid,KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg2,NvBool bValid,NvBool bCreateCap)1315 static inline NV_STATUS kmigmgrCreateGPUInstance(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvU8 *pUuid, KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg2, NvBool bValid, NvBool bCreateCap) {
1316     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1317     return NV_ERR_NOT_SUPPORTED;
1318 }
1319 #else //__nvoc_kernel_mig_manager_h_disabled
1320 #define kmigmgrCreateGPUInstance(arg0, arg1, swizzId, pUuid, arg2, bValid, bCreateCap) kmigmgrCreateGPUInstance_IMPL(arg0, arg1, swizzId, pUuid, arg2, bValid, bCreateCap)
1321 #endif //__nvoc_kernel_mig_manager_h_disabled
1322 
1323 NV_STATUS kmigmgrInvalidateGPUInstance_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvBool bUnload);
1324 
1325 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInvalidateGPUInstance(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId,NvBool bUnload)1326 static inline NV_STATUS kmigmgrInvalidateGPUInstance(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvBool bUnload) {
1327     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1328     return NV_ERR_NOT_SUPPORTED;
1329 }
1330 #else //__nvoc_kernel_mig_manager_h_disabled
1331 #define kmigmgrInvalidateGPUInstance(arg0, arg1, swizzId, bUnload) kmigmgrInvalidateGPUInstance_IMPL(arg0, arg1, swizzId, bUnload)
1332 #endif //__nvoc_kernel_mig_manager_h_disabled
1333 
1334 NV_STATUS kmigmgrInitGPUInstanceScrubber_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
1335 
1336 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitGPUInstanceScrubber(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)1337 static inline NV_STATUS kmigmgrInitGPUInstanceScrubber(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
1338     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1339     return NV_ERR_NOT_SUPPORTED;
1340 }
1341 #else //__nvoc_kernel_mig_manager_h_disabled
1342 #define kmigmgrInitGPUInstanceScrubber(arg0, arg1, arg2) kmigmgrInitGPUInstanceScrubber_IMPL(arg0, arg1, arg2)
1343 #endif //__nvoc_kernel_mig_manager_h_disabled
1344 
1345 void kmigmgrDestroyGPUInstanceScrubber_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
1346 
1347 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDestroyGPUInstanceScrubber(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)1348 static inline void kmigmgrDestroyGPUInstanceScrubber(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
1349     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1350 }
1351 #else //__nvoc_kernel_mig_manager_h_disabled
1352 #define kmigmgrDestroyGPUInstanceScrubber(arg0, arg1, arg2) kmigmgrDestroyGPUInstanceScrubber_IMPL(arg0, arg1, arg2)
1353 #endif //__nvoc_kernel_mig_manager_h_disabled
1354 
1355 NV_STATUS kmigmgrInitGPUInstanceBufPools_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
1356 
1357 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitGPUInstanceBufPools(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)1358 static inline NV_STATUS kmigmgrInitGPUInstanceBufPools(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
1359     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1360     return NV_ERR_NOT_SUPPORTED;
1361 }
1362 #else //__nvoc_kernel_mig_manager_h_disabled
1363 #define kmigmgrInitGPUInstanceBufPools(arg0, arg1, arg2) kmigmgrInitGPUInstanceBufPools_IMPL(arg0, arg1, arg2)
1364 #endif //__nvoc_kernel_mig_manager_h_disabled
1365 
1366 NV_STATUS kmigmgrInitGPUInstanceGrBufPools_IMPL(OBJGPU *pGpu, struct KernelMIGManager *arg0, KERNEL_MIG_GPU_INSTANCE *arg1);
1367 
1368 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitGPUInstanceGrBufPools(OBJGPU * pGpu,struct KernelMIGManager * arg0,KERNEL_MIG_GPU_INSTANCE * arg1)1369 static inline NV_STATUS kmigmgrInitGPUInstanceGrBufPools(OBJGPU *pGpu, struct KernelMIGManager *arg0, KERNEL_MIG_GPU_INSTANCE *arg1) {
1370     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1371     return NV_ERR_NOT_SUPPORTED;
1372 }
1373 #else //__nvoc_kernel_mig_manager_h_disabled
1374 #define kmigmgrInitGPUInstanceGrBufPools(pGpu, arg0, arg1) kmigmgrInitGPUInstanceGrBufPools_IMPL(pGpu, arg0, arg1)
1375 #endif //__nvoc_kernel_mig_manager_h_disabled
1376 
1377 void kmigmgrDestroyGPUInstanceGrBufPools_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
1378 
1379 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDestroyGPUInstanceGrBufPools(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)1380 static inline void kmigmgrDestroyGPUInstanceGrBufPools(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
1381     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1382 }
1383 #else //__nvoc_kernel_mig_manager_h_disabled
1384 #define kmigmgrDestroyGPUInstanceGrBufPools(arg0, arg1, arg2) kmigmgrDestroyGPUInstanceGrBufPools_IMPL(arg0, arg1, arg2)
1385 #endif //__nvoc_kernel_mig_manager_h_disabled
1386 
1387 NV_STATUS kmigmgrInitGPUInstancePool_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
1388 
1389 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitGPUInstancePool(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)1390 static inline NV_STATUS kmigmgrInitGPUInstancePool(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
1391     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1392     return NV_ERR_NOT_SUPPORTED;
1393 }
1394 #else //__nvoc_kernel_mig_manager_h_disabled
1395 #define kmigmgrInitGPUInstancePool(arg0, arg1, arg2) kmigmgrInitGPUInstancePool_IMPL(arg0, arg1, arg2)
1396 #endif //__nvoc_kernel_mig_manager_h_disabled
1397 
1398 void kmigmgrDestroyGPUInstancePool_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
1399 
1400 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDestroyGPUInstancePool(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)1401 static inline void kmigmgrDestroyGPUInstancePool(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
1402     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1403 }
1404 #else //__nvoc_kernel_mig_manager_h_disabled
1405 #define kmigmgrDestroyGPUInstancePool(arg0, arg1, arg2) kmigmgrDestroyGPUInstancePool_IMPL(arg0, arg1, arg2)
1406 #endif //__nvoc_kernel_mig_manager_h_disabled
1407 
1408 NV_STATUS kmigmgrInitGPUInstanceRunlistBufPools_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
1409 
1410 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitGPUInstanceRunlistBufPools(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)1411 static inline NV_STATUS kmigmgrInitGPUInstanceRunlistBufPools(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
1412     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1413     return NV_ERR_NOT_SUPPORTED;
1414 }
1415 #else //__nvoc_kernel_mig_manager_h_disabled
1416 #define kmigmgrInitGPUInstanceRunlistBufPools(arg0, arg1, arg2) kmigmgrInitGPUInstanceRunlistBufPools_IMPL(arg0, arg1, arg2)
1417 #endif //__nvoc_kernel_mig_manager_h_disabled
1418 
1419 void kmigmgrDestroyGPUInstanceRunlistBufPools_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
1420 
1421 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDestroyGPUInstanceRunlistBufPools(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)1422 static inline void kmigmgrDestroyGPUInstanceRunlistBufPools(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
1423     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1424 }
1425 #else //__nvoc_kernel_mig_manager_h_disabled
1426 #define kmigmgrDestroyGPUInstanceRunlistBufPools(arg0, arg1, arg2) kmigmgrDestroyGPUInstanceRunlistBufPools_IMPL(arg0, arg1, arg2)
1427 #endif //__nvoc_kernel_mig_manager_h_disabled
1428 
1429 void kmigmgrPrintSubscribingClients_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId);
1430 
1431 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrPrintSubscribingClients(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId)1432 static inline void kmigmgrPrintSubscribingClients(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId) {
1433     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1434 }
1435 #else //__nvoc_kernel_mig_manager_h_disabled
1436 #define kmigmgrPrintSubscribingClients(arg0, arg1, swizzId) kmigmgrPrintSubscribingClients_IMPL(arg0, arg1, swizzId)
1437 #endif //__nvoc_kernel_mig_manager_h_disabled
1438 
1439 void kmigmgrInitGPUInstanceInfo_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
1440 
1441 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitGPUInstanceInfo(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)1442 static inline void kmigmgrInitGPUInstanceInfo(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
1443     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1444 }
1445 #else //__nvoc_kernel_mig_manager_h_disabled
1446 #define kmigmgrInitGPUInstanceInfo(arg0, arg1, arg2) kmigmgrInitGPUInstanceInfo_IMPL(arg0, arg1, arg2)
1447 #endif //__nvoc_kernel_mig_manager_h_disabled
1448 
1449 void kmigmgrTrimInstanceRunlistBufPools_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
1450 
1451 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrTrimInstanceRunlistBufPools(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)1452 static inline void kmigmgrTrimInstanceRunlistBufPools(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
1453     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1454 }
1455 #else //__nvoc_kernel_mig_manager_h_disabled
1456 #define kmigmgrTrimInstanceRunlistBufPools(arg0, arg1, arg2) kmigmgrTrimInstanceRunlistBufPools_IMPL(arg0, arg1, arg2)
1457 #endif //__nvoc_kernel_mig_manager_h_disabled
1458 
1459 NV_STATUS kmigmgrSetDeviceProfilingInUse_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1460 
1461 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSetDeviceProfilingInUse(OBJGPU * arg0,struct KernelMIGManager * arg1)1462 static inline NV_STATUS kmigmgrSetDeviceProfilingInUse(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1463     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1464     return NV_ERR_NOT_SUPPORTED;
1465 }
1466 #else //__nvoc_kernel_mig_manager_h_disabled
1467 #define kmigmgrSetDeviceProfilingInUse(arg0, arg1) kmigmgrSetDeviceProfilingInUse_IMPL(arg0, arg1)
1468 #endif //__nvoc_kernel_mig_manager_h_disabled
1469 
1470 void kmigmgrClearDeviceProfilingInUse_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1471 
1472 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrClearDeviceProfilingInUse(OBJGPU * arg0,struct KernelMIGManager * arg1)1473 static inline void kmigmgrClearDeviceProfilingInUse(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1474     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1475 }
1476 #else //__nvoc_kernel_mig_manager_h_disabled
1477 #define kmigmgrClearDeviceProfilingInUse(arg0, arg1) kmigmgrClearDeviceProfilingInUse_IMPL(arg0, arg1)
1478 #endif //__nvoc_kernel_mig_manager_h_disabled
1479 
1480 NvBool kmigmgrIsDeviceProfilingInUse_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1481 
1482 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsDeviceProfilingInUse(OBJGPU * arg0,struct KernelMIGManager * arg1)1483 static inline NvBool kmigmgrIsDeviceProfilingInUse(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1484     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1485     return NV_FALSE;
1486 }
1487 #else //__nvoc_kernel_mig_manager_h_disabled
1488 #define kmigmgrIsDeviceProfilingInUse(arg0, arg1) kmigmgrIsDeviceProfilingInUse_IMPL(arg0, arg1)
1489 #endif //__nvoc_kernel_mig_manager_h_disabled
1490 
1491 NvBool kmigmgrIsDeviceUsingDeviceProfiling_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, Device *pDevice);
1492 
1493 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsDeviceUsingDeviceProfiling(OBJGPU * arg0,struct KernelMIGManager * arg1,Device * pDevice)1494 static inline NvBool kmigmgrIsDeviceUsingDeviceProfiling(OBJGPU *arg0, struct KernelMIGManager *arg1, Device *pDevice) {
1495     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1496     return NV_FALSE;
1497 }
1498 #else //__nvoc_kernel_mig_manager_h_disabled
1499 #define kmigmgrIsDeviceUsingDeviceProfiling(arg0, arg1, pDevice) kmigmgrIsDeviceUsingDeviceProfiling_IMPL(arg0, arg1, pDevice)
1500 #endif //__nvoc_kernel_mig_manager_h_disabled
1501 
1502 NV_STATUS kmigmgrEnableAllLCEs_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvBool bEnableAllLCEs);
1503 
1504 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrEnableAllLCEs(OBJGPU * arg0,struct KernelMIGManager * arg1,NvBool bEnableAllLCEs)1505 static inline NV_STATUS kmigmgrEnableAllLCEs(OBJGPU *arg0, struct KernelMIGManager *arg1, NvBool bEnableAllLCEs) {
1506     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1507     return NV_ERR_NOT_SUPPORTED;
1508 }
1509 #else //__nvoc_kernel_mig_manager_h_disabled
1510 #define kmigmgrEnableAllLCEs(arg0, arg1, bEnableAllLCEs) kmigmgrEnableAllLCEs_IMPL(arg0, arg1, bEnableAllLCEs)
1511 #endif //__nvoc_kernel_mig_manager_h_disabled
1512 
1513 NV_STATUS kmigmgrGetInstanceRefFromDevice_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, Device *arg2, struct MIG_INSTANCE_REF *arg3);
1514 
1515 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetInstanceRefFromDevice(OBJGPU * arg0,struct KernelMIGManager * arg1,Device * arg2,struct MIG_INSTANCE_REF * arg3)1516 static inline NV_STATUS kmigmgrGetInstanceRefFromDevice(OBJGPU *arg0, struct KernelMIGManager *arg1, Device *arg2, struct MIG_INSTANCE_REF *arg3) {
1517     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1518     return NV_ERR_NOT_SUPPORTED;
1519 }
1520 #else //__nvoc_kernel_mig_manager_h_disabled
1521 #define kmigmgrGetInstanceRefFromDevice(arg0, arg1, arg2, arg3) kmigmgrGetInstanceRefFromDevice_IMPL(arg0, arg1, arg2, arg3)
1522 #endif //__nvoc_kernel_mig_manager_h_disabled
1523 
1524 NV_STATUS kmigmgrGetMemoryPartitionHeapFromDevice_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, Device *arg2, struct Heap **arg3);
1525 
1526 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetMemoryPartitionHeapFromDevice(OBJGPU * arg0,struct KernelMIGManager * arg1,Device * arg2,struct Heap ** arg3)1527 static inline NV_STATUS kmigmgrGetMemoryPartitionHeapFromDevice(OBJGPU *arg0, struct KernelMIGManager *arg1, Device *arg2, struct Heap **arg3) {
1528     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1529     return NV_ERR_NOT_SUPPORTED;
1530 }
1531 #else //__nvoc_kernel_mig_manager_h_disabled
1532 #define kmigmgrGetMemoryPartitionHeapFromDevice(arg0, arg1, arg2, arg3) kmigmgrGetMemoryPartitionHeapFromDevice_IMPL(arg0, arg1, arg2, arg3)
1533 #endif //__nvoc_kernel_mig_manager_h_disabled
1534 
1535 NV_STATUS kmigmgrGetSwizzIdFromDevice_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, Device *pDevice, NvU32 *pSwizzId);
1536 
1537 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetSwizzIdFromDevice(OBJGPU * arg0,struct KernelMIGManager * arg1,Device * pDevice,NvU32 * pSwizzId)1538 static inline NV_STATUS kmigmgrGetSwizzIdFromDevice(OBJGPU *arg0, struct KernelMIGManager *arg1, Device *pDevice, NvU32 *pSwizzId) {
1539     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1540     return NV_ERR_NOT_SUPPORTED;
1541 }
1542 #else //__nvoc_kernel_mig_manager_h_disabled
1543 #define kmigmgrGetSwizzIdFromDevice(arg0, arg1, pDevice, pSwizzId) kmigmgrGetSwizzIdFromDevice_IMPL(arg0, arg1, pDevice, pSwizzId)
1544 #endif //__nvoc_kernel_mig_manager_h_disabled
1545 
1546 void kmigmgrPrintGPUInstanceInfo_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2);
1547 
1548 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrPrintGPUInstanceInfo(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2)1549 static inline void kmigmgrPrintGPUInstanceInfo(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2) {
1550     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1551 }
1552 #else //__nvoc_kernel_mig_manager_h_disabled
1553 #define kmigmgrPrintGPUInstanceInfo(arg0, arg1, arg2) kmigmgrPrintGPUInstanceInfo_IMPL(arg0, arg1, arg2)
1554 #endif //__nvoc_kernel_mig_manager_h_disabled
1555 
1556 NV_STATUS kmigmgrSetGPUInstanceInfo_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvU8 *pUuid, KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg2);
1557 
1558 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSetGPUInstanceInfo(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId,NvU8 * pUuid,KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg2)1559 static inline NV_STATUS kmigmgrSetGPUInstanceInfo(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, NvU8 *pUuid, KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg2) {
1560     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1561     return NV_ERR_NOT_SUPPORTED;
1562 }
1563 #else //__nvoc_kernel_mig_manager_h_disabled
1564 #define kmigmgrSetGPUInstanceInfo(arg0, arg1, swizzId, pUuid, arg2) kmigmgrSetGPUInstanceInfo_IMPL(arg0, arg1, swizzId, pUuid, arg2)
1565 #endif //__nvoc_kernel_mig_manager_h_disabled
1566 
1567 NV_STATUS kmigmgrGetGPUInstanceInfo_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, KERNEL_MIG_GPU_INSTANCE **arg2);
1568 
1569 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetGPUInstanceInfo(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId,KERNEL_MIG_GPU_INSTANCE ** arg2)1570 static inline NV_STATUS kmigmgrGetGPUInstanceInfo(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, KERNEL_MIG_GPU_INSTANCE **arg2) {
1571     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1572     return NV_ERR_NOT_SUPPORTED;
1573 }
1574 #else //__nvoc_kernel_mig_manager_h_disabled
1575 #define kmigmgrGetGPUInstanceInfo(arg0, arg1, swizzId, arg2) kmigmgrGetGPUInstanceInfo_IMPL(arg0, arg1, swizzId, arg2)
1576 #endif //__nvoc_kernel_mig_manager_h_disabled
1577 
1578 NV_STATUS kmigmgrGetLocalToGlobalEngineType_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, struct MIG_INSTANCE_REF arg2, RM_ENGINE_TYPE localEngType, RM_ENGINE_TYPE *pGlobalEngType);
1579 
1580 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetLocalToGlobalEngineType(OBJGPU * arg0,struct KernelMIGManager * arg1,struct MIG_INSTANCE_REF arg2,RM_ENGINE_TYPE localEngType,RM_ENGINE_TYPE * pGlobalEngType)1581 static inline NV_STATUS kmigmgrGetLocalToGlobalEngineType(OBJGPU *arg0, struct KernelMIGManager *arg1, struct MIG_INSTANCE_REF arg2, RM_ENGINE_TYPE localEngType, RM_ENGINE_TYPE *pGlobalEngType) {
1582     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1583     return NV_ERR_NOT_SUPPORTED;
1584 }
1585 #else //__nvoc_kernel_mig_manager_h_disabled
1586 #define kmigmgrGetLocalToGlobalEngineType(arg0, arg1, arg2, localEngType, pGlobalEngType) kmigmgrGetLocalToGlobalEngineType_IMPL(arg0, arg1, arg2, localEngType, pGlobalEngType)
1587 #endif //__nvoc_kernel_mig_manager_h_disabled
1588 
1589 NV_STATUS kmigmgrGetGlobalToLocalEngineType_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, struct MIG_INSTANCE_REF arg2, RM_ENGINE_TYPE globalEngType, RM_ENGINE_TYPE *pLocalEngType);
1590 
1591 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetGlobalToLocalEngineType(OBJGPU * arg0,struct KernelMIGManager * arg1,struct MIG_INSTANCE_REF arg2,RM_ENGINE_TYPE globalEngType,RM_ENGINE_TYPE * pLocalEngType)1592 static inline NV_STATUS kmigmgrGetGlobalToLocalEngineType(OBJGPU *arg0, struct KernelMIGManager *arg1, struct MIG_INSTANCE_REF arg2, RM_ENGINE_TYPE globalEngType, RM_ENGINE_TYPE *pLocalEngType) {
1593     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1594     return NV_ERR_NOT_SUPPORTED;
1595 }
1596 #else //__nvoc_kernel_mig_manager_h_disabled
1597 #define kmigmgrGetGlobalToLocalEngineType(arg0, arg1, arg2, globalEngType, pLocalEngType) kmigmgrGetGlobalToLocalEngineType_IMPL(arg0, arg1, arg2, globalEngType, pLocalEngType)
1598 #endif //__nvoc_kernel_mig_manager_h_disabled
1599 
1600 NV_STATUS kmigmgrFilterEngineList_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, struct Subdevice *arg2, RM_ENGINE_TYPE *pEngineTypes, NvU32 *pEngineCount);
1601 
1602 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrFilterEngineList(OBJGPU * arg0,struct KernelMIGManager * arg1,struct Subdevice * arg2,RM_ENGINE_TYPE * pEngineTypes,NvU32 * pEngineCount)1603 static inline NV_STATUS kmigmgrFilterEngineList(OBJGPU *arg0, struct KernelMIGManager *arg1, struct Subdevice *arg2, RM_ENGINE_TYPE *pEngineTypes, NvU32 *pEngineCount) {
1604     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1605     return NV_ERR_NOT_SUPPORTED;
1606 }
1607 #else //__nvoc_kernel_mig_manager_h_disabled
1608 #define kmigmgrFilterEngineList(arg0, arg1, arg2, pEngineTypes, pEngineCount) kmigmgrFilterEngineList_IMPL(arg0, arg1, arg2, pEngineTypes, pEngineCount)
1609 #endif //__nvoc_kernel_mig_manager_h_disabled
1610 
1611 NV_STATUS kmigmgrFilterEnginePartnerList_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, struct Subdevice *arg2, NV2080_CTRL_GPU_GET_ENGINE_PARTNERLIST_PARAMS *arg3);
1612 
1613 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrFilterEnginePartnerList(OBJGPU * arg0,struct KernelMIGManager * arg1,struct Subdevice * arg2,NV2080_CTRL_GPU_GET_ENGINE_PARTNERLIST_PARAMS * arg3)1614 static inline NV_STATUS kmigmgrFilterEnginePartnerList(OBJGPU *arg0, struct KernelMIGManager *arg1, struct Subdevice *arg2, NV2080_CTRL_GPU_GET_ENGINE_PARTNERLIST_PARAMS *arg3) {
1615     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1616     return NV_ERR_NOT_SUPPORTED;
1617 }
1618 #else //__nvoc_kernel_mig_manager_h_disabled
1619 #define kmigmgrFilterEnginePartnerList(arg0, arg1, arg2, arg3) kmigmgrFilterEnginePartnerList_IMPL(arg0, arg1, arg2, arg3)
1620 #endif //__nvoc_kernel_mig_manager_h_disabled
1621 
1622 NV_STATUS kmigmgrGetProfileByPartitionFlag_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 partitionFlag, const NV2080_CTRL_INTERNAL_MIGMGR_PROFILE_INFO **arg2);
1623 
1624 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetProfileByPartitionFlag(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 partitionFlag,const NV2080_CTRL_INTERNAL_MIGMGR_PROFILE_INFO ** arg2)1625 static inline NV_STATUS kmigmgrGetProfileByPartitionFlag(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 partitionFlag, const NV2080_CTRL_INTERNAL_MIGMGR_PROFILE_INFO **arg2) {
1626     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1627     return NV_ERR_NOT_SUPPORTED;
1628 }
1629 #else //__nvoc_kernel_mig_manager_h_disabled
1630 #define kmigmgrGetProfileByPartitionFlag(arg0, arg1, partitionFlag, arg2) kmigmgrGetProfileByPartitionFlag_IMPL(arg0, arg1, partitionFlag, arg2)
1631 #endif //__nvoc_kernel_mig_manager_h_disabled
1632 
1633 NV_STATUS kmigmgrSaveComputeInstances_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, GPUMGR_SAVE_COMPUTE_INSTANCE *arg3);
1634 
1635 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSaveComputeInstances(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2,GPUMGR_SAVE_COMPUTE_INSTANCE * arg3)1636 static inline NV_STATUS kmigmgrSaveComputeInstances(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, GPUMGR_SAVE_COMPUTE_INSTANCE *arg3) {
1637     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1638     return NV_ERR_NOT_SUPPORTED;
1639 }
1640 #else //__nvoc_kernel_mig_manager_h_disabled
1641 #define kmigmgrSaveComputeInstances(arg0, arg1, arg2, arg3) kmigmgrSaveComputeInstances_IMPL(arg0, arg1, arg2, arg3)
1642 #endif //__nvoc_kernel_mig_manager_h_disabled
1643 
1644 NV_STATUS kmigmgrSetPartitioningMode_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1645 
1646 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSetPartitioningMode(OBJGPU * arg0,struct KernelMIGManager * arg1)1647 static inline NV_STATUS kmigmgrSetPartitioningMode(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1648     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1649     return NV_ERR_NOT_SUPPORTED;
1650 }
1651 #else //__nvoc_kernel_mig_manager_h_disabled
1652 #define kmigmgrSetPartitioningMode(arg0, arg1) kmigmgrSetPartitioningMode_IMPL(arg0, arg1)
1653 #endif //__nvoc_kernel_mig_manager_h_disabled
1654 
1655 NV_STATUS kmigmgrGetMIGReferenceFromEngineType_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, RM_ENGINE_TYPE rmEngineType, struct MIG_INSTANCE_REF *arg2);
1656 
1657 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetMIGReferenceFromEngineType(OBJGPU * arg0,struct KernelMIGManager * arg1,RM_ENGINE_TYPE rmEngineType,struct MIG_INSTANCE_REF * arg2)1658 static inline NV_STATUS kmigmgrGetMIGReferenceFromEngineType(OBJGPU *arg0, struct KernelMIGManager *arg1, RM_ENGINE_TYPE rmEngineType, struct MIG_INSTANCE_REF *arg2) {
1659     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1660     return NV_ERR_NOT_SUPPORTED;
1661 }
1662 #else //__nvoc_kernel_mig_manager_h_disabled
1663 #define kmigmgrGetMIGReferenceFromEngineType(arg0, arg1, rmEngineType, arg2) kmigmgrGetMIGReferenceFromEngineType_IMPL(arg0, arg1, rmEngineType, arg2)
1664 #endif //__nvoc_kernel_mig_manager_h_disabled
1665 
1666 NV_STATUS kmigmgrGetSmallestGpuInstanceSize_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 *pComputeSizeFlag);
1667 
1668 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetSmallestGpuInstanceSize(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 * pComputeSizeFlag)1669 static inline NV_STATUS kmigmgrGetSmallestGpuInstanceSize(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 *pComputeSizeFlag) {
1670     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1671     return NV_ERR_NOT_SUPPORTED;
1672 }
1673 #else //__nvoc_kernel_mig_manager_h_disabled
1674 #define kmigmgrGetSmallestGpuInstanceSize(arg0, arg1, pComputeSizeFlag) kmigmgrGetSmallestGpuInstanceSize_IMPL(arg0, arg1, pComputeSizeFlag)
1675 #endif //__nvoc_kernel_mig_manager_h_disabled
1676 
1677 NV_STATUS kmigmgrGetGPUInstanceScrubberCe_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, Device *pDevice, NvU32 *ceInst);
1678 
1679 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetGPUInstanceScrubberCe(OBJGPU * arg0,struct KernelMIGManager * arg1,Device * pDevice,NvU32 * ceInst)1680 static inline NV_STATUS kmigmgrGetGPUInstanceScrubberCe(OBJGPU *arg0, struct KernelMIGManager *arg1, Device *pDevice, NvU32 *ceInst) {
1681     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1682     return NV_ERR_NOT_SUPPORTED;
1683 }
1684 #else //__nvoc_kernel_mig_manager_h_disabled
1685 #define kmigmgrGetGPUInstanceScrubberCe(arg0, arg1, pDevice, ceInst) kmigmgrGetGPUInstanceScrubberCe_IMPL(arg0, arg1, pDevice, ceInst)
1686 #endif //__nvoc_kernel_mig_manager_h_disabled
1687 
1688 NV_STATUS kmigmgrDescribeGPUInstances_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NV2080_CTRL_GPU_DESCRIBE_PARTITIONS_PARAMS *arg2);
1689 
1690 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDescribeGPUInstances(OBJGPU * arg0,struct KernelMIGManager * arg1,NV2080_CTRL_GPU_DESCRIBE_PARTITIONS_PARAMS * arg2)1691 static inline NV_STATUS kmigmgrDescribeGPUInstances(OBJGPU *arg0, struct KernelMIGManager *arg1, NV2080_CTRL_GPU_DESCRIBE_PARTITIONS_PARAMS *arg2) {
1692     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1693     return NV_ERR_NOT_SUPPORTED;
1694 }
1695 #else //__nvoc_kernel_mig_manager_h_disabled
1696 #define kmigmgrDescribeGPUInstances(arg0, arg1, arg2) kmigmgrDescribeGPUInstances_IMPL(arg0, arg1, arg2)
1697 #endif //__nvoc_kernel_mig_manager_h_disabled
1698 
1699 NV_STATUS kmigmgrSwizzIdToResourceAllocation_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg2, KERNEL_MIG_GPU_INSTANCE *arg3, MIG_RESOURCE_ALLOCATION *arg4);
1700 
1701 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSwizzIdToResourceAllocation(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId,KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg2,KERNEL_MIG_GPU_INSTANCE * arg3,MIG_RESOURCE_ALLOCATION * arg4)1702 static inline NV_STATUS kmigmgrSwizzIdToResourceAllocation(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg2, KERNEL_MIG_GPU_INSTANCE *arg3, MIG_RESOURCE_ALLOCATION *arg4) {
1703     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1704     return NV_ERR_NOT_SUPPORTED;
1705 }
1706 #else //__nvoc_kernel_mig_manager_h_disabled
1707 #define kmigmgrSwizzIdToResourceAllocation(arg0, arg1, swizzId, arg2, arg3, arg4) kmigmgrSwizzIdToResourceAllocation_IMPL(arg0, arg1, swizzId, arg2, arg3, arg4)
1708 #endif //__nvoc_kernel_mig_manager_h_disabled
1709 
1710 NV_STATUS kmigmgrAllocComputeInstanceHandles_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, MIG_COMPUTE_INSTANCE *arg3);
1711 
1712 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrAllocComputeInstanceHandles(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2,MIG_COMPUTE_INSTANCE * arg3)1713 static inline NV_STATUS kmigmgrAllocComputeInstanceHandles(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, MIG_COMPUTE_INSTANCE *arg3) {
1714     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1715     return NV_ERR_NOT_SUPPORTED;
1716 }
1717 #else //__nvoc_kernel_mig_manager_h_disabled
1718 #define kmigmgrAllocComputeInstanceHandles(arg0, arg1, arg2, arg3) kmigmgrAllocComputeInstanceHandles_IMPL(arg0, arg1, arg2, arg3)
1719 #endif //__nvoc_kernel_mig_manager_h_disabled
1720 
1721 void kmigmgrFreeComputeInstanceHandles_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, MIG_COMPUTE_INSTANCE *arg3);
1722 
1723 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrFreeComputeInstanceHandles(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2,MIG_COMPUTE_INSTANCE * arg3)1724 static inline void kmigmgrFreeComputeInstanceHandles(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, MIG_COMPUTE_INSTANCE *arg3) {
1725     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1726 }
1727 #else //__nvoc_kernel_mig_manager_h_disabled
1728 #define kmigmgrFreeComputeInstanceHandles(arg0, arg1, arg2, arg3) kmigmgrFreeComputeInstanceHandles_IMPL(arg0, arg1, arg2, arg3)
1729 #endif //__nvoc_kernel_mig_manager_h_disabled
1730 
1731 void kmigmgrReleaseComputeInstanceEngines_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, MIG_COMPUTE_INSTANCE *arg3);
1732 
1733 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrReleaseComputeInstanceEngines(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2,MIG_COMPUTE_INSTANCE * arg3)1734 static inline void kmigmgrReleaseComputeInstanceEngines(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, MIG_COMPUTE_INSTANCE *arg3) {
1735     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1736 }
1737 #else //__nvoc_kernel_mig_manager_h_disabled
1738 #define kmigmgrReleaseComputeInstanceEngines(arg0, arg1, arg2, arg3) kmigmgrReleaseComputeInstanceEngines_IMPL(arg0, arg1, arg2, arg3)
1739 #endif //__nvoc_kernel_mig_manager_h_disabled
1740 
1741 NV_STATUS kmigmgrDeleteComputeInstance_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, NvU32 CIId, NvBool bUnload);
1742 
1743 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDeleteComputeInstance(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2,NvU32 CIId,NvBool bUnload)1744 static inline NV_STATUS kmigmgrDeleteComputeInstance(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, NvU32 CIId, NvBool bUnload) {
1745     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1746     return NV_ERR_NOT_SUPPORTED;
1747 }
1748 #else //__nvoc_kernel_mig_manager_h_disabled
1749 #define kmigmgrDeleteComputeInstance(arg0, arg1, arg2, CIId, bUnload) kmigmgrDeleteComputeInstance_IMPL(arg0, arg1, arg2, CIId, bUnload)
1750 #endif //__nvoc_kernel_mig_manager_h_disabled
1751 
1752 NV_STATUS kmigmgrConfigureGPUInstance_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, const KMIGMGR_CONFIGURE_INSTANCE_REQUEST *pConfigRequestPerCi, NvU32 updateEngMask);
1753 
1754 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrConfigureGPUInstance(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 swizzId,const KMIGMGR_CONFIGURE_INSTANCE_REQUEST * pConfigRequestPerCi,NvU32 updateEngMask)1755 static inline NV_STATUS kmigmgrConfigureGPUInstance(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 swizzId, const KMIGMGR_CONFIGURE_INSTANCE_REQUEST *pConfigRequestPerCi, NvU32 updateEngMask) {
1756     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1757     return NV_ERR_NOT_SUPPORTED;
1758 }
1759 #else //__nvoc_kernel_mig_manager_h_disabled
1760 #define kmigmgrConfigureGPUInstance(arg0, arg1, swizzId, pConfigRequestPerCi, updateEngMask) kmigmgrConfigureGPUInstance_IMPL(arg0, arg1, swizzId, pConfigRequestPerCi, updateEngMask)
1761 #endif //__nvoc_kernel_mig_manager_h_disabled
1762 
1763 NV_STATUS kmigmgrInvalidateGrGpcMapping_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, NvU32 grIdx);
1764 
1765 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInvalidateGrGpcMapping(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2,NvU32 grIdx)1766 static inline NV_STATUS kmigmgrInvalidateGrGpcMapping(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, NvU32 grIdx) {
1767     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1768     return NV_ERR_NOT_SUPPORTED;
1769 }
1770 #else //__nvoc_kernel_mig_manager_h_disabled
1771 #define kmigmgrInvalidateGrGpcMapping(arg0, arg1, arg2, grIdx) kmigmgrInvalidateGrGpcMapping_IMPL(arg0, arg1, arg2, grIdx)
1772 #endif //__nvoc_kernel_mig_manager_h_disabled
1773 
1774 NV_STATUS kmigmgrInvalidateGr_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, NvU32 grIdx);
1775 
1776 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInvalidateGr(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2,NvU32 grIdx)1777 static inline NV_STATUS kmigmgrInvalidateGr(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, NvU32 grIdx) {
1778     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1779     return NV_ERR_NOT_SUPPORTED;
1780 }
1781 #else //__nvoc_kernel_mig_manager_h_disabled
1782 #define kmigmgrInvalidateGr(arg0, arg1, arg2, grIdx) kmigmgrInvalidateGr_IMPL(arg0, arg1, arg2, grIdx)
1783 #endif //__nvoc_kernel_mig_manager_h_disabled
1784 
1785 NvU32 kmigmgrGetNextComputeSize_IMPL(NvBool bGetNextSmallest, NvU32 computeSize);
1786 
1787 #define kmigmgrGetNextComputeSize(bGetNextSmallest, computeSize) kmigmgrGetNextComputeSize_IMPL(bGetNextSmallest, computeSize)
1788 NV_STATUS kmigmgrGetSkylineFromSize_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 computeSize, const NV2080_CTRL_INTERNAL_GRMGR_SKYLINE_INFO **ppSkyline);
1789 
1790 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetSkylineFromSize(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 computeSize,const NV2080_CTRL_INTERNAL_GRMGR_SKYLINE_INFO ** ppSkyline)1791 static inline NV_STATUS kmigmgrGetSkylineFromSize(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 computeSize, const NV2080_CTRL_INTERNAL_GRMGR_SKYLINE_INFO **ppSkyline) {
1792     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1793     return NV_ERR_NOT_SUPPORTED;
1794 }
1795 #else //__nvoc_kernel_mig_manager_h_disabled
1796 #define kmigmgrGetSkylineFromSize(arg0, arg1, computeSize, ppSkyline) kmigmgrGetSkylineFromSize_IMPL(arg0, arg1, computeSize, ppSkyline)
1797 #endif //__nvoc_kernel_mig_manager_h_disabled
1798 
1799 NV_STATUS kmigmgrGetComputeProfileFromSize_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 computeSize, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile);
1800 
1801 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetComputeProfileFromSize(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 computeSize,NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE * pProfile)1802 static inline NV_STATUS kmigmgrGetComputeProfileFromSize(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 computeSize, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile) {
1803     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1804     return NV_ERR_NOT_SUPPORTED;
1805 }
1806 #else //__nvoc_kernel_mig_manager_h_disabled
1807 #define kmigmgrGetComputeProfileFromSize(arg0, arg1, computeSize, pProfile) kmigmgrGetComputeProfileFromSize_IMPL(arg0, arg1, computeSize, pProfile)
1808 #endif //__nvoc_kernel_mig_manager_h_disabled
1809 
1810 NV_STATUS kmigmgrGetComputeProfileForRequest_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, NvU32 smCountRequest, NvU32 gpcCountRequest, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile);
1811 
1812 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetComputeProfileForRequest(OBJGPU * arg0,struct KernelMIGManager * arg1,KERNEL_MIG_GPU_INSTANCE * arg2,NvU32 smCountRequest,NvU32 gpcCountRequest,NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE * pProfile)1813 static inline NV_STATUS kmigmgrGetComputeProfileForRequest(OBJGPU *arg0, struct KernelMIGManager *arg1, KERNEL_MIG_GPU_INSTANCE *arg2, NvU32 smCountRequest, NvU32 gpcCountRequest, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile) {
1814     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1815     return NV_ERR_NOT_SUPPORTED;
1816 }
1817 #else //__nvoc_kernel_mig_manager_h_disabled
1818 #define kmigmgrGetComputeProfileForRequest(arg0, arg1, arg2, smCountRequest, gpcCountRequest, pProfile) kmigmgrGetComputeProfileForRequest_IMPL(arg0, arg1, arg2, smCountRequest, gpcCountRequest, pProfile)
1819 #endif //__nvoc_kernel_mig_manager_h_disabled
1820 
1821 NV_STATUS kmigmgrGetComputeProfileFromSmCount_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 smCount, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile);
1822 
1823 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetComputeProfileFromSmCount(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 smCount,NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE * pProfile)1824 static inline NV_STATUS kmigmgrGetComputeProfileFromSmCount(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 smCount, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile) {
1825     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1826     return NV_ERR_NOT_SUPPORTED;
1827 }
1828 #else //__nvoc_kernel_mig_manager_h_disabled
1829 #define kmigmgrGetComputeProfileFromSmCount(arg0, arg1, smCount, pProfile) kmigmgrGetComputeProfileFromSmCount_IMPL(arg0, arg1, smCount, pProfile)
1830 #endif //__nvoc_kernel_mig_manager_h_disabled
1831 
1832 NV_STATUS kmigmgrGetComputeProfileFromCTSId_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 ctsId, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile);
1833 
1834 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetComputeProfileFromCTSId(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 ctsId,NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE * pProfile)1835 static inline NV_STATUS kmigmgrGetComputeProfileFromCTSId(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 ctsId, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile) {
1836     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1837     return NV_ERR_NOT_SUPPORTED;
1838 }
1839 #else //__nvoc_kernel_mig_manager_h_disabled
1840 #define kmigmgrGetComputeProfileFromCTSId(arg0, arg1, ctsId, pProfile) kmigmgrGetComputeProfileFromCTSId_IMPL(arg0, arg1, ctsId, pProfile)
1841 #endif //__nvoc_kernel_mig_manager_h_disabled
1842 
1843 NV_STATUS kmigmgrGetInvalidCTSIdMask_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 ctsId, NvU64 *pInvalidCTSIdMask);
1844 
1845 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetInvalidCTSIdMask(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 ctsId,NvU64 * pInvalidCTSIdMask)1846 static inline NV_STATUS kmigmgrGetInvalidCTSIdMask(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 ctsId, NvU64 *pInvalidCTSIdMask) {
1847     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1848     return NV_ERR_NOT_SUPPORTED;
1849 }
1850 #else //__nvoc_kernel_mig_manager_h_disabled
1851 #define kmigmgrGetInvalidCTSIdMask(arg0, arg1, ctsId, pInvalidCTSIdMask) kmigmgrGetInvalidCTSIdMask_IMPL(arg0, arg1, ctsId, pInvalidCTSIdMask)
1852 #endif //__nvoc_kernel_mig_manager_h_disabled
1853 
1854 struct NV_RANGE kmigmgrComputeProfileSizeToCTSIdRange_IMPL(NvU32 computeSize);
1855 
1856 #define kmigmgrComputeProfileSizeToCTSIdRange(computeSize) kmigmgrComputeProfileSizeToCTSIdRange_IMPL(computeSize)
1857 struct NV_RANGE kmigmgrCtsIdToSpan_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 ctsId);
1858 
1859 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrCtsIdToSpan(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 ctsId)1860 static inline struct NV_RANGE kmigmgrCtsIdToSpan(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 ctsId) {
1861     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1862     struct NV_RANGE ret;
1863     portMemSet(&ret, 0, sizeof(struct NV_RANGE));
1864     return ret;
1865 }
1866 #else //__nvoc_kernel_mig_manager_h_disabled
1867 #define kmigmgrCtsIdToSpan(arg0, arg1, ctsId) kmigmgrCtsIdToSpan_IMPL(arg0, arg1, ctsId)
1868 #endif //__nvoc_kernel_mig_manager_h_disabled
1869 
1870 NV_STATUS kmigmgrGetFreeCTSId_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 *pCtsId, NvU64 globalValidCtsMask, NvU64 globalValidGfxCtsMask, NvU64 ctsIdsInUseMask, NvU32 profileSize, NvBool bRestrictWithGfx, NvBool bGfxRequested);
1871 
1872 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetFreeCTSId(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 * pCtsId,NvU64 globalValidCtsMask,NvU64 globalValidGfxCtsMask,NvU64 ctsIdsInUseMask,NvU32 profileSize,NvBool bRestrictWithGfx,NvBool bGfxRequested)1873 static inline NV_STATUS kmigmgrGetFreeCTSId(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 *pCtsId, NvU64 globalValidCtsMask, NvU64 globalValidGfxCtsMask, NvU64 ctsIdsInUseMask, NvU32 profileSize, NvBool bRestrictWithGfx, NvBool bGfxRequested) {
1874     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1875     return NV_ERR_NOT_SUPPORTED;
1876 }
1877 #else //__nvoc_kernel_mig_manager_h_disabled
1878 #define kmigmgrGetFreeCTSId(arg0, arg1, pCtsId, globalValidCtsMask, globalValidGfxCtsMask, ctsIdsInUseMask, profileSize, bRestrictWithGfx, bGfxRequested) kmigmgrGetFreeCTSId_IMPL(arg0, arg1, pCtsId, globalValidCtsMask, globalValidGfxCtsMask, ctsIdsInUseMask, profileSize, bRestrictWithGfx, bGfxRequested)
1879 #endif //__nvoc_kernel_mig_manager_h_disabled
1880 
1881 NvU32 kmigmgrGetComputeSizeFromCTSId_IMPL(NvU32 ctsId);
1882 
1883 #define kmigmgrGetComputeSizeFromCTSId(ctsId) kmigmgrGetComputeSizeFromCTSId_IMPL(ctsId)
1884 NvU32 kmigmgrSmallestComputeProfileSize_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1);
1885 
1886 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSmallestComputeProfileSize(OBJGPU * arg0,struct KernelMIGManager * arg1)1887 static inline NvU32 kmigmgrSmallestComputeProfileSize(OBJGPU *arg0, struct KernelMIGManager *arg1) {
1888     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1889     return 0;
1890 }
1891 #else //__nvoc_kernel_mig_manager_h_disabled
1892 #define kmigmgrSmallestComputeProfileSize(arg0, arg1) kmigmgrSmallestComputeProfileSize_IMPL(arg0, arg1)
1893 #endif //__nvoc_kernel_mig_manager_h_disabled
1894 
1895 void kmigmgrSetCTSIdInUse_IMPL(KERNEL_MIG_GPU_INSTANCE *arg0, NvU32 ctsId, NvU32 grId, NvBool bInUse);
1896 
1897 #define kmigmgrSetCTSIdInUse(arg0, ctsId, grId, bInUse) kmigmgrSetCTSIdInUse_IMPL(arg0, ctsId, grId, bInUse)
1898 NV_STATUS kmigmgrXlateSpanStartToCTSId_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 computeSize, NvU32 spanStart, NvU32 *pCtsId);
1899 
1900 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrXlateSpanStartToCTSId(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 computeSize,NvU32 spanStart,NvU32 * pCtsId)1901 static inline NV_STATUS kmigmgrXlateSpanStartToCTSId(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 computeSize, NvU32 spanStart, NvU32 *pCtsId) {
1902     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1903     return NV_ERR_NOT_SUPPORTED;
1904 }
1905 #else //__nvoc_kernel_mig_manager_h_disabled
1906 #define kmigmgrXlateSpanStartToCTSId(arg0, arg1, computeSize, spanStart, pCtsId) kmigmgrXlateSpanStartToCTSId_IMPL(arg0, arg1, computeSize, spanStart, pCtsId)
1907 #endif //__nvoc_kernel_mig_manager_h_disabled
1908 
1909 NV_STATUS kmigmgrGetSlotBasisMask_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU64 *pMask);
1910 
1911 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetSlotBasisMask(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU64 * pMask)1912 static inline NV_STATUS kmigmgrGetSlotBasisMask(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU64 *pMask) {
1913     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1914     return NV_ERR_NOT_SUPPORTED;
1915 }
1916 #else //__nvoc_kernel_mig_manager_h_disabled
1917 #define kmigmgrGetSlotBasisMask(arg0, arg1, pMask) kmigmgrGetSlotBasisMask_IMPL(arg0, arg1, pMask)
1918 #endif //__nvoc_kernel_mig_manager_h_disabled
1919 
1920 NvU32 kmigmgrGetSpanStartFromCTSId_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 ctsId);
1921 
1922 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetSpanStartFromCTSId(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU32 ctsId)1923 static inline NvU32 kmigmgrGetSpanStartFromCTSId(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU32 ctsId) {
1924     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1925     return 0;
1926 }
1927 #else //__nvoc_kernel_mig_manager_h_disabled
1928 #define kmigmgrGetSpanStartFromCTSId(arg0, arg1, ctsId) kmigmgrGetSpanStartFromCTSId_IMPL(arg0, arg1, ctsId)
1929 #endif //__nvoc_kernel_mig_manager_h_disabled
1930 
1931 NvBool kmigmgrIsCTSIdAvailable_IMPL(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU64 ctsIdValidMask, NvU64 ctsIdInUseMask, NvU32 ctsId);
1932 
1933 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsCTSIdAvailable(OBJGPU * arg0,struct KernelMIGManager * arg1,NvU64 ctsIdValidMask,NvU64 ctsIdInUseMask,NvU32 ctsId)1934 static inline NvBool kmigmgrIsCTSIdAvailable(OBJGPU *arg0, struct KernelMIGManager *arg1, NvU64 ctsIdValidMask, NvU64 ctsIdInUseMask, NvU32 ctsId) {
1935     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1936     return NV_FALSE;
1937 }
1938 #else //__nvoc_kernel_mig_manager_h_disabled
1939 #define kmigmgrIsCTSIdAvailable(arg0, arg1, ctsIdValidMask, ctsIdInUseMask, ctsId) kmigmgrIsCTSIdAvailable_IMPL(arg0, arg1, ctsIdValidMask, ctsIdInUseMask, ctsId)
1940 #endif //__nvoc_kernel_mig_manager_h_disabled
1941 
1942 NV_STATUS kmigmgrUpdateCiConfigForVgpu_IMPL(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, NvU32 execPartCount, NvU32 *pExecPartId, NvU32 gfid, NvBool bDelete);
1943 
1944 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrUpdateCiConfigForVgpu(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager,NvU32 execPartCount,NvU32 * pExecPartId,NvU32 gfid,NvBool bDelete)1945 static inline NV_STATUS kmigmgrUpdateCiConfigForVgpu(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, NvU32 execPartCount, NvU32 *pExecPartId, NvU32 gfid, NvBool bDelete) {
1946     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1947     return NV_ERR_NOT_SUPPORTED;
1948 }
1949 #else //__nvoc_kernel_mig_manager_h_disabled
1950 #define kmigmgrUpdateCiConfigForVgpu(pGpu, pKernelMIGManager, execPartCount, pExecPartId, gfid, bDelete) kmigmgrUpdateCiConfigForVgpu_IMPL(pGpu, pKernelMIGManager, execPartCount, pExecPartId, gfid, bDelete)
1951 #endif //__nvoc_kernel_mig_manager_h_disabled
1952 
1953 NvBool kmigmgrIsPartitionVeidAllocationContiguous_IMPL(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, KERNEL_MIG_GPU_INSTANCE *arg0);
1954 
1955 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsPartitionVeidAllocationContiguous(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager,KERNEL_MIG_GPU_INSTANCE * arg0)1956 static inline NvBool kmigmgrIsPartitionVeidAllocationContiguous(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, KERNEL_MIG_GPU_INSTANCE *arg0) {
1957     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1958     return NV_FALSE;
1959 }
1960 #else //__nvoc_kernel_mig_manager_h_disabled
1961 #define kmigmgrIsPartitionVeidAllocationContiguous(pGpu, pKernelMIGManager, arg0) kmigmgrIsPartitionVeidAllocationContiguous_IMPL(pGpu, pKernelMIGManager, arg0)
1962 #endif //__nvoc_kernel_mig_manager_h_disabled
1963 
1964 #undef PRIVATE_FIELD
1965 
1966 
1967 #endif // KERNEL_MIG_MANAGER_H
1968 
1969 
1970 #ifdef __cplusplus
1971 } // extern "C"
1972 #endif
1973 
1974 #endif // _G_KERNEL_MIG_MANAGER_NVOC_H_
1975