1 
2 #ifndef _G_KERNEL_MIG_MANAGER_NVOC_H_
3 #define _G_KERNEL_MIG_MANAGER_NVOC_H_
4 #include "nvoc/runtime.h"
5 
6 // Version of generated metadata structures
7 #ifdef NVOC_METADATA_VERSION
8 #undef NVOC_METADATA_VERSION
9 #endif
10 #define NVOC_METADATA_VERSION 0
11 
12 #ifdef __cplusplus
13 extern "C" {
14 #endif
15 
16 /*
17  * SPDX-FileCopyrightText: Copyright (c) 2021-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
18  * SPDX-License-Identifier: MIT
19  *
20  * Permission is hereby granted, free of charge, to any person obtaining a
21  * copy of this software and associated documentation files (the "Software"),
22  * to deal in the Software without restriction, including without limitation
23  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
24  * and/or sell copies of the Software, and to permit persons to whom the
25  * Software is furnished to do so, subject to the following conditions:
26  *
27  * The above copyright notice and this permission notice shall be included in
28  * all copies or substantial portions of the Software.
29  *
30  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
31  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
32  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
33  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
34  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
35  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
36  * DEALINGS IN THE SOFTWARE.
37  */
38 
39 #pragma once
40 #include "g_kernel_mig_manager_nvoc.h"
41 
42 #ifndef KERNEL_MIG_MANAGER_H
43 #define KERNEL_MIG_MANAGER_H
44 
45 #include "core/core.h"
46 #include "gpu/eng_state.h"
47 #include "gpu/gpu.h"
48 #include "gpu_mgr/gpu_mgr.h"
49 #include "kernel/gpu/gr/kernel_graphics_manager.h"
50 #include "kernel/gpu_mgr/gpu_mgr.h"
51 #include "kernel/gpu/mmu/kern_gmmu.h"
52 #include "kernel/gpu/nvbitmask.h"
53 
54 #include "ctrl/ctrlc637.h"
55 
56 typedef struct KERNEL_MIG_GPU_INSTANCE KERNEL_MIG_GPU_INSTANCE;
57 
58 // Forward declaration of opaque type
59 typedef struct KERNEL_MIG_MANAGER_PRIVATE_DATA KERNEL_MIG_MANAGER_PRIVATE_DATA;
60 typedef struct MIG_GPU_INSTANCE MIG_GPU_INSTANCE;
61 
62 #define  IS_MIG_ENABLED(pGpu) (((pGpu) != NULL) && (GPU_GET_KERNEL_MIG_MANAGER(pGpu) != NULL) && \
63                                kmigmgrIsMIGEnabled((pGpu), GPU_GET_KERNEL_MIG_MANAGER(pGpu)))
64 #define  IS_MIG_IN_USE(pGpu)  (((pGpu) != NULL) && (GPU_GET_KERNEL_MIG_MANAGER(pGpu) != NULL) && \
65                                kmigmgrIsMIGGpuInstancingEnabled((pGpu), GPU_GET_KERNEL_MIG_MANAGER(pGpu)))
66 
67 #define FOR_EACH_VALID_GPU_INSTANCE(pGpu, pKernelMIGManager, pLocal)                 \
68     {                                                                                \
69         NvU32 i;                                                                     \
70         for (i = 0; i < KMIGMGR_MAX_GPU_INSTANCES; ++i)                              \
71         {                                                                            \
72             (pLocal) = kmigmgrGetMIGGpuInstanceSlot((pGpu), (pKernelMIGManager), i); \
73             if (((pLocal) == NULL) || !(pLocal)->bValid)                             \
74                 continue;
75 
76 #define FOR_EACH_VALID_GPU_INSTANCE_END()                                           \
77         }                                                                           \
78     }
79 
80 #define KMIGMGR_SWIZZID_INVALID             0xFFFFFFFF
81 #define KMIGMGR_MAX_GPU_SWIZZID             15
82 #define KMIGMGR_MAX_GPU_INSTANCES           GPUMGR_MAX_GPU_INSTANCES
83 #define KMIGMGR_MAX_COMPUTE_INSTANCES       GPUMGR_MAX_COMPUTE_INSTANCES
84 #define KMIGMGR_COMPUTE_INSTANCE_ID_INVALID 0xFFFFFFFF
85 #define KMIGMGR_COMPUTE_SIZE_INVALID        NV2080_CTRL_GPU_PARTITION_FLAG_COMPUTE_SIZE__SIZE
86 #define KMIGMGR_MAX_GPU_CTSID               21
87 #define KMIGMGR_CTSID_INVALID               0xFFFFFFFFUL
88 #define KMIGMGR_SPAN_OFFSET_INVALID         KMIGMGR_CTSID_INVALID
89 
90 #define KMIGMGR_INSTANCE_ATTRIBUTION_ID_INVALID            \
91     ((KMIGMGR_MAX_GPU_SWIZZID * KMIGMGR_MAX_GPU_SWIZZID) + \
92      KMIGMGR_MAX_COMPUTE_INSTANCES)
93 
94 MAKE_BITVECTOR(GFID_BIT_VECTOR, VMMU_MAX_GFID);
95 
96 typedef struct KMIGMGR_INSTANCE_HANDLES
97 {
98     /*!
99      * Client handle to make calls into this instance
100      */
101     NvHandle hClient;
102 
103     /*!
104      * Device handle to make calls into this instance
105      */
106     NvHandle hDevice;
107 
108     /*!
109      * Subdevice handle to make calls into this instance
110      */
111     NvHandle hSubdevice;
112 
113     /*!
114      * Subscription handle to make calls into this instance
115      */
116     NvHandle hSubscription;
117     /*!
118      * Internal ThirdPartyP2P handle for tracking persistent mappings under MIG.
119      */
120     NvHandle hThirdPartyP2P;
121 } KMIGMGR_INSTANCE_HANDLES;
122 
123 typedef struct MIG_RESOURCE_ALLOCATION
124 {
125     /*!
126      * Logical GPC-IDs which are associated with this instance
127      * As current assumption is that GPCs within a instance is always
128      * physically contiguous, so we can use start and count also saving some
129      * memory however it will enforce contiguity restriction which may not be
130      * in future.
131      */
132     NvU32 gpcIds[KGRMGR_MAX_GPC];
133 
134     /*!
135      * Number of GPCs associated with this instance
136      */
137     NvU32 gpcCount;
138 
139     /*!
140      * Number of GFX GPCs associated with this instance. This should be a subset of gpcs included in gpcCount.
141      */
142     NvU32 gfxGpcCount;
143 
144     /*!
145      * VEID start offset for this instance
146      */
147     NvU32 veidOffset;
148 
149     /*!
150      * Number of VEIDs associated with this instance
151      */
152     NvU32 veidCount;
153 
154     /*!
155      * Bitvector of partitionable engines associated with this instance.
156      */
157     ENGTYPE_BIT_VECTOR engines;
158 
159     /*!
160      * Bitvector of local engine IDs associated with this instance.
161      */
162     ENGTYPE_BIT_VECTOR localEngines;
163 
164     /*!
165      * Virtualized GPC Count
166     */
167     NvU32 virtualGpcCount;
168 
169     /*!
170      * Number of SMs
171      */
172     NvU32 smCount;
173 } MIG_RESOURCE_ALLOCATION;
174 
175 typedef struct MIG_COMPUTE_INSTANCE
176 {
177     /*!
178      * Resource allocated for this instance
179      */
180     MIG_RESOURCE_ALLOCATION resourceAllocation;
181 
182     /*!
183      * States that this is a valid compute instance
184      */
185     NvBool bValid;
186 
187     /*!
188      * Flags indicating which engines (if any) are shared across multiple compute
189      * instances. Bit positions in this flag correspond to
190      * NVC637_CTRL_EXEC_PARTITIONS_SHARED_FLAG_*
191      */
192     NvU32 sharedEngFlag;
193 
194     /*!
195      * Compute instance ID
196      */
197     NvU32 id;
198 
199     /*!
200      * Shared object to track instance reference count
201      */
202     struct RsShared *pShare;
203 
204     /*!
205      * Opaque pointer to os-specific capabilities
206      */
207     OS_RM_CAPS *pOsRmCaps;
208 
209     /*!
210      * Compute instance UUID
211      */
212     NvUuid uuid;
213 
214     /*!
215      * Handles for RPC's into this instance
216      */
217     KMIGMGR_INSTANCE_HANDLES instanceHandles;
218 
219     /*!
220      * Span start of this compute instance indicating the "position" of the
221      * instance within a GPU instance's view. For non-CTS ID enabled chips,
222      * this corresponds to the start of a VEID segment. For CTS-ID chips, this
223      * corresponds to the offset from the first CTS ID of a given profile size.
224      */
225     NvU32 spanStart;
226 
227     /*!
228      * Compute Profile size associated with this MIG compute instance
229      * To associate an instance with a given compute profile, since a CTS
230      * ID may not have been assigned.
231      */
232     NvU32 computeSize;
233 } MIG_COMPUTE_INSTANCE;
234 
235 /*!
236  * @brief Situational params for compute instance creation API
237  *
238  * This structure comes with two specializations:
239  *  TYPE_REQUEST
240  *      Parameter refers to request data passed in via EXEC_PARTITIONS_CREATE ctrl
241  *      call. All resources claimed by new compute instance are chosen via allocator,
242  *      and the API may create multiple compute instances.
243  *  TYPE_RESTORE
244  *      Parameter refers to saved compute instance data. Most resources claimed by new
245  *      compute instance are determined by the save data, and others are claimed via
246  *      allocator.
247  *  requestFlags
248  *  TYPE_REQUEST_WITH_IDS
249  *      Parameter refers to request data passed in via EXEC_PARTITIONS_CREATE ctrl
250  *          call. All resources claimed by new instance are chosen via allocator unless
251  *          the _AT_SPAN flag is also specified.
252  *      RM also tries to allocate instance with compute instance id
253  *      requested by user. This flag is only supported on vGPU enabled RM build
254  *      and will be removed when vgpu plugin implements virtualized compute
255  *      instance ID support. (bug 2938187)
256  *      TYPE_REQUEST_AT_SPAN
257  *          Parameter refers to request data passed in via EXEC_PARTITIONS_CREATE ctrl
258  *          call. All resources claimed by new instance are attempt to be claimed by
259  *          the RM allocater starting at the specified resource span.
260  */
261 typedef struct KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS
262 {
263     enum
264     {
265         KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS_TYPE_REQUEST,
266         KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS_TYPE_RESTORE
267     } type;
268     union
269     {
270         struct
271         {
272             NvU32 count;
273             NVC637_CTRL_EXEC_PARTITIONS_INFO *pReqComputeInstanceInfo;
274             NvU32 requestFlags;
275         } request;
276         struct
277         {
278             struct GPUMGR_SAVE_COMPUTE_INSTANCE *pComputeInstanceSave;
279         } restore;
280     } inst;
281 } KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS;
282 
283 typedef struct KMIGMGR_CONFIGURE_INSTANCE_PARAMS
284 {
285     NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE profile;
286     NvU32 ctsId;
287     NvU32 veidSpanStart;
288 } KMIGMGR_CONFIGURE_INSTANCE_REQUEST;
289 
290 typedef struct KERNEL_MIG_GPU_INSTANCE
291 {
292     /*! Structure containing GPU instance profile */
293     const NV2080_CTRL_INTERNAL_MIGMGR_PROFILE_INFO *pProfile;
294 
295     /*!
296      * Resource allocated for this instance
297      */
298     MIG_RESOURCE_ALLOCATION resourceAllocation;
299 
300     /*!
301      * Mask of physical engines in this GPU instance which are assigned exclusively
302      * to some compute instance. Indexed via RM_ENGINE_TYPE_*
303      */
304     ENGTYPE_BIT_VECTOR exclusiveEngMask;
305 
306     /*!
307      * Mask of physical engines in this GPU instance which are assigned to at least
308      * one compute instance, but may be assigned to others.
309      * Indexed via RM_ENGINE_TYPE_*
310      */
311     ENGTYPE_BIT_VECTOR sharedEngMask;
312 
313     /*!
314      * compute instance info.
315      */
316     MIG_COMPUTE_INSTANCE MIGComputeInstance[KMIGMGR_MAX_COMPUTE_INSTANCES];
317 
318     /*!
319      * Bit Vector of GFID's associated with this instance.
320      */
321     GFID_BIT_VECTOR gfidMap;
322 
323     /*!
324      * GPU instance ID
325      */
326     NvU32 swizzId;
327 
328     /*!
329      * Validated user-provided instance flags - NV2080_CTRL_GPU_PARTITION_FLAG_*
330      */
331     NvU32 partitionFlag;
332 
333     /*!
334      * Memory handle associated with partitioned memory
335      */
336     NvHandle hMemory;
337 
338     /*!
339      * Shared object to track instance reference count
340      */
341     struct RsShared *pShare;
342 
343     /*!
344      * Heap used for managing instance's memory
345      */
346     struct Heap *pMemoryPartitionHeap;
347 
348     /*!
349      * States that this instance is valid
350      */
351     NvBool bValid;
352 
353     /*!
354      * Indicates that the GPU instance scrubber is initialized and should be
355      * accounted for / ignored in the instance refcount when determining
356      * whether or not a instance can be destroyed.
357      */
358     NvBool bMemoryPartitionScrubberInitialized;
359 
360     /*!
361      * Physical memory address range for this instance.
362      */
363     NV_RANGE memRange;
364 
365     /*!
366      * Memory pool for client page table allocations
367      */
368     RM_POOL_ALLOC_MEM_RESERVE_INFO *pPageTableMemPool;
369 
370     /*!
371      * Physical MIG GPU Instance info for this instance
372      */
373     MIG_GPU_INSTANCE *pMIGGpuInstance;
374 
375     /*!
376      * Mask of runlistIds for engines that belong to this instance
377      */
378     NvU64 runlistIdMask;
379 
380     /*!
381      * Opaque pointer to os-specific capabilities
382      */
383     OS_RM_CAPS *pOsRmCaps;
384 
385     /*!
386      * Handles for RPC's into this instance
387      */
388     KMIGMGR_INSTANCE_HANDLES instanceHandles;
389 
390     /*!
391      * Mask of CTS IDs in use
392      */
393     NvU64 ctsIdsInUseMask;
394 
395     /*!
396      * GR to CTS ID mapping
397      */
398     NvU32 grCtsIdMap[KMIGMGR_MAX_COMPUTE_INSTANCES];
399 
400     /*!
401      * Mask tracking which compute spans are currently in-use
402      */
403     NvU32 spanInUseMask;
404 
405     /*!
406      * GPU Instance UUID
407      */
408     NvUuid uuid;
409 } KERNEL_MIG_GPU_INSTANCE;
410 
411 /*!
412  * @brief Situational params for GPU instance creation API
413  *
414  * This structure comes with two specializations:
415  *  TYPE_REQUEST
416  *      Parameter refers to request data passed in via SET_PARTITIONS ctrl
417  *      call. All resources claimed by new GPU instance are chosen via allocator.
418  *  TYPE_RESTORE
419  *      Parameter refers to saved GPU instance data. Most resources claimed by new
420  *      GPU instance are determined by the save data, and others are claimed via
421  *      allocator.
422  */
423 typedef struct KMIGMGR_CREATE_GPU_INSTANCE_PARAMS
424 {
425     enum
426     {
427         KMIGMGR_CREATE_GPU_INSTANCE_PARAMS_TYPE_REQUEST,
428         KMIGMGR_CREATE_GPU_INSTANCE_PARAMS_TYPE_RESTORE
429     } type;
430     union
431     {
432         struct
433         {
434             NvU32    partitionFlag;
435             NV_RANGE placement;
436             NvBool   bUsePlacement;
437         } request;
438         struct
439         {
440             struct GPUMGR_SAVE_GPU_INSTANCE *pGPUInstanceSave;
441         } restore;
442     } inst;
443 } KMIGMGR_CREATE_GPU_INSTANCE_PARAMS;
444 
445 /*!
446  * @brief Packed pointer to a GPU instance/compute instance combo
447  * @note  Having NULL pKernelMIGGpuInstance and non-NULL pMIGComputeInstance is never expected
448  */
449 struct MIG_INSTANCE_REF
450 {
451     KERNEL_MIG_GPU_INSTANCE *pKernelMIGGpuInstance;
452     MIG_COMPUTE_INSTANCE *pMIGComputeInstance;
453 };
454 
455 typedef struct KERNEL_MIG_MANAGER_STATIC_INFO
456 {
457     /*! @ref NV2080_CTRL_CMD_INTERNAL_STATIC_MIGMGR_GET_PROFILES */
458     NV2080_CTRL_INTERNAL_STATIC_MIGMGR_GET_PROFILES_PARAMS *pProfiles;
459 
460     /*! Partitionable engines which are present on this GPU. */
461     ENGTYPE_BIT_VECTOR partitionableEngines;
462 
463     /*! Per swizzId FB memory page ranges */
464     NV2080_CTRL_INTERNAL_STATIC_MIGMGR_GET_SWIZZ_ID_FB_MEM_PAGE_RANGES_PARAMS *pSwizzIdFbMemPageRanges;
465 
466     /*! Compute instance profiles */
467     NV2080_CTRL_INTERNAL_STATIC_MIGMGR_GET_COMPUTE_PROFILES_PARAMS *pCIProfiles;
468 
469     /*! Skyline info used to determine GPU and compute instance resources available */
470     NV2080_CTRL_INTERNAL_STATIC_GRMGR_GET_SKYLINE_INFO_PARAMS *pSkylineInfo;
471 } KERNEL_MIG_MANAGER_STATIC_INFO;
472 
473 /*!
474  * KernelMIGManager provides kernel side services for managing MIG instances.
475  * It also maintains state relating to GPU partitioning and related state.
476  */
477 
478 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
479 // the matching C source file, but causes diagnostics to be issued if another
480 // source file references the field.
481 #ifdef NVOC_KERNEL_MIG_MANAGER_H_PRIVATE_ACCESS_ALLOWED
482 #define PRIVATE_FIELD(x) x
483 #else
484 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
485 #endif
486 
487 
488 struct KernelMIGManager {
489 
490     // Metadata
491     const struct NVOC_RTTI *__nvoc_rtti;
492 
493     // Parent (i.e. superclass or base class) object pointers
494     struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
495 
496     // Ancestor object pointers for `staticCast` feature
497     struct Object *__nvoc_pbase_Object;    // obj super^2
498     struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;    // engstate super
499     struct KernelMIGManager *__nvoc_pbase_KernelMIGManager;    // kmigmgr
500 
501     // Vtable with 35 per-object function pointers
502     NV_STATUS (*__kmigmgrConstructEngine__)(OBJGPU *, struct KernelMIGManager * /*this*/, ENGDESCRIPTOR);  // virtual override (engstate) base (engstate)
503     NV_STATUS (*__kmigmgrStateInitLocked__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual override (engstate) base (engstate)
504     NV_STATUS (*__kmigmgrStateUnload__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // virtual override (engstate) base (engstate)
505     NV_STATUS (*__kmigmgrLoadStaticInfo__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (2 hals)
506     NV_STATUS (*__kmigmgrSetStaticInfo__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (2 hals)
507     void (*__kmigmgrClearStaticInfo__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (2 hals)
508     NV_STATUS (*__kmigmgrSaveToPersistenceFromVgpuStaticInfo__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (2 hals)
509     NV_STATUS (*__kmigmgrDeleteGPUInstanceRunlists__)(OBJGPU *, struct KernelMIGManager * /*this*/, KERNEL_MIG_GPU_INSTANCE *);  // halified (2 hals)
510     NV_STATUS (*__kmigmgrCreateGPUInstanceRunlists__)(OBJGPU *, struct KernelMIGManager * /*this*/, KERNEL_MIG_GPU_INSTANCE *);  // halified (2 hals)
511     NV_STATUS (*__kmigmgrRestoreFromPersistence__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (2 hals)
512     NV_STATUS (*__kmigmgrCreateGPUInstanceCheck__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvBool);  // halified (2 hals)
513     NvBool (*__kmigmgrIsDevinitMIGBitSet__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (3 hals)
514     NvBool (*__kmigmgrIsGPUInstanceCombinationValid__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // halified (3 hals)
515     NvBool (*__kmigmgrIsGPUInstanceFlagValid__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // halified (4 hals)
516     NV_STATUS (*__kmigmgrGenerateComputeInstanceUuid__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32, NvU32, NvUuid *);  // halified (2 hals)
517     NV_STATUS (*__kmigmgrGenerateGPUInstanceUuid__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32, NvUuid *);  // halified (2 hals)
518     NV_STATUS (*__kmigmgrCreateComputeInstances__)(OBJGPU *, struct KernelMIGManager * /*this*/, KERNEL_MIG_GPU_INSTANCE *, NvBool, KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS, NvU32 *, NvBool);  // halified (2 hals)
519     NvBool (*__kmigmgrIsMemoryPartitioningRequested__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // halified (2 hals)
520     NvBool (*__kmigmgrIsMemoryPartitioningNeeded__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // halified (2 hals)
521     struct NV_RANGE (*__kmigmgrMemSizeFlagToSwizzIdRange__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // halified (3 hals)
522     struct NV_RANGE (*__kmigmgrSwizzIdToSpan__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // halified (2 hals)
523     NV_STATUS (*__kmigmgrSetMIGState__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvBool, NvBool, NvBool);  // halified (2 hals)
524     NvBool (*__kmigmgrIsCTSAlignmentRequired__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (3 hals)
525     NV_STATUS (*__kmigmgrRestoreFromBootConfig__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (2 hals) body
526     void (*__kmigmgrInitMissing__)(struct OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual inherited (engstate) base (engstate)
527     NV_STATUS (*__kmigmgrStatePreInitLocked__)(struct OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual inherited (engstate) base (engstate)
528     NV_STATUS (*__kmigmgrStatePreInitUnlocked__)(struct OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual inherited (engstate) base (engstate)
529     NV_STATUS (*__kmigmgrStateInitUnlocked__)(struct OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual inherited (engstate) base (engstate)
530     NV_STATUS (*__kmigmgrStatePreLoad__)(struct OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
531     NV_STATUS (*__kmigmgrStateLoad__)(struct OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
532     NV_STATUS (*__kmigmgrStatePostLoad__)(struct OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
533     NV_STATUS (*__kmigmgrStatePreUnload__)(struct OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
534     NV_STATUS (*__kmigmgrStatePostUnload__)(struct OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
535     void (*__kmigmgrStateDestroy__)(struct OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual inherited (engstate) base (engstate)
536     NvBool (*__kmigmgrIsPresent__)(struct OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual inherited (engstate) base (engstate)
537 
538     // Data members
539     NvBool PRIVATE_FIELD(bIsA100ReducedConfig);
540     KERNEL_MIG_MANAGER_PRIVATE_DATA *PRIVATE_FIELD(pPrivate);
541     KERNEL_MIG_GPU_INSTANCE PRIVATE_FIELD(kernelMIGGpuInstance)[8];
542     NvBool PRIVATE_FIELD(bMIGEnabled);
543     NvU64 PRIVATE_FIELD(swizzIdInUseMask);
544     NvBool PRIVATE_FIELD(bRestoreWatchdog);
545     NvBool PRIVATE_FIELD(bReenableWatchdog);
546     union ENGTYPE_BIT_VECTOR PRIVATE_FIELD(partitionableEnginesInUse);
547     NvBool PRIVATE_FIELD(bDeviceProfilingInUse);
548     NvBool PRIVATE_FIELD(bMIGAutoOnlineEnabled);
549     NvBool PRIVATE_FIELD(bBootConfigSupported);
550     NvBool PRIVATE_FIELD(bAutoUpdateBootConfig);
551     NvBool PRIVATE_FIELD(bGlobalBootConfigUsed);
552     NvU64 PRIVATE_FIELD(validGlobalCTSIdMask);
553     NvU64 PRIVATE_FIELD(validGlobalGfxCTSIdMask);
554 };
555 
556 
557 struct KernelMIGManager_PRIVATE {
558 
559     // Metadata
560     const struct NVOC_RTTI *__nvoc_rtti;
561 
562     // Parent (i.e. superclass or base class) object pointers
563     struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
564 
565     // Ancestor object pointers for `staticCast` feature
566     struct Object *__nvoc_pbase_Object;    // obj super^2
567     struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;    // engstate super
568     struct KernelMIGManager *__nvoc_pbase_KernelMIGManager;    // kmigmgr
569 
570     // Vtable with 35 per-object function pointers
571     NV_STATUS (*__kmigmgrConstructEngine__)(OBJGPU *, struct KernelMIGManager * /*this*/, ENGDESCRIPTOR);  // virtual override (engstate) base (engstate)
572     NV_STATUS (*__kmigmgrStateInitLocked__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual override (engstate) base (engstate)
573     NV_STATUS (*__kmigmgrStateUnload__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // virtual override (engstate) base (engstate)
574     NV_STATUS (*__kmigmgrLoadStaticInfo__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (2 hals)
575     NV_STATUS (*__kmigmgrSetStaticInfo__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (2 hals)
576     void (*__kmigmgrClearStaticInfo__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (2 hals)
577     NV_STATUS (*__kmigmgrSaveToPersistenceFromVgpuStaticInfo__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (2 hals)
578     NV_STATUS (*__kmigmgrDeleteGPUInstanceRunlists__)(OBJGPU *, struct KernelMIGManager * /*this*/, KERNEL_MIG_GPU_INSTANCE *);  // halified (2 hals)
579     NV_STATUS (*__kmigmgrCreateGPUInstanceRunlists__)(OBJGPU *, struct KernelMIGManager * /*this*/, KERNEL_MIG_GPU_INSTANCE *);  // halified (2 hals)
580     NV_STATUS (*__kmigmgrRestoreFromPersistence__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (2 hals)
581     NV_STATUS (*__kmigmgrCreateGPUInstanceCheck__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvBool);  // halified (2 hals)
582     NvBool (*__kmigmgrIsDevinitMIGBitSet__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (3 hals)
583     NvBool (*__kmigmgrIsGPUInstanceCombinationValid__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // halified (3 hals)
584     NvBool (*__kmigmgrIsGPUInstanceFlagValid__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // halified (4 hals)
585     NV_STATUS (*__kmigmgrGenerateComputeInstanceUuid__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32, NvU32, NvUuid *);  // halified (2 hals)
586     NV_STATUS (*__kmigmgrGenerateGPUInstanceUuid__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32, NvUuid *);  // halified (2 hals)
587     NV_STATUS (*__kmigmgrCreateComputeInstances__)(OBJGPU *, struct KernelMIGManager * /*this*/, KERNEL_MIG_GPU_INSTANCE *, NvBool, KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS, NvU32 *, NvBool);  // halified (2 hals)
588     NvBool (*__kmigmgrIsMemoryPartitioningRequested__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // halified (2 hals)
589     NvBool (*__kmigmgrIsMemoryPartitioningNeeded__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // halified (2 hals)
590     struct NV_RANGE (*__kmigmgrMemSizeFlagToSwizzIdRange__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // halified (3 hals)
591     struct NV_RANGE (*__kmigmgrSwizzIdToSpan__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // halified (2 hals)
592     NV_STATUS (*__kmigmgrSetMIGState__)(OBJGPU *, struct KernelMIGManager * /*this*/, NvBool, NvBool, NvBool);  // halified (2 hals)
593     NvBool (*__kmigmgrIsCTSAlignmentRequired__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (3 hals)
594     NV_STATUS (*__kmigmgrRestoreFromBootConfig__)(OBJGPU *, struct KernelMIGManager * /*this*/);  // halified (2 hals) body
595     void (*__kmigmgrInitMissing__)(struct OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual inherited (engstate) base (engstate)
596     NV_STATUS (*__kmigmgrStatePreInitLocked__)(struct OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual inherited (engstate) base (engstate)
597     NV_STATUS (*__kmigmgrStatePreInitUnlocked__)(struct OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual inherited (engstate) base (engstate)
598     NV_STATUS (*__kmigmgrStateInitUnlocked__)(struct OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual inherited (engstate) base (engstate)
599     NV_STATUS (*__kmigmgrStatePreLoad__)(struct OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
600     NV_STATUS (*__kmigmgrStateLoad__)(struct OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
601     NV_STATUS (*__kmigmgrStatePostLoad__)(struct OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
602     NV_STATUS (*__kmigmgrStatePreUnload__)(struct OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
603     NV_STATUS (*__kmigmgrStatePostUnload__)(struct OBJGPU *, struct KernelMIGManager * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
604     void (*__kmigmgrStateDestroy__)(struct OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual inherited (engstate) base (engstate)
605     NvBool (*__kmigmgrIsPresent__)(struct OBJGPU *, struct KernelMIGManager * /*this*/);  // virtual inherited (engstate) base (engstate)
606 
607     // Data members
608     NvBool bIsA100ReducedConfig;
609     KERNEL_MIG_MANAGER_PRIVATE_DATA *pPrivate;
610     KERNEL_MIG_GPU_INSTANCE kernelMIGGpuInstance[8];
611     NvBool bMIGEnabled;
612     NvU64 swizzIdInUseMask;
613     NvBool bRestoreWatchdog;
614     NvBool bReenableWatchdog;
615     union ENGTYPE_BIT_VECTOR partitionableEnginesInUse;
616     NvBool bDeviceProfilingInUse;
617     NvBool bMIGAutoOnlineEnabled;
618     NvBool bBootConfigSupported;
619     NvBool bAutoUpdateBootConfig;
620     NvBool bGlobalBootConfigUsed;
621     NvU64 validGlobalCTSIdMask;
622     NvU64 validGlobalGfxCTSIdMask;
623 };
624 
625 #ifndef __NVOC_CLASS_KernelMIGManager_TYPEDEF__
626 #define __NVOC_CLASS_KernelMIGManager_TYPEDEF__
627 typedef struct KernelMIGManager KernelMIGManager;
628 #endif /* __NVOC_CLASS_KernelMIGManager_TYPEDEF__ */
629 
630 #ifndef __nvoc_class_id_KernelMIGManager
631 #define __nvoc_class_id_KernelMIGManager 0x01c1bf
632 #endif /* __nvoc_class_id_KernelMIGManager */
633 
634 // Casting support
635 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelMIGManager;
636 
637 #define __staticCast_KernelMIGManager(pThis) \
638     ((pThis)->__nvoc_pbase_KernelMIGManager)
639 
640 #ifdef __nvoc_kernel_mig_manager_h_disabled
641 #define __dynamicCast_KernelMIGManager(pThis) ((KernelMIGManager*)NULL)
642 #else //__nvoc_kernel_mig_manager_h_disabled
643 #define __dynamicCast_KernelMIGManager(pThis) \
644     ((KernelMIGManager*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelMIGManager)))
645 #endif //__nvoc_kernel_mig_manager_h_disabled
646 
647 // Property macros
648 #define PDB_PROP_KMIGMGR_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
649 #define PDB_PROP_KMIGMGR_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
650 
651 NV_STATUS __nvoc_objCreateDynamic_KernelMIGManager(KernelMIGManager**, Dynamic*, NvU32, va_list);
652 
653 NV_STATUS __nvoc_objCreate_KernelMIGManager(KernelMIGManager**, Dynamic*, NvU32);
654 #define __objCreate_KernelMIGManager(ppNewObj, pParent, createFlags) \
655     __nvoc_objCreate_KernelMIGManager((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
656 
657 
658 // Wrapper macros
659 #define kmigmgrConstructEngine_FNPTR(arg_this) arg_this->__kmigmgrConstructEngine__
660 #define kmigmgrConstructEngine(arg1, arg_this, arg3) kmigmgrConstructEngine_DISPATCH(arg1, arg_this, arg3)
661 #define kmigmgrStateInitLocked_FNPTR(arg_this) arg_this->__kmigmgrStateInitLocked__
662 #define kmigmgrStateInitLocked(arg1, arg_this) kmigmgrStateInitLocked_DISPATCH(arg1, arg_this)
663 #define kmigmgrStateUnload_FNPTR(arg_this) arg_this->__kmigmgrStateUnload__
664 #define kmigmgrStateUnload(arg1, arg_this, flags) kmigmgrStateUnload_DISPATCH(arg1, arg_this, flags)
665 #define kmigmgrLoadStaticInfo_FNPTR(arg_this) arg_this->__kmigmgrLoadStaticInfo__
666 #define kmigmgrLoadStaticInfo(arg1, arg_this) kmigmgrLoadStaticInfo_DISPATCH(arg1, arg_this)
667 #define kmigmgrLoadStaticInfo_HAL(arg1, arg_this) kmigmgrLoadStaticInfo_DISPATCH(arg1, arg_this)
668 #define kmigmgrSetStaticInfo_FNPTR(arg_this) arg_this->__kmigmgrSetStaticInfo__
669 #define kmigmgrSetStaticInfo(arg1, arg_this) kmigmgrSetStaticInfo_DISPATCH(arg1, arg_this)
670 #define kmigmgrSetStaticInfo_HAL(arg1, arg_this) kmigmgrSetStaticInfo_DISPATCH(arg1, arg_this)
671 #define kmigmgrClearStaticInfo_FNPTR(arg_this) arg_this->__kmigmgrClearStaticInfo__
672 #define kmigmgrClearStaticInfo(arg1, arg_this) kmigmgrClearStaticInfo_DISPATCH(arg1, arg_this)
673 #define kmigmgrClearStaticInfo_HAL(arg1, arg_this) kmigmgrClearStaticInfo_DISPATCH(arg1, arg_this)
674 #define kmigmgrSaveToPersistenceFromVgpuStaticInfo_FNPTR(arg_this) arg_this->__kmigmgrSaveToPersistenceFromVgpuStaticInfo__
675 #define kmigmgrSaveToPersistenceFromVgpuStaticInfo(arg1, arg_this) kmigmgrSaveToPersistenceFromVgpuStaticInfo_DISPATCH(arg1, arg_this)
676 #define kmigmgrSaveToPersistenceFromVgpuStaticInfo_HAL(arg1, arg_this) kmigmgrSaveToPersistenceFromVgpuStaticInfo_DISPATCH(arg1, arg_this)
677 #define kmigmgrDeleteGPUInstanceRunlists_FNPTR(arg_this) arg_this->__kmigmgrDeleteGPUInstanceRunlists__
678 #define kmigmgrDeleteGPUInstanceRunlists(arg1, arg_this, arg3) kmigmgrDeleteGPUInstanceRunlists_DISPATCH(arg1, arg_this, arg3)
679 #define kmigmgrDeleteGPUInstanceRunlists_HAL(arg1, arg_this, arg3) kmigmgrDeleteGPUInstanceRunlists_DISPATCH(arg1, arg_this, arg3)
680 #define kmigmgrCreateGPUInstanceRunlists_FNPTR(arg_this) arg_this->__kmigmgrCreateGPUInstanceRunlists__
681 #define kmigmgrCreateGPUInstanceRunlists(arg1, arg_this, arg3) kmigmgrCreateGPUInstanceRunlists_DISPATCH(arg1, arg_this, arg3)
682 #define kmigmgrCreateGPUInstanceRunlists_HAL(arg1, arg_this, arg3) kmigmgrCreateGPUInstanceRunlists_DISPATCH(arg1, arg_this, arg3)
683 #define kmigmgrRestoreFromPersistence_FNPTR(arg_this) arg_this->__kmigmgrRestoreFromPersistence__
684 #define kmigmgrRestoreFromPersistence(arg1, arg_this) kmigmgrRestoreFromPersistence_DISPATCH(arg1, arg_this)
685 #define kmigmgrRestoreFromPersistence_HAL(arg1, arg_this) kmigmgrRestoreFromPersistence_DISPATCH(arg1, arg_this)
686 #define kmigmgrCreateGPUInstanceCheck_FNPTR(arg_this) arg_this->__kmigmgrCreateGPUInstanceCheck__
687 #define kmigmgrCreateGPUInstanceCheck(arg1, arg_this, bMemoryPartitioningNeeded) kmigmgrCreateGPUInstanceCheck_DISPATCH(arg1, arg_this, bMemoryPartitioningNeeded)
688 #define kmigmgrCreateGPUInstanceCheck_HAL(arg1, arg_this, bMemoryPartitioningNeeded) kmigmgrCreateGPUInstanceCheck_DISPATCH(arg1, arg_this, bMemoryPartitioningNeeded)
689 #define kmigmgrIsDevinitMIGBitSet_FNPTR(arg_this) arg_this->__kmigmgrIsDevinitMIGBitSet__
690 #define kmigmgrIsDevinitMIGBitSet(arg1, arg_this) kmigmgrIsDevinitMIGBitSet_DISPATCH(arg1, arg_this)
691 #define kmigmgrIsDevinitMIGBitSet_HAL(arg1, arg_this) kmigmgrIsDevinitMIGBitSet_DISPATCH(arg1, arg_this)
692 #define kmigmgrIsGPUInstanceCombinationValid_FNPTR(arg_this) arg_this->__kmigmgrIsGPUInstanceCombinationValid__
693 #define kmigmgrIsGPUInstanceCombinationValid(arg1, arg_this, gpuInstanceFlag) kmigmgrIsGPUInstanceCombinationValid_DISPATCH(arg1, arg_this, gpuInstanceFlag)
694 #define kmigmgrIsGPUInstanceCombinationValid_HAL(arg1, arg_this, gpuInstanceFlag) kmigmgrIsGPUInstanceCombinationValid_DISPATCH(arg1, arg_this, gpuInstanceFlag)
695 #define kmigmgrIsGPUInstanceFlagValid_FNPTR(arg_this) arg_this->__kmigmgrIsGPUInstanceFlagValid__
696 #define kmigmgrIsGPUInstanceFlagValid(arg1, arg_this, gpuInstanceFlag) kmigmgrIsGPUInstanceFlagValid_DISPATCH(arg1, arg_this, gpuInstanceFlag)
697 #define kmigmgrIsGPUInstanceFlagValid_HAL(arg1, arg_this, gpuInstanceFlag) kmigmgrIsGPUInstanceFlagValid_DISPATCH(arg1, arg_this, gpuInstanceFlag)
698 #define kmigmgrGenerateComputeInstanceUuid_FNPTR(arg_this) arg_this->__kmigmgrGenerateComputeInstanceUuid__
699 #define kmigmgrGenerateComputeInstanceUuid(arg1, arg_this, swizzId, globalGrIdx, arg5) kmigmgrGenerateComputeInstanceUuid_DISPATCH(arg1, arg_this, swizzId, globalGrIdx, arg5)
700 #define kmigmgrGenerateComputeInstanceUuid_HAL(arg1, arg_this, swizzId, globalGrIdx, arg5) kmigmgrGenerateComputeInstanceUuid_DISPATCH(arg1, arg_this, swizzId, globalGrIdx, arg5)
701 #define kmigmgrGenerateGPUInstanceUuid_FNPTR(arg_this) arg_this->__kmigmgrGenerateGPUInstanceUuid__
702 #define kmigmgrGenerateGPUInstanceUuid(arg1, arg_this, swizzId, arg4) kmigmgrGenerateGPUInstanceUuid_DISPATCH(arg1, arg_this, swizzId, arg4)
703 #define kmigmgrGenerateGPUInstanceUuid_HAL(arg1, arg_this, swizzId, arg4) kmigmgrGenerateGPUInstanceUuid_DISPATCH(arg1, arg_this, swizzId, arg4)
704 #define kmigmgrCreateComputeInstances_FNPTR(arg_this) arg_this->__kmigmgrCreateComputeInstances__
705 #define kmigmgrCreateComputeInstances(arg1, arg_this, arg3, bQuery, arg5, pCIIds, bCreateCap) kmigmgrCreateComputeInstances_DISPATCH(arg1, arg_this, arg3, bQuery, arg5, pCIIds, bCreateCap)
706 #define kmigmgrCreateComputeInstances_HAL(arg1, arg_this, arg3, bQuery, arg5, pCIIds, bCreateCap) kmigmgrCreateComputeInstances_DISPATCH(arg1, arg_this, arg3, bQuery, arg5, pCIIds, bCreateCap)
707 #define kmigmgrIsMemoryPartitioningRequested_FNPTR(arg_this) arg_this->__kmigmgrIsMemoryPartitioningRequested__
708 #define kmigmgrIsMemoryPartitioningRequested(arg1, arg_this, partitionFlags) kmigmgrIsMemoryPartitioningRequested_DISPATCH(arg1, arg_this, partitionFlags)
709 #define kmigmgrIsMemoryPartitioningRequested_HAL(arg1, arg_this, partitionFlags) kmigmgrIsMemoryPartitioningRequested_DISPATCH(arg1, arg_this, partitionFlags)
710 #define kmigmgrIsMemoryPartitioningNeeded_FNPTR(arg_this) arg_this->__kmigmgrIsMemoryPartitioningNeeded__
711 #define kmigmgrIsMemoryPartitioningNeeded(arg1, arg_this, swizzId) kmigmgrIsMemoryPartitioningNeeded_DISPATCH(arg1, arg_this, swizzId)
712 #define kmigmgrIsMemoryPartitioningNeeded_HAL(arg1, arg_this, swizzId) kmigmgrIsMemoryPartitioningNeeded_DISPATCH(arg1, arg_this, swizzId)
713 #define kmigmgrMemSizeFlagToSwizzIdRange_FNPTR(arg_this) arg_this->__kmigmgrMemSizeFlagToSwizzIdRange__
714 #define kmigmgrMemSizeFlagToSwizzIdRange(arg1, arg_this, memSizeFlag) kmigmgrMemSizeFlagToSwizzIdRange_DISPATCH(arg1, arg_this, memSizeFlag)
715 #define kmigmgrMemSizeFlagToSwizzIdRange_HAL(arg1, arg_this, memSizeFlag) kmigmgrMemSizeFlagToSwizzIdRange_DISPATCH(arg1, arg_this, memSizeFlag)
716 #define kmigmgrSwizzIdToSpan_FNPTR(arg_this) arg_this->__kmigmgrSwizzIdToSpan__
717 #define kmigmgrSwizzIdToSpan(arg1, arg_this, swizzId) kmigmgrSwizzIdToSpan_DISPATCH(arg1, arg_this, swizzId)
718 #define kmigmgrSwizzIdToSpan_HAL(arg1, arg_this, swizzId) kmigmgrSwizzIdToSpan_DISPATCH(arg1, arg_this, swizzId)
719 #define kmigmgrSetMIGState_FNPTR(arg_this) arg_this->__kmigmgrSetMIGState__
720 #define kmigmgrSetMIGState(arg1, arg_this, bMemoryPartitioningNeeded, bEnable, bUnload) kmigmgrSetMIGState_DISPATCH(arg1, arg_this, bMemoryPartitioningNeeded, bEnable, bUnload)
721 #define kmigmgrSetMIGState_HAL(arg1, arg_this, bMemoryPartitioningNeeded, bEnable, bUnload) kmigmgrSetMIGState_DISPATCH(arg1, arg_this, bMemoryPartitioningNeeded, bEnable, bUnload)
722 #define kmigmgrIsCTSAlignmentRequired_FNPTR(arg_this) arg_this->__kmigmgrIsCTSAlignmentRequired__
723 #define kmigmgrIsCTSAlignmentRequired(arg1, arg_this) kmigmgrIsCTSAlignmentRequired_DISPATCH(arg1, arg_this)
724 #define kmigmgrIsCTSAlignmentRequired_HAL(arg1, arg_this) kmigmgrIsCTSAlignmentRequired_DISPATCH(arg1, arg_this)
725 #define kmigmgrRestoreFromBootConfig_FNPTR(pKernelMIGManager) pKernelMIGManager->__kmigmgrRestoreFromBootConfig__
726 #define kmigmgrRestoreFromBootConfig(pGpu, pKernelMIGManager) kmigmgrRestoreFromBootConfig_DISPATCH(pGpu, pKernelMIGManager)
727 #define kmigmgrRestoreFromBootConfig_HAL(pGpu, pKernelMIGManager) kmigmgrRestoreFromBootConfig_DISPATCH(pGpu, pKernelMIGManager)
728 #define kmigmgrInitMissing_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateInitMissing__
729 #define kmigmgrInitMissing(pGpu, pEngstate) kmigmgrInitMissing_DISPATCH(pGpu, pEngstate)
730 #define kmigmgrStatePreInitLocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreInitLocked__
731 #define kmigmgrStatePreInitLocked(pGpu, pEngstate) kmigmgrStatePreInitLocked_DISPATCH(pGpu, pEngstate)
732 #define kmigmgrStatePreInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreInitUnlocked__
733 #define kmigmgrStatePreInitUnlocked(pGpu, pEngstate) kmigmgrStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
734 #define kmigmgrStateInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStateInitUnlocked__
735 #define kmigmgrStateInitUnlocked(pGpu, pEngstate) kmigmgrStateInitUnlocked_DISPATCH(pGpu, pEngstate)
736 #define kmigmgrStatePreLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreLoad__
737 #define kmigmgrStatePreLoad(pGpu, pEngstate, arg3) kmigmgrStatePreLoad_DISPATCH(pGpu, pEngstate, arg3)
738 #define kmigmgrStateLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStateLoad__
739 #define kmigmgrStateLoad(pGpu, pEngstate, arg3) kmigmgrStateLoad_DISPATCH(pGpu, pEngstate, arg3)
740 #define kmigmgrStatePostLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePostLoad__
741 #define kmigmgrStatePostLoad(pGpu, pEngstate, arg3) kmigmgrStatePostLoad_DISPATCH(pGpu, pEngstate, arg3)
742 #define kmigmgrStatePreUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreUnload__
743 #define kmigmgrStatePreUnload(pGpu, pEngstate, arg3) kmigmgrStatePreUnload_DISPATCH(pGpu, pEngstate, arg3)
744 #define kmigmgrStatePostUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePostUnload__
745 #define kmigmgrStatePostUnload(pGpu, pEngstate, arg3) kmigmgrStatePostUnload_DISPATCH(pGpu, pEngstate, arg3)
746 #define kmigmgrStateDestroy_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStateDestroy__
747 #define kmigmgrStateDestroy(pGpu, pEngstate) kmigmgrStateDestroy_DISPATCH(pGpu, pEngstate)
748 #define kmigmgrIsPresent_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateIsPresent__
749 #define kmigmgrIsPresent(pGpu, pEngstate) kmigmgrIsPresent_DISPATCH(pGpu, pEngstate)
750 
751 // Dispatch functions
kmigmgrConstructEngine_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,ENGDESCRIPTOR arg3)752 static inline NV_STATUS kmigmgrConstructEngine_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, ENGDESCRIPTOR arg3) {
753     return arg_this->__kmigmgrConstructEngine__(arg1, arg_this, arg3);
754 }
755 
kmigmgrStateInitLocked_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this)756 static inline NV_STATUS kmigmgrStateInitLocked_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this) {
757     return arg_this->__kmigmgrStateInitLocked__(arg1, arg_this);
758 }
759 
kmigmgrStateUnload_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,NvU32 flags)760 static inline NV_STATUS kmigmgrStateUnload_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, NvU32 flags) {
761     return arg_this->__kmigmgrStateUnload__(arg1, arg_this, flags);
762 }
763 
kmigmgrLoadStaticInfo_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this)764 static inline NV_STATUS kmigmgrLoadStaticInfo_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this) {
765     return arg_this->__kmigmgrLoadStaticInfo__(arg1, arg_this);
766 }
767 
kmigmgrSetStaticInfo_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this)768 static inline NV_STATUS kmigmgrSetStaticInfo_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this) {
769     return arg_this->__kmigmgrSetStaticInfo__(arg1, arg_this);
770 }
771 
kmigmgrClearStaticInfo_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this)772 static inline void kmigmgrClearStaticInfo_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this) {
773     arg_this->__kmigmgrClearStaticInfo__(arg1, arg_this);
774 }
775 
kmigmgrSaveToPersistenceFromVgpuStaticInfo_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this)776 static inline NV_STATUS kmigmgrSaveToPersistenceFromVgpuStaticInfo_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this) {
777     return arg_this->__kmigmgrSaveToPersistenceFromVgpuStaticInfo__(arg1, arg_this);
778 }
779 
kmigmgrDeleteGPUInstanceRunlists_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,KERNEL_MIG_GPU_INSTANCE * arg3)780 static inline NV_STATUS kmigmgrDeleteGPUInstanceRunlists_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, KERNEL_MIG_GPU_INSTANCE *arg3) {
781     return arg_this->__kmigmgrDeleteGPUInstanceRunlists__(arg1, arg_this, arg3);
782 }
783 
kmigmgrCreateGPUInstanceRunlists_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,KERNEL_MIG_GPU_INSTANCE * arg3)784 static inline NV_STATUS kmigmgrCreateGPUInstanceRunlists_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, KERNEL_MIG_GPU_INSTANCE *arg3) {
785     return arg_this->__kmigmgrCreateGPUInstanceRunlists__(arg1, arg_this, arg3);
786 }
787 
kmigmgrRestoreFromPersistence_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this)788 static inline NV_STATUS kmigmgrRestoreFromPersistence_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this) {
789     return arg_this->__kmigmgrRestoreFromPersistence__(arg1, arg_this);
790 }
791 
kmigmgrCreateGPUInstanceCheck_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,NvBool bMemoryPartitioningNeeded)792 static inline NV_STATUS kmigmgrCreateGPUInstanceCheck_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, NvBool bMemoryPartitioningNeeded) {
793     return arg_this->__kmigmgrCreateGPUInstanceCheck__(arg1, arg_this, bMemoryPartitioningNeeded);
794 }
795 
kmigmgrIsDevinitMIGBitSet_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this)796 static inline NvBool kmigmgrIsDevinitMIGBitSet_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this) {
797     return arg_this->__kmigmgrIsDevinitMIGBitSet__(arg1, arg_this);
798 }
799 
kmigmgrIsGPUInstanceCombinationValid_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,NvU32 gpuInstanceFlag)800 static inline NvBool kmigmgrIsGPUInstanceCombinationValid_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, NvU32 gpuInstanceFlag) {
801     return arg_this->__kmigmgrIsGPUInstanceCombinationValid__(arg1, arg_this, gpuInstanceFlag);
802 }
803 
kmigmgrIsGPUInstanceFlagValid_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,NvU32 gpuInstanceFlag)804 static inline NvBool kmigmgrIsGPUInstanceFlagValid_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, NvU32 gpuInstanceFlag) {
805     return arg_this->__kmigmgrIsGPUInstanceFlagValid__(arg1, arg_this, gpuInstanceFlag);
806 }
807 
kmigmgrGenerateComputeInstanceUuid_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,NvU32 swizzId,NvU32 globalGrIdx,NvUuid * arg5)808 static inline NV_STATUS kmigmgrGenerateComputeInstanceUuid_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, NvU32 swizzId, NvU32 globalGrIdx, NvUuid *arg5) {
809     return arg_this->__kmigmgrGenerateComputeInstanceUuid__(arg1, arg_this, swizzId, globalGrIdx, arg5);
810 }
811 
kmigmgrGenerateGPUInstanceUuid_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,NvU32 swizzId,NvUuid * arg4)812 static inline NV_STATUS kmigmgrGenerateGPUInstanceUuid_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, NvU32 swizzId, NvUuid *arg4) {
813     return arg_this->__kmigmgrGenerateGPUInstanceUuid__(arg1, arg_this, swizzId, arg4);
814 }
815 
kmigmgrCreateComputeInstances_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,KERNEL_MIG_GPU_INSTANCE * arg3,NvBool bQuery,KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS arg5,NvU32 * pCIIds,NvBool bCreateCap)816 static inline NV_STATUS kmigmgrCreateComputeInstances_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, KERNEL_MIG_GPU_INSTANCE *arg3, NvBool bQuery, KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS arg5, NvU32 *pCIIds, NvBool bCreateCap) {
817     return arg_this->__kmigmgrCreateComputeInstances__(arg1, arg_this, arg3, bQuery, arg5, pCIIds, bCreateCap);
818 }
819 
kmigmgrIsMemoryPartitioningRequested_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,NvU32 partitionFlags)820 static inline NvBool kmigmgrIsMemoryPartitioningRequested_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, NvU32 partitionFlags) {
821     return arg_this->__kmigmgrIsMemoryPartitioningRequested__(arg1, arg_this, partitionFlags);
822 }
823 
kmigmgrIsMemoryPartitioningNeeded_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,NvU32 swizzId)824 static inline NvBool kmigmgrIsMemoryPartitioningNeeded_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, NvU32 swizzId) {
825     return arg_this->__kmigmgrIsMemoryPartitioningNeeded__(arg1, arg_this, swizzId);
826 }
827 
kmigmgrMemSizeFlagToSwizzIdRange_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,NvU32 memSizeFlag)828 static inline struct NV_RANGE kmigmgrMemSizeFlagToSwizzIdRange_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, NvU32 memSizeFlag) {
829     return arg_this->__kmigmgrMemSizeFlagToSwizzIdRange__(arg1, arg_this, memSizeFlag);
830 }
831 
kmigmgrSwizzIdToSpan_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,NvU32 swizzId)832 static inline struct NV_RANGE kmigmgrSwizzIdToSpan_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, NvU32 swizzId) {
833     return arg_this->__kmigmgrSwizzIdToSpan__(arg1, arg_this, swizzId);
834 }
835 
kmigmgrSetMIGState_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this,NvBool bMemoryPartitioningNeeded,NvBool bEnable,NvBool bUnload)836 static inline NV_STATUS kmigmgrSetMIGState_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this, NvBool bMemoryPartitioningNeeded, NvBool bEnable, NvBool bUnload) {
837     return arg_this->__kmigmgrSetMIGState__(arg1, arg_this, bMemoryPartitioningNeeded, bEnable, bUnload);
838 }
839 
kmigmgrIsCTSAlignmentRequired_DISPATCH(OBJGPU * arg1,struct KernelMIGManager * arg_this)840 static inline NvBool kmigmgrIsCTSAlignmentRequired_DISPATCH(OBJGPU *arg1, struct KernelMIGManager *arg_this) {
841     return arg_this->__kmigmgrIsCTSAlignmentRequired__(arg1, arg_this);
842 }
843 
kmigmgrRestoreFromBootConfig_DISPATCH(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)844 static inline NV_STATUS kmigmgrRestoreFromBootConfig_DISPATCH(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
845     return pKernelMIGManager->__kmigmgrRestoreFromBootConfig__(pGpu, pKernelMIGManager);
846 }
847 
kmigmgrInitMissing_DISPATCH(struct OBJGPU * pGpu,struct KernelMIGManager * pEngstate)848 static inline void kmigmgrInitMissing_DISPATCH(struct OBJGPU *pGpu, struct KernelMIGManager *pEngstate) {
849     pEngstate->__kmigmgrInitMissing__(pGpu, pEngstate);
850 }
851 
kmigmgrStatePreInitLocked_DISPATCH(struct OBJGPU * pGpu,struct KernelMIGManager * pEngstate)852 static inline NV_STATUS kmigmgrStatePreInitLocked_DISPATCH(struct OBJGPU *pGpu, struct KernelMIGManager *pEngstate) {
853     return pEngstate->__kmigmgrStatePreInitLocked__(pGpu, pEngstate);
854 }
855 
kmigmgrStatePreInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct KernelMIGManager * pEngstate)856 static inline NV_STATUS kmigmgrStatePreInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct KernelMIGManager *pEngstate) {
857     return pEngstate->__kmigmgrStatePreInitUnlocked__(pGpu, pEngstate);
858 }
859 
kmigmgrStateInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct KernelMIGManager * pEngstate)860 static inline NV_STATUS kmigmgrStateInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct KernelMIGManager *pEngstate) {
861     return pEngstate->__kmigmgrStateInitUnlocked__(pGpu, pEngstate);
862 }
863 
kmigmgrStatePreLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelMIGManager * pEngstate,NvU32 arg3)864 static inline NV_STATUS kmigmgrStatePreLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelMIGManager *pEngstate, NvU32 arg3) {
865     return pEngstate->__kmigmgrStatePreLoad__(pGpu, pEngstate, arg3);
866 }
867 
kmigmgrStateLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelMIGManager * pEngstate,NvU32 arg3)868 static inline NV_STATUS kmigmgrStateLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelMIGManager *pEngstate, NvU32 arg3) {
869     return pEngstate->__kmigmgrStateLoad__(pGpu, pEngstate, arg3);
870 }
871 
kmigmgrStatePostLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelMIGManager * pEngstate,NvU32 arg3)872 static inline NV_STATUS kmigmgrStatePostLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelMIGManager *pEngstate, NvU32 arg3) {
873     return pEngstate->__kmigmgrStatePostLoad__(pGpu, pEngstate, arg3);
874 }
875 
kmigmgrStatePreUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelMIGManager * pEngstate,NvU32 arg3)876 static inline NV_STATUS kmigmgrStatePreUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelMIGManager *pEngstate, NvU32 arg3) {
877     return pEngstate->__kmigmgrStatePreUnload__(pGpu, pEngstate, arg3);
878 }
879 
kmigmgrStatePostUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelMIGManager * pEngstate,NvU32 arg3)880 static inline NV_STATUS kmigmgrStatePostUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelMIGManager *pEngstate, NvU32 arg3) {
881     return pEngstate->__kmigmgrStatePostUnload__(pGpu, pEngstate, arg3);
882 }
883 
kmigmgrStateDestroy_DISPATCH(struct OBJGPU * pGpu,struct KernelMIGManager * pEngstate)884 static inline void kmigmgrStateDestroy_DISPATCH(struct OBJGPU *pGpu, struct KernelMIGManager *pEngstate) {
885     pEngstate->__kmigmgrStateDestroy__(pGpu, pEngstate);
886 }
887 
kmigmgrIsPresent_DISPATCH(struct OBJGPU * pGpu,struct KernelMIGManager * pEngstate)888 static inline NvBool kmigmgrIsPresent_DISPATCH(struct OBJGPU *pGpu, struct KernelMIGManager *pEngstate) {
889     return pEngstate->__kmigmgrIsPresent__(pGpu, pEngstate);
890 }
891 
892 void kmigmgrDetectReducedConfig_KERNEL(OBJGPU *arg1, struct KernelMIGManager *arg2);
893 
894 
895 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDetectReducedConfig(OBJGPU * arg1,struct KernelMIGManager * arg2)896 static inline void kmigmgrDetectReducedConfig(OBJGPU *arg1, struct KernelMIGManager *arg2) {
897     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
898 }
899 #else //__nvoc_kernel_mig_manager_h_disabled
900 #define kmigmgrDetectReducedConfig(arg1, arg2) kmigmgrDetectReducedConfig_KERNEL(arg1, arg2)
901 #endif //__nvoc_kernel_mig_manager_h_disabled
902 
903 #define kmigmgrDetectReducedConfig_HAL(arg1, arg2) kmigmgrDetectReducedConfig(arg1, arg2)
904 
905 NV_STATUS kmigmgrGetComputeProfileFromGpcCount_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 gpcCount, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile);
906 
907 
908 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetComputeProfileFromGpcCount(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 gpcCount,NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE * pProfile)909 static inline NV_STATUS kmigmgrGetComputeProfileFromGpcCount(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 gpcCount, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile) {
910     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
911     return NV_ERR_NOT_SUPPORTED;
912 }
913 #else //__nvoc_kernel_mig_manager_h_disabled
914 #define kmigmgrGetComputeProfileFromGpcCount(arg1, arg2, gpcCount, pProfile) kmigmgrGetComputeProfileFromGpcCount_IMPL(arg1, arg2, gpcCount, pProfile)
915 #endif //__nvoc_kernel_mig_manager_h_disabled
916 
917 #define kmigmgrGetComputeProfileFromGpcCount_HAL(arg1, arg2, gpcCount, pProfile) kmigmgrGetComputeProfileFromGpcCount(arg1, arg2, gpcCount, pProfile)
918 
919 NV_STATUS kmigmgrConstructEngine_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, ENGDESCRIPTOR arg3);
920 
921 NV_STATUS kmigmgrStateInitLocked_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
922 
923 NV_STATUS kmigmgrStateUnload_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 flags);
924 
925 NV_STATUS kmigmgrLoadStaticInfo_VF(OBJGPU *arg1, struct KernelMIGManager *arg2);
926 
927 NV_STATUS kmigmgrLoadStaticInfo_KERNEL(OBJGPU *arg1, struct KernelMIGManager *arg2);
928 
929 NV_STATUS kmigmgrSetStaticInfo_VF(OBJGPU *arg1, struct KernelMIGManager *arg2);
930 
kmigmgrSetStaticInfo_46f6a7(OBJGPU * arg1,struct KernelMIGManager * arg2)931 static inline NV_STATUS kmigmgrSetStaticInfo_46f6a7(OBJGPU *arg1, struct KernelMIGManager *arg2) {
932     return NV_ERR_NOT_SUPPORTED;
933 }
934 
935 void kmigmgrClearStaticInfo_VF(OBJGPU *arg1, struct KernelMIGManager *arg2);
936 
kmigmgrClearStaticInfo_b3696a(OBJGPU * arg1,struct KernelMIGManager * arg2)937 static inline void kmigmgrClearStaticInfo_b3696a(OBJGPU *arg1, struct KernelMIGManager *arg2) {
938     return;
939 }
940 
941 NV_STATUS kmigmgrSaveToPersistenceFromVgpuStaticInfo_VF(OBJGPU *arg1, struct KernelMIGManager *arg2);
942 
kmigmgrSaveToPersistenceFromVgpuStaticInfo_46f6a7(OBJGPU * arg1,struct KernelMIGManager * arg2)943 static inline NV_STATUS kmigmgrSaveToPersistenceFromVgpuStaticInfo_46f6a7(OBJGPU *arg1, struct KernelMIGManager *arg2) {
944     return NV_ERR_NOT_SUPPORTED;
945 }
946 
kmigmgrDeleteGPUInstanceRunlists_56cd7a(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)947 static inline NV_STATUS kmigmgrDeleteGPUInstanceRunlists_56cd7a(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
948     return NV_OK;
949 }
950 
951 NV_STATUS kmigmgrDeleteGPUInstanceRunlists_FWCLIENT(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
952 
kmigmgrCreateGPUInstanceRunlists_56cd7a(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)953 static inline NV_STATUS kmigmgrCreateGPUInstanceRunlists_56cd7a(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
954     return NV_OK;
955 }
956 
957 NV_STATUS kmigmgrCreateGPUInstanceRunlists_FWCLIENT(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
958 
959 NV_STATUS kmigmgrRestoreFromPersistence_VF(OBJGPU *arg1, struct KernelMIGManager *arg2);
960 
961 NV_STATUS kmigmgrRestoreFromPersistence_PF(OBJGPU *arg1, struct KernelMIGManager *arg2);
962 
963 NV_STATUS kmigmgrCreateGPUInstanceCheck_GA100(OBJGPU *arg1, struct KernelMIGManager *arg2, NvBool bMemoryPartitioningNeeded);
964 
kmigmgrCreateGPUInstanceCheck_46f6a7(OBJGPU * arg1,struct KernelMIGManager * arg2,NvBool bMemoryPartitioningNeeded)965 static inline NV_STATUS kmigmgrCreateGPUInstanceCheck_46f6a7(OBJGPU *arg1, struct KernelMIGManager *arg2, NvBool bMemoryPartitioningNeeded) {
966     return NV_ERR_NOT_SUPPORTED;
967 }
968 
969 NvBool kmigmgrIsDevinitMIGBitSet_VF(OBJGPU *arg1, struct KernelMIGManager *arg2);
970 
971 NvBool kmigmgrIsDevinitMIGBitSet_GA100(OBJGPU *arg1, struct KernelMIGManager *arg2);
972 
kmigmgrIsDevinitMIGBitSet_491d52(OBJGPU * arg1,struct KernelMIGManager * arg2)973 static inline NvBool kmigmgrIsDevinitMIGBitSet_491d52(OBJGPU *arg1, struct KernelMIGManager *arg2) {
974     return ((NvBool)(0 != 0));
975 }
976 
977 NvBool kmigmgrIsGPUInstanceCombinationValid_GA100(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 gpuInstanceFlag);
978 
979 NvBool kmigmgrIsGPUInstanceCombinationValid_GH100(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 gpuInstanceFlag);
980 
kmigmgrIsGPUInstanceCombinationValid_491d52(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 gpuInstanceFlag)981 static inline NvBool kmigmgrIsGPUInstanceCombinationValid_491d52(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 gpuInstanceFlag) {
982     return ((NvBool)(0 != 0));
983 }
984 
985 NvBool kmigmgrIsGPUInstanceFlagValid_GA100(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 gpuInstanceFlag);
986 
987 NvBool kmigmgrIsGPUInstanceFlagValid_GH100(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 gpuInstanceFlag);
988 
989 NvBool kmigmgrIsGPUInstanceFlagValid_GB100(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 gpuInstanceFlag);
990 
kmigmgrIsGPUInstanceFlagValid_491d52(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 gpuInstanceFlag)991 static inline NvBool kmigmgrIsGPUInstanceFlagValid_491d52(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 gpuInstanceFlag) {
992     return ((NvBool)(0 != 0));
993 }
994 
995 NV_STATUS kmigmgrGenerateComputeInstanceUuid_VF(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, NvU32 globalGrIdx, NvUuid *arg5);
996 
kmigmgrGenerateComputeInstanceUuid_5baef9(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId,NvU32 globalGrIdx,NvUuid * arg5)997 static inline NV_STATUS kmigmgrGenerateComputeInstanceUuid_5baef9(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, NvU32 globalGrIdx, NvUuid *arg5) {
998     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
999 }
1000 
1001 NV_STATUS kmigmgrGenerateGPUInstanceUuid_VF(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, NvUuid *arg4);
1002 
kmigmgrGenerateGPUInstanceUuid_5baef9(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId,NvUuid * arg4)1003 static inline NV_STATUS kmigmgrGenerateGPUInstanceUuid_5baef9(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, NvUuid *arg4) {
1004     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1005 }
1006 
1007 NV_STATUS kmigmgrCreateComputeInstances_VF(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, NvBool bQuery, KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS arg5, NvU32 *pCIIds, NvBool bCreateCap);
1008 
1009 NV_STATUS kmigmgrCreateComputeInstances_FWCLIENT(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, NvBool bQuery, KMIGMGR_CREATE_COMPUTE_INSTANCE_PARAMS arg5, NvU32 *pCIIds, NvBool bCreateCap);
1010 
1011 NvBool kmigmgrIsMemoryPartitioningRequested_GA100(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 partitionFlags);
1012 
kmigmgrIsMemoryPartitioningRequested_491d52(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 partitionFlags)1013 static inline NvBool kmigmgrIsMemoryPartitioningRequested_491d52(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 partitionFlags) {
1014     return ((NvBool)(0 != 0));
1015 }
1016 
1017 NvBool kmigmgrIsMemoryPartitioningNeeded_GA100(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId);
1018 
kmigmgrIsMemoryPartitioningNeeded_491d52(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId)1019 static inline NvBool kmigmgrIsMemoryPartitioningNeeded_491d52(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId) {
1020     return ((NvBool)(0 != 0));
1021 }
1022 
kmigmgrMemSizeFlagToSwizzIdRange_d64cd6(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 memSizeFlag)1023 static inline struct NV_RANGE kmigmgrMemSizeFlagToSwizzIdRange_d64cd6(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 memSizeFlag) {
1024     return NV_RANGE_EMPTY;
1025 }
1026 
1027 struct NV_RANGE kmigmgrMemSizeFlagToSwizzIdRange_GA100(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 memSizeFlag);
1028 
1029 struct NV_RANGE kmigmgrSwizzIdToSpan_GA100(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId);
1030 
kmigmgrSwizzIdToSpan_d64cd6(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId)1031 static inline struct NV_RANGE kmigmgrSwizzIdToSpan_d64cd6(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId) {
1032     return NV_RANGE_EMPTY;
1033 }
1034 
1035 NV_STATUS kmigmgrSetMIGState_VF(OBJGPU *arg1, struct KernelMIGManager *arg2, NvBool bMemoryPartitioningNeeded, NvBool bEnable, NvBool bUnload);
1036 
1037 NV_STATUS kmigmgrSetMIGState_FWCLIENT(OBJGPU *arg1, struct KernelMIGManager *arg2, NvBool bMemoryPartitioningNeeded, NvBool bEnable, NvBool bUnload);
1038 
kmigmgrIsCTSAlignmentRequired_cbe027(OBJGPU * arg1,struct KernelMIGManager * arg2)1039 static inline NvBool kmigmgrIsCTSAlignmentRequired_cbe027(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1040     return ((NvBool)(0 == 0));
1041 }
1042 
1043 NvBool kmigmgrIsCTSAlignmentRequired_VF(OBJGPU *arg1, struct KernelMIGManager *arg2);
1044 
1045 NvBool kmigmgrIsCTSAlignmentRequired_PF(OBJGPU *arg1, struct KernelMIGManager *arg2);
1046 
1047 NV_STATUS kmigmgrRestoreFromBootConfig_PF(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager);
1048 
kmigmgrRestoreFromBootConfig_56cd7a(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)1049 static inline NV_STATUS kmigmgrRestoreFromBootConfig_56cd7a(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
1050     return NV_OK;
1051 }
1052 
kmigmgrUseLegacyVgpuPolicy(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)1053 static inline NvBool kmigmgrUseLegacyVgpuPolicy(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
1054     return ((NvBool)(0 != 0));
1055 }
1056 
kmigmgrIsMIGNvlinkP2PSupportOverridden(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)1057 static inline NvBool kmigmgrIsMIGNvlinkP2PSupportOverridden(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
1058     return ((NvBool)(0 != 0));
1059 }
1060 
kmigmgrSetMIGEnabled(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager,NvBool bEnabled)1061 static inline void kmigmgrSetMIGEnabled(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, NvBool bEnabled) {
1062     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
1063     pKernelMIGManager_PRIVATE->bMIGEnabled = bEnabled;
1064 }
1065 
kmigmgrGetPartitionableEnginesInUse(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)1066 static inline const union ENGTYPE_BIT_VECTOR *kmigmgrGetPartitionableEnginesInUse(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
1067     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
1068     return &pKernelMIGManager_PRIVATE->partitionableEnginesInUse;
1069 }
1070 
kmigmgrIsA100ReducedConfig(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)1071 static inline NvBool kmigmgrIsA100ReducedConfig(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
1072     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
1073     return pKernelMIGManager_PRIVATE->bIsA100ReducedConfig;
1074 }
1075 
kmigmgrSetIsA100ReducedConfig(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager,NvBool bA100ReducedConfig)1076 static inline void kmigmgrSetIsA100ReducedConfig(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, NvBool bA100ReducedConfig) {
1077     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
1078     pKernelMIGManager_PRIVATE->bIsA100ReducedConfig = bA100ReducedConfig;
1079 }
1080 
kmigmgrGetValidGlobalCTSIdMask(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)1081 static inline NvU64 kmigmgrGetValidGlobalCTSIdMask(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
1082     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
1083     return pKernelMIGManager_PRIVATE->validGlobalCTSIdMask;
1084 }
1085 
kmigmgrSetValidGlobalCTSIdMask(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager,NvU64 validGlobalCTSIdMask)1086 static inline void kmigmgrSetValidGlobalCTSIdMask(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, NvU64 validGlobalCTSIdMask) {
1087     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
1088     pKernelMIGManager_PRIVATE->validGlobalCTSIdMask = validGlobalCTSIdMask;
1089 }
1090 
kmigmgrGetValidGlobalGfxCTSIdMask(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager)1091 static inline NvU64 kmigmgrGetValidGlobalGfxCTSIdMask(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager) {
1092     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
1093     return pKernelMIGManager_PRIVATE->validGlobalGfxCTSIdMask;
1094 }
1095 
kmigmgrSetValidGlobalGfxCTSIdMask(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager,NvU64 validGlobalGfxCTSIdMask)1096 static inline void kmigmgrSetValidGlobalGfxCTSIdMask(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, NvU64 validGlobalGfxCTSIdMask) {
1097     struct KernelMIGManager_PRIVATE *pKernelMIGManager_PRIVATE = (struct KernelMIGManager_PRIVATE *)pKernelMIGManager;
1098     pKernelMIGManager_PRIVATE->validGlobalGfxCTSIdMask = validGlobalGfxCTSIdMask;
1099 }
1100 
1101 NV_STATUS kmigmgrIncRefCount_IMPL(struct RsShared *arg1);
1102 
1103 #define kmigmgrIncRefCount(arg1) kmigmgrIncRefCount_IMPL(arg1)
1104 NV_STATUS kmigmgrDecRefCount_IMPL(struct RsShared *arg1);
1105 
1106 #define kmigmgrDecRefCount(arg1) kmigmgrDecRefCount_IMPL(arg1)
1107 struct MIG_INSTANCE_REF kmigmgrMakeGIReference_IMPL(KERNEL_MIG_GPU_INSTANCE *arg1);
1108 
1109 #define kmigmgrMakeGIReference(arg1) kmigmgrMakeGIReference_IMPL(arg1)
1110 struct MIG_INSTANCE_REF kmigmgrMakeCIReference_IMPL(KERNEL_MIG_GPU_INSTANCE *arg1, MIG_COMPUTE_INSTANCE *arg2);
1111 
1112 #define kmigmgrMakeCIReference(arg1, arg2) kmigmgrMakeCIReference_IMPL(arg1, arg2)
1113 NV_STATUS kmigmgrEngineTypeXlate_IMPL(union ENGTYPE_BIT_VECTOR *pSrc, RM_ENGINE_TYPE srcEngineType, union ENGTYPE_BIT_VECTOR *pDst, RM_ENGINE_TYPE *pDstEngineType);
1114 
1115 #define kmigmgrEngineTypeXlate(pSrc, srcEngineType, pDst, pDstEngineType) kmigmgrEngineTypeXlate_IMPL(pSrc, srcEngineType, pDst, pDstEngineType)
1116 NV_STATUS kmigmgrEngBitVectorXlate_IMPL(union ENGTYPE_BIT_VECTOR *pSrcRef, union ENGTYPE_BIT_VECTOR *pSrc, union ENGTYPE_BIT_VECTOR *pDstRef, union ENGTYPE_BIT_VECTOR *pDst);
1117 
1118 #define kmigmgrEngBitVectorXlate(pSrcRef, pSrc, pDstRef, pDst) kmigmgrEngBitVectorXlate_IMPL(pSrcRef, pSrc, pDstRef, pDst)
1119 NvBool kmigmgrIsInstanceAttributionIdValid_IMPL(NvU16 id);
1120 
1121 #define kmigmgrIsInstanceAttributionIdValid(id) kmigmgrIsInstanceAttributionIdValid_IMPL(id)
1122 struct MIG_INSTANCE_REF kmigmgrMakeNoMIGReference_IMPL(void);
1123 
1124 #define kmigmgrMakeNoMIGReference() kmigmgrMakeNoMIGReference_IMPL()
1125 NvBool kmigmgrIsMIGReferenceValid_IMPL(struct MIG_INSTANCE_REF *arg1);
1126 
1127 #define kmigmgrIsMIGReferenceValid(arg1) kmigmgrIsMIGReferenceValid_IMPL(arg1)
1128 NvBool kmigmgrAreMIGReferencesSame_IMPL(struct MIG_INSTANCE_REF *arg1, struct MIG_INSTANCE_REF *arg2);
1129 
1130 #define kmigmgrAreMIGReferencesSame(arg1, arg2) kmigmgrAreMIGReferencesSame_IMPL(arg1, arg2)
1131 NvU32 kmigmgrCountEnginesOfType_IMPL(const union ENGTYPE_BIT_VECTOR *arg1, RM_ENGINE_TYPE arg2);
1132 
1133 #define kmigmgrCountEnginesOfType(arg1, arg2) kmigmgrCountEnginesOfType_IMPL(arg1, arg2)
1134 NvU32 kmigmgrCountEnginesInRange_IMPL(const union ENGTYPE_BIT_VECTOR *arg1, struct NV_RANGE arg2);
1135 
1136 #define kmigmgrCountEnginesInRange(arg1, arg2) kmigmgrCountEnginesInRange_IMPL(arg1, arg2)
1137 struct NV_RANGE kmigmgrGetAsyncCERange_IMPL(OBJGPU *arg1);
1138 
1139 #define kmigmgrGetAsyncCERange(arg1) kmigmgrGetAsyncCERange_IMPL(arg1)
1140 NvU16 kmigmgrGetAttributionIdFromMIGReference_IMPL(struct MIG_INSTANCE_REF arg1);
1141 
1142 #define kmigmgrGetAttributionIdFromMIGReference(arg1) kmigmgrGetAttributionIdFromMIGReference_IMPL(arg1)
1143 NV_STATUS kmigmgrAllocateInstanceEngines_IMPL(union ENGTYPE_BIT_VECTOR *pSourceEngines, NvBool bShared, struct NV_RANGE engTypeRange, NvU32 reqEngCount, union ENGTYPE_BIT_VECTOR *pOutEngines, union ENGTYPE_BIT_VECTOR *pExclusiveEngines, union ENGTYPE_BIT_VECTOR *pSharedEngines, union ENGTYPE_BIT_VECTOR *pAllocatableEngines);
1144 
1145 #define kmigmgrAllocateInstanceEngines(pSourceEngines, bShared, engTypeRange, reqEngCount, pOutEngines, pExclusiveEngines, pSharedEngines, pAllocatableEngines) kmigmgrAllocateInstanceEngines_IMPL(pSourceEngines, bShared, engTypeRange, reqEngCount, pOutEngines, pExclusiveEngines, pSharedEngines, pAllocatableEngines)
1146 void kmigmgrGetLocalEngineMask_IMPL(OBJGPU *arg1, union ENGTYPE_BIT_VECTOR *pPhysicalEngineMask, union ENGTYPE_BIT_VECTOR *pLocalEngineMask);
1147 
1148 #define kmigmgrGetLocalEngineMask(arg1, pPhysicalEngineMask, pLocalEngineMask) kmigmgrGetLocalEngineMask_IMPL(arg1, pPhysicalEngineMask, pLocalEngineMask)
1149 NV_STATUS kmigmgrAllocGPUInstanceHandles_IMPL(OBJGPU *arg1, NvU32 swizzId, KERNEL_MIG_GPU_INSTANCE *arg3);
1150 
1151 #define kmigmgrAllocGPUInstanceHandles(arg1, swizzId, arg3) kmigmgrAllocGPUInstanceHandles_IMPL(arg1, swizzId, arg3)
1152 void kmigmgrFreeGPUInstanceHandles_IMPL(KERNEL_MIG_GPU_INSTANCE *arg1);
1153 
1154 #define kmigmgrFreeGPUInstanceHandles(arg1) kmigmgrFreeGPUInstanceHandles_IMPL(arg1)
1155 NvBool kmigmgrIsGPUInstanceReadyToBeDestroyed_IMPL(KERNEL_MIG_GPU_INSTANCE *arg1);
1156 
1157 #define kmigmgrIsGPUInstanceReadyToBeDestroyed(arg1) kmigmgrIsGPUInstanceReadyToBeDestroyed_IMPL(arg1)
1158 void kmigmgrDestruct_IMPL(struct KernelMIGManager *arg1);
1159 
1160 #define __nvoc_kmigmgrDestruct(arg1) kmigmgrDestruct_IMPL(arg1)
1161 void kmigmgrInitRegistryOverrides_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1162 
1163 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitRegistryOverrides(OBJGPU * arg1,struct KernelMIGManager * arg2)1164 static inline void kmigmgrInitRegistryOverrides(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1165     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1166 }
1167 #else //__nvoc_kernel_mig_manager_h_disabled
1168 #define kmigmgrInitRegistryOverrides(arg1, arg2) kmigmgrInitRegistryOverrides_IMPL(arg1, arg2)
1169 #endif //__nvoc_kernel_mig_manager_h_disabled
1170 
1171 KERNEL_MIG_GPU_INSTANCE *kmigmgrGetMIGGpuInstanceSlot_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 i);
1172 
1173 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetMIGGpuInstanceSlot(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 i)1174 static inline KERNEL_MIG_GPU_INSTANCE *kmigmgrGetMIGGpuInstanceSlot(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 i) {
1175     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1176     return NULL;
1177 }
1178 #else //__nvoc_kernel_mig_manager_h_disabled
1179 #define kmigmgrGetMIGGpuInstanceSlot(arg1, arg2, i) kmigmgrGetMIGGpuInstanceSlot_IMPL(arg1, arg2, i)
1180 #endif //__nvoc_kernel_mig_manager_h_disabled
1181 
1182 NvBool kmigmgrIsMIGSupported_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1183 
1184 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsMIGSupported(OBJGPU * arg1,struct KernelMIGManager * arg2)1185 static inline NvBool kmigmgrIsMIGSupported(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1186     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1187     return NV_FALSE;
1188 }
1189 #else //__nvoc_kernel_mig_manager_h_disabled
1190 #define kmigmgrIsMIGSupported(arg1, arg2) kmigmgrIsMIGSupported_IMPL(arg1, arg2)
1191 #endif //__nvoc_kernel_mig_manager_h_disabled
1192 
1193 NvBool kmigmgrIsMIGEnabled_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1194 
1195 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsMIGEnabled(OBJGPU * arg1,struct KernelMIGManager * arg2)1196 static inline NvBool kmigmgrIsMIGEnabled(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1197     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1198     return NV_FALSE;
1199 }
1200 #else //__nvoc_kernel_mig_manager_h_disabled
1201 #define kmigmgrIsMIGEnabled(arg1, arg2) kmigmgrIsMIGEnabled_IMPL(arg1, arg2)
1202 #endif //__nvoc_kernel_mig_manager_h_disabled
1203 
1204 NvBool kmigmgrIsMIGGpuInstancingEnabled_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1205 
1206 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsMIGGpuInstancingEnabled(OBJGPU * arg1,struct KernelMIGManager * arg2)1207 static inline NvBool kmigmgrIsMIGGpuInstancingEnabled(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1208     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1209     return NV_FALSE;
1210 }
1211 #else //__nvoc_kernel_mig_manager_h_disabled
1212 #define kmigmgrIsMIGGpuInstancingEnabled(arg1, arg2) kmigmgrIsMIGGpuInstancingEnabled_IMPL(arg1, arg2)
1213 #endif //__nvoc_kernel_mig_manager_h_disabled
1214 
1215 NvBool kmigmgrIsMIGMemPartitioningEnabled_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1216 
1217 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsMIGMemPartitioningEnabled(OBJGPU * arg1,struct KernelMIGManager * arg2)1218 static inline NvBool kmigmgrIsMIGMemPartitioningEnabled(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1219     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1220     return NV_FALSE;
1221 }
1222 #else //__nvoc_kernel_mig_manager_h_disabled
1223 #define kmigmgrIsMIGMemPartitioningEnabled(arg1, arg2) kmigmgrIsMIGMemPartitioningEnabled_IMPL(arg1, arg2)
1224 #endif //__nvoc_kernel_mig_manager_h_disabled
1225 
1226 const KERNEL_MIG_MANAGER_STATIC_INFO *kmigmgrGetStaticInfo_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1227 
1228 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetStaticInfo(OBJGPU * arg1,struct KernelMIGManager * arg2)1229 static inline const KERNEL_MIG_MANAGER_STATIC_INFO *kmigmgrGetStaticInfo(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1230     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1231     return NULL;
1232 }
1233 #else //__nvoc_kernel_mig_manager_h_disabled
1234 #define kmigmgrGetStaticInfo(arg1, arg2) kmigmgrGetStaticInfo_IMPL(arg1, arg2)
1235 #endif //__nvoc_kernel_mig_manager_h_disabled
1236 
1237 NV_STATUS kmigmgrSaveToPersistence_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1238 
1239 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSaveToPersistence(OBJGPU * arg1,struct KernelMIGManager * arg2)1240 static inline NV_STATUS kmigmgrSaveToPersistence(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1241     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1242     return NV_ERR_NOT_SUPPORTED;
1243 }
1244 #else //__nvoc_kernel_mig_manager_h_disabled
1245 #define kmigmgrSaveToPersistence(arg1, arg2) kmigmgrSaveToPersistence_IMPL(arg1, arg2)
1246 #endif //__nvoc_kernel_mig_manager_h_disabled
1247 
1248 NV_STATUS kmigmgrDisableWatchdog_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1249 
1250 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDisableWatchdog(OBJGPU * arg1,struct KernelMIGManager * arg2)1251 static inline NV_STATUS kmigmgrDisableWatchdog(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1252     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1253     return NV_ERR_NOT_SUPPORTED;
1254 }
1255 #else //__nvoc_kernel_mig_manager_h_disabled
1256 #define kmigmgrDisableWatchdog(arg1, arg2) kmigmgrDisableWatchdog_IMPL(arg1, arg2)
1257 #endif //__nvoc_kernel_mig_manager_h_disabled
1258 
1259 NV_STATUS kmigmgrRestoreWatchdog_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1260 
1261 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrRestoreWatchdog(OBJGPU * arg1,struct KernelMIGManager * arg2)1262 static inline NV_STATUS kmigmgrRestoreWatchdog(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1263     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1264     return NV_ERR_NOT_SUPPORTED;
1265 }
1266 #else //__nvoc_kernel_mig_manager_h_disabled
1267 #define kmigmgrRestoreWatchdog(arg1, arg2) kmigmgrRestoreWatchdog_IMPL(arg1, arg2)
1268 #endif //__nvoc_kernel_mig_manager_h_disabled
1269 
1270 NV_STATUS kmigmgrSetSwizzIdInUse_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId);
1271 
1272 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSetSwizzIdInUse(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId)1273 static inline NV_STATUS kmigmgrSetSwizzIdInUse(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId) {
1274     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1275     return NV_ERR_NOT_SUPPORTED;
1276 }
1277 #else //__nvoc_kernel_mig_manager_h_disabled
1278 #define kmigmgrSetSwizzIdInUse(arg1, arg2, swizzId) kmigmgrSetSwizzIdInUse_IMPL(arg1, arg2, swizzId)
1279 #endif //__nvoc_kernel_mig_manager_h_disabled
1280 
1281 NV_STATUS kmigmgrClearSwizzIdInUse_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId);
1282 
1283 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrClearSwizzIdInUse(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId)1284 static inline NV_STATUS kmigmgrClearSwizzIdInUse(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId) {
1285     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1286     return NV_ERR_NOT_SUPPORTED;
1287 }
1288 #else //__nvoc_kernel_mig_manager_h_disabled
1289 #define kmigmgrClearSwizzIdInUse(arg1, arg2, swizzId) kmigmgrClearSwizzIdInUse_IMPL(arg1, arg2, swizzId)
1290 #endif //__nvoc_kernel_mig_manager_h_disabled
1291 
1292 NvBool kmigmgrIsSwizzIdInUse_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId);
1293 
1294 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsSwizzIdInUse(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId)1295 static inline NvBool kmigmgrIsSwizzIdInUse(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId) {
1296     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1297     return NV_FALSE;
1298 }
1299 #else //__nvoc_kernel_mig_manager_h_disabled
1300 #define kmigmgrIsSwizzIdInUse(arg1, arg2, swizzId) kmigmgrIsSwizzIdInUse_IMPL(arg1, arg2, swizzId)
1301 #endif //__nvoc_kernel_mig_manager_h_disabled
1302 
1303 NV_STATUS kmigmgrGetInvalidSwizzIdMask_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, NvU64 *pUnsupportedSwizzIdMask);
1304 
1305 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetInvalidSwizzIdMask(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId,NvU64 * pUnsupportedSwizzIdMask)1306 static inline NV_STATUS kmigmgrGetInvalidSwizzIdMask(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, NvU64 *pUnsupportedSwizzIdMask) {
1307     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1308     return NV_ERR_NOT_SUPPORTED;
1309 }
1310 #else //__nvoc_kernel_mig_manager_h_disabled
1311 #define kmigmgrGetInvalidSwizzIdMask(arg1, arg2, swizzId, pUnsupportedSwizzIdMask) kmigmgrGetInvalidSwizzIdMask_IMPL(arg1, arg2, swizzId, pUnsupportedSwizzIdMask)
1312 #endif //__nvoc_kernel_mig_manager_h_disabled
1313 
1314 NvBool kmigmgrIsMIGNvlinkP2PSupported_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1315 
1316 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsMIGNvlinkP2PSupported(OBJGPU * arg1,struct KernelMIGManager * arg2)1317 static inline NvBool kmigmgrIsMIGNvlinkP2PSupported(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1318     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1319     return NV_FALSE;
1320 }
1321 #else //__nvoc_kernel_mig_manager_h_disabled
1322 #define kmigmgrIsMIGNvlinkP2PSupported(arg1, arg2) kmigmgrIsMIGNvlinkP2PSupported_IMPL(arg1, arg2)
1323 #endif //__nvoc_kernel_mig_manager_h_disabled
1324 
1325 NvU64 kmigmgrGetSwizzIdInUseMask_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1326 
1327 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetSwizzIdInUseMask(OBJGPU * arg1,struct KernelMIGManager * arg2)1328 static inline NvU64 kmigmgrGetSwizzIdInUseMask(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1329     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1330     return 0;
1331 }
1332 #else //__nvoc_kernel_mig_manager_h_disabled
1333 #define kmigmgrGetSwizzIdInUseMask(arg1, arg2) kmigmgrGetSwizzIdInUseMask_IMPL(arg1, arg2)
1334 #endif //__nvoc_kernel_mig_manager_h_disabled
1335 
1336 NV_STATUS kmigmgrSetEnginesInUse_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, union ENGTYPE_BIT_VECTOR *pEngines);
1337 
1338 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSetEnginesInUse(OBJGPU * arg1,struct KernelMIGManager * arg2,union ENGTYPE_BIT_VECTOR * pEngines)1339 static inline NV_STATUS kmigmgrSetEnginesInUse(OBJGPU *arg1, struct KernelMIGManager *arg2, union ENGTYPE_BIT_VECTOR *pEngines) {
1340     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1341     return NV_ERR_NOT_SUPPORTED;
1342 }
1343 #else //__nvoc_kernel_mig_manager_h_disabled
1344 #define kmigmgrSetEnginesInUse(arg1, arg2, pEngines) kmigmgrSetEnginesInUse_IMPL(arg1, arg2, pEngines)
1345 #endif //__nvoc_kernel_mig_manager_h_disabled
1346 
1347 NV_STATUS kmigmgrClearEnginesInUse_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, union ENGTYPE_BIT_VECTOR *pEngines);
1348 
1349 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrClearEnginesInUse(OBJGPU * arg1,struct KernelMIGManager * arg2,union ENGTYPE_BIT_VECTOR * pEngines)1350 static inline NV_STATUS kmigmgrClearEnginesInUse(OBJGPU *arg1, struct KernelMIGManager *arg2, union ENGTYPE_BIT_VECTOR *pEngines) {
1351     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1352     return NV_ERR_NOT_SUPPORTED;
1353 }
1354 #else //__nvoc_kernel_mig_manager_h_disabled
1355 #define kmigmgrClearEnginesInUse(arg1, arg2, pEngines) kmigmgrClearEnginesInUse_IMPL(arg1, arg2, pEngines)
1356 #endif //__nvoc_kernel_mig_manager_h_disabled
1357 
1358 NvBool kmigmgrIsEngineInUse_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, RM_ENGINE_TYPE rmEngineType);
1359 
1360 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsEngineInUse(OBJGPU * arg1,struct KernelMIGManager * arg2,RM_ENGINE_TYPE rmEngineType)1361 static inline NvBool kmigmgrIsEngineInUse(OBJGPU *arg1, struct KernelMIGManager *arg2, RM_ENGINE_TYPE rmEngineType) {
1362     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1363     return NV_FALSE;
1364 }
1365 #else //__nvoc_kernel_mig_manager_h_disabled
1366 #define kmigmgrIsEngineInUse(arg1, arg2, rmEngineType) kmigmgrIsEngineInUse_IMPL(arg1, arg2, rmEngineType)
1367 #endif //__nvoc_kernel_mig_manager_h_disabled
1368 
1369 NvBool kmigmgrIsEnginePartitionable_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, RM_ENGINE_TYPE rmEngineType);
1370 
1371 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsEnginePartitionable(OBJGPU * arg1,struct KernelMIGManager * arg2,RM_ENGINE_TYPE rmEngineType)1372 static inline NvBool kmigmgrIsEnginePartitionable(OBJGPU *arg1, struct KernelMIGManager *arg2, RM_ENGINE_TYPE rmEngineType) {
1373     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1374     return NV_FALSE;
1375 }
1376 #else //__nvoc_kernel_mig_manager_h_disabled
1377 #define kmigmgrIsEnginePartitionable(arg1, arg2, rmEngineType) kmigmgrIsEnginePartitionable_IMPL(arg1, arg2, rmEngineType)
1378 #endif //__nvoc_kernel_mig_manager_h_disabled
1379 
1380 NvBool kmigmgrIsEngineInInstance_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, RM_ENGINE_TYPE globalRmEngType, struct MIG_INSTANCE_REF arg4);
1381 
1382 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsEngineInInstance(OBJGPU * arg1,struct KernelMIGManager * arg2,RM_ENGINE_TYPE globalRmEngType,struct MIG_INSTANCE_REF arg4)1383 static inline NvBool kmigmgrIsEngineInInstance(OBJGPU *arg1, struct KernelMIGManager *arg2, RM_ENGINE_TYPE globalRmEngType, struct MIG_INSTANCE_REF arg4) {
1384     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1385     return NV_FALSE;
1386 }
1387 #else //__nvoc_kernel_mig_manager_h_disabled
1388 #define kmigmgrIsEngineInInstance(arg1, arg2, globalRmEngType, arg4) kmigmgrIsEngineInInstance_IMPL(arg1, arg2, globalRmEngType, arg4)
1389 #endif //__nvoc_kernel_mig_manager_h_disabled
1390 
1391 NvBool kmigmgrIsLocalEngineInInstance_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, RM_ENGINE_TYPE localRmEngType, struct MIG_INSTANCE_REF arg4);
1392 
1393 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsLocalEngineInInstance(OBJGPU * arg1,struct KernelMIGManager * arg2,RM_ENGINE_TYPE localRmEngType,struct MIG_INSTANCE_REF arg4)1394 static inline NvBool kmigmgrIsLocalEngineInInstance(OBJGPU *arg1, struct KernelMIGManager *arg2, RM_ENGINE_TYPE localRmEngType, struct MIG_INSTANCE_REF arg4) {
1395     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1396     return NV_FALSE;
1397 }
1398 #else //__nvoc_kernel_mig_manager_h_disabled
1399 #define kmigmgrIsLocalEngineInInstance(arg1, arg2, localRmEngType, arg4) kmigmgrIsLocalEngineInInstance_IMPL(arg1, arg2, localRmEngType, arg4)
1400 #endif //__nvoc_kernel_mig_manager_h_disabled
1401 
1402 NV_STATUS kmigmgrCreateGPUInstance_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, NvU8 *pUuid, KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg5, NvBool bValid, NvBool bCreateCap);
1403 
1404 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrCreateGPUInstance(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId,NvU8 * pUuid,KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg5,NvBool bValid,NvBool bCreateCap)1405 static inline NV_STATUS kmigmgrCreateGPUInstance(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, NvU8 *pUuid, KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg5, NvBool bValid, NvBool bCreateCap) {
1406     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1407     return NV_ERR_NOT_SUPPORTED;
1408 }
1409 #else //__nvoc_kernel_mig_manager_h_disabled
1410 #define kmigmgrCreateGPUInstance(arg1, arg2, swizzId, pUuid, arg5, bValid, bCreateCap) kmigmgrCreateGPUInstance_IMPL(arg1, arg2, swizzId, pUuid, arg5, bValid, bCreateCap)
1411 #endif //__nvoc_kernel_mig_manager_h_disabled
1412 
1413 NV_STATUS kmigmgrInvalidateGPUInstance_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, NvBool bUnload);
1414 
1415 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInvalidateGPUInstance(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId,NvBool bUnload)1416 static inline NV_STATUS kmigmgrInvalidateGPUInstance(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, NvBool bUnload) {
1417     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1418     return NV_ERR_NOT_SUPPORTED;
1419 }
1420 #else //__nvoc_kernel_mig_manager_h_disabled
1421 #define kmigmgrInvalidateGPUInstance(arg1, arg2, swizzId, bUnload) kmigmgrInvalidateGPUInstance_IMPL(arg1, arg2, swizzId, bUnload)
1422 #endif //__nvoc_kernel_mig_manager_h_disabled
1423 
1424 NV_STATUS kmigmgrInitGPUInstanceScrubber_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
1425 
1426 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitGPUInstanceScrubber(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)1427 static inline NV_STATUS kmigmgrInitGPUInstanceScrubber(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
1428     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1429     return NV_ERR_NOT_SUPPORTED;
1430 }
1431 #else //__nvoc_kernel_mig_manager_h_disabled
1432 #define kmigmgrInitGPUInstanceScrubber(arg1, arg2, arg3) kmigmgrInitGPUInstanceScrubber_IMPL(arg1, arg2, arg3)
1433 #endif //__nvoc_kernel_mig_manager_h_disabled
1434 
1435 void kmigmgrDestroyGPUInstanceScrubber_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
1436 
1437 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDestroyGPUInstanceScrubber(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)1438 static inline void kmigmgrDestroyGPUInstanceScrubber(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
1439     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1440 }
1441 #else //__nvoc_kernel_mig_manager_h_disabled
1442 #define kmigmgrDestroyGPUInstanceScrubber(arg1, arg2, arg3) kmigmgrDestroyGPUInstanceScrubber_IMPL(arg1, arg2, arg3)
1443 #endif //__nvoc_kernel_mig_manager_h_disabled
1444 
1445 NV_STATUS kmigmgrInitGPUInstanceBufPools_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
1446 
1447 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitGPUInstanceBufPools(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)1448 static inline NV_STATUS kmigmgrInitGPUInstanceBufPools(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
1449     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1450     return NV_ERR_NOT_SUPPORTED;
1451 }
1452 #else //__nvoc_kernel_mig_manager_h_disabled
1453 #define kmigmgrInitGPUInstanceBufPools(arg1, arg2, arg3) kmigmgrInitGPUInstanceBufPools_IMPL(arg1, arg2, arg3)
1454 #endif //__nvoc_kernel_mig_manager_h_disabled
1455 
1456 NV_STATUS kmigmgrInitGPUInstanceGrBufPools_IMPL(OBJGPU *pGpu, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
1457 
1458 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitGPUInstanceGrBufPools(OBJGPU * pGpu,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)1459 static inline NV_STATUS kmigmgrInitGPUInstanceGrBufPools(OBJGPU *pGpu, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
1460     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1461     return NV_ERR_NOT_SUPPORTED;
1462 }
1463 #else //__nvoc_kernel_mig_manager_h_disabled
1464 #define kmigmgrInitGPUInstanceGrBufPools(pGpu, arg2, arg3) kmigmgrInitGPUInstanceGrBufPools_IMPL(pGpu, arg2, arg3)
1465 #endif //__nvoc_kernel_mig_manager_h_disabled
1466 
1467 void kmigmgrDestroyGPUInstanceGrBufPools_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
1468 
1469 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDestroyGPUInstanceGrBufPools(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)1470 static inline void kmigmgrDestroyGPUInstanceGrBufPools(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
1471     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1472 }
1473 #else //__nvoc_kernel_mig_manager_h_disabled
1474 #define kmigmgrDestroyGPUInstanceGrBufPools(arg1, arg2, arg3) kmigmgrDestroyGPUInstanceGrBufPools_IMPL(arg1, arg2, arg3)
1475 #endif //__nvoc_kernel_mig_manager_h_disabled
1476 
1477 NV_STATUS kmigmgrInitGPUInstancePool_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
1478 
1479 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitGPUInstancePool(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)1480 static inline NV_STATUS kmigmgrInitGPUInstancePool(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
1481     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1482     return NV_ERR_NOT_SUPPORTED;
1483 }
1484 #else //__nvoc_kernel_mig_manager_h_disabled
1485 #define kmigmgrInitGPUInstancePool(arg1, arg2, arg3) kmigmgrInitGPUInstancePool_IMPL(arg1, arg2, arg3)
1486 #endif //__nvoc_kernel_mig_manager_h_disabled
1487 
1488 void kmigmgrDestroyGPUInstancePool_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
1489 
1490 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDestroyGPUInstancePool(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)1491 static inline void kmigmgrDestroyGPUInstancePool(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
1492     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1493 }
1494 #else //__nvoc_kernel_mig_manager_h_disabled
1495 #define kmigmgrDestroyGPUInstancePool(arg1, arg2, arg3) kmigmgrDestroyGPUInstancePool_IMPL(arg1, arg2, arg3)
1496 #endif //__nvoc_kernel_mig_manager_h_disabled
1497 
1498 NV_STATUS kmigmgrInitGPUInstanceRunlistBufPools_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
1499 
1500 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitGPUInstanceRunlistBufPools(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)1501 static inline NV_STATUS kmigmgrInitGPUInstanceRunlistBufPools(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
1502     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1503     return NV_ERR_NOT_SUPPORTED;
1504 }
1505 #else //__nvoc_kernel_mig_manager_h_disabled
1506 #define kmigmgrInitGPUInstanceRunlistBufPools(arg1, arg2, arg3) kmigmgrInitGPUInstanceRunlistBufPools_IMPL(arg1, arg2, arg3)
1507 #endif //__nvoc_kernel_mig_manager_h_disabled
1508 
1509 void kmigmgrDestroyGPUInstanceRunlistBufPools_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
1510 
1511 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDestroyGPUInstanceRunlistBufPools(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)1512 static inline void kmigmgrDestroyGPUInstanceRunlistBufPools(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
1513     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1514 }
1515 #else //__nvoc_kernel_mig_manager_h_disabled
1516 #define kmigmgrDestroyGPUInstanceRunlistBufPools(arg1, arg2, arg3) kmigmgrDestroyGPUInstanceRunlistBufPools_IMPL(arg1, arg2, arg3)
1517 #endif //__nvoc_kernel_mig_manager_h_disabled
1518 
1519 void kmigmgrPrintSubscribingClients_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId);
1520 
1521 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrPrintSubscribingClients(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId)1522 static inline void kmigmgrPrintSubscribingClients(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId) {
1523     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1524 }
1525 #else //__nvoc_kernel_mig_manager_h_disabled
1526 #define kmigmgrPrintSubscribingClients(arg1, arg2, swizzId) kmigmgrPrintSubscribingClients_IMPL(arg1, arg2, swizzId)
1527 #endif //__nvoc_kernel_mig_manager_h_disabled
1528 
1529 void kmigmgrInitGPUInstanceInfo_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
1530 
1531 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInitGPUInstanceInfo(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)1532 static inline void kmigmgrInitGPUInstanceInfo(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
1533     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1534 }
1535 #else //__nvoc_kernel_mig_manager_h_disabled
1536 #define kmigmgrInitGPUInstanceInfo(arg1, arg2, arg3) kmigmgrInitGPUInstanceInfo_IMPL(arg1, arg2, arg3)
1537 #endif //__nvoc_kernel_mig_manager_h_disabled
1538 
1539 void kmigmgrTrimInstanceRunlistBufPools_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
1540 
1541 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrTrimInstanceRunlistBufPools(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)1542 static inline void kmigmgrTrimInstanceRunlistBufPools(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
1543     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1544 }
1545 #else //__nvoc_kernel_mig_manager_h_disabled
1546 #define kmigmgrTrimInstanceRunlistBufPools(arg1, arg2, arg3) kmigmgrTrimInstanceRunlistBufPools_IMPL(arg1, arg2, arg3)
1547 #endif //__nvoc_kernel_mig_manager_h_disabled
1548 
1549 NV_STATUS kmigmgrSetDeviceProfilingInUse_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1550 
1551 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSetDeviceProfilingInUse(OBJGPU * arg1,struct KernelMIGManager * arg2)1552 static inline NV_STATUS kmigmgrSetDeviceProfilingInUse(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1553     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1554     return NV_ERR_NOT_SUPPORTED;
1555 }
1556 #else //__nvoc_kernel_mig_manager_h_disabled
1557 #define kmigmgrSetDeviceProfilingInUse(arg1, arg2) kmigmgrSetDeviceProfilingInUse_IMPL(arg1, arg2)
1558 #endif //__nvoc_kernel_mig_manager_h_disabled
1559 
1560 void kmigmgrClearDeviceProfilingInUse_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1561 
1562 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrClearDeviceProfilingInUse(OBJGPU * arg1,struct KernelMIGManager * arg2)1563 static inline void kmigmgrClearDeviceProfilingInUse(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1564     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1565 }
1566 #else //__nvoc_kernel_mig_manager_h_disabled
1567 #define kmigmgrClearDeviceProfilingInUse(arg1, arg2) kmigmgrClearDeviceProfilingInUse_IMPL(arg1, arg2)
1568 #endif //__nvoc_kernel_mig_manager_h_disabled
1569 
1570 NvBool kmigmgrIsDeviceProfilingInUse_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1571 
1572 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsDeviceProfilingInUse(OBJGPU * arg1,struct KernelMIGManager * arg2)1573 static inline NvBool kmigmgrIsDeviceProfilingInUse(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1574     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1575     return NV_FALSE;
1576 }
1577 #else //__nvoc_kernel_mig_manager_h_disabled
1578 #define kmigmgrIsDeviceProfilingInUse(arg1, arg2) kmigmgrIsDeviceProfilingInUse_IMPL(arg1, arg2)
1579 #endif //__nvoc_kernel_mig_manager_h_disabled
1580 
1581 NvBool kmigmgrIsDeviceUsingDeviceProfiling_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, Device *pDevice);
1582 
1583 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsDeviceUsingDeviceProfiling(OBJGPU * arg1,struct KernelMIGManager * arg2,Device * pDevice)1584 static inline NvBool kmigmgrIsDeviceUsingDeviceProfiling(OBJGPU *arg1, struct KernelMIGManager *arg2, Device *pDevice) {
1585     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1586     return NV_FALSE;
1587 }
1588 #else //__nvoc_kernel_mig_manager_h_disabled
1589 #define kmigmgrIsDeviceUsingDeviceProfiling(arg1, arg2, pDevice) kmigmgrIsDeviceUsingDeviceProfiling_IMPL(arg1, arg2, pDevice)
1590 #endif //__nvoc_kernel_mig_manager_h_disabled
1591 
1592 NV_STATUS kmigmgrEnableAllLCEs_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvBool bEnableAllLCEs);
1593 
1594 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrEnableAllLCEs(OBJGPU * arg1,struct KernelMIGManager * arg2,NvBool bEnableAllLCEs)1595 static inline NV_STATUS kmigmgrEnableAllLCEs(OBJGPU *arg1, struct KernelMIGManager *arg2, NvBool bEnableAllLCEs) {
1596     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1597     return NV_ERR_NOT_SUPPORTED;
1598 }
1599 #else //__nvoc_kernel_mig_manager_h_disabled
1600 #define kmigmgrEnableAllLCEs(arg1, arg2, bEnableAllLCEs) kmigmgrEnableAllLCEs_IMPL(arg1, arg2, bEnableAllLCEs)
1601 #endif //__nvoc_kernel_mig_manager_h_disabled
1602 
1603 NV_STATUS kmigmgrGetInstanceRefFromDevice_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, Device *arg3, struct MIG_INSTANCE_REF *arg4);
1604 
1605 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetInstanceRefFromDevice(OBJGPU * arg1,struct KernelMIGManager * arg2,Device * arg3,struct MIG_INSTANCE_REF * arg4)1606 static inline NV_STATUS kmigmgrGetInstanceRefFromDevice(OBJGPU *arg1, struct KernelMIGManager *arg2, Device *arg3, struct MIG_INSTANCE_REF *arg4) {
1607     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1608     return NV_ERR_NOT_SUPPORTED;
1609 }
1610 #else //__nvoc_kernel_mig_manager_h_disabled
1611 #define kmigmgrGetInstanceRefFromDevice(arg1, arg2, arg3, arg4) kmigmgrGetInstanceRefFromDevice_IMPL(arg1, arg2, arg3, arg4)
1612 #endif //__nvoc_kernel_mig_manager_h_disabled
1613 
1614 NV_STATUS kmigmgrGetMemoryPartitionHeapFromDevice_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, Device *arg3, struct Heap **arg4);
1615 
1616 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetMemoryPartitionHeapFromDevice(OBJGPU * arg1,struct KernelMIGManager * arg2,Device * arg3,struct Heap ** arg4)1617 static inline NV_STATUS kmigmgrGetMemoryPartitionHeapFromDevice(OBJGPU *arg1, struct KernelMIGManager *arg2, Device *arg3, struct Heap **arg4) {
1618     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1619     return NV_ERR_NOT_SUPPORTED;
1620 }
1621 #else //__nvoc_kernel_mig_manager_h_disabled
1622 #define kmigmgrGetMemoryPartitionHeapFromDevice(arg1, arg2, arg3, arg4) kmigmgrGetMemoryPartitionHeapFromDevice_IMPL(arg1, arg2, arg3, arg4)
1623 #endif //__nvoc_kernel_mig_manager_h_disabled
1624 
1625 NV_STATUS kmigmgrGetSwizzIdFromDevice_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, Device *pDevice, NvU32 *pSwizzId);
1626 
1627 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetSwizzIdFromDevice(OBJGPU * arg1,struct KernelMIGManager * arg2,Device * pDevice,NvU32 * pSwizzId)1628 static inline NV_STATUS kmigmgrGetSwizzIdFromDevice(OBJGPU *arg1, struct KernelMIGManager *arg2, Device *pDevice, NvU32 *pSwizzId) {
1629     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1630     return NV_ERR_NOT_SUPPORTED;
1631 }
1632 #else //__nvoc_kernel_mig_manager_h_disabled
1633 #define kmigmgrGetSwizzIdFromDevice(arg1, arg2, pDevice, pSwizzId) kmigmgrGetSwizzIdFromDevice_IMPL(arg1, arg2, pDevice, pSwizzId)
1634 #endif //__nvoc_kernel_mig_manager_h_disabled
1635 
1636 void kmigmgrPrintGPUInstanceInfo_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3);
1637 
1638 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrPrintGPUInstanceInfo(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3)1639 static inline void kmigmgrPrintGPUInstanceInfo(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3) {
1640     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1641 }
1642 #else //__nvoc_kernel_mig_manager_h_disabled
1643 #define kmigmgrPrintGPUInstanceInfo(arg1, arg2, arg3) kmigmgrPrintGPUInstanceInfo_IMPL(arg1, arg2, arg3)
1644 #endif //__nvoc_kernel_mig_manager_h_disabled
1645 
1646 NV_STATUS kmigmgrSetGPUInstanceInfo_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, NvU8 *pUuid, KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg5);
1647 
1648 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSetGPUInstanceInfo(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId,NvU8 * pUuid,KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg5)1649 static inline NV_STATUS kmigmgrSetGPUInstanceInfo(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, NvU8 *pUuid, KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg5) {
1650     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1651     return NV_ERR_NOT_SUPPORTED;
1652 }
1653 #else //__nvoc_kernel_mig_manager_h_disabled
1654 #define kmigmgrSetGPUInstanceInfo(arg1, arg2, swizzId, pUuid, arg5) kmigmgrSetGPUInstanceInfo_IMPL(arg1, arg2, swizzId, pUuid, arg5)
1655 #endif //__nvoc_kernel_mig_manager_h_disabled
1656 
1657 NV_STATUS kmigmgrGetGPUInstanceInfo_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, KERNEL_MIG_GPU_INSTANCE **arg4);
1658 
1659 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetGPUInstanceInfo(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId,KERNEL_MIG_GPU_INSTANCE ** arg4)1660 static inline NV_STATUS kmigmgrGetGPUInstanceInfo(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, KERNEL_MIG_GPU_INSTANCE **arg4) {
1661     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1662     return NV_ERR_NOT_SUPPORTED;
1663 }
1664 #else //__nvoc_kernel_mig_manager_h_disabled
1665 #define kmigmgrGetGPUInstanceInfo(arg1, arg2, swizzId, arg4) kmigmgrGetGPUInstanceInfo_IMPL(arg1, arg2, swizzId, arg4)
1666 #endif //__nvoc_kernel_mig_manager_h_disabled
1667 
1668 NV_STATUS kmigmgrGetLocalToGlobalEngineType_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, struct MIG_INSTANCE_REF arg3, RM_ENGINE_TYPE localEngType, RM_ENGINE_TYPE *pGlobalEngType);
1669 
1670 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetLocalToGlobalEngineType(OBJGPU * arg1,struct KernelMIGManager * arg2,struct MIG_INSTANCE_REF arg3,RM_ENGINE_TYPE localEngType,RM_ENGINE_TYPE * pGlobalEngType)1671 static inline NV_STATUS kmigmgrGetLocalToGlobalEngineType(OBJGPU *arg1, struct KernelMIGManager *arg2, struct MIG_INSTANCE_REF arg3, RM_ENGINE_TYPE localEngType, RM_ENGINE_TYPE *pGlobalEngType) {
1672     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1673     return NV_ERR_NOT_SUPPORTED;
1674 }
1675 #else //__nvoc_kernel_mig_manager_h_disabled
1676 #define kmigmgrGetLocalToGlobalEngineType(arg1, arg2, arg3, localEngType, pGlobalEngType) kmigmgrGetLocalToGlobalEngineType_IMPL(arg1, arg2, arg3, localEngType, pGlobalEngType)
1677 #endif //__nvoc_kernel_mig_manager_h_disabled
1678 
1679 NV_STATUS kmigmgrGetGlobalToLocalEngineType_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, struct MIG_INSTANCE_REF arg3, RM_ENGINE_TYPE globalEngType, RM_ENGINE_TYPE *pLocalEngType);
1680 
1681 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetGlobalToLocalEngineType(OBJGPU * arg1,struct KernelMIGManager * arg2,struct MIG_INSTANCE_REF arg3,RM_ENGINE_TYPE globalEngType,RM_ENGINE_TYPE * pLocalEngType)1682 static inline NV_STATUS kmigmgrGetGlobalToLocalEngineType(OBJGPU *arg1, struct KernelMIGManager *arg2, struct MIG_INSTANCE_REF arg3, RM_ENGINE_TYPE globalEngType, RM_ENGINE_TYPE *pLocalEngType) {
1683     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1684     return NV_ERR_NOT_SUPPORTED;
1685 }
1686 #else //__nvoc_kernel_mig_manager_h_disabled
1687 #define kmigmgrGetGlobalToLocalEngineType(arg1, arg2, arg3, globalEngType, pLocalEngType) kmigmgrGetGlobalToLocalEngineType_IMPL(arg1, arg2, arg3, globalEngType, pLocalEngType)
1688 #endif //__nvoc_kernel_mig_manager_h_disabled
1689 
1690 NV_STATUS kmigmgrFilterEngineList_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, struct Subdevice *arg3, RM_ENGINE_TYPE *pEngineTypes, NvU32 *pEngineCount);
1691 
1692 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrFilterEngineList(OBJGPU * arg1,struct KernelMIGManager * arg2,struct Subdevice * arg3,RM_ENGINE_TYPE * pEngineTypes,NvU32 * pEngineCount)1693 static inline NV_STATUS kmigmgrFilterEngineList(OBJGPU *arg1, struct KernelMIGManager *arg2, struct Subdevice *arg3, RM_ENGINE_TYPE *pEngineTypes, NvU32 *pEngineCount) {
1694     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1695     return NV_ERR_NOT_SUPPORTED;
1696 }
1697 #else //__nvoc_kernel_mig_manager_h_disabled
1698 #define kmigmgrFilterEngineList(arg1, arg2, arg3, pEngineTypes, pEngineCount) kmigmgrFilterEngineList_IMPL(arg1, arg2, arg3, pEngineTypes, pEngineCount)
1699 #endif //__nvoc_kernel_mig_manager_h_disabled
1700 
1701 NV_STATUS kmigmgrFilterEnginePartnerList_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, struct Subdevice *arg3, NV2080_CTRL_GPU_GET_ENGINE_PARTNERLIST_PARAMS *arg4);
1702 
1703 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrFilterEnginePartnerList(OBJGPU * arg1,struct KernelMIGManager * arg2,struct Subdevice * arg3,NV2080_CTRL_GPU_GET_ENGINE_PARTNERLIST_PARAMS * arg4)1704 static inline NV_STATUS kmigmgrFilterEnginePartnerList(OBJGPU *arg1, struct KernelMIGManager *arg2, struct Subdevice *arg3, NV2080_CTRL_GPU_GET_ENGINE_PARTNERLIST_PARAMS *arg4) {
1705     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1706     return NV_ERR_NOT_SUPPORTED;
1707 }
1708 #else //__nvoc_kernel_mig_manager_h_disabled
1709 #define kmigmgrFilterEnginePartnerList(arg1, arg2, arg3, arg4) kmigmgrFilterEnginePartnerList_IMPL(arg1, arg2, arg3, arg4)
1710 #endif //__nvoc_kernel_mig_manager_h_disabled
1711 
1712 NV_STATUS kmigmgrGetProfileByPartitionFlag_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 partitionFlag, const NV2080_CTRL_INTERNAL_MIGMGR_PROFILE_INFO **arg4);
1713 
1714 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetProfileByPartitionFlag(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 partitionFlag,const NV2080_CTRL_INTERNAL_MIGMGR_PROFILE_INFO ** arg4)1715 static inline NV_STATUS kmigmgrGetProfileByPartitionFlag(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 partitionFlag, const NV2080_CTRL_INTERNAL_MIGMGR_PROFILE_INFO **arg4) {
1716     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1717     return NV_ERR_NOT_SUPPORTED;
1718 }
1719 #else //__nvoc_kernel_mig_manager_h_disabled
1720 #define kmigmgrGetProfileByPartitionFlag(arg1, arg2, partitionFlag, arg4) kmigmgrGetProfileByPartitionFlag_IMPL(arg1, arg2, partitionFlag, arg4)
1721 #endif //__nvoc_kernel_mig_manager_h_disabled
1722 
1723 NV_STATUS kmigmgrSaveComputeInstances_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, GPUMGR_SAVE_COMPUTE_INSTANCE *arg4);
1724 
1725 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSaveComputeInstances(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3,GPUMGR_SAVE_COMPUTE_INSTANCE * arg4)1726 static inline NV_STATUS kmigmgrSaveComputeInstances(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, GPUMGR_SAVE_COMPUTE_INSTANCE *arg4) {
1727     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1728     return NV_ERR_NOT_SUPPORTED;
1729 }
1730 #else //__nvoc_kernel_mig_manager_h_disabled
1731 #define kmigmgrSaveComputeInstances(arg1, arg2, arg3, arg4) kmigmgrSaveComputeInstances_IMPL(arg1, arg2, arg3, arg4)
1732 #endif //__nvoc_kernel_mig_manager_h_disabled
1733 
1734 NV_STATUS kmigmgrSetPartitioningMode_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1735 
1736 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSetPartitioningMode(OBJGPU * arg1,struct KernelMIGManager * arg2)1737 static inline NV_STATUS kmigmgrSetPartitioningMode(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1738     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1739     return NV_ERR_NOT_SUPPORTED;
1740 }
1741 #else //__nvoc_kernel_mig_manager_h_disabled
1742 #define kmigmgrSetPartitioningMode(arg1, arg2) kmigmgrSetPartitioningMode_IMPL(arg1, arg2)
1743 #endif //__nvoc_kernel_mig_manager_h_disabled
1744 
1745 NV_STATUS kmigmgrGetMIGReferenceFromEngineType_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, RM_ENGINE_TYPE rmEngineType, struct MIG_INSTANCE_REF *arg4);
1746 
1747 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetMIGReferenceFromEngineType(OBJGPU * arg1,struct KernelMIGManager * arg2,RM_ENGINE_TYPE rmEngineType,struct MIG_INSTANCE_REF * arg4)1748 static inline NV_STATUS kmigmgrGetMIGReferenceFromEngineType(OBJGPU *arg1, struct KernelMIGManager *arg2, RM_ENGINE_TYPE rmEngineType, struct MIG_INSTANCE_REF *arg4) {
1749     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1750     return NV_ERR_NOT_SUPPORTED;
1751 }
1752 #else //__nvoc_kernel_mig_manager_h_disabled
1753 #define kmigmgrGetMIGReferenceFromEngineType(arg1, arg2, rmEngineType, arg4) kmigmgrGetMIGReferenceFromEngineType_IMPL(arg1, arg2, rmEngineType, arg4)
1754 #endif //__nvoc_kernel_mig_manager_h_disabled
1755 
1756 NV_STATUS kmigmgrGetSmallestGpuInstanceSize_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 *pComputeSizeFlag);
1757 
1758 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetSmallestGpuInstanceSize(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 * pComputeSizeFlag)1759 static inline NV_STATUS kmigmgrGetSmallestGpuInstanceSize(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 *pComputeSizeFlag) {
1760     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1761     return NV_ERR_NOT_SUPPORTED;
1762 }
1763 #else //__nvoc_kernel_mig_manager_h_disabled
1764 #define kmigmgrGetSmallestGpuInstanceSize(arg1, arg2, pComputeSizeFlag) kmigmgrGetSmallestGpuInstanceSize_IMPL(arg1, arg2, pComputeSizeFlag)
1765 #endif //__nvoc_kernel_mig_manager_h_disabled
1766 
1767 NV_STATUS kmigmgrGetGPUInstanceScrubberCe_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, Device *pDevice, NvU32 *ceInst);
1768 
1769 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetGPUInstanceScrubberCe(OBJGPU * arg1,struct KernelMIGManager * arg2,Device * pDevice,NvU32 * ceInst)1770 static inline NV_STATUS kmigmgrGetGPUInstanceScrubberCe(OBJGPU *arg1, struct KernelMIGManager *arg2, Device *pDevice, NvU32 *ceInst) {
1771     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1772     return NV_ERR_NOT_SUPPORTED;
1773 }
1774 #else //__nvoc_kernel_mig_manager_h_disabled
1775 #define kmigmgrGetGPUInstanceScrubberCe(arg1, arg2, pDevice, ceInst) kmigmgrGetGPUInstanceScrubberCe_IMPL(arg1, arg2, pDevice, ceInst)
1776 #endif //__nvoc_kernel_mig_manager_h_disabled
1777 
1778 NV_STATUS kmigmgrDescribeGPUInstances_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NV2080_CTRL_GPU_DESCRIBE_PARTITIONS_PARAMS *arg3);
1779 
1780 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDescribeGPUInstances(OBJGPU * arg1,struct KernelMIGManager * arg2,NV2080_CTRL_GPU_DESCRIBE_PARTITIONS_PARAMS * arg3)1781 static inline NV_STATUS kmigmgrDescribeGPUInstances(OBJGPU *arg1, struct KernelMIGManager *arg2, NV2080_CTRL_GPU_DESCRIBE_PARTITIONS_PARAMS *arg3) {
1782     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1783     return NV_ERR_NOT_SUPPORTED;
1784 }
1785 #else //__nvoc_kernel_mig_manager_h_disabled
1786 #define kmigmgrDescribeGPUInstances(arg1, arg2, arg3) kmigmgrDescribeGPUInstances_IMPL(arg1, arg2, arg3)
1787 #endif //__nvoc_kernel_mig_manager_h_disabled
1788 
1789 NV_STATUS kmigmgrSwizzIdToResourceAllocation_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg4, KERNEL_MIG_GPU_INSTANCE *arg5, MIG_RESOURCE_ALLOCATION *arg6);
1790 
1791 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSwizzIdToResourceAllocation(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId,KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg4,KERNEL_MIG_GPU_INSTANCE * arg5,MIG_RESOURCE_ALLOCATION * arg6)1792 static inline NV_STATUS kmigmgrSwizzIdToResourceAllocation(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, KMIGMGR_CREATE_GPU_INSTANCE_PARAMS arg4, KERNEL_MIG_GPU_INSTANCE *arg5, MIG_RESOURCE_ALLOCATION *arg6) {
1793     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1794     return NV_ERR_NOT_SUPPORTED;
1795 }
1796 #else //__nvoc_kernel_mig_manager_h_disabled
1797 #define kmigmgrSwizzIdToResourceAllocation(arg1, arg2, swizzId, arg4, arg5, arg6) kmigmgrSwizzIdToResourceAllocation_IMPL(arg1, arg2, swizzId, arg4, arg5, arg6)
1798 #endif //__nvoc_kernel_mig_manager_h_disabled
1799 
1800 NV_STATUS kmigmgrAllocComputeInstanceHandles_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, MIG_COMPUTE_INSTANCE *arg4);
1801 
1802 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrAllocComputeInstanceHandles(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3,MIG_COMPUTE_INSTANCE * arg4)1803 static inline NV_STATUS kmigmgrAllocComputeInstanceHandles(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, MIG_COMPUTE_INSTANCE *arg4) {
1804     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1805     return NV_ERR_NOT_SUPPORTED;
1806 }
1807 #else //__nvoc_kernel_mig_manager_h_disabled
1808 #define kmigmgrAllocComputeInstanceHandles(arg1, arg2, arg3, arg4) kmigmgrAllocComputeInstanceHandles_IMPL(arg1, arg2, arg3, arg4)
1809 #endif //__nvoc_kernel_mig_manager_h_disabled
1810 
1811 void kmigmgrFreeComputeInstanceHandles_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, MIG_COMPUTE_INSTANCE *arg4);
1812 
1813 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrFreeComputeInstanceHandles(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3,MIG_COMPUTE_INSTANCE * arg4)1814 static inline void kmigmgrFreeComputeInstanceHandles(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, MIG_COMPUTE_INSTANCE *arg4) {
1815     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1816 }
1817 #else //__nvoc_kernel_mig_manager_h_disabled
1818 #define kmigmgrFreeComputeInstanceHandles(arg1, arg2, arg3, arg4) kmigmgrFreeComputeInstanceHandles_IMPL(arg1, arg2, arg3, arg4)
1819 #endif //__nvoc_kernel_mig_manager_h_disabled
1820 
1821 void kmigmgrReleaseComputeInstanceEngines_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, MIG_COMPUTE_INSTANCE *arg4);
1822 
1823 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrReleaseComputeInstanceEngines(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3,MIG_COMPUTE_INSTANCE * arg4)1824 static inline void kmigmgrReleaseComputeInstanceEngines(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, MIG_COMPUTE_INSTANCE *arg4) {
1825     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1826 }
1827 #else //__nvoc_kernel_mig_manager_h_disabled
1828 #define kmigmgrReleaseComputeInstanceEngines(arg1, arg2, arg3, arg4) kmigmgrReleaseComputeInstanceEngines_IMPL(arg1, arg2, arg3, arg4)
1829 #endif //__nvoc_kernel_mig_manager_h_disabled
1830 
1831 NV_STATUS kmigmgrDeleteComputeInstance_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, NvU32 CIId, NvBool bUnload);
1832 
1833 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrDeleteComputeInstance(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3,NvU32 CIId,NvBool bUnload)1834 static inline NV_STATUS kmigmgrDeleteComputeInstance(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, NvU32 CIId, NvBool bUnload) {
1835     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1836     return NV_ERR_NOT_SUPPORTED;
1837 }
1838 #else //__nvoc_kernel_mig_manager_h_disabled
1839 #define kmigmgrDeleteComputeInstance(arg1, arg2, arg3, CIId, bUnload) kmigmgrDeleteComputeInstance_IMPL(arg1, arg2, arg3, CIId, bUnload)
1840 #endif //__nvoc_kernel_mig_manager_h_disabled
1841 
1842 NV_STATUS kmigmgrConfigureGPUInstance_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, const KMIGMGR_CONFIGURE_INSTANCE_REQUEST *pConfigRequestPerCi, NvU32 updateEngMask);
1843 
1844 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrConfigureGPUInstance(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 swizzId,const KMIGMGR_CONFIGURE_INSTANCE_REQUEST * pConfigRequestPerCi,NvU32 updateEngMask)1845 static inline NV_STATUS kmigmgrConfigureGPUInstance(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 swizzId, const KMIGMGR_CONFIGURE_INSTANCE_REQUEST *pConfigRequestPerCi, NvU32 updateEngMask) {
1846     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1847     return NV_ERR_NOT_SUPPORTED;
1848 }
1849 #else //__nvoc_kernel_mig_manager_h_disabled
1850 #define kmigmgrConfigureGPUInstance(arg1, arg2, swizzId, pConfigRequestPerCi, updateEngMask) kmigmgrConfigureGPUInstance_IMPL(arg1, arg2, swizzId, pConfigRequestPerCi, updateEngMask)
1851 #endif //__nvoc_kernel_mig_manager_h_disabled
1852 
1853 NV_STATUS kmigmgrInvalidateGrGpcMapping_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, NvU32 grIdx);
1854 
1855 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInvalidateGrGpcMapping(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3,NvU32 grIdx)1856 static inline NV_STATUS kmigmgrInvalidateGrGpcMapping(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, NvU32 grIdx) {
1857     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1858     return NV_ERR_NOT_SUPPORTED;
1859 }
1860 #else //__nvoc_kernel_mig_manager_h_disabled
1861 #define kmigmgrInvalidateGrGpcMapping(arg1, arg2, arg3, grIdx) kmigmgrInvalidateGrGpcMapping_IMPL(arg1, arg2, arg3, grIdx)
1862 #endif //__nvoc_kernel_mig_manager_h_disabled
1863 
1864 NV_STATUS kmigmgrInvalidateGr_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, NvU32 grIdx);
1865 
1866 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrInvalidateGr(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3,NvU32 grIdx)1867 static inline NV_STATUS kmigmgrInvalidateGr(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, NvU32 grIdx) {
1868     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1869     return NV_ERR_NOT_SUPPORTED;
1870 }
1871 #else //__nvoc_kernel_mig_manager_h_disabled
1872 #define kmigmgrInvalidateGr(arg1, arg2, arg3, grIdx) kmigmgrInvalidateGr_IMPL(arg1, arg2, arg3, grIdx)
1873 #endif //__nvoc_kernel_mig_manager_h_disabled
1874 
1875 NvU32 kmigmgrGetNextComputeSize_IMPL(NvBool bGetNextSmallest, NvU32 computeSize);
1876 
1877 #define kmigmgrGetNextComputeSize(bGetNextSmallest, computeSize) kmigmgrGetNextComputeSize_IMPL(bGetNextSmallest, computeSize)
1878 NV_STATUS kmigmgrGetSkylineFromSize_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 computeSize, const NV2080_CTRL_INTERNAL_GRMGR_SKYLINE_INFO **ppSkyline);
1879 
1880 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetSkylineFromSize(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 computeSize,const NV2080_CTRL_INTERNAL_GRMGR_SKYLINE_INFO ** ppSkyline)1881 static inline NV_STATUS kmigmgrGetSkylineFromSize(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 computeSize, const NV2080_CTRL_INTERNAL_GRMGR_SKYLINE_INFO **ppSkyline) {
1882     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1883     return NV_ERR_NOT_SUPPORTED;
1884 }
1885 #else //__nvoc_kernel_mig_manager_h_disabled
1886 #define kmigmgrGetSkylineFromSize(arg1, arg2, computeSize, ppSkyline) kmigmgrGetSkylineFromSize_IMPL(arg1, arg2, computeSize, ppSkyline)
1887 #endif //__nvoc_kernel_mig_manager_h_disabled
1888 
1889 NV_STATUS kmigmgrGetComputeProfileFromSize_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 computeSize, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile);
1890 
1891 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetComputeProfileFromSize(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 computeSize,NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE * pProfile)1892 static inline NV_STATUS kmigmgrGetComputeProfileFromSize(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 computeSize, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile) {
1893     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1894     return NV_ERR_NOT_SUPPORTED;
1895 }
1896 #else //__nvoc_kernel_mig_manager_h_disabled
1897 #define kmigmgrGetComputeProfileFromSize(arg1, arg2, computeSize, pProfile) kmigmgrGetComputeProfileFromSize_IMPL(arg1, arg2, computeSize, pProfile)
1898 #endif //__nvoc_kernel_mig_manager_h_disabled
1899 
1900 NV_STATUS kmigmgrGetComputeProfileForRequest_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, NvU32 smCountRequest, NvU32 gpcCountRequest, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile);
1901 
1902 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetComputeProfileForRequest(OBJGPU * arg1,struct KernelMIGManager * arg2,KERNEL_MIG_GPU_INSTANCE * arg3,NvU32 smCountRequest,NvU32 gpcCountRequest,NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE * pProfile)1903 static inline NV_STATUS kmigmgrGetComputeProfileForRequest(OBJGPU *arg1, struct KernelMIGManager *arg2, KERNEL_MIG_GPU_INSTANCE *arg3, NvU32 smCountRequest, NvU32 gpcCountRequest, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile) {
1904     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1905     return NV_ERR_NOT_SUPPORTED;
1906 }
1907 #else //__nvoc_kernel_mig_manager_h_disabled
1908 #define kmigmgrGetComputeProfileForRequest(arg1, arg2, arg3, smCountRequest, gpcCountRequest, pProfile) kmigmgrGetComputeProfileForRequest_IMPL(arg1, arg2, arg3, smCountRequest, gpcCountRequest, pProfile)
1909 #endif //__nvoc_kernel_mig_manager_h_disabled
1910 
1911 NV_STATUS kmigmgrGetComputeProfileFromSmCount_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 smCount, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile);
1912 
1913 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetComputeProfileFromSmCount(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 smCount,NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE * pProfile)1914 static inline NV_STATUS kmigmgrGetComputeProfileFromSmCount(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 smCount, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile) {
1915     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1916     return NV_ERR_NOT_SUPPORTED;
1917 }
1918 #else //__nvoc_kernel_mig_manager_h_disabled
1919 #define kmigmgrGetComputeProfileFromSmCount(arg1, arg2, smCount, pProfile) kmigmgrGetComputeProfileFromSmCount_IMPL(arg1, arg2, smCount, pProfile)
1920 #endif //__nvoc_kernel_mig_manager_h_disabled
1921 
1922 NV_STATUS kmigmgrGetComputeProfileFromCTSId_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 ctsId, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile);
1923 
1924 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetComputeProfileFromCTSId(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 ctsId,NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE * pProfile)1925 static inline NV_STATUS kmigmgrGetComputeProfileFromCTSId(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 ctsId, NV2080_CTRL_INTERNAL_MIGMGR_COMPUTE_PROFILE *pProfile) {
1926     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1927     return NV_ERR_NOT_SUPPORTED;
1928 }
1929 #else //__nvoc_kernel_mig_manager_h_disabled
1930 #define kmigmgrGetComputeProfileFromCTSId(arg1, arg2, ctsId, pProfile) kmigmgrGetComputeProfileFromCTSId_IMPL(arg1, arg2, ctsId, pProfile)
1931 #endif //__nvoc_kernel_mig_manager_h_disabled
1932 
1933 NV_STATUS kmigmgrGetInvalidCTSIdMask_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 ctsId, NvU64 *pInvalidCTSIdMask);
1934 
1935 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetInvalidCTSIdMask(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 ctsId,NvU64 * pInvalidCTSIdMask)1936 static inline NV_STATUS kmigmgrGetInvalidCTSIdMask(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 ctsId, NvU64 *pInvalidCTSIdMask) {
1937     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1938     return NV_ERR_NOT_SUPPORTED;
1939 }
1940 #else //__nvoc_kernel_mig_manager_h_disabled
1941 #define kmigmgrGetInvalidCTSIdMask(arg1, arg2, ctsId, pInvalidCTSIdMask) kmigmgrGetInvalidCTSIdMask_IMPL(arg1, arg2, ctsId, pInvalidCTSIdMask)
1942 #endif //__nvoc_kernel_mig_manager_h_disabled
1943 
1944 struct NV_RANGE kmigmgrComputeProfileSizeToCTSIdRange_IMPL(NvU32 computeSize);
1945 
1946 #define kmigmgrComputeProfileSizeToCTSIdRange(computeSize) kmigmgrComputeProfileSizeToCTSIdRange_IMPL(computeSize)
1947 struct NV_RANGE kmigmgrCtsIdToSpan_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 ctsId);
1948 
1949 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrCtsIdToSpan(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 ctsId)1950 static inline struct NV_RANGE kmigmgrCtsIdToSpan(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 ctsId) {
1951     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1952     struct NV_RANGE ret;
1953     portMemSet(&ret, 0, sizeof(struct NV_RANGE));
1954     return ret;
1955 }
1956 #else //__nvoc_kernel_mig_manager_h_disabled
1957 #define kmigmgrCtsIdToSpan(arg1, arg2, ctsId) kmigmgrCtsIdToSpan_IMPL(arg1, arg2, ctsId)
1958 #endif //__nvoc_kernel_mig_manager_h_disabled
1959 
1960 NV_STATUS kmigmgrGetFreeCTSId_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 *pCtsId, NvU64 globalValidCtsMask, NvU64 globalValidGfxCtsMask, NvU64 ctsIdsInUseMask, NvU32 profileSize, NvBool bRestrictWithGfx, NvBool bGfxRequested);
1961 
1962 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetFreeCTSId(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 * pCtsId,NvU64 globalValidCtsMask,NvU64 globalValidGfxCtsMask,NvU64 ctsIdsInUseMask,NvU32 profileSize,NvBool bRestrictWithGfx,NvBool bGfxRequested)1963 static inline NV_STATUS kmigmgrGetFreeCTSId(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 *pCtsId, NvU64 globalValidCtsMask, NvU64 globalValidGfxCtsMask, NvU64 ctsIdsInUseMask, NvU32 profileSize, NvBool bRestrictWithGfx, NvBool bGfxRequested) {
1964     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1965     return NV_ERR_NOT_SUPPORTED;
1966 }
1967 #else //__nvoc_kernel_mig_manager_h_disabled
1968 #define kmigmgrGetFreeCTSId(arg1, arg2, pCtsId, globalValidCtsMask, globalValidGfxCtsMask, ctsIdsInUseMask, profileSize, bRestrictWithGfx, bGfxRequested) kmigmgrGetFreeCTSId_IMPL(arg1, arg2, pCtsId, globalValidCtsMask, globalValidGfxCtsMask, ctsIdsInUseMask, profileSize, bRestrictWithGfx, bGfxRequested)
1969 #endif //__nvoc_kernel_mig_manager_h_disabled
1970 
1971 NvU32 kmigmgrGetComputeSizeFromCTSId_IMPL(NvU32 ctsId);
1972 
1973 #define kmigmgrGetComputeSizeFromCTSId(ctsId) kmigmgrGetComputeSizeFromCTSId_IMPL(ctsId)
1974 NvU32 kmigmgrSmallestComputeProfileSize_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2);
1975 
1976 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrSmallestComputeProfileSize(OBJGPU * arg1,struct KernelMIGManager * arg2)1977 static inline NvU32 kmigmgrSmallestComputeProfileSize(OBJGPU *arg1, struct KernelMIGManager *arg2) {
1978     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1979     return 0;
1980 }
1981 #else //__nvoc_kernel_mig_manager_h_disabled
1982 #define kmigmgrSmallestComputeProfileSize(arg1, arg2) kmigmgrSmallestComputeProfileSize_IMPL(arg1, arg2)
1983 #endif //__nvoc_kernel_mig_manager_h_disabled
1984 
1985 void kmigmgrSetCTSIdInUse_IMPL(KERNEL_MIG_GPU_INSTANCE *arg1, NvU32 ctsId, NvU32 grId, NvBool bInUse);
1986 
1987 #define kmigmgrSetCTSIdInUse(arg1, ctsId, grId, bInUse) kmigmgrSetCTSIdInUse_IMPL(arg1, ctsId, grId, bInUse)
1988 NV_STATUS kmigmgrXlateSpanStartToCTSId_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 computeSize, NvU32 spanStart, NvU32 *pCtsId);
1989 
1990 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrXlateSpanStartToCTSId(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 computeSize,NvU32 spanStart,NvU32 * pCtsId)1991 static inline NV_STATUS kmigmgrXlateSpanStartToCTSId(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 computeSize, NvU32 spanStart, NvU32 *pCtsId) {
1992     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
1993     return NV_ERR_NOT_SUPPORTED;
1994 }
1995 #else //__nvoc_kernel_mig_manager_h_disabled
1996 #define kmigmgrXlateSpanStartToCTSId(arg1, arg2, computeSize, spanStart, pCtsId) kmigmgrXlateSpanStartToCTSId_IMPL(arg1, arg2, computeSize, spanStart, pCtsId)
1997 #endif //__nvoc_kernel_mig_manager_h_disabled
1998 
1999 NV_STATUS kmigmgrGetSlotBasisMask_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU64 *pMask);
2000 
2001 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetSlotBasisMask(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU64 * pMask)2002 static inline NV_STATUS kmigmgrGetSlotBasisMask(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU64 *pMask) {
2003     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
2004     return NV_ERR_NOT_SUPPORTED;
2005 }
2006 #else //__nvoc_kernel_mig_manager_h_disabled
2007 #define kmigmgrGetSlotBasisMask(arg1, arg2, pMask) kmigmgrGetSlotBasisMask_IMPL(arg1, arg2, pMask)
2008 #endif //__nvoc_kernel_mig_manager_h_disabled
2009 
2010 NvU32 kmigmgrGetSpanStartFromCTSId_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 ctsId);
2011 
2012 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrGetSpanStartFromCTSId(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU32 ctsId)2013 static inline NvU32 kmigmgrGetSpanStartFromCTSId(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU32 ctsId) {
2014     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
2015     return 0;
2016 }
2017 #else //__nvoc_kernel_mig_manager_h_disabled
2018 #define kmigmgrGetSpanStartFromCTSId(arg1, arg2, ctsId) kmigmgrGetSpanStartFromCTSId_IMPL(arg1, arg2, ctsId)
2019 #endif //__nvoc_kernel_mig_manager_h_disabled
2020 
2021 NvBool kmigmgrIsCTSIdAvailable_IMPL(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU64 ctsIdValidMask, NvU64 ctsIdInUseMask, NvU32 ctsId);
2022 
2023 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsCTSIdAvailable(OBJGPU * arg1,struct KernelMIGManager * arg2,NvU64 ctsIdValidMask,NvU64 ctsIdInUseMask,NvU32 ctsId)2024 static inline NvBool kmigmgrIsCTSIdAvailable(OBJGPU *arg1, struct KernelMIGManager *arg2, NvU64 ctsIdValidMask, NvU64 ctsIdInUseMask, NvU32 ctsId) {
2025     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
2026     return NV_FALSE;
2027 }
2028 #else //__nvoc_kernel_mig_manager_h_disabled
2029 #define kmigmgrIsCTSIdAvailable(arg1, arg2, ctsIdValidMask, ctsIdInUseMask, ctsId) kmigmgrIsCTSIdAvailable_IMPL(arg1, arg2, ctsIdValidMask, ctsIdInUseMask, ctsId)
2030 #endif //__nvoc_kernel_mig_manager_h_disabled
2031 
2032 NV_STATUS kmigmgrUpdateCiConfigForVgpu_IMPL(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, NvU32 execPartCount, NvU32 *pExecPartId, NvU32 gfid, NvBool bDelete);
2033 
2034 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrUpdateCiConfigForVgpu(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager,NvU32 execPartCount,NvU32 * pExecPartId,NvU32 gfid,NvBool bDelete)2035 static inline NV_STATUS kmigmgrUpdateCiConfigForVgpu(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, NvU32 execPartCount, NvU32 *pExecPartId, NvU32 gfid, NvBool bDelete) {
2036     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
2037     return NV_ERR_NOT_SUPPORTED;
2038 }
2039 #else //__nvoc_kernel_mig_manager_h_disabled
2040 #define kmigmgrUpdateCiConfigForVgpu(pGpu, pKernelMIGManager, execPartCount, pExecPartId, gfid, bDelete) kmigmgrUpdateCiConfigForVgpu_IMPL(pGpu, pKernelMIGManager, execPartCount, pExecPartId, gfid, bDelete)
2041 #endif //__nvoc_kernel_mig_manager_h_disabled
2042 
2043 NvBool kmigmgrIsPartitionVeidAllocationContiguous_IMPL(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, KERNEL_MIG_GPU_INSTANCE *arg3);
2044 
2045 #ifdef __nvoc_kernel_mig_manager_h_disabled
kmigmgrIsPartitionVeidAllocationContiguous(OBJGPU * pGpu,struct KernelMIGManager * pKernelMIGManager,KERNEL_MIG_GPU_INSTANCE * arg3)2046 static inline NvBool kmigmgrIsPartitionVeidAllocationContiguous(OBJGPU *pGpu, struct KernelMIGManager *pKernelMIGManager, KERNEL_MIG_GPU_INSTANCE *arg3) {
2047     NV_ASSERT_FAILED_PRECOMP("KernelMIGManager was disabled!");
2048     return NV_FALSE;
2049 }
2050 #else //__nvoc_kernel_mig_manager_h_disabled
2051 #define kmigmgrIsPartitionVeidAllocationContiguous(pGpu, pKernelMIGManager, arg3) kmigmgrIsPartitionVeidAllocationContiguous_IMPL(pGpu, pKernelMIGManager, arg3)
2052 #endif //__nvoc_kernel_mig_manager_h_disabled
2053 
2054 #undef PRIVATE_FIELD
2055 
2056 
2057 #endif // KERNEL_MIG_MANAGER_H
2058 
2059 
2060 #ifdef __cplusplus
2061 } // extern "C"
2062 #endif
2063 
2064 #endif // _G_KERNEL_MIG_MANAGER_NVOC_H_
2065