1 #define NVOC_KERNEL_GRAPHICS_CONTEXT_H_PRIVATE_ACCESS_ALLOWED
2 #include "nvoc/runtime.h"
3 #include "nvoc/rtti.h"
4 #include "nvtypes.h"
5 #include "nvport/nvport.h"
6 #include "nvport/inline/util_valist.h"
7 #include "utils/nvassert.h"
8 #include "g_kernel_graphics_context_nvoc.h"
9
10 #ifdef DEBUG
11 char __nvoc_class_id_uniqueness_check_0x7ead09 = 1;
12 #endif
13
14 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelGraphicsContext;
15
16 extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
17
18 extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
19
20 extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
21
22 extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
23
24 extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuResource;
25
26 void __nvoc_init_KernelGraphicsContext(KernelGraphicsContext*, RmHalspecOwner* );
27 void __nvoc_init_funcTable_KernelGraphicsContext(KernelGraphicsContext*, RmHalspecOwner* );
28 NV_STATUS __nvoc_ctor_KernelGraphicsContext(KernelGraphicsContext*, RmHalspecOwner* , struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
29 void __nvoc_init_dataField_KernelGraphicsContext(KernelGraphicsContext*, RmHalspecOwner* );
30 void __nvoc_dtor_KernelGraphicsContext(KernelGraphicsContext*);
31 extern const struct NVOC_EXPORT_INFO __nvoc_export_info_KernelGraphicsContext;
32
33 static const struct NVOC_RTTI __nvoc_rtti_KernelGraphicsContext_KernelGraphicsContext = {
34 /*pClassDef=*/ &__nvoc_class_def_KernelGraphicsContext,
35 /*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_KernelGraphicsContext,
36 /*offset=*/ 0,
37 };
38
39 static const struct NVOC_RTTI __nvoc_rtti_KernelGraphicsContext_Object = {
40 /*pClassDef=*/ &__nvoc_class_def_Object,
41 /*dtor=*/ &__nvoc_destructFromBase,
42 /*offset=*/ NV_OFFSETOF(KernelGraphicsContext, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
43 };
44
45 static const struct NVOC_RTTI __nvoc_rtti_KernelGraphicsContext_RsResource = {
46 /*pClassDef=*/ &__nvoc_class_def_RsResource,
47 /*dtor=*/ &__nvoc_destructFromBase,
48 /*offset=*/ NV_OFFSETOF(KernelGraphicsContext, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource),
49 };
50
51 static const struct NVOC_RTTI __nvoc_rtti_KernelGraphicsContext_RmResourceCommon = {
52 /*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
53 /*dtor=*/ &__nvoc_destructFromBase,
54 /*offset=*/ NV_OFFSETOF(KernelGraphicsContext, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
55 };
56
57 static const struct NVOC_RTTI __nvoc_rtti_KernelGraphicsContext_RmResource = {
58 /*pClassDef=*/ &__nvoc_class_def_RmResource,
59 /*dtor=*/ &__nvoc_destructFromBase,
60 /*offset=*/ NV_OFFSETOF(KernelGraphicsContext, __nvoc_base_GpuResource.__nvoc_base_RmResource),
61 };
62
63 static const struct NVOC_RTTI __nvoc_rtti_KernelGraphicsContext_GpuResource = {
64 /*pClassDef=*/ &__nvoc_class_def_GpuResource,
65 /*dtor=*/ &__nvoc_destructFromBase,
66 /*offset=*/ NV_OFFSETOF(KernelGraphicsContext, __nvoc_base_GpuResource),
67 };
68
69 static const struct NVOC_CASTINFO __nvoc_castinfo_KernelGraphicsContext = {
70 /*numRelatives=*/ 6,
71 /*relatives=*/ {
72 &__nvoc_rtti_KernelGraphicsContext_KernelGraphicsContext,
73 &__nvoc_rtti_KernelGraphicsContext_GpuResource,
74 &__nvoc_rtti_KernelGraphicsContext_RmResource,
75 &__nvoc_rtti_KernelGraphicsContext_RmResourceCommon,
76 &__nvoc_rtti_KernelGraphicsContext_RsResource,
77 &__nvoc_rtti_KernelGraphicsContext_Object,
78 },
79 };
80
81 const struct NVOC_CLASS_DEF __nvoc_class_def_KernelGraphicsContext =
82 {
83 /*classInfo=*/ {
84 /*size=*/ sizeof(KernelGraphicsContext),
85 /*classId=*/ classId(KernelGraphicsContext),
86 /*providerId=*/ &__nvoc_rtti_provider,
87 #if NV_PRINTF_STRINGS_ALLOWED
88 /*name=*/ "KernelGraphicsContext",
89 #endif
90 },
91 /*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_KernelGraphicsContext,
92 /*pCastInfo=*/ &__nvoc_castinfo_KernelGraphicsContext,
93 /*pExportInfo=*/ &__nvoc_export_info_KernelGraphicsContext
94 };
95
__nvoc_thunk_KernelGraphicsContext_resCanCopy(struct RsResource * arg0)96 static NvBool __nvoc_thunk_KernelGraphicsContext_resCanCopy(struct RsResource *arg0) {
97 return kgrctxCanCopy((struct KernelGraphicsContext *)(((unsigned char *)arg0) - __nvoc_rtti_KernelGraphicsContext_RsResource.offset));
98 }
99
__nvoc_thunk_KernelGraphicsContext_gpuresGetInternalObjectHandle(struct GpuResource * arg0)100 static NvHandle __nvoc_thunk_KernelGraphicsContext_gpuresGetInternalObjectHandle(struct GpuResource *arg0) {
101 return kgrctxGetInternalObjectHandle((struct KernelGraphicsContext *)(((unsigned char *)arg0) - __nvoc_rtti_KernelGraphicsContext_GpuResource.offset));
102 }
103
__nvoc_thunk_GpuResource_kgrctxShareCallback(struct KernelGraphicsContext * pGpuResource,struct RsClient * pInvokingClient,struct RsResourceRef * pParentRef,RS_SHARE_POLICY * pSharePolicy)104 static NvBool __nvoc_thunk_GpuResource_kgrctxShareCallback(struct KernelGraphicsContext *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
105 return gpuresShareCallback((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_KernelGraphicsContext_GpuResource.offset), pInvokingClient, pParentRef, pSharePolicy);
106 }
107
__nvoc_thunk_RmResource_kgrctxCheckMemInterUnmap(struct KernelGraphicsContext * pRmResource,NvBool bSubdeviceHandleProvided)108 static NV_STATUS __nvoc_thunk_RmResource_kgrctxCheckMemInterUnmap(struct KernelGraphicsContext *pRmResource, NvBool bSubdeviceHandleProvided) {
109 return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_KernelGraphicsContext_RmResource.offset), bSubdeviceHandleProvided);
110 }
111
__nvoc_thunk_RsResource_kgrctxMapTo(struct KernelGraphicsContext * pResource,RS_RES_MAP_TO_PARAMS * pParams)112 static NV_STATUS __nvoc_thunk_RsResource_kgrctxMapTo(struct KernelGraphicsContext *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
113 return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RsResource.offset), pParams);
114 }
115
__nvoc_thunk_GpuResource_kgrctxGetMapAddrSpace(struct KernelGraphicsContext * pGpuResource,struct CALL_CONTEXT * pCallContext,NvU32 mapFlags,NV_ADDRESS_SPACE * pAddrSpace)116 static NV_STATUS __nvoc_thunk_GpuResource_kgrctxGetMapAddrSpace(struct KernelGraphicsContext *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
117 return gpuresGetMapAddrSpace((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_KernelGraphicsContext_GpuResource.offset), pCallContext, mapFlags, pAddrSpace);
118 }
119
__nvoc_thunk_RsResource_kgrctxGetRefCount(struct KernelGraphicsContext * pResource)120 static NvU32 __nvoc_thunk_RsResource_kgrctxGetRefCount(struct KernelGraphicsContext *pResource) {
121 return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RsResource.offset));
122 }
123
__nvoc_thunk_RsResource_kgrctxAddAdditionalDependants(struct RsClient * pClient,struct KernelGraphicsContext * pResource,RsResourceRef * pReference)124 static void __nvoc_thunk_RsResource_kgrctxAddAdditionalDependants(struct RsClient *pClient, struct KernelGraphicsContext *pResource, RsResourceRef *pReference) {
125 resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RsResource.offset), pReference);
126 }
127
__nvoc_thunk_RmResource_kgrctxControl_Prologue(struct KernelGraphicsContext * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)128 static NV_STATUS __nvoc_thunk_RmResource_kgrctxControl_Prologue(struct KernelGraphicsContext *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
129 return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RmResource.offset), pCallContext, pParams);
130 }
131
__nvoc_thunk_GpuResource_kgrctxGetRegBaseOffsetAndSize(struct KernelGraphicsContext * pGpuResource,struct OBJGPU * pGpu,NvU32 * pOffset,NvU32 * pSize)132 static NV_STATUS __nvoc_thunk_GpuResource_kgrctxGetRegBaseOffsetAndSize(struct KernelGraphicsContext *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
133 return gpuresGetRegBaseOffsetAndSize((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_KernelGraphicsContext_GpuResource.offset), pGpu, pOffset, pSize);
134 }
135
__nvoc_thunk_GpuResource_kgrctxInternalControlForward(struct KernelGraphicsContext * pGpuResource,NvU32 command,void * pParams,NvU32 size)136 static NV_STATUS __nvoc_thunk_GpuResource_kgrctxInternalControlForward(struct KernelGraphicsContext *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
137 return gpuresInternalControlForward((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_KernelGraphicsContext_GpuResource.offset), command, pParams, size);
138 }
139
__nvoc_thunk_RsResource_kgrctxUnmapFrom(struct KernelGraphicsContext * pResource,RS_RES_UNMAP_FROM_PARAMS * pParams)140 static NV_STATUS __nvoc_thunk_RsResource_kgrctxUnmapFrom(struct KernelGraphicsContext *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
141 return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RsResource.offset), pParams);
142 }
143
__nvoc_thunk_RmResource_kgrctxControl_Epilogue(struct KernelGraphicsContext * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)144 static void __nvoc_thunk_RmResource_kgrctxControl_Epilogue(struct KernelGraphicsContext *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
145 rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RmResource.offset), pCallContext, pParams);
146 }
147
__nvoc_thunk_GpuResource_kgrctxControl(struct KernelGraphicsContext * pGpuResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)148 static NV_STATUS __nvoc_thunk_GpuResource_kgrctxControl(struct KernelGraphicsContext *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
149 return gpuresControl((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_KernelGraphicsContext_GpuResource.offset), pCallContext, pParams);
150 }
151
__nvoc_thunk_GpuResource_kgrctxUnmap(struct KernelGraphicsContext * pGpuResource,struct CALL_CONTEXT * pCallContext,struct RsCpuMapping * pCpuMapping)152 static NV_STATUS __nvoc_thunk_GpuResource_kgrctxUnmap(struct KernelGraphicsContext *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
153 return gpuresUnmap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_KernelGraphicsContext_GpuResource.offset), pCallContext, pCpuMapping);
154 }
155
__nvoc_thunk_RmResource_kgrctxGetMemInterMapParams(struct KernelGraphicsContext * pRmResource,RMRES_MEM_INTER_MAP_PARAMS * pParams)156 static NV_STATUS __nvoc_thunk_RmResource_kgrctxGetMemInterMapParams(struct KernelGraphicsContext *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
157 return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_KernelGraphicsContext_RmResource.offset), pParams);
158 }
159
__nvoc_thunk_RmResource_kgrctxGetMemoryMappingDescriptor(struct KernelGraphicsContext * pRmResource,struct MEMORY_DESCRIPTOR ** ppMemDesc)160 static NV_STATUS __nvoc_thunk_RmResource_kgrctxGetMemoryMappingDescriptor(struct KernelGraphicsContext *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
161 return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_KernelGraphicsContext_RmResource.offset), ppMemDesc);
162 }
163
__nvoc_thunk_RsResource_kgrctxControlFilter(struct KernelGraphicsContext * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)164 static NV_STATUS __nvoc_thunk_RsResource_kgrctxControlFilter(struct KernelGraphicsContext *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
165 return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RsResource.offset), pCallContext, pParams);
166 }
167
__nvoc_thunk_RmResource_kgrctxControlSerialization_Prologue(struct KernelGraphicsContext * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)168 static NV_STATUS __nvoc_thunk_RmResource_kgrctxControlSerialization_Prologue(struct KernelGraphicsContext *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
169 return rmresControlSerialization_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RmResource.offset), pCallContext, pParams);
170 }
171
__nvoc_thunk_RsResource_kgrctxIsPartialUnmapSupported(struct KernelGraphicsContext * pResource)172 static NvBool __nvoc_thunk_RsResource_kgrctxIsPartialUnmapSupported(struct KernelGraphicsContext *pResource) {
173 return resIsPartialUnmapSupported((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RsResource.offset));
174 }
175
__nvoc_thunk_RsResource_kgrctxPreDestruct(struct KernelGraphicsContext * pResource)176 static void __nvoc_thunk_RsResource_kgrctxPreDestruct(struct KernelGraphicsContext *pResource) {
177 resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RsResource.offset));
178 }
179
__nvoc_thunk_RsResource_kgrctxIsDuplicate(struct KernelGraphicsContext * pResource,NvHandle hMemory,NvBool * pDuplicate)180 static NV_STATUS __nvoc_thunk_RsResource_kgrctxIsDuplicate(struct KernelGraphicsContext *pResource, NvHandle hMemory, NvBool *pDuplicate) {
181 return resIsDuplicate((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RsResource.offset), hMemory, pDuplicate);
182 }
183
__nvoc_thunk_RmResource_kgrctxControlSerialization_Epilogue(struct KernelGraphicsContext * pResource,struct CALL_CONTEXT * pCallContext,struct RS_RES_CONTROL_PARAMS_INTERNAL * pParams)184 static void __nvoc_thunk_RmResource_kgrctxControlSerialization_Epilogue(struct KernelGraphicsContext *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
185 rmresControlSerialization_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RmResource.offset), pCallContext, pParams);
186 }
187
__nvoc_thunk_GpuResource_kgrctxMap(struct KernelGraphicsContext * pGpuResource,struct CALL_CONTEXT * pCallContext,struct RS_CPU_MAP_PARAMS * pParams,struct RsCpuMapping * pCpuMapping)188 static NV_STATUS __nvoc_thunk_GpuResource_kgrctxMap(struct KernelGraphicsContext *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
189 return gpuresMap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_KernelGraphicsContext_GpuResource.offset), pCallContext, pParams, pCpuMapping);
190 }
191
__nvoc_thunk_RmResource_kgrctxAccessCallback(struct KernelGraphicsContext * pResource,struct RsClient * pInvokingClient,void * pAllocParams,RsAccessRight accessRight)192 static NvBool __nvoc_thunk_RmResource_kgrctxAccessCallback(struct KernelGraphicsContext *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
193 return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_KernelGraphicsContext_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
194 }
195
196 #if !defined(NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG)
197 #define NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(x) (0)
198 #endif
199
200 static const struct NVOC_EXPORTED_METHOD_DEF __nvoc_exported_method_def_KernelGraphicsContext[] =
201 {
202 { /* [0] */
203 #if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
204 /*pFunc=*/ (void (*)(void)) NULL,
205 #else
206 /*pFunc=*/ (void (*)(void)) kgrctxCtrlSetTpcPartitionMode_IMPL,
207 #endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
208 /*flags=*/ 0x10u,
209 /*accessRight=*/0x0u,
210 /*methodId=*/ 0x900101u,
211 /*paramSize=*/ sizeof(NV0090_CTRL_TPC_PARTITION_MODE_PARAMS),
212 /*pClassInfo=*/ &(__nvoc_class_def_KernelGraphicsContext.classInfo),
213 #if NV_PRINTF_STRINGS_ALLOWED
214 /*func=*/ "kgrctxCtrlSetTpcPartitionMode"
215 #endif
216 },
217 { /* [1] */
218 #if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
219 /*pFunc=*/ (void (*)(void)) NULL,
220 #else
221 /*pFunc=*/ (void (*)(void)) kgrctxCtrlGetTpcPartitionMode_IMPL,
222 #endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
223 /*flags=*/ 0x10u,
224 /*accessRight=*/0x0u,
225 /*methodId=*/ 0x900103u,
226 /*paramSize=*/ sizeof(NV0090_CTRL_TPC_PARTITION_MODE_PARAMS),
227 /*pClassInfo=*/ &(__nvoc_class_def_KernelGraphicsContext.classInfo),
228 #if NV_PRINTF_STRINGS_ALLOWED
229 /*func=*/ "kgrctxCtrlGetTpcPartitionMode"
230 #endif
231 },
232 { /* [2] */
233 #if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
234 /*pFunc=*/ (void (*)(void)) NULL,
235 #else
236 /*pFunc=*/ (void (*)(void)) kgrctxCtrlGetMMUDebugMode_IMPL,
237 #endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
238 /*flags=*/ 0x10u,
239 /*accessRight=*/0x0u,
240 /*methodId=*/ 0x900105u,
241 /*paramSize=*/ sizeof(NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS),
242 /*pClassInfo=*/ &(__nvoc_class_def_KernelGraphicsContext.classInfo),
243 #if NV_PRINTF_STRINGS_ALLOWED
244 /*func=*/ "kgrctxCtrlGetMMUDebugMode"
245 #endif
246 },
247 { /* [3] */
248 #if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
249 /*pFunc=*/ (void (*)(void)) NULL,
250 #else
251 /*pFunc=*/ (void (*)(void)) kgrctxCtrlProgramVidmemPromote_IMPL,
252 #endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
253 /*flags=*/ 0x10u,
254 /*accessRight=*/0x0u,
255 /*methodId=*/ 0x900107u,
256 /*paramSize=*/ sizeof(NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS),
257 /*pClassInfo=*/ &(__nvoc_class_def_KernelGraphicsContext.classInfo),
258 #if NV_PRINTF_STRINGS_ALLOWED
259 /*func=*/ "kgrctxCtrlProgramVidmemPromote"
260 #endif
261 },
262
263 };
264
265 const struct NVOC_EXPORT_INFO __nvoc_export_info_KernelGraphicsContext =
266 {
267 /*numEntries=*/ 4,
268 /*pExportEntries=*/ __nvoc_exported_method_def_KernelGraphicsContext
269 };
270
271 void __nvoc_dtor_GpuResource(GpuResource*);
__nvoc_dtor_KernelGraphicsContext(KernelGraphicsContext * pThis)272 void __nvoc_dtor_KernelGraphicsContext(KernelGraphicsContext *pThis) {
273 __nvoc_kgrctxDestruct(pThis);
274 __nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
275 PORT_UNREFERENCED_VARIABLE(pThis);
276 }
277
__nvoc_init_dataField_KernelGraphicsContext(KernelGraphicsContext * pThis,RmHalspecOwner * pRmhalspecowner)278 void __nvoc_init_dataField_KernelGraphicsContext(KernelGraphicsContext *pThis, RmHalspecOwner *pRmhalspecowner) {
279 ChipHal *chipHal = &pRmhalspecowner->chipHal;
280 const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx;
281 RmVariantHal *rmVariantHal = &pRmhalspecowner->rmVariantHal;
282 const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx;
283 PORT_UNREFERENCED_VARIABLE(pThis);
284 PORT_UNREFERENCED_VARIABLE(pRmhalspecowner);
285 PORT_UNREFERENCED_VARIABLE(chipHal);
286 PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx);
287 PORT_UNREFERENCED_VARIABLE(rmVariantHal);
288 PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx);
289 }
290
291 NV_STATUS __nvoc_ctor_GpuResource(GpuResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
__nvoc_ctor_KernelGraphicsContext(KernelGraphicsContext * pThis,RmHalspecOwner * pRmhalspecowner,struct CALL_CONTEXT * arg_pCallContext,struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams)292 NV_STATUS __nvoc_ctor_KernelGraphicsContext(KernelGraphicsContext *pThis, RmHalspecOwner *pRmhalspecowner, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
293 NV_STATUS status = NV_OK;
294 status = __nvoc_ctor_GpuResource(&pThis->__nvoc_base_GpuResource, arg_pCallContext, arg_pParams);
295 if (status != NV_OK) goto __nvoc_ctor_KernelGraphicsContext_fail_GpuResource;
296 __nvoc_init_dataField_KernelGraphicsContext(pThis, pRmhalspecowner);
297
298 status = __nvoc_kgrctxConstruct(pThis, arg_pCallContext, arg_pParams);
299 if (status != NV_OK) goto __nvoc_ctor_KernelGraphicsContext_fail__init;
300 goto __nvoc_ctor_KernelGraphicsContext_exit; // Success
301
302 __nvoc_ctor_KernelGraphicsContext_fail__init:
303 __nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
304 __nvoc_ctor_KernelGraphicsContext_fail_GpuResource:
305 __nvoc_ctor_KernelGraphicsContext_exit:
306
307 return status;
308 }
309
__nvoc_init_funcTable_KernelGraphicsContext_1(KernelGraphicsContext * pThis,RmHalspecOwner * pRmhalspecowner)310 static void __nvoc_init_funcTable_KernelGraphicsContext_1(KernelGraphicsContext *pThis, RmHalspecOwner *pRmhalspecowner) {
311 ChipHal *chipHal = &pRmhalspecowner->chipHal;
312 const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx;
313 RmVariantHal *rmVariantHal = &pRmhalspecowner->rmVariantHal;
314 const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx;
315 PORT_UNREFERENCED_VARIABLE(pThis);
316 PORT_UNREFERENCED_VARIABLE(pRmhalspecowner);
317 PORT_UNREFERENCED_VARIABLE(chipHal);
318 PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx);
319 PORT_UNREFERENCED_VARIABLE(rmVariantHal);
320 PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx);
321
322 pThis->__kgrctxCanCopy__ = &kgrctxCanCopy_0c883b;
323
324 pThis->__kgrctxGetInternalObjectHandle__ = &kgrctxGetInternalObjectHandle_IMPL;
325
326 // Hal function -- kgrctxShouldPreAllocPmBuffer
327 if (((( ((chipHal_HalVarIdx >> 5) == 1UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x11f0ffe0UL) )) /* ChipHal: TU102 | TU104 | TU106 | TU116 | TU117 | GA100 | GA102 | GA103 | GA104 | GA106 | GA107 | AD102 | AD103 | AD104 | AD106 | AD107 | GH100 */ && (( ((rmVariantHal_HalVarIdx >> 5) == 0UL) && ((1UL << (rmVariantHal_HalVarIdx & 0x1f)) & 0x00000001UL) )) /* RmVariantHal: VF */ ))
328 {
329 pThis->__kgrctxShouldPreAllocPmBuffer__ = &kgrctxShouldPreAllocPmBuffer_VF;
330 }
331 else
332 {
333 pThis->__kgrctxShouldPreAllocPmBuffer__ = &kgrctxShouldPreAllocPmBuffer_PF;
334 }
335
336 // Hal function -- kgrctxGetRegisterAccessMapId
337 if (( ((rmVariantHal_HalVarIdx >> 5) == 0UL) && ((1UL << (rmVariantHal_HalVarIdx & 0x1f)) & 0x00000001UL) )) /* RmVariantHal: VF */
338 {
339 pThis->__kgrctxGetRegisterAccessMapId__ = &kgrctxGetRegisterAccessMapId_aa21e9;
340 }
341 // default
342 else
343 {
344 pThis->__kgrctxGetRegisterAccessMapId__ = &kgrctxGetRegisterAccessMapId_IMPL;
345 }
346
347 #if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
348 pThis->__kgrctxCtrlGetTpcPartitionMode__ = &kgrctxCtrlGetTpcPartitionMode_IMPL;
349 #endif
350
351 #if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
352 pThis->__kgrctxCtrlSetTpcPartitionMode__ = &kgrctxCtrlSetTpcPartitionMode_IMPL;
353 #endif
354
355 #if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
356 pThis->__kgrctxCtrlGetMMUDebugMode__ = &kgrctxCtrlGetMMUDebugMode_IMPL;
357 #endif
358
359 #if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
360 pThis->__kgrctxCtrlProgramVidmemPromote__ = &kgrctxCtrlProgramVidmemPromote_IMPL;
361 #endif
362
363 pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__resCanCopy__ = &__nvoc_thunk_KernelGraphicsContext_resCanCopy;
364
365 pThis->__nvoc_base_GpuResource.__gpuresGetInternalObjectHandle__ = &__nvoc_thunk_KernelGraphicsContext_gpuresGetInternalObjectHandle;
366
367 pThis->__kgrctxShareCallback__ = &__nvoc_thunk_GpuResource_kgrctxShareCallback;
368
369 pThis->__kgrctxCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_kgrctxCheckMemInterUnmap;
370
371 pThis->__kgrctxMapTo__ = &__nvoc_thunk_RsResource_kgrctxMapTo;
372
373 pThis->__kgrctxGetMapAddrSpace__ = &__nvoc_thunk_GpuResource_kgrctxGetMapAddrSpace;
374
375 pThis->__kgrctxGetRefCount__ = &__nvoc_thunk_RsResource_kgrctxGetRefCount;
376
377 pThis->__kgrctxAddAdditionalDependants__ = &__nvoc_thunk_RsResource_kgrctxAddAdditionalDependants;
378
379 pThis->__kgrctxControl_Prologue__ = &__nvoc_thunk_RmResource_kgrctxControl_Prologue;
380
381 pThis->__kgrctxGetRegBaseOffsetAndSize__ = &__nvoc_thunk_GpuResource_kgrctxGetRegBaseOffsetAndSize;
382
383 pThis->__kgrctxInternalControlForward__ = &__nvoc_thunk_GpuResource_kgrctxInternalControlForward;
384
385 pThis->__kgrctxUnmapFrom__ = &__nvoc_thunk_RsResource_kgrctxUnmapFrom;
386
387 pThis->__kgrctxControl_Epilogue__ = &__nvoc_thunk_RmResource_kgrctxControl_Epilogue;
388
389 pThis->__kgrctxControl__ = &__nvoc_thunk_GpuResource_kgrctxControl;
390
391 pThis->__kgrctxUnmap__ = &__nvoc_thunk_GpuResource_kgrctxUnmap;
392
393 pThis->__kgrctxGetMemInterMapParams__ = &__nvoc_thunk_RmResource_kgrctxGetMemInterMapParams;
394
395 pThis->__kgrctxGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_kgrctxGetMemoryMappingDescriptor;
396
397 pThis->__kgrctxControlFilter__ = &__nvoc_thunk_RsResource_kgrctxControlFilter;
398
399 pThis->__kgrctxControlSerialization_Prologue__ = &__nvoc_thunk_RmResource_kgrctxControlSerialization_Prologue;
400
401 pThis->__kgrctxIsPartialUnmapSupported__ = &__nvoc_thunk_RsResource_kgrctxIsPartialUnmapSupported;
402
403 pThis->__kgrctxPreDestruct__ = &__nvoc_thunk_RsResource_kgrctxPreDestruct;
404
405 pThis->__kgrctxIsDuplicate__ = &__nvoc_thunk_RsResource_kgrctxIsDuplicate;
406
407 pThis->__kgrctxControlSerialization_Epilogue__ = &__nvoc_thunk_RmResource_kgrctxControlSerialization_Epilogue;
408
409 pThis->__kgrctxMap__ = &__nvoc_thunk_GpuResource_kgrctxMap;
410
411 pThis->__kgrctxAccessCallback__ = &__nvoc_thunk_RmResource_kgrctxAccessCallback;
412 }
413
__nvoc_init_funcTable_KernelGraphicsContext(KernelGraphicsContext * pThis,RmHalspecOwner * pRmhalspecowner)414 void __nvoc_init_funcTable_KernelGraphicsContext(KernelGraphicsContext *pThis, RmHalspecOwner *pRmhalspecowner) {
415 __nvoc_init_funcTable_KernelGraphicsContext_1(pThis, pRmhalspecowner);
416 }
417
418 void __nvoc_init_GpuResource(GpuResource*);
__nvoc_init_KernelGraphicsContext(KernelGraphicsContext * pThis,RmHalspecOwner * pRmhalspecowner)419 void __nvoc_init_KernelGraphicsContext(KernelGraphicsContext *pThis, RmHalspecOwner *pRmhalspecowner) {
420 pThis->__nvoc_pbase_KernelGraphicsContext = pThis;
421 pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
422 pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource;
423 pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
424 pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource;
425 pThis->__nvoc_pbase_GpuResource = &pThis->__nvoc_base_GpuResource;
426 __nvoc_init_GpuResource(&pThis->__nvoc_base_GpuResource);
427 __nvoc_init_funcTable_KernelGraphicsContext(pThis, pRmhalspecowner);
428 }
429
__nvoc_objCreate_KernelGraphicsContext(KernelGraphicsContext ** ppThis,Dynamic * pParent,NvU32 createFlags,struct CALL_CONTEXT * arg_pCallContext,struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams)430 NV_STATUS __nvoc_objCreate_KernelGraphicsContext(KernelGraphicsContext **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams)
431 {
432 NV_STATUS status;
433 Object *pParentObj = NULL;
434 KernelGraphicsContext *pThis;
435 RmHalspecOwner *pRmhalspecowner;
436
437 // Assign `pThis`, allocating memory unless suppressed by flag.
438 status = __nvoc_handleObjCreateMemAlloc(createFlags, sizeof(KernelGraphicsContext), (void**)&pThis, (void**)ppThis);
439 if (status != NV_OK)
440 return status;
441
442 // Zero is the initial value for everything.
443 portMemSet(pThis, 0, sizeof(KernelGraphicsContext));
444
445 // Initialize runtime type information.
446 __nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_KernelGraphicsContext);
447
448 pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.createFlags = createFlags;
449
450 // pParent must be a valid object that derives from a halspec owner class.
451 NV_ASSERT_OR_RETURN(pParent != NULL, NV_ERR_INVALID_ARGUMENT);
452
453 // Link the child into the parent unless flagged not to do so.
454 if (!(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
455 {
456 pParentObj = dynamicCast(pParent, Object);
457 objAddChild(pParentObj, &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
458 }
459 else
460 {
461 pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
462 }
463
464 if ((pRmhalspecowner = dynamicCast(pParent, RmHalspecOwner)) == NULL)
465 pRmhalspecowner = objFindAncestorOfType(RmHalspecOwner, pParent);
466 NV_ASSERT_OR_RETURN(pRmhalspecowner != NULL, NV_ERR_INVALID_ARGUMENT);
467
468 __nvoc_init_KernelGraphicsContext(pThis, pRmhalspecowner);
469 status = __nvoc_ctor_KernelGraphicsContext(pThis, pRmhalspecowner, arg_pCallContext, arg_pParams);
470 if (status != NV_OK) goto __nvoc_objCreate_KernelGraphicsContext_cleanup;
471
472 // Assignment has no effect if NVOC_OBJ_CREATE_FLAGS_IN_PLACE_CONSTRUCT is set.
473 *ppThis = pThis;
474
475 return NV_OK;
476
477 __nvoc_objCreate_KernelGraphicsContext_cleanup:
478
479 // Unlink the child from the parent if it was linked above.
480 if (pParentObj != NULL)
481 objRemoveChild(pParentObj, &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
482
483 // Do not call destructors here since the constructor already called them.
484 if (createFlags & NVOC_OBJ_CREATE_FLAGS_IN_PLACE_CONSTRUCT)
485 portMemSet(pThis, 0, sizeof(KernelGraphicsContext));
486 else
487 {
488 portMemFree(pThis);
489 *ppThis = NULL;
490 }
491
492 // coverity[leaked_storage:FALSE]
493 return status;
494 }
495
__nvoc_objCreateDynamic_KernelGraphicsContext(KernelGraphicsContext ** ppThis,Dynamic * pParent,NvU32 createFlags,va_list args)496 NV_STATUS __nvoc_objCreateDynamic_KernelGraphicsContext(KernelGraphicsContext **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
497 NV_STATUS status;
498 struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
499 struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
500
501 status = __nvoc_objCreate_KernelGraphicsContext(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
502
503 return status;
504 }
505
506 #ifdef DEBUG
507 char __nvoc_class_id_uniqueness_check_0xe7abeb = 1;
508 #endif
509
510 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelGraphicsContextShared;
511
512 extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
513
514 extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsShared;
515
516 void __nvoc_init_KernelGraphicsContextShared(KernelGraphicsContextShared*);
517 void __nvoc_init_funcTable_KernelGraphicsContextShared(KernelGraphicsContextShared*);
518 NV_STATUS __nvoc_ctor_KernelGraphicsContextShared(KernelGraphicsContextShared*);
519 void __nvoc_init_dataField_KernelGraphicsContextShared(KernelGraphicsContextShared*);
520 void __nvoc_dtor_KernelGraphicsContextShared(KernelGraphicsContextShared*);
521 extern const struct NVOC_EXPORT_INFO __nvoc_export_info_KernelGraphicsContextShared;
522
523 static const struct NVOC_RTTI __nvoc_rtti_KernelGraphicsContextShared_KernelGraphicsContextShared = {
524 /*pClassDef=*/ &__nvoc_class_def_KernelGraphicsContextShared,
525 /*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_KernelGraphicsContextShared,
526 /*offset=*/ 0,
527 };
528
529 static const struct NVOC_RTTI __nvoc_rtti_KernelGraphicsContextShared_Object = {
530 /*pClassDef=*/ &__nvoc_class_def_Object,
531 /*dtor=*/ &__nvoc_destructFromBase,
532 /*offset=*/ NV_OFFSETOF(KernelGraphicsContextShared, __nvoc_base_RsShared.__nvoc_base_Object),
533 };
534
535 static const struct NVOC_RTTI __nvoc_rtti_KernelGraphicsContextShared_RsShared = {
536 /*pClassDef=*/ &__nvoc_class_def_RsShared,
537 /*dtor=*/ &__nvoc_destructFromBase,
538 /*offset=*/ NV_OFFSETOF(KernelGraphicsContextShared, __nvoc_base_RsShared),
539 };
540
541 static const struct NVOC_CASTINFO __nvoc_castinfo_KernelGraphicsContextShared = {
542 /*numRelatives=*/ 3,
543 /*relatives=*/ {
544 &__nvoc_rtti_KernelGraphicsContextShared_KernelGraphicsContextShared,
545 &__nvoc_rtti_KernelGraphicsContextShared_RsShared,
546 &__nvoc_rtti_KernelGraphicsContextShared_Object,
547 },
548 };
549
550 const struct NVOC_CLASS_DEF __nvoc_class_def_KernelGraphicsContextShared =
551 {
552 /*classInfo=*/ {
553 /*size=*/ sizeof(KernelGraphicsContextShared),
554 /*classId=*/ classId(KernelGraphicsContextShared),
555 /*providerId=*/ &__nvoc_rtti_provider,
556 #if NV_PRINTF_STRINGS_ALLOWED
557 /*name=*/ "KernelGraphicsContextShared",
558 #endif
559 },
560 /*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_KernelGraphicsContextShared,
561 /*pCastInfo=*/ &__nvoc_castinfo_KernelGraphicsContextShared,
562 /*pExportInfo=*/ &__nvoc_export_info_KernelGraphicsContextShared
563 };
564
565 const struct NVOC_EXPORT_INFO __nvoc_export_info_KernelGraphicsContextShared =
566 {
567 /*numEntries=*/ 0,
568 /*pExportEntries=*/ 0
569 };
570
571 void __nvoc_dtor_RsShared(RsShared*);
__nvoc_dtor_KernelGraphicsContextShared(KernelGraphicsContextShared * pThis)572 void __nvoc_dtor_KernelGraphicsContextShared(KernelGraphicsContextShared *pThis) {
573 __nvoc_shrkgrctxDestruct(pThis);
574 __nvoc_dtor_RsShared(&pThis->__nvoc_base_RsShared);
575 PORT_UNREFERENCED_VARIABLE(pThis);
576 }
577
__nvoc_init_dataField_KernelGraphicsContextShared(KernelGraphicsContextShared * pThis)578 void __nvoc_init_dataField_KernelGraphicsContextShared(KernelGraphicsContextShared *pThis) {
579 PORT_UNREFERENCED_VARIABLE(pThis);
580 }
581
582 NV_STATUS __nvoc_ctor_RsShared(RsShared* );
__nvoc_ctor_KernelGraphicsContextShared(KernelGraphicsContextShared * pThis)583 NV_STATUS __nvoc_ctor_KernelGraphicsContextShared(KernelGraphicsContextShared *pThis) {
584 NV_STATUS status = NV_OK;
585 status = __nvoc_ctor_RsShared(&pThis->__nvoc_base_RsShared);
586 if (status != NV_OK) goto __nvoc_ctor_KernelGraphicsContextShared_fail_RsShared;
587 __nvoc_init_dataField_KernelGraphicsContextShared(pThis);
588
589 status = __nvoc_shrkgrctxConstruct(pThis);
590 if (status != NV_OK) goto __nvoc_ctor_KernelGraphicsContextShared_fail__init;
591 goto __nvoc_ctor_KernelGraphicsContextShared_exit; // Success
592
593 __nvoc_ctor_KernelGraphicsContextShared_fail__init:
594 __nvoc_dtor_RsShared(&pThis->__nvoc_base_RsShared);
595 __nvoc_ctor_KernelGraphicsContextShared_fail_RsShared:
596 __nvoc_ctor_KernelGraphicsContextShared_exit:
597
598 return status;
599 }
600
__nvoc_init_funcTable_KernelGraphicsContextShared_1(KernelGraphicsContextShared * pThis)601 static void __nvoc_init_funcTable_KernelGraphicsContextShared_1(KernelGraphicsContextShared *pThis) {
602 PORT_UNREFERENCED_VARIABLE(pThis);
603 }
604
__nvoc_init_funcTable_KernelGraphicsContextShared(KernelGraphicsContextShared * pThis)605 void __nvoc_init_funcTable_KernelGraphicsContextShared(KernelGraphicsContextShared *pThis) {
606 __nvoc_init_funcTable_KernelGraphicsContextShared_1(pThis);
607 }
608
609 void __nvoc_init_RsShared(RsShared*);
__nvoc_init_KernelGraphicsContextShared(KernelGraphicsContextShared * pThis)610 void __nvoc_init_KernelGraphicsContextShared(KernelGraphicsContextShared *pThis) {
611 pThis->__nvoc_pbase_KernelGraphicsContextShared = pThis;
612 pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_RsShared.__nvoc_base_Object;
613 pThis->__nvoc_pbase_RsShared = &pThis->__nvoc_base_RsShared;
614 __nvoc_init_RsShared(&pThis->__nvoc_base_RsShared);
615 __nvoc_init_funcTable_KernelGraphicsContextShared(pThis);
616 }
617
__nvoc_objCreate_KernelGraphicsContextShared(KernelGraphicsContextShared ** ppThis,Dynamic * pParent,NvU32 createFlags)618 NV_STATUS __nvoc_objCreate_KernelGraphicsContextShared(KernelGraphicsContextShared **ppThis, Dynamic *pParent, NvU32 createFlags)
619 {
620 NV_STATUS status;
621 Object *pParentObj = NULL;
622 KernelGraphicsContextShared *pThis;
623
624 // Assign `pThis`, allocating memory unless suppressed by flag.
625 status = __nvoc_handleObjCreateMemAlloc(createFlags, sizeof(KernelGraphicsContextShared), (void**)&pThis, (void**)ppThis);
626 if (status != NV_OK)
627 return status;
628
629 // Zero is the initial value for everything.
630 portMemSet(pThis, 0, sizeof(KernelGraphicsContextShared));
631
632 // Initialize runtime type information.
633 __nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_KernelGraphicsContextShared);
634
635 pThis->__nvoc_base_RsShared.__nvoc_base_Object.createFlags = createFlags;
636
637 // Link the child into the parent if there is one unless flagged not to do so.
638 if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
639 {
640 pParentObj = dynamicCast(pParent, Object);
641 objAddChild(pParentObj, &pThis->__nvoc_base_RsShared.__nvoc_base_Object);
642 }
643 else
644 {
645 pThis->__nvoc_base_RsShared.__nvoc_base_Object.pParent = NULL;
646 }
647
648 __nvoc_init_KernelGraphicsContextShared(pThis);
649 status = __nvoc_ctor_KernelGraphicsContextShared(pThis);
650 if (status != NV_OK) goto __nvoc_objCreate_KernelGraphicsContextShared_cleanup;
651
652 // Assignment has no effect if NVOC_OBJ_CREATE_FLAGS_IN_PLACE_CONSTRUCT is set.
653 *ppThis = pThis;
654
655 return NV_OK;
656
657 __nvoc_objCreate_KernelGraphicsContextShared_cleanup:
658
659 // Unlink the child from the parent if it was linked above.
660 if (pParentObj != NULL)
661 objRemoveChild(pParentObj, &pThis->__nvoc_base_RsShared.__nvoc_base_Object);
662
663 // Do not call destructors here since the constructor already called them.
664 if (createFlags & NVOC_OBJ_CREATE_FLAGS_IN_PLACE_CONSTRUCT)
665 portMemSet(pThis, 0, sizeof(KernelGraphicsContextShared));
666 else
667 {
668 portMemFree(pThis);
669 *ppThis = NULL;
670 }
671
672 // coverity[leaked_storage:FALSE]
673 return status;
674 }
675
__nvoc_objCreateDynamic_KernelGraphicsContextShared(KernelGraphicsContextShared ** ppThis,Dynamic * pParent,NvU32 createFlags,va_list args)676 NV_STATUS __nvoc_objCreateDynamic_KernelGraphicsContextShared(KernelGraphicsContextShared **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
677 NV_STATUS status;
678
679 status = __nvoc_objCreate_KernelGraphicsContextShared(ppThis, pParent, createFlags);
680
681 return status;
682 }
683
684