1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4 /*
5 * CallHelpers.CPP: helpers to call managed code
6 *
7
8 */
9
10 #include "common.h"
11 #include "dbginterface.h"
12
13 // To include declaration of "AppDomainTransitionExceptionFilter"
14 #include "excep.h"
15
16 // To include declaration of "SignatureNative"
17 #include "runtimehandles.h"
18
19
20 #if defined(FEATURE_MULTICOREJIT) && defined(_DEBUG)
21
22 // Allow system module, and first party WinMD files for Appx
23
AssertMulticoreJitAllowedModule(PCODE pTarget)24 void AssertMulticoreJitAllowedModule(PCODE pTarget)
25 {
26 CONTRACTL
27 {
28 SO_NOT_MAINLINE;
29 }
30 CONTRACTL_END;
31
32 MethodDesc* pMethod = Entry2MethodDesc(pTarget, NULL);
33
34 Module * pModule = pMethod->GetModule_NoLogging();
35
36 #if defined(FEATURE_APPX_BINDER)
37
38 // For Appx process, allow certain modules to load on background thread
39 if (AppX::IsAppXProcess())
40 {
41 if (MulticoreJitManager::IsLoadOkay(pModule))
42 {
43 return;
44 }
45 }
46 #endif
47
48 _ASSERTE(pModule->IsSystem());
49 }
50
51 #endif
52
53 // For X86, INSTALL_COMPLUS_EXCEPTION_HANDLER grants us sufficient protection to call into
54 // managed code.
55 //
56 // But on 64-bit, the personality routine will not pop frames or trackers as exceptions unwind
57 // out of managed code. Instead, we rely on explicit cleanup like CLRException::HandlerState::CleanupTry
58 // or UMThunkUnwindFrameChainHandler.
59 //
60 // So most callers should call through CallDescrWorkerWithHandler (or a wrapper like MethodDesc::Call)
61 // and get the platform-appropriate exception handling. A few places try to optimize by calling direct
62 // to managed methods (see ArrayInitializeWorker or FastCallFinalize). This sort of thing is
63 // dangerous. You have to worry about marking yourself as a legal managed caller and you have to
64 // worry about how exceptions will be handled on a WIN64EXCEPTIONS plan. It is generally only suitable
65 // for X86.
66
67 //*******************************************************************************
CallDescrWorkerWithHandler(CallDescrData * pCallDescrData,BOOL fCriticalCall)68 void CallDescrWorkerWithHandler(
69 CallDescrData * pCallDescrData,
70 BOOL fCriticalCall)
71 {
72 STATIC_CONTRACT_SO_INTOLERANT;
73
74 #if defined(FEATURE_MULTICOREJIT) && defined(_DEBUG)
75
76 // For multicore JITting, background thread should not call managed code, except when calling system code (e.g. throwing managed exception)
77 if (GetThread()->HasThreadStateNC(Thread::TSNC_CallingManagedCodeDisabled))
78 {
79 AssertMulticoreJitAllowedModule(pCallDescrData->pTarget);
80 }
81
82 #endif
83
84
85 BEGIN_CALL_TO_MANAGEDEX(fCriticalCall ? EEToManagedCriticalCall : EEToManagedDefault);
86
87 CallDescrWorker(pCallDescrData);
88
89 END_CALL_TO_MANAGED();
90 }
91
92
93 #if !defined(_WIN64) && defined(_DEBUG)
94
95 //*******************************************************************************
96 // assembly code, in i386/asmhelpers.asm
CallDescrWorker(CallDescrData * pCallDescrData)97 void CallDescrWorker(CallDescrData * pCallDescrData)
98 {
99 //
100 // This function must not have a contract ... it's caller has pushed an FS:0 frame (COMPlusFrameHandler) that must
101 // be the first handler on the stack. The contract causes, at a minimum, a C++ exception handler to be pushed to
102 // handle the destruction of the contract object. If there is an exception in the managed code called from here,
103 // and that exception is handled in that same block of managed code, then the COMPlusFrameHandler will actually
104 // unwind the C++ handler before branching to the catch clause in managed code. That essentially causes an
105 // out-of-order destruction of the contract object, resulting in very odd crashes later.
106 //
107 #if 0
108 CONTRACTL {
109 THROWS;
110 GC_TRIGGERS;
111 } CONTRACTL_END;
112 #endif // 0
113 STATIC_CONTRACT_THROWS;
114 STATIC_CONTRACT_GC_TRIGGERS;
115 STATIC_CONTRACT_SO_TOLERANT;
116
117 _ASSERTE(!NingenEnabled() && "You cannot invoke managed code inside the ngen compilation process.");
118
119 TRIGGERSGC_NOSTOMP(); // Can't stomp object refs because they are args to the function
120
121 // Save a copy of dangerousObjRefs in table.
122 Thread* curThread;
123 DWORD_PTR ObjRefTable[OBJREF_TABSIZE];
124
125 curThread = GetThread();
126 _ASSERTE(curThread != NULL);
127
128 static_assert_no_msg(sizeof(curThread->dangerousObjRefs) == sizeof(ObjRefTable));
129 memcpy(ObjRefTable, curThread->dangerousObjRefs, sizeof(ObjRefTable));
130
131 #ifndef FEATURE_INTERPRETER
132 // When the interpreter is used, this mayb be called from preemptive code.
133 _ASSERTE(curThread->PreemptiveGCDisabled()); // Jitted code expects to be in cooperative mode
134 #endif
135
136 // If the current thread owns spinlock or unbreakable lock, it cannot call managed code.
137 _ASSERTE(!curThread->HasUnbreakableLock() &&
138 (curThread->m_StateNC & Thread::TSNC_OwnsSpinLock) == 0);
139
140 #ifdef _TARGET_ARM_
141 _ASSERTE(IsThumbCode(pCallDescrData->pTarget));
142 #endif
143
144 CallDescrWorkerInternal(pCallDescrData);
145
146 // Restore dangerousObjRefs when we return back to EE after call
147 memcpy(curThread->dangerousObjRefs, ObjRefTable, sizeof(ObjRefTable));
148
149 TRIGGERSGC();
150
151 ENABLESTRESSHEAP();
152 }
153 #endif // !defined(_WIN64) && defined(_DEBUG)
154
DispatchCallDebuggerWrapper(CallDescrData * pCallDescrData,ContextTransitionFrame * pFrame,BOOL fCriticalCall)155 void DispatchCallDebuggerWrapper(
156 CallDescrData * pCallDescrData,
157 ContextTransitionFrame* pFrame,
158 BOOL fCriticalCall
159 )
160 {
161 // Use static contracts b/c we have SEH.
162 STATIC_CONTRACT_THROWS;
163 STATIC_CONTRACT_GC_TRIGGERS;
164 STATIC_CONTRACT_MODE_COOPERATIVE;
165
166 struct Param : NotifyOfCHFFilterWrapperParam
167 {
168 CallDescrData * pCallDescrData;
169 BOOL fCriticalCall;
170 } param;
171
172 param.pFrame = pFrame;
173 param.pCallDescrData = pCallDescrData;
174 param.fCriticalCall = fCriticalCall;
175
176 PAL_TRY(Param *, pParam, ¶m)
177 {
178 CallDescrWorkerWithHandler(
179 pParam->pCallDescrData,
180 pParam->fCriticalCall);
181 }
182 PAL_EXCEPT_FILTER(AppDomainTransitionExceptionFilter)
183 {
184 // Should never reach here b/c handler should always continue search.
185 _ASSERTE(!"Unreachable");
186 }
187 PAL_ENDTRY
188 }
189
190 // Helper for VM->managed calls with simple signatures.
DispatchCallSimple(SIZE_T * pSrc,DWORD numStackSlotsToCopy,PCODE pTargetAddress,DWORD dwDispatchCallSimpleFlags)191 void * DispatchCallSimple(
192 SIZE_T *pSrc,
193 DWORD numStackSlotsToCopy,
194 PCODE pTargetAddress,
195 DWORD dwDispatchCallSimpleFlags)
196 {
197 CONTRACTL
198 {
199 GC_TRIGGERS;
200 THROWS;
201 MODE_COOPERATIVE;
202 }
203 CONTRACTL_END;
204
205 #ifdef DEBUGGING_SUPPORTED
206 if (CORDebuggerTraceCall())
207 g_pDebugInterface->TraceCall((const BYTE *)pTargetAddress);
208 #endif // DEBUGGING_SUPPORTED
209
210 CallDescrData callDescrData;
211
212 #ifdef CALLDESCR_ARGREGS
213 callDescrData.pSrc = pSrc + NUM_ARGUMENT_REGISTERS;
214 callDescrData.numStackSlots = numStackSlotsToCopy;
215 callDescrData.pArgumentRegisters = (ArgumentRegisters *)pSrc;
216 #else
217 callDescrData.pSrc = pSrc;
218 callDescrData.numStackSlots = numStackSlotsToCopy;
219 #endif
220 #ifdef CALLDESCR_FPARGREGS
221 callDescrData.pFloatArgumentRegisters = NULL;
222 #endif
223 #ifdef CALLDESCR_REGTYPEMAP
224 callDescrData.dwRegTypeMap = 0;
225 #endif
226 callDescrData.fpReturnSize = 0;
227 callDescrData.pTarget = pTargetAddress;
228
229 if ((dwDispatchCallSimpleFlags & DispatchCallSimple_CatchHandlerFoundNotification) != 0)
230 {
231 DispatchCallDebuggerWrapper(
232 &callDescrData,
233 NULL,
234 dwDispatchCallSimpleFlags & DispatchCallSimple_CriticalCall);
235 }
236 else
237 {
238 CallDescrWorkerWithHandler(&callDescrData, dwDispatchCallSimpleFlags & DispatchCallSimple_CriticalCall);
239 }
240
241 return *(void **)(&callDescrData.returnValue);
242 }
243
244 // This method performs the proper profiler and debugger callbacks before dispatching the
245 // call. The caller has the responsibility of furnishing the target address, register and stack arguments.
246 // Stack arguments should be in reverse order, and pSrc should point to past the last argument
247 // Returns the return value or the exception object if one was thrown.
DispatchCall(CallDescrData * pCallDescrData,OBJECTREF * pRefException,ContextTransitionFrame * pFrame,CorruptionSeverity * pSeverity)248 void DispatchCall(
249 CallDescrData * pCallDescrData,
250 OBJECTREF *pRefException,
251 ContextTransitionFrame* pFrame /* = NULL */
252 #ifdef FEATURE_CORRUPTING_EXCEPTIONS
253 , CorruptionSeverity *pSeverity /*= NULL*/
254 #endif // FEATURE_CORRUPTING_EXCEPTIONS
255 )
256 {
257 CONTRACTL
258 {
259 GC_TRIGGERS;
260 THROWS;
261 MODE_COOPERATIVE;
262 }
263 CONTRACTL_END;
264
265 #ifdef DEBUGGING_SUPPORTED
266 if (CORDebuggerTraceCall())
267 g_pDebugInterface->TraceCall((const BYTE *)pCallDescrData->pTarget);
268 #endif // DEBUGGING_SUPPORTED
269
270 #ifdef FEATURE_CORRUPTING_EXCEPTIONS
271 if (pSeverity != NULL)
272 {
273 // By default, assume any exception that comes out is NotCorrupting
274 *pSeverity = NotCorrupting;
275 }
276 #endif // FEATURE_CORRUPTING_EXCEPTIONS
277
278 EX_TRY
279 {
280 DispatchCallDebuggerWrapper(pCallDescrData,
281 pFrame,
282 FALSE);
283 }
284 EX_CATCH
285 {
286 *pRefException = GET_THROWABLE();
287
288 #ifdef FEATURE_CORRUPTING_EXCEPTIONS
289 if (pSeverity != NULL)
290 {
291 // By default, assume any exception that comes out is NotCorrupting
292 *pSeverity = GetThread()->GetExceptionState()->GetLastActiveExceptionCorruptionSeverity();
293 }
294 #endif // FEATURE_CORRUPTING_EXCEPTIONS
295
296 }
297 EX_END_CATCH(RethrowTransientExceptions);
298 }
299
300 #ifdef CALLDESCR_REGTYPEMAP
301 //*******************************************************************************
FillInRegTypeMap(int argOffset,CorElementType typ,BYTE * pMap)302 void FillInRegTypeMap(int argOffset, CorElementType typ, BYTE * pMap)
303 {
304 CONTRACTL
305 {
306 WRAPPER(THROWS);
307 WRAPPER(GC_TRIGGERS);
308 MODE_ANY;
309 PRECONDITION(CheckPointer(pMap, NULL_NOT_OK));
310 }
311 CONTRACTL_END;
312
313 int regArgNum = TransitionBlock::GetArgumentIndexFromOffset(argOffset);
314
315 // Create a map of the first 8 argument types. This is used in
316 // CallDescrWorkerInternal to load args into general registers or
317 // floating point registers.
318 //
319 // we put these in order from the LSB to the MSB so that we can keep
320 // the map in a register and just examine the low byte and then shift
321 // right for each arg.
322
323 if (regArgNum < NUM_ARGUMENT_REGISTERS)
324 {
325 pMap[regArgNum] = typ;
326 }
327 }
328 #endif // CALLDESCR_REGTYPEMAP
329
330 #if defined(_DEBUG) && defined(FEATURE_COMINTEROP)
331 extern int g_fMainThreadApartmentStateSet;
332 extern int g_fInitializingInitialAD;
333 extern Volatile<LONG> g_fInExecuteMainMethod;
334 #endif
335
336 //*******************************************************************************
337 #ifdef FEATURE_INTERPRETER
CallTargetWorker(const ARG_SLOT * pArguments,bool transitionToPreemptive)338 ARG_SLOT MethodDescCallSite::CallTargetWorker(const ARG_SLOT *pArguments, bool transitionToPreemptive)
339 #else
340 ARG_SLOT MethodDescCallSite::CallTargetWorker(const ARG_SLOT *pArguments)
341 #endif
342 {
343 //
344 // WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING
345 //
346 // This method needs to have a GC_TRIGGERS contract because it
347 // calls managed code. However, IT MAY NOT TRIGGER A GC ITSELF
348 // because the argument array is not protected and may contain gc
349 // refs.
350 //
351 // WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING
352 //
353 CONTRACTL
354 {
355 THROWS;
356 GC_TRIGGERS;
357 INJECT_FAULT(COMPlusThrowOM(););
358 MODE_COOPERATIVE;
359 PRECONDITION(GetAppDomain()->CheckCanExecuteManagedCode(m_pMD));
360 PRECONDITION(m_pMD->CheckActivated()); // EnsureActive will trigger, so we must already be activated
361
362 #ifdef FEATURE_COMINTEROP
363 // If we're an exe, then we must either be initializing the first AD, or have already setup the main thread's
364 // COM apartment state.
365 // If you hit this assert, then you likely introduced code during startup that could inadvertently
366 // initialize the COM apartment state of the main thread before we set it based on the user attribute.
367 PRECONDITION(g_fInExecuteMainMethod ? (g_fMainThreadApartmentStateSet || g_fInitializingInitialAD) : TRUE);
368 #endif // FEATURE_COMINTEROP
369 }
370 CONTRACTL_END;
371
372 _ASSERTE(!NingenEnabled() && "You cannot invoke managed code inside the ngen compilation process.");
373
374 // If we're invoking an mscorlib method, lift the restriction on type load limits. Calls into mscorlib are
375 // typically calls into specific and controlled helper methods for security checks and other linktime tasks.
376 //
377 // @todo: In an ideal world, we would require each of those sites to do the override rather than disabling
378 // the assert broadly here. However, by limiting the override to mscorlib methods, we should still be able
379 // to effectively enforce the more general rule about loader recursion.
380 MAYBE_OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOADED, m_pMD->GetModule()->IsSystem());
381
382 LPBYTE pTransitionBlock;
383 UINT nStackBytes;
384 UINT fpReturnSize;
385 #ifdef CALLDESCR_REGTYPEMAP
386 UINT64 dwRegTypeMap;
387 #endif
388 #ifdef CALLDESCR_FPARGREGS
389 FloatArgumentRegisters *pFloatArgumentRegisters = NULL;
390 #endif
391 void* pvRetBuff = NULL;
392
393 {
394 //
395 // the incoming argument array is not gc-protected, so we
396 // may not trigger a GC before we actually call managed code
397 //
398 GCX_FORBID();
399
400 // Record this call if required
401 g_IBCLogger.LogMethodDescAccess(m_pMD);
402
403 //
404 // All types must already be loaded. This macro also sets up a FAULT_FORBID region which is
405 // also required for critical calls since we cannot inject any failure points between the
406 // caller of MethodDesc::CallDescr and the actual transition to managed code.
407 //
408 ENABLE_FORBID_GC_LOADER_USE_IN_THIS_SCOPE();
409
410 _ASSERTE(GetAppDomain()->ShouldHaveCode());
411
412 #ifdef FEATURE_INTERPRETER
413 _ASSERTE(isCallConv(m_methodSig.GetCallingConvention(), IMAGE_CEE_CS_CALLCONV_DEFAULT)
414 || isCallConv(m_methodSig.GetCallingConvention(), CorCallingConvention(IMAGE_CEE_CS_CALLCONV_C))
415 || isCallConv(m_methodSig.GetCallingConvention(), CorCallingConvention(IMAGE_CEE_CS_CALLCONV_VARARG))
416 || isCallConv(m_methodSig.GetCallingConvention(), CorCallingConvention(IMAGE_CEE_CS_CALLCONV_NATIVEVARARG))
417 || isCallConv(m_methodSig.GetCallingConvention(), CorCallingConvention(IMAGE_CEE_CS_CALLCONV_STDCALL)));
418 #else
419 _ASSERTE(isCallConv(m_methodSig.GetCallingConvention(), IMAGE_CEE_CS_CALLCONV_DEFAULT));
420 _ASSERTE(!(m_methodSig.GetCallingConventionInfo() & CORINFO_CALLCONV_PARAMTYPE));
421 #endif
422
423 #ifdef DEBUGGING_SUPPORTED
424 if (CORDebuggerTraceCall())
425 {
426 g_pDebugInterface->TraceCall((const BYTE *)m_pCallTarget);
427 }
428 #endif // DEBUGGING_SUPPORTED
429
430 #if CHECK_APP_DOMAIN_LEAKS
431 if (g_pConfig->AppDomainLeaks())
432 {
433 // See if we are in the correct domain to call on the object
434 if (m_methodSig.HasThis() && !m_pMD->GetMethodTable()->IsValueType())
435 {
436 CONTRACT_VIOLATION(ThrowsViolation|GCViolation|FaultViolation);
437 OBJECTREF pThis = ArgSlotToObj(pArguments[0]);
438 if (!pThis->AssignAppDomain(GetAppDomain()))
439 _ASSERTE(!"Attempt to call method on object in wrong domain");
440 }
441 }
442 #endif // CHECK_APP_DOMAIN_LEAKS
443
444 #ifdef _DEBUG
445 {
446 // The metasig should be reset
447 _ASSERTE(m_methodSig.GetArgNum() == 0);
448
449 // Check to see that any value type args have been loaded and restored.
450 // This is because we may be calling a FramedMethodFrame which will use the sig
451 // to trace the args, but if any are unloaded we will be stuck if a GC occurs.
452 _ASSERTE(m_pMD->IsRestored_NoLogging());
453 CorElementType argType;
454 while ((argType = m_methodSig.NextArg()) != ELEMENT_TYPE_END)
455 {
456 if (argType == ELEMENT_TYPE_VALUETYPE)
457 {
458 TypeHandle th = m_methodSig.GetLastTypeHandleThrowing(ClassLoader::DontLoadTypes);
459 CONSISTENCY_CHECK(th.CheckFullyLoaded());
460 CONSISTENCY_CHECK(th.IsRestored_NoLogging());
461 }
462 }
463 m_methodSig.Reset();
464 }
465 #endif // _DEBUG
466
467 DWORD arg = 0;
468
469 nStackBytes = m_argIt.SizeOfFrameArgumentArray();
470
471 // Create a fake FramedMethodFrame on the stack.
472
473 // Note that SizeOfFrameArgumentArray does overflow checks with sufficient margin to prevent overflows here
474 DWORD dwAllocaSize = TransitionBlock::GetNegSpaceSize() + sizeof(TransitionBlock) + nStackBytes;
475
476 LPBYTE pAlloc = (LPBYTE)_alloca(dwAllocaSize);
477
478 pTransitionBlock = pAlloc + TransitionBlock::GetNegSpaceSize();
479
480 #ifdef CALLDESCR_REGTYPEMAP
481 dwRegTypeMap = 0;
482 BYTE* pMap = (BYTE*)&dwRegTypeMap;
483 #endif // CALLDESCR_REGTYPEMAP
484
485 if (m_argIt.HasThis())
486 {
487 *((LPVOID*)(pTransitionBlock + m_argIt.GetThisOffset())) = ArgSlotToPtr(pArguments[arg++]);
488 }
489
490 if (m_argIt.HasRetBuffArg())
491 {
492 *((LPVOID*)(pTransitionBlock + m_argIt.GetRetBuffArgOffset())) = ArgSlotToPtr(pArguments[arg++]);
493 }
494 #ifdef FEATURE_HFA
495 #ifdef FEATURE_INTERPRETER
496 // Something is necessary for HFA's, but what's below (in the FEATURE_INTERPRETER ifdef)
497 // doesn't seem to do the proper test. It fires,
498 // incorrectly, for a one-word struct that *doesn't* have a ret buff. So we'll try this, instead:
499 // We're here because it doesn't have a ret buff. If it would, except that the struct being returned
500 // is an HFA, *then* assume the invoker made this slot a ret buff pointer.
501 // It's an HFA if the return type is a struct, but it has a non-zero FP return size.
502 // (If it were an HFA, but had a ret buff because it was varargs, then we wouldn't be here.
503 // Also this test won't work for float enums.
504 else if (m_methodSig.GetReturnType() == ELEMENT_TYPE_VALUETYPE
505 && m_argIt.GetFPReturnSize() > 0)
506 #else // FEATURE_INTERPRETER
507 else if (ELEMENT_TYPE_VALUETYPE == m_methodSig.GetReturnTypeNormalized())
508 #endif // FEATURE_INTERPRETER
509 {
510 pvRetBuff = ArgSlotToPtr(pArguments[arg++]);
511 }
512 #endif // FEATURE_HFA
513
514
515 #ifdef FEATURE_INTERPRETER
516 if (m_argIt.IsVarArg())
517 {
518 *((LPVOID*)(pTransitionBlock + m_argIt.GetVASigCookieOffset())) = ArgSlotToPtr(pArguments[arg++]);
519 }
520
521 if (m_argIt.HasParamType())
522 {
523 *((LPVOID*)(pTransitionBlock + m_argIt.GetParamTypeArgOffset())) = ArgSlotToPtr(pArguments[arg++]);
524 }
525 #endif
526
527 int ofs;
528 for (; TransitionBlock::InvalidOffset != (ofs = m_argIt.GetNextOffset()); arg++)
529 {
530 #ifdef CALLDESCR_REGTYPEMAP
531 FillInRegTypeMap(ofs, m_argIt.GetArgType(), pMap);
532 #endif
533
534 #ifdef CALLDESCR_FPARGREGS
535 // Under CALLDESCR_FPARGREGS -ve offsets indicate arguments in floating point registers. If we
536 // have at least one such argument we point the call worker at the floating point area of the
537 // frame (we leave it null otherwise since the worker can perform a useful optimization if it
538 // knows no floating point registers need to be set up).
539 if ((ofs < 0) && (pFloatArgumentRegisters == NULL))
540 pFloatArgumentRegisters = (FloatArgumentRegisters*)(pTransitionBlock +
541 TransitionBlock::GetOffsetOfFloatArgumentRegisters());
542 #endif
543
544 #if CHECK_APP_DOMAIN_LEAKS
545 // Make sure the arg is in the right app domain
546 if (g_pConfig->AppDomainLeaks() && m_argIt.GetArgType() == ELEMENT_TYPE_CLASS)
547 {
548 CONTRACT_VIOLATION(ThrowsViolation|GCViolation|FaultViolation);
549 OBJECTREF objRef = ArgSlotToObj(pArguments[arg]);
550 if (!objRef->AssignAppDomain(GetAppDomain()))
551 _ASSERTE(!"Attempt to pass object in wrong app domain to method");
552 }
553 #endif // CHECK_APP_DOMAIN_LEAKS
554
555 PVOID pDest = pTransitionBlock + ofs;
556
557 UINT32 stackSize = m_argIt.GetArgSize();
558 switch (stackSize)
559 {
560 case 1:
561 case 2:
562 case 4:
563 *((INT32*)pDest) = (INT32)pArguments[arg];
564 break;
565
566 case 8:
567 *((INT64*)pDest) = pArguments[arg];
568 break;
569
570 default:
571 // The ARG_SLOT contains a pointer to the value-type
572 #ifdef ENREGISTERED_PARAMTYPE_MAXSIZE
573 if (m_argIt.IsArgPassedByRef())
574 {
575 // We need to pass in a pointer, but be careful of the ARG_SLOT calling convention.
576 // We might already have a pointer in the ARG_SLOT
577 *(PVOID*)pDest = stackSize>sizeof(ARG_SLOT) ?
578 (LPVOID)ArgSlotToPtr(pArguments[arg]) :
579 (LPVOID)ArgSlotEndianessFixup((ARG_SLOT*)&pArguments[arg], stackSize);
580 }
581 else
582 #endif // ENREGISTERED_PARAMTYPE_MAXSIZE
583 if (stackSize>sizeof(ARG_SLOT))
584 {
585 CopyMemory(pDest, ArgSlotToPtr(pArguments[arg]), stackSize);
586 }
587 else
588 {
589 CopyMemory(pDest, (LPVOID) (&pArguments[arg]), stackSize);
590 }
591 break;
592 }
593 }
594
595 fpReturnSize = m_argIt.GetFPReturnSize();
596
597 } // END GCX_FORBID & ENABLE_FORBID_GC_LOADER_USE_IN_THIS_SCOPE
598
599 CallDescrData callDescrData;
600
601 callDescrData.pSrc = pTransitionBlock + sizeof(TransitionBlock);
602 callDescrData.numStackSlots = nStackBytes / STACK_ELEM_SIZE;
603 #ifdef CALLDESCR_ARGREGS
604 callDescrData.pArgumentRegisters = (ArgumentRegisters*)(pTransitionBlock + TransitionBlock::GetOffsetOfArgumentRegisters());
605 #endif
606 #ifdef CALLDESCR_FPARGREGS
607 callDescrData.pFloatArgumentRegisters = pFloatArgumentRegisters;
608 #endif
609 #ifdef CALLDESCR_REGTYPEMAP
610 callDescrData.dwRegTypeMap = dwRegTypeMap;
611 #endif
612 callDescrData.fpReturnSize = fpReturnSize;
613 callDescrData.pTarget = m_pCallTarget;
614
615 #ifdef FEATURE_INTERPRETER
616 if (transitionToPreemptive)
617 {
618 GCPreemp transitionIfILStub(transitionToPreemptive);
619 DWORD* pLastError = &GetThread()->m_dwLastErrorInterp;
620 CallDescrWorkerInternal(&callDescrData);
621 *pLastError = GetLastError();
622 }
623 else
624 #endif // FEATURE_INTERPRETER
625 {
626 CallDescrWorkerWithHandler(&callDescrData);
627 }
628
629 if (pvRetBuff != NULL)
630 {
631 memcpyNoGCRefs(pvRetBuff, &callDescrData.returnValue, sizeof(callDescrData.returnValue));
632 }
633
634 ARG_SLOT retval = *(ARG_SLOT *)(&callDescrData.returnValue);
635
636 #if !defined(_WIN64) && BIGENDIAN
637 {
638 GCX_FORBID();
639
640 if (!m_methodSig.Is64BitReturn())
641 {
642 retval >>= 32;
643 }
644 }
645 #endif // !defined(_WIN64) && BIGENDIAN
646
647 return retval;
648 }
649
CallDefaultConstructor(OBJECTREF ref)650 void CallDefaultConstructor(OBJECTREF ref)
651 {
652 CONTRACTL
653 {
654 THROWS;
655 GC_TRIGGERS;
656 MODE_COOPERATIVE;
657 }
658 CONTRACTL_END;
659
660 MethodTable *pMT = ref->GetTrueMethodTable();
661
662 PREFIX_ASSUME(pMT != NULL);
663
664 if (!pMT->HasDefaultConstructor())
665 {
666 SString ctorMethodName(SString::Utf8, COR_CTOR_METHOD_NAME);
667 COMPlusThrowNonLocalized(kMissingMethodException, ctorMethodName.GetUnicode());
668 }
669
670 GCPROTECT_BEGIN (ref);
671
672 MethodDesc *pMD = pMT->GetDefaultConstructor();
673
674 PREPARE_NONVIRTUAL_CALLSITE_USING_METHODDESC(pMD);
675 DECLARE_ARGHOLDER_ARRAY(CtorArgs, 1);
676 CtorArgs[ARGNUM_0] = OBJECTREF_TO_ARGHOLDER(ref);
677
678 // Call the ctor...
679 CATCH_HANDLER_FOUND_NOTIFICATION_CALLSITE;
680 CALL_MANAGED_METHOD_NORET(CtorArgs);
681
682 GCPROTECT_END ();
683 }
684