1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_IA32
6 
7 #include "src/api-arguments.h"
8 #include "src/assembler-inl.h"
9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h"
11 #include "src/code-stubs.h"
12 #include "src/frame-constants.h"
13 #include "src/frames.h"
14 #include "src/heap/heap-inl.h"
15 #include "src/ic/ic.h"
16 #include "src/ic/stub-cache.h"
17 #include "src/isolate.h"
18 #include "src/objects/api-callbacks.h"
19 #include "src/regexp/jsregexp.h"
20 #include "src/regexp/regexp-macro-assembler.h"
21 #include "src/runtime/runtime.h"
22 
23 namespace v8 {
24 namespace internal {
25 
26 #define __ ACCESS_MASM(masm)
27 
Generate(MacroAssembler * masm)28 void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
29   __ pop(ecx);
30   __ mov(MemOperand(esp, eax, times_4, 0), edi);
31   __ push(edi);
32   __ push(ebx);
33   __ push(ecx);
34   __ add(eax, Immediate(3));
35   __ TailCallRuntime(Runtime::kNewArray);
36 }
37 
GenerateStubsAheadOfTime(Isolate * isolate)38 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
39   // It is important that the store buffer overflow stubs are generated first.
40   CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
41   StoreFastElementStub::GenerateAheadOfTime(isolate);
42 }
43 
Generate(MacroAssembler * masm)44 void JSEntryStub::Generate(MacroAssembler* masm) {
45   Label invoke, handler_entry, exit;
46   Label not_outermost_js, not_outermost_js_2;
47 
48   ProfileEntryHookStub::MaybeCallEntryHook(masm);
49 
50   // Set up frame.
51   __ push(ebp);
52   __ mov(ebp, esp);
53 
54   // Push marker in two places.
55   StackFrame::Type marker = type();
56   __ push(Immediate(StackFrame::TypeToMarker(marker)));  // marker
57   ExternalReference context_address =
58       ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
59   __ push(Operand::StaticVariable(context_address));  // context
60   // Save callee-saved registers (C calling conventions).
61   __ push(edi);
62   __ push(esi);
63   __ push(ebx);
64 
65   // Save copies of the top frame descriptor on the stack.
66   ExternalReference c_entry_fp =
67       ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
68   __ push(Operand::StaticVariable(c_entry_fp));
69 
70   // If this is the outermost JS call, set js_entry_sp value.
71   ExternalReference js_entry_sp =
72       ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress, isolate());
73   __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
74   __ j(not_equal, &not_outermost_js, Label::kNear);
75   __ mov(Operand::StaticVariable(js_entry_sp), ebp);
76   __ push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
77   __ jmp(&invoke, Label::kNear);
78   __ bind(&not_outermost_js);
79   __ push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
80 
81   // Jump to a faked try block that does the invoke, with a faked catch
82   // block that sets the pending exception.
83   __ jmp(&invoke);
84   __ bind(&handler_entry);
85   handler_offset_ = handler_entry.pos();
86   // Caught exception: Store result (exception) in the pending exception
87   // field in the JSEnv and return a failure sentinel.
88   ExternalReference pending_exception = ExternalReference::Create(
89       IsolateAddressId::kPendingExceptionAddress, isolate());
90   __ mov(Operand::StaticVariable(pending_exception), eax);
91   __ mov(eax, Immediate(isolate()->factory()->exception()));
92   __ jmp(&exit);
93 
94   // Invoke: Link this frame into the handler chain.
95   __ bind(&invoke);
96   __ PushStackHandler();
97 
98   // Invoke the function by calling through JS entry trampoline builtin and
99   // pop the faked function when we return. Notice that we cannot store a
100   // reference to the trampoline code directly in this stub, because the
101   // builtin stubs may not have been generated yet.
102   __ Call(EntryTrampoline(), RelocInfo::CODE_TARGET);
103 
104   // Unlink this frame from the handler chain.
105   __ PopStackHandler();
106 
107   __ bind(&exit);
108   // Check if the current stack frame is marked as the outermost JS frame.
109   __ pop(ebx);
110   __ cmp(ebx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
111   __ j(not_equal, &not_outermost_js_2);
112   __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
113   __ bind(&not_outermost_js_2);
114 
115   // Restore the top frame descriptor from the stack.
116   __ pop(Operand::StaticVariable(ExternalReference::Create(
117       IsolateAddressId::kCEntryFPAddress, isolate())));
118 
119   // Restore callee-saved registers (C calling conventions).
120   __ pop(ebx);
121   __ pop(esi);
122   __ pop(edi);
123   __ add(esp, Immediate(2 * kPointerSize));  // remove markers
124 
125   // Restore frame pointer and return.
126   __ pop(ebp);
127   __ ret(0);
128 }
129 
MaybeCallEntryHookDelayed(TurboAssembler * tasm,Zone * zone)130 void ProfileEntryHookStub::MaybeCallEntryHookDelayed(TurboAssembler* tasm,
131                                                      Zone* zone) {
132   if (tasm->isolate()->function_entry_hook() != nullptr) {
133     tasm->CallStubDelayed(new (zone) ProfileEntryHookStub(nullptr));
134   }
135 }
136 
MaybeCallEntryHook(MacroAssembler * masm)137 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
138   if (masm->isolate()->function_entry_hook() != nullptr) {
139     ProfileEntryHookStub stub(masm->isolate());
140     masm->CallStub(&stub);
141   }
142 }
143 
144 
Generate(MacroAssembler * masm)145 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
146   // Save volatile registers.
147   const int kNumSavedRegisters = 3;
148   __ push(eax);
149   __ push(ecx);
150   __ push(edx);
151 
152   // Calculate and push the original stack pointer.
153   __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
154   __ push(eax);
155 
156   // Retrieve our return address and use it to calculate the calling
157   // function's address.
158   __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
159   __ sub(eax, Immediate(Assembler::kCallInstructionLength));
160   __ push(eax);
161 
162   // Call the entry hook.
163   DCHECK_NOT_NULL(isolate()->function_entry_hook());
164   __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
165           RelocInfo::RUNTIME_ENTRY);
166   __ add(esp, Immediate(2 * kPointerSize));
167 
168   // Restore ecx.
169   __ pop(edx);
170   __ pop(ecx);
171   __ pop(eax);
172 
173   __ ret(0);
174 }
175 
176 
177 template<class T>
CreateArrayDispatch(MacroAssembler * masm,AllocationSiteOverrideMode mode)178 static void CreateArrayDispatch(MacroAssembler* masm,
179                                 AllocationSiteOverrideMode mode) {
180   if (mode == DISABLE_ALLOCATION_SITES) {
181     T stub(masm->isolate(),
182            GetInitialFastElementsKind(),
183            mode);
184     __ TailCallStub(&stub);
185   } else if (mode == DONT_OVERRIDE) {
186     int last_index =
187         GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
188     for (int i = 0; i <= last_index; ++i) {
189       Label next;
190       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
191       __ cmp(edx, kind);
192       __ j(not_equal, &next);
193       T stub(masm->isolate(), kind);
194       __ TailCallStub(&stub);
195       __ bind(&next);
196     }
197 
198     // If we reached this point there is a problem.
199     __ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
200   } else {
201     UNREACHABLE();
202   }
203 }
204 
205 
CreateArrayDispatchOneArgument(MacroAssembler * masm,AllocationSiteOverrideMode mode)206 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
207                                            AllocationSiteOverrideMode mode) {
208   // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
209   // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
210   // eax - number of arguments
211   // edi - constructor?
212   // esp[0] - return address
213   // esp[4] - last argument
214   STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
215   STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
216   STATIC_ASSERT(PACKED_ELEMENTS == 2);
217   STATIC_ASSERT(HOLEY_ELEMENTS == 3);
218   STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
219   STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
220 
221   if (mode == DISABLE_ALLOCATION_SITES) {
222     ElementsKind initial = GetInitialFastElementsKind();
223     ElementsKind holey_initial = GetHoleyElementsKind(initial);
224 
225     ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
226                                                   holey_initial,
227                                                   DISABLE_ALLOCATION_SITES);
228     __ TailCallStub(&stub_holey);
229   } else if (mode == DONT_OVERRIDE) {
230     // is the low bit set? If so, we are holey and that is good.
231     Label normal_sequence;
232     __ test_b(edx, Immediate(1));
233     __ j(not_zero, &normal_sequence);
234 
235     // We are going to create a holey array, but our kind is non-holey.
236     // Fix kind and retry.
237     __ inc(edx);
238 
239     if (FLAG_debug_code) {
240       Handle<Map> allocation_site_map =
241           masm->isolate()->factory()->allocation_site_map();
242       __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
243       __ Assert(equal, AbortReason::kExpectedAllocationSite);
244     }
245 
246     // Save the resulting elements kind in type info. We can't just store r3
247     // in the AllocationSite::transition_info field because elements kind is
248     // restricted to a portion of the field...upper bits need to be left alone.
249     STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
250     __ add(
251         FieldOperand(ebx, AllocationSite::kTransitionInfoOrBoilerplateOffset),
252         Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
253 
254     __ bind(&normal_sequence);
255     int last_index =
256         GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
257     for (int i = 0; i <= last_index; ++i) {
258       Label next;
259       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
260       __ cmp(edx, kind);
261       __ j(not_equal, &next);
262       ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
263       __ TailCallStub(&stub);
264       __ bind(&next);
265     }
266 
267     // If we reached this point there is a problem.
268     __ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
269   } else {
270     UNREACHABLE();
271   }
272 }
273 
274 
275 template<class T>
ArrayConstructorStubAheadOfTimeHelper(Isolate * isolate)276 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
277   int to_index =
278       GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
279   for (int i = 0; i <= to_index; ++i) {
280     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
281     T stub(isolate, kind);
282     stub.GetCode();
283     if (AllocationSite::ShouldTrack(kind)) {
284       T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
285       stub1.GetCode();
286     }
287   }
288 }
289 
GenerateStubsAheadOfTime(Isolate * isolate)290 void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
291   ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
292       isolate);
293   ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
294       isolate);
295   ArrayNArgumentsConstructorStub stub(isolate);
296   stub.GetCode();
297 
298   ElementsKind kinds[2] = {PACKED_ELEMENTS, HOLEY_ELEMENTS};
299   for (int i = 0; i < 2; i++) {
300     // For internal arrays we only need a few things
301     InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
302     stubh1.GetCode();
303     InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
304     stubh2.GetCode();
305   }
306 }
307 
GenerateDispatchToArrayStub(MacroAssembler * masm,AllocationSiteOverrideMode mode)308 void ArrayConstructorStub::GenerateDispatchToArrayStub(
309     MacroAssembler* masm, AllocationSiteOverrideMode mode) {
310   Label not_zero_case, not_one_case;
311   __ test(eax, eax);
312   __ j(not_zero, &not_zero_case);
313   CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
314 
315   __ bind(&not_zero_case);
316   __ cmp(eax, 1);
317   __ j(greater, &not_one_case);
318   CreateArrayDispatchOneArgument(masm, mode);
319 
320   __ bind(&not_one_case);
321   ArrayNArgumentsConstructorStub stub(masm->isolate());
322   __ TailCallStub(&stub);
323 }
324 
Generate(MacroAssembler * masm)325 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
326   // ----------- S t a t e -------------
327   //  -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
328   //  -- ebx : AllocationSite or undefined
329   //  -- edi : constructor
330   //  -- edx : Original constructor
331   //  -- esp[0] : return address
332   //  -- esp[4] : last argument
333   // -----------------------------------
334   if (FLAG_debug_code) {
335     // The array construct code is only set for the global and natives
336     // builtin Array functions which always have maps.
337 
338     // Initial map for the builtin Array function should be a map.
339     __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
340     // Will both indicate a nullptr and a Smi.
341     __ test(ecx, Immediate(kSmiTagMask));
342     __ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction);
343     __ CmpObjectType(ecx, MAP_TYPE, ecx);
344     __ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
345 
346     // We should either have undefined in ebx or a valid AllocationSite
347     __ AssertUndefinedOrAllocationSite(ebx);
348   }
349 
350   Label subclassing;
351 
352   // Enter the context of the Array function.
353   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
354 
355   __ cmp(edx, edi);
356   __ j(not_equal, &subclassing);
357 
358   Label no_info;
359   // If the feedback vector is the undefined value call an array constructor
360   // that doesn't use AllocationSites.
361   __ cmp(ebx, isolate()->factory()->undefined_value());
362   __ j(equal, &no_info);
363 
364   // Only look at the lower 16 bits of the transition info.
365   __ mov(edx,
366          FieldOperand(ebx, AllocationSite::kTransitionInfoOrBoilerplateOffset));
367   __ SmiUntag(edx);
368   STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
369   __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
370   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
371 
372   __ bind(&no_info);
373   GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
374 
375   // Subclassing.
376   __ bind(&subclassing);
377   __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
378   __ add(eax, Immediate(3));
379   __ PopReturnAddressTo(ecx);
380   __ Push(edx);
381   __ Push(ebx);
382   __ PushReturnAddressFrom(ecx);
383   __ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
384 }
385 
386 
GenerateCase(MacroAssembler * masm,ElementsKind kind)387 void InternalArrayConstructorStub::GenerateCase(
388     MacroAssembler* masm, ElementsKind kind) {
389   Label not_zero_case, not_one_case;
390   Label normal_sequence;
391 
392   __ test(eax, eax);
393   __ j(not_zero, &not_zero_case);
394   InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
395   __ TailCallStub(&stub0);
396 
397   __ bind(&not_zero_case);
398   __ cmp(eax, 1);
399   __ j(greater, &not_one_case);
400 
401   if (IsFastPackedElementsKind(kind)) {
402     // We might need to create a holey array
403     // look at the first argument
404     __ mov(ecx, Operand(esp, kPointerSize));
405     __ test(ecx, ecx);
406     __ j(zero, &normal_sequence);
407 
408     InternalArraySingleArgumentConstructorStub
409         stub1_holey(isolate(), GetHoleyElementsKind(kind));
410     __ TailCallStub(&stub1_holey);
411   }
412 
413   __ bind(&normal_sequence);
414   InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
415   __ TailCallStub(&stub1);
416 
417   __ bind(&not_one_case);
418   ArrayNArgumentsConstructorStub stubN(isolate());
419   __ TailCallStub(&stubN);
420 }
421 
422 
Generate(MacroAssembler * masm)423 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
424   // ----------- S t a t e -------------
425   //  -- eax : argc
426   //  -- edi : constructor
427   //  -- esp[0] : return address
428   //  -- esp[4] : last argument
429   // -----------------------------------
430 
431   if (FLAG_debug_code) {
432     // The array construct code is only set for the global and natives
433     // builtin Array functions which always have maps.
434 
435     // Initial map for the builtin Array function should be a map.
436     __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
437     // Will both indicate a nullptr and a Smi.
438     __ test(ecx, Immediate(kSmiTagMask));
439     __ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction);
440     __ CmpObjectType(ecx, MAP_TYPE, ecx);
441     __ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
442   }
443 
444   // Figure out the right elements kind
445   __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
446 
447   // Load the map's "bit field 2" into |result|. We only need the first byte,
448   // but the following masking takes care of that anyway.
449   __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
450   // Retrieve elements_kind from bit field 2.
451   __ DecodeField<Map::ElementsKindBits>(ecx);
452 
453   if (FLAG_debug_code) {
454     Label done;
455     __ cmp(ecx, Immediate(PACKED_ELEMENTS));
456     __ j(equal, &done);
457     __ cmp(ecx, Immediate(HOLEY_ELEMENTS));
458     __ Assert(
459         equal,
460         AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
461     __ bind(&done);
462   }
463 
464   Label fast_elements_case;
465   __ cmp(ecx, Immediate(PACKED_ELEMENTS));
466   __ j(equal, &fast_elements_case);
467   GenerateCase(masm, HOLEY_ELEMENTS);
468 
469   __ bind(&fast_elements_case);
470   GenerateCase(masm, PACKED_ELEMENTS);
471 }
472 
473 // Generates an Operand for saving parameters after PrepareCallApiFunction.
ApiParameterOperand(int index)474 static Operand ApiParameterOperand(int index) {
475   return Operand(esp, index * kPointerSize);
476 }
477 
478 
479 // Prepares stack to put arguments (aligns and so on). Reserves
480 // space for return value if needed (assumes the return value is a handle).
481 // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
482 // etc. Saves context (esi). If space was reserved for return value then
483 // stores the pointer to the reserved slot into esi.
PrepareCallApiFunction(MacroAssembler * masm,int argc)484 static void PrepareCallApiFunction(MacroAssembler* masm, int argc) {
485   __ EnterApiExitFrame(argc);
486   if (__ emit_debug_code()) {
487     __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
488   }
489 }
490 
491 
492 // Calls an API function.  Allocates HandleScope, extracts returned value
493 // from handle and propagates exceptions.  Clobbers ebx, edi and
494 // caller-save registers.  Restores context.  On return removes
495 // stack_space * kPointerSize (GCed).
CallApiFunctionAndReturn(MacroAssembler * masm,Register function_address,ExternalReference thunk_ref,Operand thunk_last_arg,int stack_space,Operand * stack_space_operand,Operand return_value_operand)496 static void CallApiFunctionAndReturn(MacroAssembler* masm,
497                                      Register function_address,
498                                      ExternalReference thunk_ref,
499                                      Operand thunk_last_arg, int stack_space,
500                                      Operand* stack_space_operand,
501                                      Operand return_value_operand) {
502   Isolate* isolate = masm->isolate();
503 
504   ExternalReference next_address =
505       ExternalReference::handle_scope_next_address(isolate);
506   ExternalReference limit_address =
507       ExternalReference::handle_scope_limit_address(isolate);
508   ExternalReference level_address =
509       ExternalReference::handle_scope_level_address(isolate);
510 
511   DCHECK(edx == function_address);
512   // Allocate HandleScope in callee-save registers.
513   __ mov(ebx, Operand::StaticVariable(next_address));
514   __ mov(edi, Operand::StaticVariable(limit_address));
515   __ add(Operand::StaticVariable(level_address), Immediate(1));
516 
517   if (FLAG_log_timer_events) {
518     FrameScope frame(masm, StackFrame::MANUAL);
519     __ PushSafepointRegisters();
520     __ PrepareCallCFunction(1, eax);
521     __ mov(Operand(esp, 0),
522            Immediate(ExternalReference::isolate_address(isolate)));
523     __ CallCFunction(ExternalReference::log_enter_external_function(), 1);
524     __ PopSafepointRegisters();
525   }
526 
527 
528   Label profiler_disabled;
529   Label end_profiler_check;
530   __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
531   __ cmpb(Operand(eax, 0), Immediate(0));
532   __ j(zero, &profiler_disabled);
533 
534   // Additional parameter is the address of the actual getter function.
535   __ mov(thunk_last_arg, function_address);
536   // Call the api function.
537   __ mov(eax, Immediate(thunk_ref));
538   __ call(eax);
539   __ jmp(&end_profiler_check);
540 
541   __ bind(&profiler_disabled);
542   // Call the api function.
543   __ call(function_address);
544   __ bind(&end_profiler_check);
545 
546   if (FLAG_log_timer_events) {
547     FrameScope frame(masm, StackFrame::MANUAL);
548     __ PushSafepointRegisters();
549     __ PrepareCallCFunction(1, eax);
550     __ mov(Operand(esp, 0),
551            Immediate(ExternalReference::isolate_address(isolate)));
552     __ CallCFunction(ExternalReference::log_leave_external_function(), 1);
553     __ PopSafepointRegisters();
554   }
555 
556   Label prologue;
557   // Load the value from ReturnValue
558   __ mov(eax, return_value_operand);
559 
560   Label promote_scheduled_exception;
561   Label delete_allocated_handles;
562   Label leave_exit_frame;
563 
564   __ bind(&prologue);
565   // No more valid handles (the result handle was the last one). Restore
566   // previous handle scope.
567   __ mov(Operand::StaticVariable(next_address), ebx);
568   __ sub(Operand::StaticVariable(level_address), Immediate(1));
569   __ Assert(above_equal, AbortReason::kInvalidHandleScopeLevel);
570   __ cmp(edi, Operand::StaticVariable(limit_address));
571   __ j(not_equal, &delete_allocated_handles);
572 
573   // Leave the API exit frame.
574   __ bind(&leave_exit_frame);
575   if (stack_space_operand != nullptr) {
576     __ mov(ebx, *stack_space_operand);
577   }
578   __ LeaveApiExitFrame();
579 
580   // Check if the function scheduled an exception.
581   ExternalReference scheduled_exception_address =
582       ExternalReference::scheduled_exception_address(isolate);
583   __ cmp(Operand::StaticVariable(scheduled_exception_address),
584          Immediate(isolate->factory()->the_hole_value()));
585   __ j(not_equal, &promote_scheduled_exception);
586 
587 #if DEBUG
588   // Check if the function returned a valid JavaScript value.
589   Label ok;
590   Register return_value = eax;
591   Register map = ecx;
592 
593   __ JumpIfSmi(return_value, &ok, Label::kNear);
594   __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
595 
596   __ CmpInstanceType(map, LAST_NAME_TYPE);
597   __ j(below_equal, &ok, Label::kNear);
598 
599   __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
600   __ j(above_equal, &ok, Label::kNear);
601 
602   __ cmp(map, isolate->factory()->heap_number_map());
603   __ j(equal, &ok, Label::kNear);
604 
605   __ cmp(return_value, isolate->factory()->undefined_value());
606   __ j(equal, &ok, Label::kNear);
607 
608   __ cmp(return_value, isolate->factory()->true_value());
609   __ j(equal, &ok, Label::kNear);
610 
611   __ cmp(return_value, isolate->factory()->false_value());
612   __ j(equal, &ok, Label::kNear);
613 
614   __ cmp(return_value, isolate->factory()->null_value());
615   __ j(equal, &ok, Label::kNear);
616 
617   __ Abort(AbortReason::kAPICallReturnedInvalidObject);
618 
619   __ bind(&ok);
620 #endif
621 
622   if (stack_space_operand != nullptr) {
623     DCHECK_EQ(0, stack_space);
624     __ pop(ecx);
625     __ add(esp, ebx);
626     __ jmp(ecx);
627   } else {
628     __ ret(stack_space * kPointerSize);
629   }
630 
631   // Re-throw by promoting a scheduled exception.
632   __ bind(&promote_scheduled_exception);
633   __ TailCallRuntime(Runtime::kPromoteScheduledException);
634 
635   // HandleScope limit has changed. Delete allocated extensions.
636   ExternalReference delete_extensions =
637       ExternalReference::delete_handle_scope_extensions();
638   __ bind(&delete_allocated_handles);
639   __ mov(Operand::StaticVariable(limit_address), edi);
640   __ mov(edi, eax);
641   __ mov(Operand(esp, 0),
642          Immediate(ExternalReference::isolate_address(isolate)));
643   __ mov(eax, Immediate(delete_extensions));
644   __ call(eax);
645   __ mov(eax, edi);
646   __ jmp(&leave_exit_frame);
647 }
648 
Generate(MacroAssembler * masm)649 void CallApiCallbackStub::Generate(MacroAssembler* masm) {
650   // ----------- S t a t e -------------
651   //  -- ebx                 : call_data
652   //  -- ecx                 : holder
653   //  -- edx                 : api_function_address
654   //  -- esi                 : context
655   //  --
656   //  -- esp[0]              : return address
657   //  -- esp[4]              : last argument
658   //  -- ...
659   //  -- esp[argc * 4]       : first argument
660   //  -- esp[(argc + 1) * 4] : receiver
661   // -----------------------------------
662 
663   Register call_data = ebx;
664   Register holder = ecx;
665   Register api_function_address = edx;
666   Register return_address = eax;
667 
668   typedef FunctionCallbackArguments FCA;
669 
670   STATIC_ASSERT(FCA::kArgsLength == 6);
671   STATIC_ASSERT(FCA::kNewTargetIndex == 5);
672   STATIC_ASSERT(FCA::kDataIndex == 4);
673   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
674   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
675   STATIC_ASSERT(FCA::kIsolateIndex == 1);
676   STATIC_ASSERT(FCA::kHolderIndex == 0);
677 
678   __ pop(return_address);
679 
680   // new target
681   __ PushRoot(Heap::kUndefinedValueRootIndex);
682 
683   // call data
684   __ push(call_data);
685 
686   // return value
687   __ PushRoot(Heap::kUndefinedValueRootIndex);
688   // return value default
689   __ PushRoot(Heap::kUndefinedValueRootIndex);
690   // isolate
691   __ push(Immediate(ExternalReference::isolate_address(isolate())));
692   // holder
693   __ push(holder);
694 
695   Register scratch = call_data;
696 
697   __ mov(scratch, esp);
698 
699   // push return address
700   __ push(return_address);
701 
702   // API function gets reference to the v8::Arguments. If CPU profiler
703   // is enabled wrapper function will be called and we need to pass
704   // address of the callback as additional parameter, always allocate
705   // space for it.
706   const int kApiArgc = 1 + 1;
707 
708   // Allocate the v8::Arguments structure in the arguments' space since
709   // it's not controlled by GC.
710   const int kApiStackSpace = 3;
711 
712   PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace);
713 
714   // FunctionCallbackInfo::implicit_args_.
715   __ mov(ApiParameterOperand(2), scratch);
716   __ add(scratch, Immediate((argc() + FCA::kArgsLength - 1) * kPointerSize));
717   // FunctionCallbackInfo::values_.
718   __ mov(ApiParameterOperand(3), scratch);
719   // FunctionCallbackInfo::length_.
720   __ Move(ApiParameterOperand(4), Immediate(argc()));
721 
722   // v8::InvocationCallback's argument.
723   __ lea(scratch, ApiParameterOperand(2));
724   __ mov(ApiParameterOperand(0), scratch);
725 
726   ExternalReference thunk_ref = ExternalReference::invoke_function_callback();
727 
728   // Stores return the first js argument
729   int return_value_offset = 2 + FCA::kReturnValueOffset;
730   Operand return_value_operand(ebp, return_value_offset * kPointerSize);
731   const int stack_space = argc() + FCA::kArgsLength + 1;
732   Operand* stack_space_operand = nullptr;
733   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
734                            ApiParameterOperand(1), stack_space,
735                            stack_space_operand, return_value_operand);
736 }
737 
738 
Generate(MacroAssembler * masm)739 void CallApiGetterStub::Generate(MacroAssembler* masm) {
740   // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
741   // name below the exit frame to make GC aware of them.
742   STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
743   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
744   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
745   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
746   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
747   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
748   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
749   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
750 
751   Register receiver = ApiGetterDescriptor::ReceiverRegister();
752   Register holder = ApiGetterDescriptor::HolderRegister();
753   Register callback = ApiGetterDescriptor::CallbackRegister();
754   Register scratch = ebx;
755   DCHECK(!AreAliased(receiver, holder, callback, scratch));
756 
757   __ pop(scratch);  // Pop return address to extend the frame.
758   __ push(receiver);
759   __ push(FieldOperand(callback, AccessorInfo::kDataOffset));
760   __ PushRoot(Heap::kUndefinedValueRootIndex);  // ReturnValue
761   // ReturnValue default value
762   __ PushRoot(Heap::kUndefinedValueRootIndex);
763   __ push(Immediate(ExternalReference::isolate_address(isolate())));
764   __ push(holder);
765   __ push(Immediate(Smi::kZero));  // should_throw_on_error -> false
766   __ push(FieldOperand(callback, AccessorInfo::kNameOffset));
767   __ push(scratch);  // Restore return address.
768 
769   // v8::PropertyCallbackInfo::args_ array and name handle.
770   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
771 
772   // Allocate v8::PropertyCallbackInfo object, arguments for callback and
773   // space for optional callback address parameter (in case CPU profiler is
774   // active) in non-GCed stack space.
775   const int kApiArgc = 3 + 1;
776 
777   // Load address of v8::PropertyAccessorInfo::args_ array.
778   __ lea(scratch, Operand(esp, 2 * kPointerSize));
779 
780   PrepareCallApiFunction(masm, kApiArgc);
781   // Create v8::PropertyCallbackInfo object on the stack and initialize
782   // it's args_ field.
783   Operand info_object = ApiParameterOperand(3);
784   __ mov(info_object, scratch);
785 
786   // Name as handle.
787   __ sub(scratch, Immediate(kPointerSize));
788   __ mov(ApiParameterOperand(0), scratch);
789   // Arguments pointer.
790   __ lea(scratch, info_object);
791   __ mov(ApiParameterOperand(1), scratch);
792   // Reserve space for optional callback address parameter.
793   Operand thunk_last_arg = ApiParameterOperand(2);
794 
795   ExternalReference thunk_ref =
796       ExternalReference::invoke_accessor_getter_callback();
797 
798   __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
799   Register function_address = edx;
800   __ mov(function_address,
801          FieldOperand(scratch, Foreign::kForeignAddressOffset));
802   // +3 is to skip prolog, return address and name handle.
803   Operand return_value_operand(
804       ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
805   CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg,
806                            kStackUnwindSpace, nullptr, return_value_operand);
807 }
808 
809 #undef __
810 
811 }  // namespace internal
812 }  // namespace v8
813 
814 #endif  // V8_TARGET_ARCH_IA32
815