1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_IA32
6 
7 #include "src/base/adapters.h"
8 #include "src/code-factory.h"
9 #include "src/debug/debug.h"
10 #include "src/deoptimizer.h"
11 #include "src/frame-constants.h"
12 #include "src/frames.h"
13 #include "src/objects-inl.h"
14 #include "src/wasm/wasm-linkage.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 #define __ ACCESS_MASM(masm)
20 
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)21 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
22                                 ExitFrameType exit_frame_type) {
23   __ mov(ebx, Immediate(ExternalReference::Create(address)));
24   if (exit_frame_type == BUILTIN_EXIT) {
25     __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
26             RelocInfo::CODE_TARGET);
27   } else {
28     DCHECK(exit_frame_type == EXIT);
29     __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
30             RelocInfo::CODE_TARGET);
31   }
32 }
33 
34 namespace {
35 
AdaptorWithExitFrameType(MacroAssembler * masm,Builtins::ExitFrameType exit_frame_type)36 void AdaptorWithExitFrameType(MacroAssembler* masm,
37                               Builtins::ExitFrameType exit_frame_type) {
38   // ----------- S t a t e -------------
39   //  -- eax                : number of arguments excluding receiver
40   //  -- ebx                : entry point
41   //  -- edi                : target
42   //  -- edx                : new.target
43   //  -- esp[0]             : return address
44   //  -- esp[4]             : last argument
45   //  -- ...
46   //  -- esp[4 * argc]      : first argument
47   //  -- esp[4 * (argc +1)] : receiver
48   // -----------------------------------
49   __ AssertFunction(edi);
50 
51   // Make sure we operate in the context of the called function (for example
52   // ConstructStubs implemented in C++ will be run in the context of the caller
53   // instead of the callee, due to the way that [[Construct]] is defined for
54   // ordinary functions).
55   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
56 
57   // CEntry expects eax to contain the number of arguments including the
58   // receiver and the extra arguments.
59   __ add(eax, Immediate(BuiltinExitFrameConstants::kNumExtraArgsWithReceiver));
60 
61   // Insert extra arguments.
62   __ PopReturnAddressTo(ecx);
63   __ SmiTag(eax);
64   __ PushRoot(Heap::kTheHoleValueRootIndex);  // Padding.
65   __ Push(eax);
66   __ SmiUntag(eax);
67   __ Push(edi);
68   __ Push(edx);
69   __ PushReturnAddressFrom(ecx);
70 
71   // Jump to the C entry runtime stub directly here instead of using
72   // JumpToExternalReference because ebx is loaded by Generate_adaptor.
73   Handle<Code> code =
74       CodeFactory::CEntry(masm->isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
75                           exit_frame_type == Builtins::BUILTIN_EXIT);
76   __ Jump(code, RelocInfo::CODE_TARGET);
77 }
78 }  // namespace
79 
Generate_AdaptorWithExitFrame(MacroAssembler * masm)80 void Builtins::Generate_AdaptorWithExitFrame(MacroAssembler* masm) {
81   AdaptorWithExitFrameType(masm, EXIT);
82 }
83 
Generate_AdaptorWithBuiltinExitFrame(MacroAssembler * masm)84 void Builtins::Generate_AdaptorWithBuiltinExitFrame(MacroAssembler* masm) {
85   AdaptorWithExitFrameType(masm, BUILTIN_EXIT);
86 }
87 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)88 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
89                                            Runtime::FunctionId function_id) {
90   // ----------- S t a t e -------------
91   //  -- eax : argument count (preserved for callee)
92   //  -- edx : new target (preserved for callee)
93   //  -- edi : target function (preserved for callee)
94   // -----------------------------------
95   {
96     FrameScope scope(masm, StackFrame::INTERNAL);
97     // Push the number of arguments to the callee.
98     __ SmiTag(eax);
99     __ push(eax);
100     // Push a copy of the target function and the new target.
101     __ push(edi);
102     __ push(edx);
103     // Function is also the parameter to the runtime call.
104     __ push(edi);
105 
106     __ CallRuntime(function_id, 1);
107     __ mov(ecx, eax);
108 
109     // Restore target function and new target.
110     __ pop(edx);
111     __ pop(edi);
112     __ pop(eax);
113     __ SmiUntag(eax);
114   }
115 
116   static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
117   __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
118   __ jmp(ecx);
119 }
120 
121 namespace {
122 
Generate_JSBuiltinsConstructStubHelper(MacroAssembler * masm)123 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
124   // ----------- S t a t e -------------
125   //  -- eax: number of arguments
126   //  -- edi: constructor function
127   //  -- edx: new target
128   //  -- esi: context
129   // -----------------------------------
130 
131   // Enter a construct frame.
132   {
133     FrameScope scope(masm, StackFrame::CONSTRUCT);
134 
135     // Preserve the incoming parameters on the stack.
136     __ SmiTag(eax);
137     __ push(esi);
138     __ push(eax);
139     __ SmiUntag(eax);
140 
141     // The receiver for the builtin/api call.
142     __ PushRoot(Heap::kTheHoleValueRootIndex);
143 
144     // Set up pointer to last argument.
145     __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
146 
147     // Copy arguments and receiver to the expression stack.
148     Label loop, entry;
149     __ mov(ecx, eax);
150     // ----------- S t a t e -------------
151     //  --                eax: number of arguments (untagged)
152     //  --                edi: constructor function
153     //  --                edx: new target
154     //  --                ebx: pointer to last argument
155     //  --                ecx: counter
156     //  -- sp[0*kPointerSize]: the hole (receiver)
157     //  -- sp[1*kPointerSize]: number of arguments (tagged)
158     //  -- sp[2*kPointerSize]: context
159     // -----------------------------------
160     __ jmp(&entry);
161     __ bind(&loop);
162     __ push(Operand(ebx, ecx, times_4, 0));
163     __ bind(&entry);
164     __ dec(ecx);
165     __ j(greater_equal, &loop);
166 
167     // Call the function.
168     // eax: number of arguments (untagged)
169     // edi: constructor function
170     // edx: new target
171     ParameterCount actual(eax);
172     __ InvokeFunction(edi, edx, actual, CALL_FUNCTION);
173 
174     // Restore context from the frame.
175     __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
176     // Restore smi-tagged arguments count from the frame.
177     __ mov(ebx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
178     // Leave construct frame.
179   }
180 
181   // Remove caller arguments from the stack and return.
182   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
183   __ pop(ecx);
184   __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize));  // 1 ~ receiver
185   __ push(ecx);
186   __ ret(0);
187 }
188 
189 }  // namespace
190 
191 // The construct stub for ES5 constructor functions and ES6 class constructors.
Generate_JSConstructStubGeneric(MacroAssembler * masm)192 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
193   // ----------- S t a t e -------------
194   //  -- eax: number of arguments (untagged)
195   //  -- edi: constructor function
196   //  -- edx: new target
197   //  -- esi: context
198   //  -- sp[...]: constructor arguments
199   // -----------------------------------
200 
201   // Enter a construct frame.
202   {
203     FrameScope scope(masm, StackFrame::CONSTRUCT);
204     Label post_instantiation_deopt_entry, not_create_implicit_receiver;
205 
206     // Preserve the incoming parameters on the stack.
207     __ mov(ecx, eax);
208     __ SmiTag(ecx);
209     __ Push(esi);
210     __ Push(ecx);
211     __ Push(edi);
212     __ PushRoot(Heap::kTheHoleValueRootIndex);
213     __ Push(edx);
214 
215     // ----------- S t a t e -------------
216     //  --         sp[0*kPointerSize]: new target
217     //  --         sp[1*kPointerSize]: padding
218     //  -- edi and sp[2*kPointerSize]: constructor function
219     //  --         sp[3*kPointerSize]: argument count
220     //  --         sp[4*kPointerSize]: context
221     // -----------------------------------
222 
223     __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
224     __ test(FieldOperand(ebx, SharedFunctionInfo::kFlagsOffset),
225             Immediate(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
226     __ j(not_zero, &not_create_implicit_receiver);
227 
228     // If not derived class constructor: Allocate the new receiver object.
229     __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
230     __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
231             RelocInfo::CODE_TARGET);
232     __ jmp(&post_instantiation_deopt_entry, Label::kNear);
233 
234     // Else: use TheHoleValue as receiver for constructor call
235     __ bind(&not_create_implicit_receiver);
236     __ LoadRoot(eax, Heap::kTheHoleValueRootIndex);
237 
238     // ----------- S t a t e -------------
239     //  --                         eax: implicit receiver
240     //  -- Slot 4 / sp[0*kPointerSize]: new target
241     //  -- Slot 3 / sp[1*kPointerSize]: padding
242     //  -- Slot 2 / sp[2*kPointerSize]: constructor function
243     //  -- Slot 1 / sp[3*kPointerSize]: number of arguments (tagged)
244     //  -- Slot 0 / sp[4*kPointerSize]: context
245     // -----------------------------------
246     // Deoptimizer enters here.
247     masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
248         masm->pc_offset());
249     __ bind(&post_instantiation_deopt_entry);
250 
251     // Restore new target.
252     __ Pop(edx);
253 
254     // Push the allocated receiver to the stack. We need two copies
255     // because we may have to return the original one and the calling
256     // conventions dictate that the called function pops the receiver.
257     __ Push(eax);
258     __ Push(eax);
259 
260     // ----------- S t a t e -------------
261     //  --                edx: new target
262     //  -- sp[0*kPointerSize]: implicit receiver
263     //  -- sp[1*kPointerSize]: implicit receiver
264     //  -- sp[2*kPointerSize]: padding
265     //  -- sp[3*kPointerSize]: constructor function
266     //  -- sp[4*kPointerSize]: number of arguments (tagged)
267     //  -- sp[5*kPointerSize]: context
268     // -----------------------------------
269 
270     // Restore constructor function and argument count.
271     __ mov(edi, Operand(ebp, ConstructFrameConstants::kConstructorOffset));
272     __ mov(eax, Operand(ebp, ConstructFrameConstants::kLengthOffset));
273     __ SmiUntag(eax);
274 
275     // Set up pointer to last argument.
276     __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
277 
278     // Copy arguments and receiver to the expression stack.
279     Label loop, entry;
280     __ mov(ecx, eax);
281     // ----------- S t a t e -------------
282     //  --                        eax: number of arguments (untagged)
283     //  --                        edx: new target
284     //  --                        ebx: pointer to last argument
285     //  --                        ecx: counter (tagged)
286     //  --         sp[0*kPointerSize]: implicit receiver
287     //  --         sp[1*kPointerSize]: implicit receiver
288     //  --         sp[2*kPointerSize]: padding
289     //  -- edi and sp[3*kPointerSize]: constructor function
290     //  --         sp[4*kPointerSize]: number of arguments (tagged)
291     //  --         sp[5*kPointerSize]: context
292     // -----------------------------------
293     __ jmp(&entry, Label::kNear);
294     __ bind(&loop);
295     __ Push(Operand(ebx, ecx, times_pointer_size, 0));
296     __ bind(&entry);
297     __ dec(ecx);
298     __ j(greater_equal, &loop);
299 
300     // Call the function.
301     ParameterCount actual(eax);
302     __ InvokeFunction(edi, edx, actual, CALL_FUNCTION);
303 
304     // ----------- S t a t e -------------
305     //  --                eax: constructor result
306     //  -- sp[0*kPointerSize]: implicit receiver
307     //  -- sp[1*kPointerSize]: padding
308     //  -- sp[2*kPointerSize]: constructor function
309     //  -- sp[3*kPointerSize]: number of arguments
310     //  -- sp[4*kPointerSize]: context
311     // -----------------------------------
312 
313     // Store offset of return address for deoptimizer.
314     masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
315         masm->pc_offset());
316 
317     // Restore context from the frame.
318     __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
319 
320     // If the result is an object (in the ECMA sense), we should get rid
321     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
322     // on page 74.
323     Label use_receiver, do_throw, leave_frame;
324 
325     // If the result is undefined, we jump out to using the implicit receiver.
326     __ JumpIfRoot(eax, Heap::kUndefinedValueRootIndex, &use_receiver,
327                   Label::kNear);
328 
329     // Otherwise we do a smi check and fall through to check if the return value
330     // is a valid receiver.
331 
332     // If the result is a smi, it is *not* an object in the ECMA sense.
333     __ JumpIfSmi(eax, &use_receiver, Label::kNear);
334 
335     // If the type of the result (stored in its map) is less than
336     // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
337     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
338     __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
339     __ j(above_equal, &leave_frame, Label::kNear);
340     __ jmp(&use_receiver, Label::kNear);
341 
342     __ bind(&do_throw);
343     __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
344 
345     // Throw away the result of the constructor invocation and use the
346     // on-stack receiver as the result.
347     __ bind(&use_receiver);
348     __ mov(eax, Operand(esp, 0 * kPointerSize));
349     __ JumpIfRoot(eax, Heap::kTheHoleValueRootIndex, &do_throw);
350 
351     __ bind(&leave_frame);
352     // Restore smi-tagged arguments count from the frame.
353     __ mov(ebx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
354     // Leave construct frame.
355   }
356   // Remove caller arguments from the stack and return.
357   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
358   __ pop(ecx);
359   __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize));  // 1 ~ receiver
360   __ push(ecx);
361   __ ret(0);
362 }
363 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)364 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
365   Generate_JSBuiltinsConstructStubHelper(masm);
366 }
367 
Generate_ConstructedNonConstructable(MacroAssembler * masm)368 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
369   FrameScope scope(masm, StackFrame::INTERNAL);
370   __ push(edi);
371   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
372 }
373 
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch1,Register scratch2,Label * stack_overflow,bool include_receiver=false)374 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
375                                         Register scratch1, Register scratch2,
376                                         Label* stack_overflow,
377                                         bool include_receiver = false) {
378   // Check the stack for overflow. We are not trying to catch
379   // interruptions (e.g. debug break and preemption) here, so the "real stack
380   // limit" is checked.
381   ExternalReference real_stack_limit =
382       ExternalReference::address_of_real_stack_limit(masm->isolate());
383   __ mov(scratch1, Operand::StaticVariable(real_stack_limit));
384   // Make scratch2 the space we have left. The stack might already be overflowed
385   // here which will cause scratch2 to become negative.
386   __ mov(scratch2, esp);
387   __ sub(scratch2, scratch1);
388   // Make scratch1 the space we need for the array when it is unrolled onto the
389   // stack.
390   __ mov(scratch1, num_args);
391   if (include_receiver) {
392     __ add(scratch1, Immediate(1));
393   }
394   __ shl(scratch1, kPointerSizeLog2);
395   // Check if the arguments will overflow the stack.
396   __ cmp(scratch2, scratch1);
397   __ j(less_equal, stack_overflow);  // Signed comparison.
398 }
399 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)400 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
401                                              bool is_construct) {
402   ProfileEntryHookStub::MaybeCallEntryHook(masm);
403 
404   {
405     FrameScope scope(masm, StackFrame::INTERNAL);
406 
407     // Setup the context (we need to use the caller context from the isolate).
408     ExternalReference context_address = ExternalReference::Create(
409         IsolateAddressId::kContextAddress, masm->isolate());
410     __ mov(esi, Operand::StaticVariable(context_address));
411 
412     // Load the previous frame pointer (ebx) to access C arguments
413     __ mov(ebx, Operand(ebp, 0));
414 
415     // Push the function and the receiver onto the stack.
416     __ push(Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
417     __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
418 
419     // Load the number of arguments and setup pointer to the arguments.
420     __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
421     __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
422 
423     // Check if we have enough stack space to push all arguments.
424     // Argument count in eax. Clobbers ecx and edx.
425     Label enough_stack_space, stack_overflow;
426     Generate_StackOverflowCheck(masm, eax, ecx, edx, &stack_overflow);
427     __ jmp(&enough_stack_space);
428 
429     __ bind(&stack_overflow);
430     __ CallRuntime(Runtime::kThrowStackOverflow);
431     // This should be unreachable.
432     __ int3();
433 
434     __ bind(&enough_stack_space);
435 
436     // Copy arguments to the stack in a loop.
437     Label loop, entry;
438     __ Move(ecx, Immediate(0));
439     __ jmp(&entry, Label::kNear);
440     __ bind(&loop);
441     __ mov(edx, Operand(ebx, ecx, times_4, 0));  // push parameter from argv
442     __ push(Operand(edx, 0));                    // dereference handle
443     __ inc(ecx);
444     __ bind(&entry);
445     __ cmp(ecx, eax);
446     __ j(not_equal, &loop);
447 
448     // Load the previous frame pointer (ebx) to access C arguments
449     __ mov(ebx, Operand(ebp, 0));
450 
451     // Get the new.target and function from the frame.
452     __ mov(edx, Operand(ebx, EntryFrameConstants::kNewTargetArgOffset));
453     __ mov(edi, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
454 
455     // Invoke the code.
456     Handle<Code> builtin = is_construct
457                                ? BUILTIN_CODE(masm->isolate(), Construct)
458                                : masm->isolate()->builtins()->Call();
459     __ Call(builtin, RelocInfo::CODE_TARGET);
460 
461     // Exit the internal frame. Notice that this also removes the empty.
462     // context and the function left on the stack by the code
463     // invocation.
464   }
465   __ ret(0);
466 }
467 
Generate_JSEntryTrampoline(MacroAssembler * masm)468 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
469   Generate_JSEntryTrampolineHelper(masm, false);
470 }
471 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)472 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
473   Generate_JSEntryTrampolineHelper(masm, true);
474 }
475 
GetSharedFunctionInfoBytecode(MacroAssembler * masm,Register sfi_data,Register scratch1)476 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
477                                           Register sfi_data,
478                                           Register scratch1) {
479   Label done;
480 
481   __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
482   __ j(not_equal, &done, Label::kNear);
483   __ mov(sfi_data,
484          FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
485 
486   __ bind(&done);
487 }
488 
489 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)490 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
491   // ----------- S t a t e -------------
492   //  -- eax    : the value to pass to the generator
493   //  -- edx    : the JSGeneratorObject to resume
494   //  -- esp[0] : return address
495   // -----------------------------------
496   __ AssertGeneratorObject(edx);
497 
498   // Store input value into generator object.
499   __ mov(FieldOperand(edx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
500   __ RecordWriteField(edx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx,
501                       kDontSaveFPRegs);
502 
503   // Load suspended function and context.
504   __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
505   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
506 
507   // Flood function if we are stepping.
508   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
509   Label stepping_prepared;
510   ExternalReference debug_hook =
511       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
512   __ cmpb(Operand::StaticVariable(debug_hook), Immediate(0));
513   __ j(not_equal, &prepare_step_in_if_stepping);
514 
515   // Flood function if we need to continue stepping in the suspended generator.
516   ExternalReference debug_suspended_generator =
517       ExternalReference::debug_suspended_generator_address(masm->isolate());
518   __ cmp(edx, Operand::StaticVariable(debug_suspended_generator));
519   __ j(equal, &prepare_step_in_suspended_generator);
520   __ bind(&stepping_prepared);
521 
522   // Check the stack for overflow. We are not trying to catch interruptions
523   // (i.e. debug break and preemption) here, so check the "real stack limit".
524   Label stack_overflow;
525   __ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex);
526   __ j(below, &stack_overflow);
527 
528   // Pop return address.
529   __ PopReturnAddressTo(eax);
530 
531   // Push receiver.
532   __ Push(FieldOperand(edx, JSGeneratorObject::kReceiverOffset));
533 
534   // ----------- S t a t e -------------
535   //  -- eax    : return address
536   //  -- edx    : the JSGeneratorObject to resume
537   //  -- edi    : generator function
538   //  -- esi    : generator context
539   //  -- esp[0] : generator receiver
540   // -----------------------------------
541 
542   // Push holes for arguments to generator function. Since the parser forced
543   // context allocation for any variables in generators, the actual argument
544   // values have already been copied into the context and these dummy values
545   // will never be used.
546   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
547   __ mov(ecx,
548          FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
549   {
550     Label done_loop, loop;
551     __ bind(&loop);
552     __ sub(ecx, Immediate(1));
553     __ j(carry, &done_loop, Label::kNear);
554     __ PushRoot(Heap::kTheHoleValueRootIndex);
555     __ jmp(&loop);
556     __ bind(&done_loop);
557   }
558 
559   // Underlying function needs to have bytecode available.
560   if (FLAG_debug_code) {
561     __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
562     __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
563     __ Push(eax);
564     GetSharedFunctionInfoBytecode(masm, ecx, eax);
565     __ Pop(eax);
566     __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx);
567     __ Assert(equal, AbortReason::kMissingBytecodeArray);
568   }
569 
570   // Resume (Ignition/TurboFan) generator object.
571   {
572     __ PushReturnAddressFrom(eax);
573     __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
574     __ mov(eax,
575            FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset));
576     // We abuse new.target both to indicate that this is a resume call and to
577     // pass in the generator object.  In ordinary calls, new.target is always
578     // undefined because generator functions are non-constructable.
579     static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
580     __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
581     __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
582     __ jmp(ecx);
583   }
584 
585   __ bind(&prepare_step_in_if_stepping);
586   {
587     FrameScope scope(masm, StackFrame::INTERNAL);
588     __ Push(edx);
589     __ Push(edi);
590     // Push hole as receiver since we do not use it for stepping.
591     __ PushRoot(Heap::kTheHoleValueRootIndex);
592     __ CallRuntime(Runtime::kDebugOnFunctionCall);
593     __ Pop(edx);
594     __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
595   }
596   __ jmp(&stepping_prepared);
597 
598   __ bind(&prepare_step_in_suspended_generator);
599   {
600     FrameScope scope(masm, StackFrame::INTERNAL);
601     __ Push(edx);
602     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
603     __ Pop(edx);
604     __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
605   }
606   __ jmp(&stepping_prepared);
607 
608   __ bind(&stack_overflow);
609   {
610     FrameScope scope(masm, StackFrame::INTERNAL);
611     __ CallRuntime(Runtime::kThrowStackOverflow);
612     __ int3();  // This should be unreachable.
613   }
614 }
615 
ReplaceClosureCodeWithOptimizedCode(MacroAssembler * masm,Register optimized_code,Register closure,Register scratch1,Register scratch2,Register scratch3)616 static void ReplaceClosureCodeWithOptimizedCode(
617     MacroAssembler* masm, Register optimized_code, Register closure,
618     Register scratch1, Register scratch2, Register scratch3) {
619 
620   // Store the optimized code in the closure.
621   __ mov(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
622   __ mov(scratch1, optimized_code);  // Write barrier clobbers scratch1 below.
623   __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
624                       kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
625 }
626 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch1,Register scratch2)627 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
628                                   Register scratch2) {
629   Register args_count = scratch1;
630   Register return_pc = scratch2;
631 
632   // Get the arguments + receiver count.
633   __ mov(args_count,
634          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
635   __ mov(args_count,
636          FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
637 
638   // Leave the frame (also dropping the register file).
639   __ leave();
640 
641   // Drop receiver + arguments.
642   __ pop(return_pc);
643   __ add(esp, args_count);
644   __ push(return_pc);
645 }
646 
647 // Tail-call |function_id| if |smi_entry| == |marker|
TailCallRuntimeIfMarkerEquals(MacroAssembler * masm,Register smi_entry,OptimizationMarker marker,Runtime::FunctionId function_id)648 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
649                                           Register smi_entry,
650                                           OptimizationMarker marker,
651                                           Runtime::FunctionId function_id) {
652   Label no_match;
653   __ cmp(smi_entry, Immediate(Smi::FromEnum(marker)));
654   __ j(not_equal, &no_match, Label::kNear);
655   GenerateTailCallToReturnedCode(masm, function_id);
656   __ bind(&no_match);
657 }
658 
MaybeTailCallOptimizedCodeSlot(MacroAssembler * masm,Register feedback_vector,Register scratch)659 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
660                                            Register feedback_vector,
661                                            Register scratch) {
662   // ----------- S t a t e -------------
663   //  -- eax : argument count (preserved for callee if needed, and caller)
664   //  -- edx : new target (preserved for callee if needed, and caller)
665   //  -- edi : target function (preserved for callee if needed, and caller)
666   //  -- feedback vector (preserved for caller if needed)
667   // -----------------------------------
668   DCHECK(!AreAliased(feedback_vector, eax, edx, edi, scratch));
669 
670   Label optimized_code_slot_is_weak_ref, fallthrough;
671 
672   Register closure = edi;
673   Register optimized_code_entry = scratch;
674 
675   __ mov(optimized_code_entry,
676          FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
677 
678   // Check if the code entry is a Smi. If yes, we interpret it as an
679   // optimisation marker. Otherwise, interpret it as a weak reference to a code
680   // object.
681   __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
682 
683   {
684     // Optimized code slot is an optimization marker.
685 
686     // Fall through if no optimization trigger.
687     __ cmp(optimized_code_entry,
688            Immediate(Smi::FromEnum(OptimizationMarker::kNone)));
689     __ j(equal, &fallthrough);
690 
691     TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
692                                   OptimizationMarker::kLogFirstExecution,
693                                   Runtime::kFunctionFirstExecution);
694     TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
695                                   OptimizationMarker::kCompileOptimized,
696                                   Runtime::kCompileOptimized_NotConcurrent);
697     TailCallRuntimeIfMarkerEquals(
698         masm, optimized_code_entry,
699         OptimizationMarker::kCompileOptimizedConcurrent,
700         Runtime::kCompileOptimized_Concurrent);
701 
702     {
703       // Otherwise, the marker is InOptimizationQueue, so fall through hoping
704       // that an interrupt will eventually update the slot with optimized code.
705       if (FLAG_debug_code) {
706         __ cmp(
707             optimized_code_entry,
708             Immediate(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
709         __ Assert(equal, AbortReason::kExpectedOptimizationSentinel);
710       }
711       __ jmp(&fallthrough);
712     }
713   }
714 
715   {
716     // Optimized code slot is a weak reference.
717     __ bind(&optimized_code_slot_is_weak_ref);
718 
719     __ LoadWeakValue(optimized_code_entry, &fallthrough);
720 
721     __ push(eax);
722     __ push(edx);
723 
724     // Check if the optimized code is marked for deopt. If it is, bailout to a
725     // given label.
726     Label found_deoptimized_code;
727     __ mov(eax,
728            FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
729     __ test(FieldOperand(eax, CodeDataContainer::kKindSpecificFlagsOffset),
730             Immediate(1 << Code::kMarkedForDeoptimizationBit));
731     __ j(not_zero, &found_deoptimized_code);
732 
733     // Optimized code is good, get it into the closure and link the closure into
734     // the optimized functions list, then tail call the optimized code.
735     // The feedback vector is no longer used, so re-use it as a scratch
736     // register.
737     ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
738                                         edx, eax, feedback_vector);
739     static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
740     __ Move(ecx, optimized_code_entry);
741     __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
742     __ pop(edx);
743     __ pop(eax);
744     __ jmp(ecx);
745 
746     // Optimized code slot contains deoptimized code, evict it and re-enter the
747     // closure's code.
748     __ bind(&found_deoptimized_code);
749     __ pop(edx);
750     __ pop(eax);
751     GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
752   }
753 
754   // Fall-through if the optimized code cell is clear and there is no
755   // optimization marker.
756   __ bind(&fallthrough);
757 }
758 
759 // Advance the current bytecode offset. This simulates what all bytecode
760 // handlers do upon completion of the underlying operation. Will bail out to a
761 // label if the bytecode (without prefix) is a return bytecode.
AdvanceBytecodeOffsetOrReturn(MacroAssembler * masm,Register bytecode_array,Register bytecode_offset,Register bytecode,Register scratch1,Label * if_return)762 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
763                                           Register bytecode_array,
764                                           Register bytecode_offset,
765                                           Register bytecode, Register scratch1,
766                                           Label* if_return) {
767   Register bytecode_size_table = scratch1;
768   DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
769                      bytecode));
770 
771   __ Move(bytecode_size_table,
772           Immediate(ExternalReference::bytecode_size_table_address()));
773 
774   // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
775   Label process_bytecode, extra_wide;
776   STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
777   STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
778   STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
779   STATIC_ASSERT(3 ==
780                 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
781   __ cmpb(bytecode, Immediate(0x3));
782   __ j(above, &process_bytecode, Label::kNear);
783   __ test(bytecode, Immediate(0x1));
784   __ j(not_equal, &extra_wide, Label::kNear);
785 
786   // Load the next bytecode and update table to the wide scaled table.
787   __ inc(bytecode_offset);
788   __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
789   __ add(bytecode_size_table,
790          Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
791   __ jmp(&process_bytecode, Label::kNear);
792 
793   __ bind(&extra_wide);
794   // Load the next bytecode and update table to the extra wide scaled table.
795   __ inc(bytecode_offset);
796   __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
797   __ add(bytecode_size_table,
798          Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
799 
800   __ bind(&process_bytecode);
801 
802 // Bailout to the return label if this is a return bytecode.
803 #define JUMP_IF_EQUAL(NAME)                                             \
804   __ cmpb(bytecode,                                                     \
805           Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
806   __ j(equal, if_return, Label::kNear);
807   RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
808 #undef JUMP_IF_EQUAL
809 
810   // Otherwise, load the size of the current bytecode and advance the offset.
811   __ add(bytecode_offset, Operand(bytecode_size_table, bytecode, times_4, 0));
812 }
813 
814 // Generate code for entering a JS function with the interpreter.
815 // On entry to the function the receiver and arguments have been pushed on the
816 // stack left to right.  The actual argument count matches the formal parameter
817 // count expected by the function.
818 //
819 // The live registers are:
820 //   o edi: the JS function object being called
821 //   o edx: the incoming new target or generator object
822 //   o esi: our context
823 //   o ebp: the caller's frame pointer
824 //   o esp: stack pointer (pointing to return address)
825 //
826 // The function builds an interpreter frame.  See InterpreterFrameConstants in
827 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)828 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
829   ProfileEntryHookStub::MaybeCallEntryHook(masm);
830 
831   Register closure = edi;
832   Register feedback_vector = ebx;
833 
834   // Load the feedback vector from the closure.
835   __ mov(feedback_vector,
836          FieldOperand(closure, JSFunction::kFeedbackCellOffset));
837   __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
838   // Read off the optimized code slot in the feedback vector, and if there
839   // is optimized code or an optimization marker, call that instead.
840   MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, ecx);
841 
842   // Open a frame scope to indicate that there is a frame on the stack.  The
843   // MANUAL indicates that the scope shouldn't actually generate code to set
844   // up the frame (that is done below).
845   FrameScope frame_scope(masm, StackFrame::MANUAL);
846   __ push(ebp);  // Caller's frame pointer.
847   __ mov(ebp, esp);
848   __ push(esi);  // Callee's context.
849   __ push(edi);  // Callee's JS function.
850 
851   // Get the bytecode array from the function object (or from the DebugInfo if
852   // it is present) and load it into kInterpreterBytecodeArrayRegister.
853   Label maybe_load_debug_bytecode_array, bytecode_array_loaded,
854       apply_instrumentation;
855   __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
856   __ mov(kInterpreterBytecodeArrayRegister,
857          FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
858   __ Push(eax);
859   GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, eax);
860   __ Pop(eax);
861   __ JumpIfNotSmi(FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset),
862                   &maybe_load_debug_bytecode_array);
863   __ bind(&bytecode_array_loaded);
864 
865   __ inc(FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
866 
867   // Check function data field is actually a BytecodeArray object.
868   if (FLAG_debug_code) {
869     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
870     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
871                      eax);
872     __ Assert(
873         equal,
874         AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
875   }
876 
877   // Reset code age.
878   __ mov_b(FieldOperand(kInterpreterBytecodeArrayRegister,
879                         BytecodeArray::kBytecodeAgeOffset),
880            Immediate(BytecodeArray::kNoAgeBytecodeAge));
881 
882   // Push bytecode array.
883   __ push(kInterpreterBytecodeArrayRegister);
884   // Push Smi tagged initial bytecode array offset.
885   __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag)));
886 
887   // Allocate the local and temporary register file on the stack.
888   {
889     // Load frame size from the BytecodeArray object.
890     __ mov(ebx, FieldOperand(kInterpreterBytecodeArrayRegister,
891                              BytecodeArray::kFrameSizeOffset));
892 
893     // Do a stack check to ensure we don't go over the limit.
894     Label ok;
895     __ mov(ecx, esp);
896     __ sub(ecx, ebx);
897     ExternalReference stack_limit =
898         ExternalReference::address_of_real_stack_limit(masm->isolate());
899     __ cmp(ecx, Operand::StaticVariable(stack_limit));
900     __ j(above_equal, &ok);
901     __ CallRuntime(Runtime::kThrowStackOverflow);
902     __ bind(&ok);
903 
904     // If ok, push undefined as the initial value for all register file entries.
905     Label loop_header;
906     Label loop_check;
907     __ mov(eax, Immediate(masm->isolate()->factory()->undefined_value()));
908     __ jmp(&loop_check);
909     __ bind(&loop_header);
910     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
911     __ push(eax);
912     // Continue loop if not done.
913     __ bind(&loop_check);
914     __ sub(ebx, Immediate(kPointerSize));
915     __ j(greater_equal, &loop_header);
916   }
917 
918   // If the bytecode array has a valid incoming new target or generator object
919   // register, initialize it with incoming value which was passed in edx.
920   Label no_incoming_new_target_or_generator_register;
921   __ mov(eax, FieldOperand(
922                   kInterpreterBytecodeArrayRegister,
923                   BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
924   __ test(eax, eax);
925   __ j(zero, &no_incoming_new_target_or_generator_register);
926   __ mov(Operand(ebp, eax, times_pointer_size, 0), edx);
927   __ bind(&no_incoming_new_target_or_generator_register);
928 
929   // Load accumulator and bytecode offset into registers.
930   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
931   __ mov(kInterpreterBytecodeOffsetRegister,
932          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
933 
934   // Load the dispatch table into a register and dispatch to the bytecode
935   // handler at the current bytecode offset.
936   Label do_dispatch;
937   __ bind(&do_dispatch);
938   __ mov(kInterpreterDispatchTableRegister,
939          Immediate(ExternalReference::interpreter_dispatch_table_address(
940              masm->isolate())));
941   __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
942                           kInterpreterBytecodeOffsetRegister, times_1, 0));
943   __ mov(
944       kJavaScriptCallCodeStartRegister,
945       Operand(kInterpreterDispatchTableRegister, ebx, times_pointer_size, 0));
946   __ call(kJavaScriptCallCodeStartRegister);
947   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
948 
949   // Any returns to the entry trampoline are either due to the return bytecode
950   // or the interpreter tail calling a builtin and then a dispatch.
951 
952   // Get bytecode array and bytecode offset from the stack frame.
953   __ mov(kInterpreterBytecodeArrayRegister,
954          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
955   __ mov(kInterpreterBytecodeOffsetRegister,
956          Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
957   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
958 
959   // Either return, or advance to the next bytecode and dispatch.
960   Label do_return;
961   __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
962                           kInterpreterBytecodeOffsetRegister, times_1, 0));
963   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
964                                 kInterpreterBytecodeOffsetRegister, ebx, ecx,
965                                 &do_return);
966   __ jmp(&do_dispatch);
967 
968   __ bind(&do_return);
969   // The return value is in eax.
970   LeaveInterpreterFrame(masm, ebx, ecx);
971   __ ret(0);
972 
973   // Load debug copy of the bytecode array if it exists.
974   // kInterpreterBytecodeArrayRegister is already loaded with
975   // SharedFunctionInfo::kFunctionDataOffset.
976   __ bind(&maybe_load_debug_bytecode_array);
977   __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset));
978   __ mov(ecx, FieldOperand(eax, DebugInfo::kDebugBytecodeArrayOffset));
979   __ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex, &bytecode_array_loaded);
980 
981   __ mov(kInterpreterBytecodeArrayRegister, ecx);
982   __ mov(ecx, FieldOperand(eax, DebugInfo::kFlagsOffset));
983   __ SmiUntag(ecx);
984   __ and_(ecx, Immediate(DebugInfo::kDebugExecutionMode));
985   STATIC_ASSERT(static_cast<int>(DebugInfo::kDebugExecutionMode) ==
986                 static_cast<int>(DebugInfo::kSideEffects));
987   ExternalReference debug_execution_mode =
988       ExternalReference::debug_execution_mode_address(masm->isolate());
989   __ cmp(ecx, Operand::StaticVariable(debug_execution_mode));
990   __ j(equal, &bytecode_array_loaded);
991 
992   __ pop(ecx);  // get JSFunction from stack
993   __ push(ecx);
994   __ push(ebx);  // preserve feedback_vector and bytecode array register
995   __ push(kInterpreterBytecodeArrayRegister);
996   __ push(ecx);  // pass function as argument
997   __ CallRuntime(Runtime::kDebugApplyInstrumentation);
998   __ pop(kInterpreterBytecodeArrayRegister);
999   __ pop(ebx);
1000   __ jmp(&bytecode_array_loaded);
1001 }
1002 
1003 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register array_limit,Register start_address)1004 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1005                                          Register array_limit,
1006                                          Register start_address) {
1007   // ----------- S t a t e -------------
1008   //  -- start_address : Pointer to the last argument in the args array.
1009   //  -- array_limit : Pointer to one before the first argument in the
1010   //                   args array.
1011   // -----------------------------------
1012   Label loop_header, loop_check;
1013   __ jmp(&loop_check);
1014   __ bind(&loop_header);
1015   __ Push(Operand(start_address, 0));
1016   __ sub(start_address, Immediate(kPointerSize));
1017   __ bind(&loop_check);
1018   __ cmp(start_address, array_limit);
1019   __ j(greater, &loop_header, Label::kNear);
1020 }
1021 
1022 // static
Generate_InterpreterPushArgsThenCallImpl(MacroAssembler * masm,ConvertReceiverMode receiver_mode,InterpreterPushArgsMode mode)1023 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1024     MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1025     InterpreterPushArgsMode mode) {
1026   DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1027   // ----------- S t a t e -------------
1028   //  -- eax : the number of arguments (not including the receiver)
1029   //  -- ebx : the address of the first argument to be pushed. Subsequent
1030   //           arguments should be consecutive above this, in the same order as
1031   //           they are to be pushed onto the stack.
1032   //  -- edi : the target to call (can be any Object).
1033   // -----------------------------------
1034   Label stack_overflow;
1035   // Compute the expected number of arguments.
1036   __ mov(ecx, eax);
1037   __ add(ecx, Immediate(1));  // Add one for receiver.
1038 
1039   // Add a stack check before pushing the arguments. We need an extra register
1040   // to perform a stack check. So push it onto the stack temporarily. This
1041   // might cause stack overflow, but it will be detected by the check.
1042   __ Push(edi);
1043   Generate_StackOverflowCheck(masm, ecx, edx, edi, &stack_overflow);
1044   __ Pop(edi);
1045 
1046   // Pop return address to allow tail-call after pushing arguments.
1047   __ Pop(edx);
1048 
1049   // Push "undefined" as the receiver arg if we need to.
1050   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1051     __ PushRoot(Heap::kUndefinedValueRootIndex);
1052     __ sub(ecx, Immediate(1));  // Subtract one for receiver.
1053   }
1054 
1055   // Find the address of the last argument.
1056   __ shl(ecx, kPointerSizeLog2);
1057   __ neg(ecx);
1058   __ add(ecx, ebx);
1059   Generate_InterpreterPushArgs(masm, ecx, ebx);
1060 
1061   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1062     __ Pop(ebx);                // Pass the spread in a register
1063     __ sub(eax, Immediate(1));  // Subtract one for spread
1064   }
1065 
1066   // Call the target.
1067   __ Push(edx);  // Re-push return address.
1068 
1069   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1070     __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1071             RelocInfo::CODE_TARGET);
1072   } else {
1073     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1074             RelocInfo::CODE_TARGET);
1075   }
1076 
1077   __ bind(&stack_overflow);
1078   {
1079     // Pop the temporary registers, so that return address is on top of stack.
1080     __ Pop(edi);
1081 
1082     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1083 
1084     // This should be unreachable.
1085     __ int3();
1086   }
1087 }
1088 
1089 namespace {
1090 
1091 // This function modified start_addr, and only reads the contents of num_args
1092 // register. scratch1 and scratch2 are used as temporary registers. Their
1093 // original values are restored after the use.
Generate_InterpreterPushZeroAndArgsAndReturnAddress(MacroAssembler * masm,Register num_args,Register start_addr,Register scratch1,Register scratch2,int num_slots_above_ret_addr,Label * stack_overflow)1094 void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1095     MacroAssembler* masm, Register num_args, Register start_addr,
1096     Register scratch1, Register scratch2, int num_slots_above_ret_addr,
1097     Label* stack_overflow) {
1098   // We have to move return address and the temporary registers above it
1099   // before we can copy arguments onto the stack. To achieve this:
1100   // Step 1: Increment the stack pointer by num_args + 1 (for receiver).
1101   // Step 2: Move the return address and values above it to the top of stack.
1102   // Step 3: Copy the arguments into the correct locations.
1103   //  current stack    =====>    required stack layout
1104   // |             |            | scratch1      | (2) <-- esp(1)
1105   // |             |            | ....          | (2)
1106   // |             |            | scratch-n     | (2)
1107   // |             |            | return addr   | (2)
1108   // |             |            | arg N         | (3)
1109   // | scratch1    | <-- esp    | ....          |
1110   // | ....        |            | arg 1         |
1111   // | scratch-n   |            | arg 0         |
1112   // | return addr |            | receiver slot |
1113 
1114   // Check for stack overflow before we increment the stack pointer.
1115   Generate_StackOverflowCheck(masm, num_args, scratch1, scratch2,
1116                               stack_overflow, true);
1117 
1118 // Step 1 - Update the stack pointer. scratch1 already contains the required
1119 // increment to the stack. i.e. num_args + 1 stack slots. This is computed in
1120 // the Generate_StackOverflowCheck.
1121 
1122 #ifdef _MSC_VER
1123   // TODO(mythria): Move it to macro assembler.
1124   // In windows, we cannot increment the stack size by more than one page
1125   // (mimimum page size is 4KB) without accessing at least one byte on the
1126   // page. Check this:
1127   // https://msdn.microsoft.com/en-us/library/aa227153(v=vs.60).aspx.
1128   const int page_size = 4 * 1024;
1129   Label check_offset, update_stack_pointer;
1130   __ bind(&check_offset);
1131   __ cmp(scratch1, page_size);
1132   __ j(less, &update_stack_pointer);
1133   __ sub(esp, Immediate(page_size));
1134   // Just to touch the page, before we increment further.
1135   __ mov(Operand(esp, 0), Immediate(0));
1136   __ sub(scratch1, Immediate(page_size));
1137   __ jmp(&check_offset);
1138   __ bind(&update_stack_pointer);
1139 #endif
1140 
1141   __ sub(esp, scratch1);
1142 
1143   // Step 2 move return_address and slots above it to the correct locations.
1144   // Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
1145   // basically when the source and destination overlap. We at least need one
1146   // extra slot for receiver, so no extra checks are required to avoid copy.
1147   for (int i = 0; i < num_slots_above_ret_addr + 1; i++) {
1148     __ mov(scratch1,
1149            Operand(esp, num_args, times_pointer_size, (i + 1) * kPointerSize));
1150     __ mov(Operand(esp, i * kPointerSize), scratch1);
1151   }
1152 
1153   // Step 3 copy arguments to correct locations.
1154   // Slot meant for receiver contains return address. Reset it so that
1155   // we will not incorrectly interpret return address as an object.
1156   __ mov(Operand(esp, num_args, times_pointer_size,
1157                  (num_slots_above_ret_addr + 1) * kPointerSize),
1158          Immediate(0));
1159   __ mov(scratch1, num_args);
1160 
1161   Label loop_header, loop_check;
1162   __ jmp(&loop_check);
1163   __ bind(&loop_header);
1164   __ mov(scratch2, Operand(start_addr, 0));
1165   __ mov(Operand(esp, scratch1, times_pointer_size,
1166                  num_slots_above_ret_addr * kPointerSize),
1167          scratch2);
1168   __ sub(start_addr, Immediate(kPointerSize));
1169   __ sub(scratch1, Immediate(1));
1170   __ bind(&loop_check);
1171   __ cmp(scratch1, Immediate(0));
1172   __ j(greater, &loop_header, Label::kNear);
1173 }
1174 
1175 }  // end anonymous namespace
1176 
1177 // static
Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler * masm,InterpreterPushArgsMode mode)1178 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1179     MacroAssembler* masm, InterpreterPushArgsMode mode) {
1180   // ----------- S t a t e -------------
1181   //  -- eax : the number of arguments (not including the receiver)
1182   //  -- edx : the new target
1183   //  -- edi : the constructor
1184   //  -- ebx : allocation site feedback (if available or undefined)
1185   //  -- ecx : the address of the first argument to be pushed. Subsequent
1186   //           arguments should be consecutive above this, in the same order as
1187   //           they are to be pushed onto the stack.
1188   // -----------------------------------
1189   Label stack_overflow;
1190   // We need two scratch registers. Push edi and edx onto stack.
1191   __ Push(edi);
1192   __ Push(edx);
1193 
1194   // Push arguments and move return address to the top of stack.
1195   // The eax register is readonly. The ecx register will be modified. The edx
1196   // and edi registers will be modified but restored to their original values.
1197   Generate_InterpreterPushZeroAndArgsAndReturnAddress(masm, eax, ecx, edx, edi,
1198                                                       2, &stack_overflow);
1199 
1200   // Restore edi and edx
1201   __ Pop(edx);
1202   __ Pop(edi);
1203 
1204   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1205     __ PopReturnAddressTo(ecx);
1206     __ Pop(ebx);  // Pass the spread in a register
1207     __ PushReturnAddressFrom(ecx);
1208     __ sub(eax, Immediate(1));  // Subtract one for spread
1209   } else {
1210     __ AssertUndefinedOrAllocationSite(ebx);
1211   }
1212 
1213   if (mode == InterpreterPushArgsMode::kArrayFunction) {
1214     // Tail call to the array construct stub (still in the caller
1215     // context at this point).
1216     __ AssertFunction(edi);
1217     ArrayConstructorStub array_constructor_stub(masm->isolate());
1218     __ Jump(array_constructor_stub.GetCode(), RelocInfo::CODE_TARGET);
1219   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1220     // Call the constructor with unmodified eax, edi, edx values.
1221     __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1222             RelocInfo::CODE_TARGET);
1223   } else {
1224     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1225     // Call the constructor with unmodified eax, edi, edx values.
1226     __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1227   }
1228 
1229   __ bind(&stack_overflow);
1230   {
1231     // Pop the temporary registers, so that return address is on top of stack.
1232     __ Pop(edx);
1233     __ Pop(edi);
1234 
1235     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1236 
1237     // This should be unreachable.
1238     __ int3();
1239   }
1240 }
1241 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1242 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1243   // Set the return address to the correct point in the interpreter entry
1244   // trampoline.
1245   Label builtin_trampoline, trampoline_loaded;
1246   Smi* interpreter_entry_return_pc_offset(
1247       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1248   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1249 
1250   // If the SFI function_data is an InterpreterData, get the trampoline stored
1251   // in it, otherwise get the trampoline from the builtins list.
1252   __ mov(ebx, Operand(ebp, StandardFrameConstants::kFunctionOffset));
1253   __ mov(ebx, FieldOperand(ebx, JSFunction::kSharedFunctionInfoOffset));
1254   __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFunctionDataOffset));
1255   __ Push(eax);
1256   __ CmpObjectType(ebx, INTERPRETER_DATA_TYPE, eax);
1257   __ j(not_equal, &builtin_trampoline, Label::kNear);
1258 
1259   __ mov(ebx, FieldOperand(ebx, InterpreterData::kInterpreterTrampolineOffset));
1260   __ jmp(&trampoline_loaded, Label::kNear);
1261 
1262   __ bind(&builtin_trampoline);
1263   __ Move(ebx, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
1264 
1265   __ bind(&trampoline_loaded);
1266   __ Pop(eax);
1267   __ add(ebx, Immediate(interpreter_entry_return_pc_offset->value() +
1268                         Code::kHeaderSize - kHeapObjectTag));
1269   __ push(ebx);
1270 
1271   // Initialize the dispatch table register.
1272   __ mov(kInterpreterDispatchTableRegister,
1273          Immediate(ExternalReference::interpreter_dispatch_table_address(
1274              masm->isolate())));
1275 
1276   // Get the bytecode array pointer from the frame.
1277   __ mov(kInterpreterBytecodeArrayRegister,
1278          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1279 
1280   if (FLAG_debug_code) {
1281     // Check function data field is actually a BytecodeArray object.
1282     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1283     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1284                      ebx);
1285     __ Assert(
1286         equal,
1287         AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1288   }
1289 
1290   // Get the target bytecode offset from the frame.
1291   __ mov(kInterpreterBytecodeOffsetRegister,
1292          Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1293   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1294 
1295   // Dispatch to the target bytecode.
1296   __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
1297                           kInterpreterBytecodeOffsetRegister, times_1, 0));
1298   __ mov(
1299       kJavaScriptCallCodeStartRegister,
1300       Operand(kInterpreterDispatchTableRegister, ebx, times_pointer_size, 0));
1301   __ jmp(kJavaScriptCallCodeStartRegister);
1302 }
1303 
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1304 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1305   // Get bytecode array and bytecode offset from the stack frame.
1306   __ mov(kInterpreterBytecodeArrayRegister,
1307          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1308   __ mov(kInterpreterBytecodeOffsetRegister,
1309          Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1310   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1311 
1312   // Load the current bytecode
1313   __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
1314                           kInterpreterBytecodeOffsetRegister, times_1, 0));
1315 
1316   // Advance to the next bytecode.
1317   Label if_return;
1318   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1319                                 kInterpreterBytecodeOffsetRegister, ebx, ecx,
1320                                 &if_return);
1321 
1322   // Convert new bytecode offset to a Smi and save in the stackframe.
1323   __ mov(ebx, kInterpreterBytecodeOffsetRegister);
1324   __ SmiTag(ebx);
1325   __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp), ebx);
1326 
1327   Generate_InterpreterEnterBytecode(masm);
1328 
1329   // We should never take the if_return path.
1330   __ bind(&if_return);
1331   __ Abort(AbortReason::kInvalidBytecodeAdvance);
1332 }
1333 
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1334 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1335   Generate_InterpreterEnterBytecode(masm);
1336 }
1337 
Generate_CompileLazyDeoptimizedCode(MacroAssembler * masm)1338 void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
1339   // Set the code slot inside the JSFunction to CompileLazy.
1340   __ Move(ecx, BUILTIN_CODE(masm->isolate(), CompileLazy));
1341   __ mov(FieldOperand(edi, JSFunction::kCodeOffset), ecx);
1342   __ RecordWriteField(edi, JSFunction::kCodeOffset, ecx, ebx, kDontSaveFPRegs,
1343                       OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1344   // Jump to compile lazy.
1345   Generate_CompileLazy(masm);
1346 }
1347 
GetSharedFunctionInfoCode(MacroAssembler * masm,Register sfi_data,Register scratch1)1348 static void GetSharedFunctionInfoCode(MacroAssembler* masm, Register sfi_data,
1349                                       Register scratch1) {
1350   // Figure out the SFI's code object.
1351   Label done;
1352   Label check_is_bytecode_array;
1353   Label check_is_exported_function_data;
1354   Label check_is_fixed_array;
1355   Label check_is_pre_parsed_scope_data;
1356   Label check_is_function_template_info;
1357   Label check_is_interpreter_data;
1358 
1359   Register data_type = scratch1;
1360 
1361   // IsSmi: Is builtin
1362   __ JumpIfNotSmi(sfi_data, &check_is_bytecode_array);
1363   __ mov(scratch1,
1364          Immediate(ExternalReference::builtins_address(masm->isolate())));
1365   // Avoid untagging the Smi unnecessarily.
1366   STATIC_ASSERT(times_2 == times_pointer_size - kSmiTagSize);
1367   __ mov(sfi_data, Operand(scratch1, sfi_data, times_2, 0));
1368   __ jmp(&done);
1369 
1370   // Get map for subsequent checks.
1371   __ bind(&check_is_bytecode_array);
1372   __ mov(data_type, FieldOperand(sfi_data, HeapObject::kMapOffset));
1373   __ mov(data_type, FieldOperand(data_type, Map::kInstanceTypeOffset));
1374 
1375   // IsBytecodeArray: Interpret bytecode
1376   __ cmpw(data_type, Immediate(BYTECODE_ARRAY_TYPE));
1377   __ j(not_equal, &check_is_exported_function_data);
1378   __ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
1379   __ jmp(&done);
1380 
1381   // IsWasmExportedFunctionData: Use the wrapper code
1382   __ bind(&check_is_exported_function_data);
1383   __ cmpw(data_type, Immediate(WASM_EXPORTED_FUNCTION_DATA_TYPE));
1384   __ j(not_equal, &check_is_fixed_array);
1385   __ mov(sfi_data,
1386          FieldOperand(sfi_data, WasmExportedFunctionData::kWrapperCodeOffset));
1387   __ jmp(&done);
1388 
1389   // IsFixedArray: Instantiate using AsmWasmData
1390   __ bind(&check_is_fixed_array);
1391   __ cmpw(data_type, Immediate(FIXED_ARRAY_TYPE));
1392   __ j(not_equal, &check_is_pre_parsed_scope_data);
1393   __ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InstantiateAsmJs));
1394   __ jmp(&done);
1395 
1396   // IsPreParsedScopeData: Compile lazy
1397   __ bind(&check_is_pre_parsed_scope_data);
1398   __ cmpw(data_type, Immediate(TUPLE2_TYPE));
1399   __ j(not_equal, &check_is_function_template_info);
1400   __ Move(sfi_data, BUILTIN_CODE(masm->isolate(), CompileLazy));
1401   __ jmp(&done);
1402 
1403   // IsFunctionTemplateInfo: API call
1404   __ bind(&check_is_function_template_info);
1405   __ cmpw(data_type, Immediate(FUNCTION_TEMPLATE_INFO_TYPE));
1406   __ j(not_equal, &check_is_interpreter_data);
1407   __ Move(sfi_data, BUILTIN_CODE(masm->isolate(), HandleApiCall));
1408   __ jmp(&done);
1409 
1410   // IsInterpreterData: Interpret bytecode
1411   __ bind(&check_is_interpreter_data);
1412   if (FLAG_debug_code) {
1413     __ cmpw(data_type, Immediate(INTERPRETER_DATA_TYPE));
1414     __ Check(equal, AbortReason::kInvalidSharedFunctionInfoData);
1415   }
1416   __ mov(sfi_data,
1417          FieldOperand(sfi_data, InterpreterData::kInterpreterTrampolineOffset));
1418 
1419   __ bind(&done);
1420 }
1421 
Generate_CompileLazy(MacroAssembler * masm)1422 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1423   // ----------- S t a t e -------------
1424   //  -- eax : argument count (preserved for callee)
1425   //  -- edx : new target (preserved for callee)
1426   //  -- edi : target function (preserved for callee)
1427   // -----------------------------------
1428   // First lookup code, maybe we don't need to compile!
1429   Label gotta_call_runtime;
1430 
1431   Register closure = edi;
1432   Register feedback_vector = ebx;
1433 
1434   // Do we have a valid feedback vector?
1435   __ mov(feedback_vector,
1436          FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1437   __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
1438   __ JumpIfRoot(feedback_vector, Heap::kUndefinedValueRootIndex,
1439                 &gotta_call_runtime);
1440 
1441   // Is there an optimization marker or optimized code in the feedback vector?
1442   MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, ecx);
1443 
1444   // We found no optimized code. Infer the code object needed for the SFI.
1445   Register entry = ecx;
1446   __ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1447   __ mov(entry, FieldOperand(entry, SharedFunctionInfo::kFunctionDataOffset));
1448   GetSharedFunctionInfoCode(masm, entry, ebx);
1449 
1450   // If code entry points to anything other than CompileLazy, install that.
1451   __ Move(ebx, masm->CodeObject());
1452   __ cmp(entry, ebx);
1453   __ j(equal, &gotta_call_runtime);
1454 
1455   // Install the SFI's code entry.
1456   __ mov(FieldOperand(closure, JSFunction::kCodeOffset), entry);
1457   __ RecordWriteField(closure, JSFunction::kCodeOffset, entry, ebx,
1458                       kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1459   __ mov(entry, FieldOperand(closure, JSFunction::kCodeOffset));
1460   __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
1461   __ jmp(entry);
1462 
1463   __ bind(&gotta_call_runtime);
1464   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1465 }
1466 
1467 // Lazy deserialization design doc: http://goo.gl/dxkYDZ.
Generate_DeserializeLazy(MacroAssembler * masm)1468 void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
1469   // ----------- S t a t e -------------
1470   //  -- eax : argument count (preserved for callee)
1471   //  -- edx : new target (preserved for callee)
1472   //  -- edi : target function (preserved for callee)
1473   // -----------------------------------
1474 
1475   Label deserialize_in_runtime;
1476 
1477   Register target = edi;  // Must be preserved
1478   Register scratch0 = ebx;
1479   Register scratch1 = ecx;
1480 
1481   CHECK(scratch0 != eax && scratch0 != edx && scratch0 != edi);
1482   CHECK(scratch1 != eax && scratch1 != edx && scratch1 != edi);
1483   CHECK(scratch0 != scratch1);
1484 
1485   // Load the builtin id for lazy deserialization from SharedFunctionInfo.
1486 
1487   __ AssertFunction(target);
1488   __ mov(scratch0, FieldOperand(target, JSFunction::kSharedFunctionInfoOffset));
1489 
1490   __ mov(scratch1,
1491          FieldOperand(scratch0, SharedFunctionInfo::kFunctionDataOffset));
1492   __ AssertSmi(scratch1);
1493 
1494   // The builtin may already have been deserialized. If that is the case, it is
1495   // stored in the builtins table, and we can copy to correct code object to
1496   // both the shared function info and function without calling into runtime.
1497   //
1498   // Otherwise, we need to call into runtime to deserialize.
1499 
1500   {
1501     // Load the code object at builtins_table[builtin_id] into scratch1.
1502 
1503     __ SmiUntag(scratch1);
1504     __ mov(scratch0,
1505            Immediate(ExternalReference::builtins_address(masm->isolate())));
1506     __ mov(scratch1, Operand(scratch0, scratch1, times_pointer_size, 0));
1507 
1508     // Check if the loaded code object has already been deserialized. This is
1509     // the case iff it does not equal DeserializeLazy.
1510 
1511     __ Move(scratch0, masm->CodeObject());
1512     __ cmp(scratch1, scratch0);
1513     __ j(equal, &deserialize_in_runtime);
1514   }
1515 
1516   {
1517     // If we've reached this spot, the target builtin has been deserialized and
1518     // we simply need to copy it over to the target function.
1519 
1520     Register target_builtin = scratch1;
1521 
1522     __ mov(FieldOperand(target, JSFunction::kCodeOffset), target_builtin);
1523     __ push(eax);  // Write barrier clobbers these below.
1524     __ push(target_builtin);
1525     __ RecordWriteField(target, JSFunction::kCodeOffset, target_builtin, eax,
1526                         kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1527     __ pop(target_builtin);
1528     __ pop(eax);
1529 
1530     // All copying is done. Jump to the deserialized code object.
1531 
1532     __ lea(target_builtin, FieldOperand(target_builtin, Code::kHeaderSize));
1533     __ jmp(target_builtin);
1534   }
1535 
1536   __ bind(&deserialize_in_runtime);
1537   GenerateTailCallToReturnedCode(masm, Runtime::kDeserializeLazy);
1538 }
1539 
Generate_InstantiateAsmJs(MacroAssembler * masm)1540 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1541   // ----------- S t a t e -------------
1542   //  -- eax : argument count (preserved for callee)
1543   //  -- edx : new target (preserved for callee)
1544   //  -- edi : target function (preserved for callee)
1545   // -----------------------------------
1546   Label failed;
1547   {
1548     FrameScope scope(masm, StackFrame::INTERNAL);
1549     // Preserve argument count for later compare.
1550     __ mov(ecx, eax);
1551     // Push the number of arguments to the callee.
1552     __ SmiTag(eax);
1553     __ push(eax);
1554     // Push a copy of the target function and the new target.
1555     __ push(edi);
1556     __ push(edx);
1557 
1558     // The function.
1559     __ push(edi);
1560     // Copy arguments from caller (stdlib, foreign, heap).
1561     Label args_done;
1562     for (int j = 0; j < 4; ++j) {
1563       Label over;
1564       if (j < 3) {
1565         __ cmp(ecx, Immediate(j));
1566         __ j(not_equal, &over, Label::kNear);
1567       }
1568       for (int i = j - 1; i >= 0; --i) {
1569         __ Push(Operand(
1570             ebp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
1571       }
1572       for (int i = 0; i < 3 - j; ++i) {
1573         __ PushRoot(Heap::kUndefinedValueRootIndex);
1574       }
1575       if (j < 3) {
1576         __ jmp(&args_done, Label::kNear);
1577         __ bind(&over);
1578       }
1579     }
1580     __ bind(&args_done);
1581 
1582     // Call runtime, on success unwind frame, and parent frame.
1583     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1584     // A smi 0 is returned on failure, an object on success.
1585     __ JumpIfSmi(eax, &failed, Label::kNear);
1586 
1587     __ Drop(2);
1588     __ Pop(ecx);
1589     __ SmiUntag(ecx);
1590     scope.GenerateLeaveFrame();
1591 
1592     __ PopReturnAddressTo(ebx);
1593     __ inc(ecx);
1594     __ lea(esp, Operand(esp, ecx, times_pointer_size, 0));
1595     __ PushReturnAddressFrom(ebx);
1596     __ ret(0);
1597 
1598     __ bind(&failed);
1599     // Restore target function and new target.
1600     __ pop(edx);
1601     __ pop(edi);
1602     __ pop(eax);
1603     __ SmiUntag(eax);
1604   }
1605   // On failure, tail call back to regular js by re-calling the function
1606   // which has be reset to the compile lazy builtin.
1607   static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
1608   __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
1609   __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1610   __ jmp(ecx);
1611 }
1612 
1613 namespace {
Generate_ContinueToBuiltinHelper(MacroAssembler * masm,bool java_script_builtin,bool with_result)1614 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1615                                       bool java_script_builtin,
1616                                       bool with_result) {
1617   const RegisterConfiguration* config(RegisterConfiguration::Default());
1618   int allocatable_register_count = config->num_allocatable_general_registers();
1619   if (with_result) {
1620     // Overwrite the hole inserted by the deoptimizer with the return value from
1621     // the LAZY deopt point.
1622     __ mov(Operand(esp,
1623                    config->num_allocatable_general_registers() * kPointerSize +
1624                        BuiltinContinuationFrameConstants::kFixedFrameSize),
1625            eax);
1626   }
1627   for (int i = allocatable_register_count - 1; i >= 0; --i) {
1628     int code = config->GetAllocatableGeneralCode(i);
1629     __ pop(Register::from_code(code));
1630     if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1631       __ SmiUntag(Register::from_code(code));
1632     }
1633   }
1634   __ mov(
1635       ebp,
1636       Operand(esp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1637   const int offsetToPC =
1638       BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp - kPointerSize;
1639   __ pop(Operand(esp, offsetToPC));
1640   __ Drop(offsetToPC / kPointerSize);
1641   __ add(Operand(esp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
1642   __ ret(0);
1643 }
1644 }  // namespace
1645 
Generate_ContinueToCodeStubBuiltin(MacroAssembler * masm)1646 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1647   Generate_ContinueToBuiltinHelper(masm, false, false);
1648 }
1649 
Generate_ContinueToCodeStubBuiltinWithResult(MacroAssembler * masm)1650 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1651     MacroAssembler* masm) {
1652   Generate_ContinueToBuiltinHelper(masm, false, true);
1653 }
1654 
Generate_ContinueToJavaScriptBuiltin(MacroAssembler * masm)1655 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1656   Generate_ContinueToBuiltinHelper(masm, true, false);
1657 }
1658 
Generate_ContinueToJavaScriptBuiltinWithResult(MacroAssembler * masm)1659 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1660     MacroAssembler* masm) {
1661   Generate_ContinueToBuiltinHelper(masm, true, true);
1662 }
1663 
Generate_NotifyDeoptimized(MacroAssembler * masm)1664 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1665   {
1666     FrameScope scope(masm, StackFrame::INTERNAL);
1667     __ CallRuntime(Runtime::kNotifyDeoptimized);
1668     // Tear down internal frame.
1669   }
1670 
1671   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
1672   __ mov(eax, Operand(esp, 1 * kPointerSize));
1673   __ ret(1 * kPointerSize);  // Remove eax.
1674 }
1675 
1676 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1677 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1678   // ----------- S t a t e -------------
1679   //  -- eax     : argc
1680   //  -- esp[0]  : return address
1681   //  -- esp[4]  : argArray
1682   //  -- esp[8]  : thisArg
1683   //  -- esp[12] : receiver
1684   // -----------------------------------
1685 
1686   // 1. Load receiver into edi, argArray into ebx (if present), remove all
1687   // arguments from the stack (including the receiver), and push thisArg (if
1688   // present) instead.
1689   {
1690     Label no_arg_array, no_this_arg;
1691     __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1692     __ mov(ebx, edx);
1693     __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1694     __ test(eax, eax);
1695     __ j(zero, &no_this_arg, Label::kNear);
1696     {
1697       __ mov(edx, Operand(esp, eax, times_pointer_size, 0));
1698       __ cmp(eax, Immediate(1));
1699       __ j(equal, &no_arg_array, Label::kNear);
1700       __ mov(ebx, Operand(esp, eax, times_pointer_size, -kPointerSize));
1701       __ bind(&no_arg_array);
1702     }
1703     __ bind(&no_this_arg);
1704     __ PopReturnAddressTo(ecx);
1705     __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1706     __ Push(edx);
1707     __ PushReturnAddressFrom(ecx);
1708   }
1709 
1710   // ----------- S t a t e -------------
1711   //  -- ebx    : argArray
1712   //  -- edi    : receiver
1713   //  -- esp[0] : return address
1714   //  -- esp[4] : thisArg
1715   // -----------------------------------
1716 
1717   // 2. We don't need to check explicitly for callable receiver here,
1718   // since that's the first thing the Call/CallWithArrayLike builtins
1719   // will do.
1720 
1721   // 3. Tail call with no arguments if argArray is null or undefined.
1722   Label no_arguments;
1723   __ JumpIfRoot(ebx, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1724   __ JumpIfRoot(ebx, Heap::kUndefinedValueRootIndex, &no_arguments,
1725                 Label::kNear);
1726 
1727   // 4a. Apply the receiver to the given argArray.
1728   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1729           RelocInfo::CODE_TARGET);
1730 
1731   // 4b. The argArray is either null or undefined, so we tail call without any
1732   // arguments to the receiver.
1733   __ bind(&no_arguments);
1734   {
1735     __ Set(eax, 0);
1736     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1737   }
1738 }
1739 
1740 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1741 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1742   // Stack Layout:
1743   // esp[0]           : Return address
1744   // esp[8]           : Argument n
1745   // esp[16]          : Argument n-1
1746   //  ...
1747   // esp[8 * n]       : Argument 1
1748   // esp[8 * (n + 1)] : Receiver (callable to call)
1749   //
1750   // eax contains the number of arguments, n, not counting the receiver.
1751   //
1752   // 1. Make sure we have at least one argument.
1753   {
1754     Label done;
1755     __ test(eax, eax);
1756     __ j(not_zero, &done, Label::kNear);
1757     __ PopReturnAddressTo(ebx);
1758     __ PushRoot(Heap::kUndefinedValueRootIndex);
1759     __ PushReturnAddressFrom(ebx);
1760     __ inc(eax);
1761     __ bind(&done);
1762   }
1763 
1764   // 2. Get the callable to call (passed as receiver) from the stack.
1765   __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1766 
1767   // 3. Shift arguments and return address one slot down on the stack
1768   //    (overwriting the original receiver).  Adjust argument count to make
1769   //    the original first argument the new receiver.
1770   {
1771     Label loop;
1772     __ mov(ecx, eax);
1773     __ bind(&loop);
1774     __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
1775     __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), ebx);
1776     __ dec(ecx);
1777     __ j(not_sign, &loop);  // While non-negative (to copy return address).
1778     __ pop(ebx);            // Discard copy of return address.
1779     __ dec(eax);  // One fewer argument (first argument is new receiver).
1780   }
1781 
1782   // 4. Call the callable.
1783   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1784 }
1785 
Generate_ReflectApply(MacroAssembler * masm)1786 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1787   // ----------- S t a t e -------------
1788   //  -- eax     : argc
1789   //  -- esp[0]  : return address
1790   //  -- esp[4]  : argumentsList
1791   //  -- esp[8]  : thisArgument
1792   //  -- esp[12] : target
1793   //  -- esp[16] : receiver
1794   // -----------------------------------
1795 
1796   // 1. Load target into edi (if present), argumentsList into ebx (if present),
1797   // remove all arguments from the stack (including the receiver), and push
1798   // thisArgument (if present) instead.
1799   {
1800     Label done;
1801     __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
1802     __ mov(edx, edi);
1803     __ mov(ebx, edi);
1804     __ cmp(eax, Immediate(1));
1805     __ j(below, &done, Label::kNear);
1806     __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1807     __ j(equal, &done, Label::kNear);
1808     __ mov(edx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1809     __ cmp(eax, Immediate(3));
1810     __ j(below, &done, Label::kNear);
1811     __ mov(ebx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1812     __ bind(&done);
1813     __ PopReturnAddressTo(ecx);
1814     __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1815     __ Push(edx);
1816     __ PushReturnAddressFrom(ecx);
1817   }
1818 
1819   // ----------- S t a t e -------------
1820   //  -- ebx    : argumentsList
1821   //  -- edi    : target
1822   //  -- esp[0] : return address
1823   //  -- esp[4] : thisArgument
1824   // -----------------------------------
1825 
1826   // 2. We don't need to check explicitly for callable target here,
1827   // since that's the first thing the Call/CallWithArrayLike builtins
1828   // will do.
1829 
1830   // 3. Apply the target to the given argumentsList.
1831   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1832           RelocInfo::CODE_TARGET);
1833 }
1834 
Generate_ReflectConstruct(MacroAssembler * masm)1835 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1836   // ----------- S t a t e -------------
1837   //  -- eax     : argc
1838   //  -- esp[0]  : return address
1839   //  -- esp[4]  : new.target (optional)
1840   //  -- esp[8]  : argumentsList
1841   //  -- esp[12] : target
1842   //  -- esp[16] : receiver
1843   // -----------------------------------
1844 
1845   // 1. Load target into edi (if present), argumentsList into ebx (if present),
1846   // new.target into edx (if present, otherwise use target), remove all
1847   // arguments from the stack (including the receiver), and push thisArgument
1848   // (if present) instead.
1849   {
1850     Label done;
1851     __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
1852     __ mov(edx, edi);
1853     __ mov(ebx, edi);
1854     __ cmp(eax, Immediate(1));
1855     __ j(below, &done, Label::kNear);
1856     __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1857     __ mov(edx, edi);
1858     __ j(equal, &done, Label::kNear);
1859     __ mov(ebx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1860     __ cmp(eax, Immediate(3));
1861     __ j(below, &done, Label::kNear);
1862     __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1863     __ bind(&done);
1864     __ PopReturnAddressTo(ecx);
1865     __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1866     __ PushRoot(Heap::kUndefinedValueRootIndex);
1867     __ PushReturnAddressFrom(ecx);
1868   }
1869 
1870   // ----------- S t a t e -------------
1871   //  -- ebx    : argumentsList
1872   //  -- edx    : new.target
1873   //  -- edi    : target
1874   //  -- esp[0] : return address
1875   //  -- esp[4] : receiver (undefined)
1876   // -----------------------------------
1877 
1878   // 2. We don't need to check explicitly for constructor target here,
1879   // since that's the first thing the Construct/ConstructWithArrayLike
1880   // builtins will do.
1881 
1882   // 3. We don't need to check explicitly for constructor new.target here,
1883   // since that's the second thing the Construct/ConstructWithArrayLike
1884   // builtins will do.
1885 
1886   // 4. Construct the target with the given new.target and argumentsList.
1887   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1888           RelocInfo::CODE_TARGET);
1889 }
1890 
Generate_InternalArrayConstructor(MacroAssembler * masm)1891 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
1892   // ----------- S t a t e -------------
1893   //  -- eax : argc
1894   //  -- esp[0] : return address
1895   //  -- esp[4] : last argument
1896   // -----------------------------------
1897   Label generic_array_code;
1898 
1899   if (FLAG_debug_code) {
1900     // Initial map for the builtin InternalArray function should be a map.
1901     __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1902     // Will both indicate a nullptr and a Smi.
1903     __ test(ebx, Immediate(kSmiTagMask));
1904     __ Assert(not_zero,
1905               AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1906     __ CmpObjectType(ebx, MAP_TYPE, ecx);
1907     __ Assert(equal,
1908               AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1909   }
1910 
1911   // Run the native code for the InternalArray function called as a normal
1912   // function.
1913   // tail call a stub
1914   __ mov(ebx, masm->isolate()->factory()->undefined_value());
1915   InternalArrayConstructorStub stub(masm->isolate());
1916   __ TailCallStub(&stub);
1917 }
1918 
Generate_ArrayConstructor(MacroAssembler * masm)1919 void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
1920   // ----------- S t a t e -------------
1921   //  -- eax : argc
1922   //  -- edi    : array function
1923   //  -- esp[0] : return address
1924   //  -- esp[4] : last argument
1925   // -----------------------------------
1926   Label generic_array_code;
1927 
1928   if (FLAG_debug_code) {
1929     // Initial map for the builtin Array function should be a map.
1930     __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1931     // Will both indicate a nullptr and a Smi.
1932     __ test(ebx, Immediate(kSmiTagMask));
1933     __ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction);
1934     __ CmpObjectType(ebx, MAP_TYPE, ecx);
1935     __ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
1936   }
1937 
1938   // ebx is the AllocationSite - here undefined.
1939   __ mov(ebx, masm->isolate()->factory()->undefined_value());
1940   // If edx (new target) is undefined, then this is the 'Call' case, so move
1941   // edi (the constructor) to rdx.
1942   Label call;
1943   __ cmp(edx, ebx);
1944   __ j(not_equal, &call);
1945   __ mov(edx, edi);
1946 
1947   // Run the native code for the Array function called as a normal function.
1948   __ bind(&call);
1949   ArrayConstructorStub stub(masm->isolate());
1950   __ TailCallStub(&stub);
1951 }
1952 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1953 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1954   __ push(ebp);
1955   __ mov(ebp, esp);
1956 
1957   // Store the arguments adaptor context sentinel.
1958   __ push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1959 
1960   // Push the function on the stack.
1961   __ push(edi);
1962 
1963   // Preserve the number of arguments on the stack. Must preserve eax,
1964   // ebx and ecx because these registers are used when copying the
1965   // arguments and the receiver.
1966   STATIC_ASSERT(kSmiTagSize == 1);
1967   __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
1968   __ push(edi);
1969 
1970   __ Push(Immediate(0));  // Padding.
1971 }
1972 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1973 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1974   // Retrieve the number of arguments from the stack.
1975   __ mov(ebx, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1976 
1977   // Leave the frame.
1978   __ leave();
1979 
1980   // Remove caller arguments from the stack.
1981   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1982   __ pop(ecx);
1983   __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize));  // 1 ~ receiver
1984   __ push(ecx);
1985 }
1986 
1987 // static
Generate_CallOrConstructVarargs(MacroAssembler * masm,Handle<Code> code)1988 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1989                                                Handle<Code> code) {
1990   // ----------- S t a t e -------------
1991   //  -- edi    : target
1992   //  -- eax    : number of parameters on the stack (not including the receiver)
1993   //  -- ebx    : arguments list (a FixedArray)
1994   //  -- ecx    : len (number of elements to from args)
1995   //  -- edx    : new.target (checked to be constructor or undefined)
1996   //  -- esp[0] : return address.
1997   // -----------------------------------
1998   __ AssertFixedArray(ebx);
1999 
2000   // We need to preserve eax, edi and ebx.
2001   __ movd(xmm0, edx);
2002   __ movd(xmm1, edi);
2003   __ movd(xmm2, eax);
2004 
2005   // Check for stack overflow.
2006   {
2007     // Check the stack for overflow. We are not trying to catch interruptions
2008     // (i.e. debug break and preemption) here, so check the "real stack limit".
2009     Label done;
2010     ExternalReference real_stack_limit =
2011         ExternalReference::address_of_real_stack_limit(masm->isolate());
2012     __ mov(edx, Operand::StaticVariable(real_stack_limit));
2013     // Make edx the space we have left. The stack might already be overflowed
2014     // here which will cause edx to become negative.
2015     __ neg(edx);
2016     __ add(edx, esp);
2017     __ sar(edx, kPointerSizeLog2);
2018     // Check if the arguments will overflow the stack.
2019     __ cmp(edx, ecx);
2020     __ j(greater, &done, Label::kNear);  // Signed comparison.
2021     __ TailCallRuntime(Runtime::kThrowStackOverflow);
2022     __ bind(&done);
2023   }
2024 
2025   // Push additional arguments onto the stack.
2026   {
2027     __ PopReturnAddressTo(edx);
2028     __ Move(eax, Immediate(0));
2029     Label done, push, loop;
2030     __ bind(&loop);
2031     __ cmp(eax, ecx);
2032     __ j(equal, &done, Label::kNear);
2033     // Turn the hole into undefined as we go.
2034     __ mov(edi,
2035            FieldOperand(ebx, eax, times_pointer_size, FixedArray::kHeaderSize));
2036     __ CompareRoot(edi, Heap::kTheHoleValueRootIndex);
2037     __ j(not_equal, &push, Label::kNear);
2038     __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
2039     __ bind(&push);
2040     __ Push(edi);
2041     __ inc(eax);
2042     __ jmp(&loop);
2043     __ bind(&done);
2044     __ PushReturnAddressFrom(edx);
2045   }
2046 
2047   // Restore eax, edi and edx.
2048   __ movd(eax, xmm2);
2049   __ movd(edi, xmm1);
2050   __ movd(edx, xmm0);
2051 
2052   // Compute the actual parameter count.
2053   __ add(eax, ecx);
2054 
2055   // Tail-call to the actual Call or Construct builtin.
2056   __ Jump(code, RelocInfo::CODE_TARGET);
2057 }
2058 
2059 // static
Generate_CallOrConstructForwardVarargs(MacroAssembler * masm,CallOrConstructMode mode,Handle<Code> code)2060 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2061                                                       CallOrConstructMode mode,
2062                                                       Handle<Code> code) {
2063   // ----------- S t a t e -------------
2064   //  -- eax : the number of arguments (not including the receiver)
2065   //  -- edi : the target to call (can be any Object)
2066   //  -- edx : the new target (for [[Construct]] calls)
2067   //  -- ecx : start index (to support rest parameters)
2068   // -----------------------------------
2069 
2070   // Check if new.target has a [[Construct]] internal method.
2071   if (mode == CallOrConstructMode::kConstruct) {
2072     Label new_target_constructor, new_target_not_constructor;
2073     __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
2074     __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
2075     __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
2076               Immediate(Map::IsConstructorBit::kMask));
2077     __ j(not_zero, &new_target_constructor, Label::kNear);
2078     __ bind(&new_target_not_constructor);
2079     {
2080       FrameScope scope(masm, StackFrame::MANUAL);
2081       __ EnterFrame(StackFrame::INTERNAL);
2082       __ Push(edx);
2083       __ CallRuntime(Runtime::kThrowNotConstructor);
2084     }
2085     __ bind(&new_target_constructor);
2086   }
2087 
2088   // Preserve new.target (in case of [[Construct]]).
2089   __ movd(xmm0, edx);
2090 
2091   // Check if we have an arguments adaptor frame below the function frame.
2092   Label arguments_adaptor, arguments_done;
2093   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2094   __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
2095          Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2096   __ j(equal, &arguments_adaptor, Label::kNear);
2097   {
2098     __ mov(edx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2099     __ mov(edx, FieldOperand(edx, JSFunction::kSharedFunctionInfoOffset));
2100     __ mov(edx,
2101            FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2102     __ mov(ebx, ebp);
2103   }
2104   __ jmp(&arguments_done, Label::kNear);
2105   __ bind(&arguments_adaptor);
2106   {
2107     // Just load the length from the ArgumentsAdaptorFrame.
2108     __ mov(edx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2109     __ SmiUntag(edx);
2110   }
2111   __ bind(&arguments_done);
2112 
2113   Label stack_done;
2114   __ sub(edx, ecx);
2115   __ j(less_equal, &stack_done);
2116   {
2117     // Check for stack overflow.
2118     {
2119       // Check the stack for overflow. We are not trying to catch interruptions
2120       // (i.e. debug break and preemption) here, so check the "real stack
2121       // limit".
2122       Label done;
2123       __ LoadRoot(ecx, Heap::kRealStackLimitRootIndex);
2124       // Make ecx the space we have left. The stack might already be
2125       // overflowed here which will cause ecx to become negative.
2126       __ neg(ecx);
2127       __ add(ecx, esp);
2128       __ sar(ecx, kPointerSizeLog2);
2129       // Check if the arguments will overflow the stack.
2130       __ cmp(ecx, edx);
2131       __ j(greater, &done, Label::kNear);  // Signed comparison.
2132       __ TailCallRuntime(Runtime::kThrowStackOverflow);
2133       __ bind(&done);
2134     }
2135 
2136     // Forward the arguments from the caller frame.
2137     {
2138       Label loop;
2139       __ add(eax, edx);
2140       __ PopReturnAddressTo(ecx);
2141       __ bind(&loop);
2142       {
2143         __ Push(Operand(ebx, edx, times_pointer_size, 1 * kPointerSize));
2144         __ dec(edx);
2145         __ j(not_zero, &loop);
2146       }
2147       __ PushReturnAddressFrom(ecx);
2148     }
2149   }
2150   __ bind(&stack_done);
2151 
2152   // Restore new.target (in case of [[Construct]]).
2153   __ movd(edx, xmm0);
2154 
2155   // Tail-call to the {code} handler.
2156   __ Jump(code, RelocInfo::CODE_TARGET);
2157 }
2158 
2159 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode)2160 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2161                                      ConvertReceiverMode mode) {
2162   // ----------- S t a t e -------------
2163   //  -- eax : the number of arguments (not including the receiver)
2164   //  -- edi : the function to call (checked to be a JSFunction)
2165   // -----------------------------------
2166   __ AssertFunction(edi);
2167 
2168   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2169   // Check that the function is not a "classConstructor".
2170   Label class_constructor;
2171   __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2172   __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset),
2173           Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
2174   __ j(not_zero, &class_constructor);
2175 
2176   // Enter the context of the function; ToObject has to run in the function
2177   // context, and we also need to take the global proxy from the function
2178   // context in case of conversion.
2179   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2180   // We need to convert the receiver for non-native sloppy mode functions.
2181   Label done_convert;
2182   __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset),
2183           Immediate(SharedFunctionInfo::IsNativeBit::kMask |
2184                     SharedFunctionInfo::IsStrictBit::kMask));
2185   __ j(not_zero, &done_convert);
2186   {
2187     // ----------- S t a t e -------------
2188     //  -- eax : the number of arguments (not including the receiver)
2189     //  -- edx : the shared function info.
2190     //  -- edi : the function to call (checked to be a JSFunction)
2191     //  -- esi : the function context.
2192     // -----------------------------------
2193 
2194     if (mode == ConvertReceiverMode::kNullOrUndefined) {
2195       // Patch receiver to global proxy.
2196       __ LoadGlobalProxy(ecx);
2197     } else {
2198       Label convert_to_object, convert_receiver;
2199       __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
2200       __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
2201       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2202       __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ebx);
2203       __ j(above_equal, &done_convert);
2204       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2205         Label convert_global_proxy;
2206         __ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex,
2207                       &convert_global_proxy, Label::kNear);
2208         __ JumpIfNotRoot(ecx, Heap::kNullValueRootIndex, &convert_to_object,
2209                          Label::kNear);
2210         __ bind(&convert_global_proxy);
2211         {
2212           // Patch receiver to global proxy.
2213           __ LoadGlobalProxy(ecx);
2214         }
2215         __ jmp(&convert_receiver);
2216       }
2217       __ bind(&convert_to_object);
2218       {
2219         // Convert receiver using ToObject.
2220         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2221         // in the fast case? (fall back to AllocateInNewSpace?)
2222         FrameScope scope(masm, StackFrame::INTERNAL);
2223         __ SmiTag(eax);
2224         __ Push(eax);
2225         __ Push(edi);
2226         __ mov(eax, ecx);
2227         __ Push(esi);
2228         __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2229                 RelocInfo::CODE_TARGET);
2230         __ Pop(esi);
2231         __ mov(ecx, eax);
2232         __ Pop(edi);
2233         __ Pop(eax);
2234         __ SmiUntag(eax);
2235       }
2236       __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2237       __ bind(&convert_receiver);
2238     }
2239     __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx);
2240   }
2241   __ bind(&done_convert);
2242 
2243   // ----------- S t a t e -------------
2244   //  -- eax : the number of arguments (not including the receiver)
2245   //  -- edx : the shared function info.
2246   //  -- edi : the function to call (checked to be a JSFunction)
2247   //  -- esi : the function context.
2248   // -----------------------------------
2249 
2250   __ mov(ebx,
2251          FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2252   ParameterCount actual(eax);
2253   ParameterCount expected(ebx);
2254   __ InvokeFunctionCode(edi, no_reg, expected, actual, JUMP_FUNCTION);
2255   // The function is a "classConstructor", need to raise an exception.
2256   __ bind(&class_constructor);
2257   {
2258     FrameScope frame(masm, StackFrame::INTERNAL);
2259     __ push(edi);
2260     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2261   }
2262 }
2263 
2264 namespace {
2265 
Generate_PushBoundArguments(MacroAssembler * masm)2266 void Generate_PushBoundArguments(MacroAssembler* masm) {
2267   // ----------- S t a t e -------------
2268   //  -- eax : the number of arguments (not including the receiver)
2269   //  -- edx : new.target (only in case of [[Construct]])
2270   //  -- edi : target (checked to be a JSBoundFunction)
2271   // -----------------------------------
2272 
2273   // Load [[BoundArguments]] into ecx and length of that into ebx.
2274   Label no_bound_arguments;
2275   __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2276   __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2277   __ SmiUntag(ebx);
2278   __ test(ebx, ebx);
2279   __ j(zero, &no_bound_arguments);
2280   {
2281     // ----------- S t a t e -------------
2282     //  -- eax : the number of arguments (not including the receiver)
2283     //  -- edx : new.target (only in case of [[Construct]])
2284     //  -- edi : target (checked to be a JSBoundFunction)
2285     //  -- ecx : the [[BoundArguments]] (implemented as FixedArray)
2286     //  -- ebx : the number of [[BoundArguments]]
2287     // -----------------------------------
2288 
2289     // Reserve stack space for the [[BoundArguments]].
2290     {
2291       Label done;
2292       __ lea(ecx, Operand(ebx, times_pointer_size, 0));
2293       __ sub(esp, ecx);
2294       // Check the stack for overflow. We are not trying to catch interruptions
2295       // (i.e. debug break and preemption) here, so check the "real stack
2296       // limit".
2297       __ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex);
2298       __ j(greater, &done, Label::kNear);  // Signed comparison.
2299       // Restore the stack pointer.
2300       __ lea(esp, Operand(esp, ebx, times_pointer_size, 0));
2301       {
2302         FrameScope scope(masm, StackFrame::MANUAL);
2303         __ EnterFrame(StackFrame::INTERNAL);
2304         __ CallRuntime(Runtime::kThrowStackOverflow);
2305       }
2306       __ bind(&done);
2307     }
2308 
2309     // Adjust effective number of arguments to include return address.
2310     __ inc(eax);
2311 
2312     // Relocate arguments and return address down the stack.
2313     {
2314       Label loop;
2315       __ Set(ecx, 0);
2316       __ lea(ebx, Operand(esp, ebx, times_pointer_size, 0));
2317       __ bind(&loop);
2318       __ movd(xmm0, Operand(ebx, ecx, times_pointer_size, 0));
2319       __ movd(Operand(esp, ecx, times_pointer_size, 0), xmm0);
2320       __ inc(ecx);
2321       __ cmp(ecx, eax);
2322       __ j(less, &loop);
2323     }
2324 
2325     // Copy [[BoundArguments]] to the stack (below the arguments).
2326     {
2327       Label loop;
2328       __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2329       __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2330       __ SmiUntag(ebx);
2331       __ bind(&loop);
2332       __ dec(ebx);
2333       __ movd(xmm0, FieldOperand(ecx, ebx, times_pointer_size,
2334                                  FixedArray::kHeaderSize));
2335       __ movd(Operand(esp, eax, times_pointer_size, 0), xmm0);
2336       __ lea(eax, Operand(eax, 1));
2337       __ j(greater, &loop);
2338     }
2339 
2340     // Adjust effective number of arguments (eax contains the number of
2341     // arguments from the call plus return address plus the number of
2342     // [[BoundArguments]]), so we need to subtract one for the return address.
2343     __ dec(eax);
2344   }
2345   __ bind(&no_bound_arguments);
2346 }
2347 
2348 }  // namespace
2349 
2350 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm)2351 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2352   // ----------- S t a t e -------------
2353   //  -- eax : the number of arguments (not including the receiver)
2354   //  -- edi : the function to call (checked to be a JSBoundFunction)
2355   // -----------------------------------
2356   __ AssertBoundFunction(edi);
2357 
2358   // Patch the receiver to [[BoundThis]].
2359   __ mov(ebx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2360   __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ebx);
2361 
2362   // Push the [[BoundArguments]] onto the stack.
2363   Generate_PushBoundArguments(masm);
2364 
2365   // Call the [[BoundTargetFunction]] via the Call builtin.
2366   __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2367   __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2368           RelocInfo::CODE_TARGET);
2369 }
2370 
2371 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode)2372 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2373   // ----------- S t a t e -------------
2374   //  -- eax : the number of arguments (not including the receiver)
2375   //  -- edi : the target to call (can be any Object).
2376   // -----------------------------------
2377 
2378   Label non_callable, non_function, non_smi;
2379   __ JumpIfSmi(edi, &non_callable);
2380   __ bind(&non_smi);
2381   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2382   __ j(equal, masm->isolate()->builtins()->CallFunction(mode),
2383        RelocInfo::CODE_TARGET);
2384   __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2385   __ j(equal, BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2386        RelocInfo::CODE_TARGET);
2387 
2388   // Check if target is a proxy and call CallProxy external builtin
2389   __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2390             Immediate(Map::IsCallableBit::kMask));
2391   __ j(zero, &non_callable);
2392 
2393   // Call CallProxy external builtin
2394   __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2395   __ j(not_equal, &non_function);
2396   __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2397 
2398   // 2. Call to something else, which might have a [[Call]] internal method (if
2399   // not we raise an exception).
2400   __ bind(&non_function);
2401   // Overwrite the original receiver with the (original) target.
2402   __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2403   // Let the "call_as_function_delegate" take care of the rest.
2404   __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, edi);
2405   __ Jump(masm->isolate()->builtins()->CallFunction(
2406               ConvertReceiverMode::kNotNullOrUndefined),
2407           RelocInfo::CODE_TARGET);
2408 
2409   // 3. Call to something that is not callable.
2410   __ bind(&non_callable);
2411   {
2412     FrameScope scope(masm, StackFrame::INTERNAL);
2413     __ Push(edi);
2414     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2415   }
2416 }
2417 
2418 // static
Generate_ConstructFunction(MacroAssembler * masm)2419 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2420   // ----------- S t a t e -------------
2421   //  -- eax : the number of arguments (not including the receiver)
2422   //  -- edx : the new target (checked to be a constructor)
2423   //  -- edi : the constructor to call (checked to be a JSFunction)
2424   // -----------------------------------
2425   __ AssertConstructor(edi);
2426   __ AssertFunction(edi);
2427 
2428   // Calling convention for function specific ConstructStubs require
2429   // ebx to contain either an AllocationSite or undefined.
2430   __ LoadRoot(ebx, Heap::kUndefinedValueRootIndex);
2431 
2432   Label call_generic_stub;
2433 
2434   // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2435   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2436   __ test(FieldOperand(ecx, SharedFunctionInfo::kFlagsOffset),
2437           Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2438   __ j(zero, &call_generic_stub, Label::kNear);
2439 
2440   __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2441           RelocInfo::CODE_TARGET);
2442 
2443   __ bind(&call_generic_stub);
2444   __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2445           RelocInfo::CODE_TARGET);
2446 }
2447 
2448 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2449 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2450   // ----------- S t a t e -------------
2451   //  -- eax : the number of arguments (not including the receiver)
2452   //  -- edx : the new target (checked to be a constructor)
2453   //  -- edi : the constructor to call (checked to be a JSBoundFunction)
2454   // -----------------------------------
2455   __ AssertConstructor(edi);
2456   __ AssertBoundFunction(edi);
2457 
2458   // Push the [[BoundArguments]] onto the stack.
2459   Generate_PushBoundArguments(masm);
2460 
2461   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2462   {
2463     Label done;
2464     __ cmp(edi, edx);
2465     __ j(not_equal, &done, Label::kNear);
2466     __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2467     __ bind(&done);
2468   }
2469 
2470   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2471   __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2472   __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2473 }
2474 
2475 // static
Generate_Construct(MacroAssembler * masm)2476 void Builtins::Generate_Construct(MacroAssembler* masm) {
2477   // ----------- S t a t e -------------
2478   //  -- eax : the number of arguments (not including the receiver)
2479   //  -- edx : the new target (either the same as the constructor or
2480   //           the JSFunction on which new was invoked initially)
2481   //  -- edi : the constructor to call (can be any Object)
2482   // -----------------------------------
2483 
2484   // Check if target is a Smi.
2485   Label non_constructor, non_proxy;
2486   __ JumpIfSmi(edi, &non_constructor, Label::kNear);
2487 
2488   // Check if target has a [[Construct]] internal method.
2489   __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
2490   __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2491             Immediate(Map::IsConstructorBit::kMask));
2492   __ j(zero, &non_constructor, Label::kNear);
2493 
2494   // Dispatch based on instance type.
2495   __ CmpInstanceType(ecx, JS_FUNCTION_TYPE);
2496   __ j(equal, BUILTIN_CODE(masm->isolate(), ConstructFunction),
2497        RelocInfo::CODE_TARGET);
2498 
2499   // Only dispatch to bound functions after checking whether they are
2500   // constructors.
2501   __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2502   __ j(equal, BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2503        RelocInfo::CODE_TARGET);
2504 
2505   // Only dispatch to proxies after checking whether they are constructors.
2506   __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2507   __ j(not_equal, &non_proxy);
2508   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2509           RelocInfo::CODE_TARGET);
2510 
2511   // Called Construct on an exotic Object with a [[Construct]] internal method.
2512   __ bind(&non_proxy);
2513   {
2514     // Overwrite the original receiver with the (original) target.
2515     __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2516     // Let the "call_as_constructor_delegate" take care of the rest.
2517     __ LoadGlobalFunction(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, edi);
2518     __ Jump(masm->isolate()->builtins()->CallFunction(),
2519             RelocInfo::CODE_TARGET);
2520   }
2521 
2522   // Called Construct on an Object that doesn't have a [[Construct]] internal
2523   // method.
2524   __ bind(&non_constructor);
2525   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2526           RelocInfo::CODE_TARGET);
2527 }
2528 
2529 // static
Generate_AllocateInNewSpace(MacroAssembler * masm)2530 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2531   // ----------- S t a t e -------------
2532   //  -- edx    : requested object size (untagged)
2533   //  -- esp[0] : return address
2534   // -----------------------------------
2535   __ SmiTag(edx);
2536   __ PopReturnAddressTo(ecx);
2537   __ Push(edx);
2538   __ PushReturnAddressFrom(ecx);
2539   __ Move(esi, Smi::kZero);
2540   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2541 }
2542 
2543 // static
Generate_AllocateInOldSpace(MacroAssembler * masm)2544 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2545   // ----------- S t a t e -------------
2546   //  -- edx    : requested object size (untagged)
2547   //  -- esp[0] : return address
2548   // -----------------------------------
2549   __ SmiTag(edx);
2550   __ PopReturnAddressTo(ecx);
2551   __ Push(edx);
2552   __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2553   __ PushReturnAddressFrom(ecx);
2554   __ Move(esi, Smi::kZero);
2555   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2556 }
2557 
2558 // static
Generate_Abort(MacroAssembler * masm)2559 void Builtins::Generate_Abort(MacroAssembler* masm) {
2560   // ----------- S t a t e -------------
2561   //  -- edx    : message_id as Smi
2562   //  -- esp[0] : return address
2563   // -----------------------------------
2564   __ PopReturnAddressTo(ecx);
2565   __ Push(edx);
2566   __ PushReturnAddressFrom(ecx);
2567   __ Move(esi, Smi::kZero);
2568   __ TailCallRuntime(Runtime::kAbort);
2569 }
2570 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2571 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2572   // ----------- S t a t e -------------
2573   //  -- eax : actual number of arguments
2574   //  -- ebx : expected number of arguments
2575   //  -- edx : new target (passed through to callee)
2576   //  -- edi : function (passed through to callee)
2577   // -----------------------------------
2578 
2579   Label invoke, dont_adapt_arguments, stack_overflow;
2580   __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
2581 
2582   Label enough, too_few;
2583   __ cmp(eax, ebx);
2584   __ j(less, &too_few);
2585   __ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2586   __ j(equal, &dont_adapt_arguments);
2587 
2588   {  // Enough parameters: Actual >= expected.
2589     __ bind(&enough);
2590     EnterArgumentsAdaptorFrame(masm);
2591     // edi is used as a scratch register. It should be restored from the frame
2592     // when needed.
2593     Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
2594 
2595     // Copy receiver and all expected arguments.
2596     const int offset = StandardFrameConstants::kCallerSPOffset;
2597     __ lea(edi, Operand(ebp, eax, times_4, offset));
2598     __ mov(eax, -1);  // account for receiver
2599 
2600     Label copy;
2601     __ bind(&copy);
2602     __ inc(eax);
2603     __ push(Operand(edi, 0));
2604     __ sub(edi, Immediate(kPointerSize));
2605     __ cmp(eax, ebx);
2606     __ j(less, &copy);
2607     // eax now contains the expected number of arguments.
2608     __ jmp(&invoke);
2609   }
2610 
2611   {  // Too few parameters: Actual < expected.
2612     __ bind(&too_few);
2613     EnterArgumentsAdaptorFrame(masm);
2614     // edi is used as a scratch register. It should be restored from the frame
2615     // when needed.
2616     Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
2617 
2618     // Remember expected arguments in ecx.
2619     __ mov(ecx, ebx);
2620 
2621     // Copy receiver and all actual arguments.
2622     const int offset = StandardFrameConstants::kCallerSPOffset;
2623     __ lea(edi, Operand(ebp, eax, times_4, offset));
2624     // ebx = expected - actual.
2625     __ sub(ebx, eax);
2626     // eax = -actual - 1
2627     __ neg(eax);
2628     __ sub(eax, Immediate(1));
2629 
2630     Label copy;
2631     __ bind(&copy);
2632     __ inc(eax);
2633     __ push(Operand(edi, 0));
2634     __ sub(edi, Immediate(kPointerSize));
2635     __ test(eax, eax);
2636     __ j(not_zero, &copy);
2637 
2638     // Fill remaining expected arguments with undefined values.
2639     Label fill;
2640     __ bind(&fill);
2641     __ inc(eax);
2642     __ push(Immediate(masm->isolate()->factory()->undefined_value()));
2643     __ cmp(eax, ebx);
2644     __ j(less, &fill);
2645 
2646     // Restore expected arguments.
2647     __ mov(eax, ecx);
2648   }
2649 
2650   // Call the entry point.
2651   __ bind(&invoke);
2652   // Restore function pointer.
2653   __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2654   // eax : expected number of arguments
2655   // edx : new target (passed through to callee)
2656   // edi : function (passed through to callee)
2657   static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
2658   __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
2659   __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2660   __ call(ecx);
2661 
2662   // Store offset of return address for deoptimizer.
2663   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2664 
2665   // Leave frame and return.
2666   LeaveArgumentsAdaptorFrame(masm);
2667   __ ret(0);
2668 
2669   // -------------------------------------------
2670   // Dont adapt arguments.
2671   // -------------------------------------------
2672   __ bind(&dont_adapt_arguments);
2673   static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
2674   __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
2675   __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2676   __ jmp(ecx);
2677 
2678   __ bind(&stack_overflow);
2679   {
2680     FrameScope frame(masm, StackFrame::MANUAL);
2681     __ CallRuntime(Runtime::kThrowStackOverflow);
2682     __ int3();
2683   }
2684 }
2685 
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)2686 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
2687                                               bool has_handler_frame) {
2688   // Lookup the function in the JavaScript frame.
2689   if (has_handler_frame) {
2690     __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2691     __ mov(eax, Operand(eax, JavaScriptFrameConstants::kFunctionOffset));
2692   } else {
2693     __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2694   }
2695 
2696   {
2697     FrameScope scope(masm, StackFrame::INTERNAL);
2698     // Pass function as argument.
2699     __ push(eax);
2700     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2701   }
2702 
2703   Label skip;
2704   // If the code object is null, just return to the caller.
2705   __ cmp(eax, Immediate(0));
2706   __ j(not_equal, &skip, Label::kNear);
2707   __ ret(0);
2708 
2709   __ bind(&skip);
2710 
2711   // Drop any potential handler frame that is be sitting on top of the actual
2712   // JavaScript frame. This is the case then OSR is triggered from bytecode.
2713   if (has_handler_frame) {
2714     __ leave();
2715   }
2716 
2717   // Load deoptimization data from the code object.
2718   __ mov(ebx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2719 
2720   // Load the OSR entrypoint offset from the deoptimization data.
2721   __ mov(ebx, Operand(ebx, FixedArray::OffsetOfElementAt(
2722                                DeoptimizationData::kOsrPcOffsetIndex) -
2723                                kHeapObjectTag));
2724   __ SmiUntag(ebx);
2725 
2726   // Compute the target address = code_obj + header_size + osr_offset
2727   __ lea(eax, Operand(eax, ebx, times_1, Code::kHeaderSize - kHeapObjectTag));
2728 
2729   // Overwrite the return address on the stack.
2730   __ mov(Operand(esp, 0), eax);
2731 
2732   // And "return" to the OSR entry point of the function.
2733   __ ret(0);
2734 }
2735 
Generate_OnStackReplacement(MacroAssembler * masm)2736 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
2737   Generate_OnStackReplacementHelper(masm, false);
2738 }
2739 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)2740 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2741   Generate_OnStackReplacementHelper(masm, true);
2742 }
2743 
Generate_WasmCompileLazy(MacroAssembler * masm)2744 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2745   {
2746     FrameScope scope(masm, StackFrame::INTERNAL);
2747 
2748     // Save all parameter registers (see wasm-linkage.cc). They might be
2749     // overwritten in the runtime call below. We don't have any callee-saved
2750     // registers in wasm, so no need to store anything else.
2751     for (Register reg : wasm::kGpParamRegisters) {
2752       if (reg == kWasmInstanceRegister) continue;
2753       __ Push(reg);
2754     }
2755     __ sub(esp, Immediate(16 * arraysize(wasm::kFpParamRegisters)));
2756     int offset = 0;
2757     for (DoubleRegister reg : wasm::kFpParamRegisters) {
2758       __ movdqu(Operand(esp, offset), reg);
2759       offset += 16;
2760     }
2761 
2762     // Pass the WASM instance as an explicit argument to WasmCompileLazy.
2763     __ Push(kWasmInstanceRegister);
2764     // Initialize the JavaScript context with 0. CEntry will use it to
2765     // set the current context on the isolate.
2766     __ Move(kContextRegister, Smi::kZero);
2767     __ CallRuntime(Runtime::kWasmCompileLazy);
2768     // The entrypoint address is the first return value.
2769     __ mov(edi, kReturnRegister0);
2770     // The WASM instance is the second return value.
2771     __ mov(kWasmInstanceRegister, kReturnRegister1);
2772 
2773     // Restore registers.
2774     for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
2775       offset -= 16;
2776       __ movdqu(reg, Operand(esp, offset));
2777     }
2778     DCHECK_EQ(0, offset);
2779     __ add(esp, Immediate(16 * arraysize(wasm::kFpParamRegisters)));
2780     for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
2781       if (reg == kWasmInstanceRegister) continue;
2782       __ Pop(reg);
2783     }
2784   }
2785   // Finally, jump to the entrypoint.
2786   __ jmp(edi);
2787 }
2788 
Generate_CEntry(MacroAssembler * masm,int result_size,SaveFPRegsMode save_doubles,ArgvMode argv_mode,bool builtin_exit_frame)2789 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2790                                SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2791                                bool builtin_exit_frame) {
2792   // eax: number of arguments including receiver
2793   // ebx: pointer to C function  (C callee-saved)
2794   // ebp: frame pointer  (restored after C call)
2795   // esp: stack pointer  (restored after C call)
2796   // esi: current context (C callee-saved)
2797   // edi: JS function of the caller (C callee-saved)
2798   //
2799   // If argv_mode == kArgvInRegister:
2800   // ecx: pointer to the first argument
2801 
2802   ProfileEntryHookStub::MaybeCallEntryHook(masm);
2803 
2804   // Reserve space on the stack for the three arguments passed to the call. If
2805   // result size is greater than can be returned in registers, also reserve
2806   // space for the hidden argument for the result location, and space for the
2807   // result itself.
2808   int arg_stack_space = 3;
2809 
2810   // Enter the exit frame that transitions from JavaScript to C++.
2811   if (argv_mode == kArgvInRegister) {
2812     DCHECK(save_doubles == kDontSaveFPRegs);
2813     DCHECK(!builtin_exit_frame);
2814     __ EnterApiExitFrame(arg_stack_space);
2815 
2816     // Move argc and argv into the correct registers.
2817     __ mov(esi, ecx);
2818     __ mov(edi, eax);
2819   } else {
2820     __ EnterExitFrame(
2821         arg_stack_space, save_doubles == kSaveFPRegs,
2822         builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2823   }
2824 
2825   // ebx: pointer to C function  (C callee-saved)
2826   // ebp: frame pointer  (restored after C call)
2827   // esp: stack pointer  (restored after C call)
2828   // edi: number of arguments including receiver  (C callee-saved)
2829   // esi: pointer to the first argument (C callee-saved)
2830 
2831   // Result returned in eax, or eax+edx if result size is 2.
2832 
2833   // Check stack alignment.
2834   if (FLAG_debug_code) {
2835     __ CheckStackAlignment();
2836   }
2837   // Call C function.
2838   __ mov(Operand(esp, 0 * kPointerSize), edi);  // argc.
2839   __ mov(Operand(esp, 1 * kPointerSize), esi);  // argv.
2840   __ mov(Operand(esp, 2 * kPointerSize),
2841          Immediate(ExternalReference::isolate_address(masm->isolate())));
2842   __ call(ebx);
2843 
2844   // Result is in eax or edx:eax - do not destroy these registers!
2845 
2846   // Check result for exception sentinel.
2847   Label exception_returned;
2848   __ cmp(eax, masm->isolate()->factory()->exception());
2849   __ j(equal, &exception_returned);
2850 
2851   // Check that there is no pending exception, otherwise we
2852   // should have returned the exception sentinel.
2853   if (FLAG_debug_code) {
2854     __ push(edx);
2855     __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
2856     Label okay;
2857     ExternalReference pending_exception_address = ExternalReference::Create(
2858         IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2859     __ cmp(edx, Operand::StaticVariable(pending_exception_address));
2860     // Cannot use check here as it attempts to generate call into runtime.
2861     __ j(equal, &okay, Label::kNear);
2862     __ int3();
2863     __ bind(&okay);
2864     __ pop(edx);
2865   }
2866 
2867   // Exit the JavaScript to C++ exit frame.
2868   __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
2869   __ ret(0);
2870 
2871   // Handling of exception.
2872   __ bind(&exception_returned);
2873 
2874   ExternalReference pending_handler_context_address = ExternalReference::Create(
2875       IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2876   ExternalReference pending_handler_entrypoint_address =
2877       ExternalReference::Create(
2878           IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2879   ExternalReference pending_handler_fp_address = ExternalReference::Create(
2880       IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2881   ExternalReference pending_handler_sp_address = ExternalReference::Create(
2882       IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2883 
2884   // Ask the runtime for help to determine the handler. This will set eax to
2885   // contain the current pending exception, don't clobber it.
2886   ExternalReference find_handler =
2887       ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2888   {
2889     FrameScope scope(masm, StackFrame::MANUAL);
2890     __ PrepareCallCFunction(3, eax);
2891     __ mov(Operand(esp, 0 * kPointerSize), Immediate(0));  // argc.
2892     __ mov(Operand(esp, 1 * kPointerSize), Immediate(0));  // argv.
2893     __ mov(Operand(esp, 2 * kPointerSize),
2894            Immediate(ExternalReference::isolate_address(masm->isolate())));
2895     __ CallCFunction(find_handler, 3);
2896   }
2897 
2898   // Retrieve the handler context, SP and FP.
2899   __ mov(esi, Operand::StaticVariable(pending_handler_context_address));
2900   __ mov(esp, Operand::StaticVariable(pending_handler_sp_address));
2901   __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address));
2902 
2903   // If the handler is a JS frame, restore the context to the frame. Note that
2904   // the context will be set to (esi == 0) for non-JS frames.
2905   Label skip;
2906   __ test(esi, esi);
2907   __ j(zero, &skip, Label::kNear);
2908   __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
2909   __ bind(&skip);
2910 
2911   // Reset the masking register. This is done independent of the underlying
2912   // feature flag {FLAG_branch_load_poisoning} to make the snapshot work with
2913   // both configurations. It is safe to always do this, because the underlying
2914   // register is caller-saved and can be arbitrarily clobbered.
2915   __ ResetSpeculationPoisonRegister();
2916 
2917   // Compute the handler entry address and jump to it.
2918   __ mov(edi, Operand::StaticVariable(pending_handler_entrypoint_address));
2919   __ jmp(edi);
2920 }
2921 
Generate_DoubleToI(MacroAssembler * masm)2922 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2923   Label check_negative, process_64_bits, done;
2924 
2925   // Account for return address and saved regs.
2926   const int kArgumentOffset = 4 * kPointerSize;
2927 
2928   MemOperand mantissa_operand(MemOperand(esp, kArgumentOffset));
2929   MemOperand exponent_operand(
2930       MemOperand(esp, kArgumentOffset + kDoubleSize / 2));
2931 
2932   // The result is returned on the stack.
2933   MemOperand return_operand = mantissa_operand;
2934 
2935   Register scratch1 = ebx;
2936 
2937   // Since we must use ecx for shifts below, use some other register (eax)
2938   // to calculate the result.
2939   Register result_reg = eax;
2940   // Save ecx if it isn't the return register and therefore volatile, or if it
2941   // is the return register, then save the temp register we use in its stead for
2942   // the result.
2943   Register save_reg = eax;
2944   __ push(ecx);
2945   __ push(scratch1);
2946   __ push(save_reg);
2947 
2948   __ mov(scratch1, mantissa_operand);
2949   if (CpuFeatures::IsSupported(SSE3)) {
2950     CpuFeatureScope scope(masm, SSE3);
2951     // Load x87 register with heap number.
2952     __ fld_d(mantissa_operand);
2953   }
2954   __ mov(ecx, exponent_operand);
2955 
2956   __ and_(ecx, HeapNumber::kExponentMask);
2957   __ shr(ecx, HeapNumber::kExponentShift);
2958   __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
2959   __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
2960   __ j(below, &process_64_bits);
2961 
2962   // Result is entirely in lower 32-bits of mantissa
2963   int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
2964   if (CpuFeatures::IsSupported(SSE3)) {
2965     __ fstp(0);
2966   }
2967   __ sub(ecx, Immediate(delta));
2968   __ xor_(result_reg, result_reg);
2969   __ cmp(ecx, Immediate(31));
2970   __ j(above, &done);
2971   __ shl_cl(scratch1);
2972   __ jmp(&check_negative);
2973 
2974   __ bind(&process_64_bits);
2975   if (CpuFeatures::IsSupported(SSE3)) {
2976     CpuFeatureScope scope(masm, SSE3);
2977     // Reserve space for 64 bit answer.
2978     __ sub(esp, Immediate(kDoubleSize));  // Nolint.
2979     // Do conversion, which cannot fail because we checked the exponent.
2980     __ fisttp_d(Operand(esp, 0));
2981     __ mov(result_reg, Operand(esp, 0));  // Load low word of answer as result
2982     __ add(esp, Immediate(kDoubleSize));
2983     __ jmp(&done);
2984   } else {
2985     // Result must be extracted from shifted 32-bit mantissa
2986     __ sub(ecx, Immediate(delta));
2987     __ neg(ecx);
2988     __ mov(result_reg, exponent_operand);
2989     __ and_(result_reg,
2990             Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
2991     __ add(result_reg,
2992            Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
2993     __ shrd_cl(scratch1, result_reg);
2994     __ shr_cl(result_reg);
2995     __ test(ecx, Immediate(32));
2996     __ cmov(not_equal, scratch1, result_reg);
2997   }
2998 
2999   // If the double was negative, negate the integer result.
3000   __ bind(&check_negative);
3001   __ mov(result_reg, scratch1);
3002   __ neg(result_reg);
3003   __ cmp(exponent_operand, Immediate(0));
3004   __ cmov(greater, result_reg, scratch1);
3005 
3006   // Restore registers
3007   __ bind(&done);
3008   __ mov(return_operand, result_reg);
3009   __ pop(save_reg);
3010   __ pop(scratch1);
3011   __ pop(ecx);
3012   __ ret(0);
3013 }
3014 
Generate_MathPowInternal(MacroAssembler * masm)3015 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
3016   const Register exponent = MathPowTaggedDescriptor::exponent();
3017   DCHECK(exponent == eax);
3018   const Register scratch = ecx;
3019   const XMMRegister double_result = xmm3;
3020   const XMMRegister double_base = xmm2;
3021   const XMMRegister double_exponent = xmm1;
3022   const XMMRegister double_scratch = xmm4;
3023 
3024   Label call_runtime, done, exponent_not_smi, int_exponent;
3025 
3026   // Save 1 in double_result - we need this several times later on.
3027   __ mov(scratch, Immediate(1));
3028   __ Cvtsi2sd(double_result, scratch);
3029 
3030   Label fast_power, try_arithmetic_simplification;
3031   __ DoubleToI(exponent, double_exponent, double_scratch,
3032                &try_arithmetic_simplification, &try_arithmetic_simplification);
3033   __ jmp(&int_exponent);
3034 
3035   __ bind(&try_arithmetic_simplification);
3036   // Skip to runtime if possibly NaN (indicated by the indefinite integer).
3037   __ cvttsd2si(exponent, Operand(double_exponent));
3038   __ cmp(exponent, Immediate(0x1));
3039   __ j(overflow, &call_runtime);
3040 
3041   // Using FPU instructions to calculate power.
3042   Label fast_power_failed;
3043   __ bind(&fast_power);
3044   __ fnclex();  // Clear flags to catch exceptions later.
3045   // Transfer (B)ase and (E)xponent onto the FPU register stack.
3046   __ sub(esp, Immediate(kDoubleSize));
3047   __ movsd(Operand(esp, 0), double_exponent);
3048   __ fld_d(Operand(esp, 0));  // E
3049   __ movsd(Operand(esp, 0), double_base);
3050   __ fld_d(Operand(esp, 0));  // B, E
3051 
3052   // Exponent is in st(1) and base is in st(0)
3053   // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
3054   // FYL2X calculates st(1) * log2(st(0))
3055   __ fyl2x();    // X
3056   __ fld(0);     // X, X
3057   __ frndint();  // rnd(X), X
3058   __ fsub(1);    // rnd(X), X-rnd(X)
3059   __ fxch(1);    // X - rnd(X), rnd(X)
3060   // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
3061   __ f2xm1();   // 2^(X-rnd(X)) - 1, rnd(X)
3062   __ fld1();    // 1, 2^(X-rnd(X)) - 1, rnd(X)
3063   __ faddp(1);  // 2^(X-rnd(X)), rnd(X)
3064   // FSCALE calculates st(0) * 2^st(1)
3065   __ fscale();  // 2^X, rnd(X)
3066   __ fstp(1);   // 2^X
3067   // Bail out to runtime in case of exceptions in the status word.
3068   __ fnstsw_ax();
3069   __ test_b(eax, Immediate(0x5F));  // We check for all but precision exception.
3070   __ j(not_zero, &fast_power_failed, Label::kNear);
3071   __ fstp_d(Operand(esp, 0));
3072   __ movsd(double_result, Operand(esp, 0));
3073   __ add(esp, Immediate(kDoubleSize));
3074   __ jmp(&done);
3075 
3076   __ bind(&fast_power_failed);
3077   __ fninit();
3078   __ add(esp, Immediate(kDoubleSize));
3079   __ jmp(&call_runtime);
3080 
3081   // Calculate power with integer exponent.
3082   __ bind(&int_exponent);
3083   const XMMRegister double_scratch2 = double_exponent;
3084   __ mov(scratch, exponent);                 // Back up exponent.
3085   __ movsd(double_scratch, double_base);     // Back up base.
3086   __ movsd(double_scratch2, double_result);  // Load double_exponent with 1.
3087 
3088   // Get absolute value of exponent.
3089   Label no_neg, while_true, while_false;
3090   __ test(scratch, scratch);
3091   __ j(positive, &no_neg, Label::kNear);
3092   __ neg(scratch);
3093   __ bind(&no_neg);
3094 
3095   __ j(zero, &while_false, Label::kNear);
3096   __ shr(scratch, 1);
3097   // Above condition means CF==0 && ZF==0.  This means that the
3098   // bit that has been shifted out is 0 and the result is not 0.
3099   __ j(above, &while_true, Label::kNear);
3100   __ movsd(double_result, double_scratch);
3101   __ j(zero, &while_false, Label::kNear);
3102 
3103   __ bind(&while_true);
3104   __ shr(scratch, 1);
3105   __ mulsd(double_scratch, double_scratch);
3106   __ j(above, &while_true, Label::kNear);
3107   __ mulsd(double_result, double_scratch);
3108   __ j(not_zero, &while_true);
3109 
3110   __ bind(&while_false);
3111   // scratch has the original value of the exponent - if the exponent is
3112   // negative, return 1/result.
3113   __ test(exponent, exponent);
3114   __ j(positive, &done);
3115   __ divsd(double_scratch2, double_result);
3116   __ movsd(double_result, double_scratch2);
3117   // Test whether result is zero.  Bail out to check for subnormal result.
3118   // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
3119   __ xorps(double_scratch2, double_scratch2);
3120   __ ucomisd(double_scratch2, double_result);  // Result cannot be NaN.
3121   // double_exponent aliased as double_scratch2 has already been overwritten
3122   // and may not have contained the exponent value in the first place when the
3123   // exponent is a smi.  We reset it with exponent value before bailing out.
3124   __ j(not_equal, &done);
3125   __ Cvtsi2sd(double_exponent, exponent);
3126 
3127   // Returning or bailing out.
3128   __ bind(&call_runtime);
3129   {
3130     AllowExternalCallThatCantCauseGC scope(masm);
3131     __ PrepareCallCFunction(4, scratch);
3132     __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
3133     __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
3134     __ CallCFunction(ExternalReference::power_double_double_function(), 4);
3135   }
3136   // Return value is in st(0) on ia32.
3137   // Store it into the (fixed) result register.
3138   __ sub(esp, Immediate(kDoubleSize));
3139   __ fstp_d(Operand(esp, 0));
3140   __ movsd(double_result, Operand(esp, 0));
3141   __ add(esp, Immediate(kDoubleSize));
3142 
3143   __ bind(&done);
3144   __ ret(0);
3145 }
3146 
3147 #undef __
3148 
3149 }  // namespace internal
3150 }  // namespace v8
3151 
3152 #endif  // V8_TARGET_ARCH_IA32
3153