1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_X64
6 
7 #include "src/base/adapters.h"
8 #include "src/code-factory.h"
9 #include "src/counters.h"
10 #include "src/deoptimizer.h"
11 #include "src/frame-constants.h"
12 #include "src/frames.h"
13 #include "src/objects-inl.h"
14 #include "src/objects/debug-objects.h"
15 #include "src/wasm/wasm-linkage.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 #define __ ACCESS_MASM(masm)
21 
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)22 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
23                                 ExitFrameType exit_frame_type) {
24   __ LoadAddress(rbx, ExternalReference::Create(address));
25   if (exit_frame_type == BUILTIN_EXIT) {
26     __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
27             RelocInfo::CODE_TARGET);
28   } else {
29     DCHECK(exit_frame_type == EXIT);
30     __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
31             RelocInfo::CODE_TARGET);
32   }
33 }
34 
35 namespace {
36 
AdaptorWithExitFrameType(MacroAssembler * masm,Builtins::ExitFrameType exit_frame_type)37 void AdaptorWithExitFrameType(MacroAssembler* masm,
38                               Builtins::ExitFrameType exit_frame_type) {
39   // ----------- S t a t e -------------
40   //  -- rax                 : number of arguments excluding receiver
41   //  -- rbx                 : entry point
42   //  -- rdi                 : target
43   //  -- rdx                 : new.target
44   //  -- rsp[0]              : return address
45   //  -- rsp[8]              : last argument
46   //  -- ...
47   //  -- rsp[8 * argc]       : first argument
48   //  -- rsp[8 * (argc + 1)] : receiver
49   // -----------------------------------
50   __ AssertFunction(rdi);
51 
52   // The logic contained here is mirrored for TurboFan inlining in
53   // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.
54 
55   // Make sure we operate in the context of the called function (for example
56   // ConstructStubs implemented in C++ will be run in the context of the caller
57   // instead of the callee, due to the way that [[Construct]] is defined for
58   // ordinary functions).
59   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
60 
61   // CEntry expects rax to contain the number of arguments including the
62   // receiver and the extra arguments.
63   __ addp(rax, Immediate(BuiltinExitFrameConstants::kNumExtraArgsWithReceiver));
64 
65   // Unconditionally insert argc, target and new target as extra arguments. They
66   // will be used by stack frame iterators when constructing the stack trace.
67   __ PopReturnAddressTo(kScratchRegister);
68   __ Integer32ToSmi(rax, rax);
69   __ PushRoot(Heap::kTheHoleValueRootIndex);  // Padding.
70   __ Push(rax);
71   __ SmiToInteger32(rax, rax);
72   __ Push(rdi);
73   __ Push(rdx);
74   __ PushReturnAddressFrom(kScratchRegister);
75 
76   // Jump to the C entry runtime stub directly here instead of using
77   // JumpToExternalReference because rbx is loaded by Generate_adaptor.
78   Handle<Code> code =
79       CodeFactory::CEntry(masm->isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
80                           exit_frame_type == Builtins::BUILTIN_EXIT);
81   __ Jump(code, RelocInfo::CODE_TARGET);
82 }
83 }  // namespace
84 
Generate_AdaptorWithExitFrame(MacroAssembler * masm)85 void Builtins::Generate_AdaptorWithExitFrame(MacroAssembler* masm) {
86   AdaptorWithExitFrameType(masm, EXIT);
87 }
88 
Generate_AdaptorWithBuiltinExitFrame(MacroAssembler * masm)89 void Builtins::Generate_AdaptorWithBuiltinExitFrame(MacroAssembler* masm) {
90   AdaptorWithExitFrameType(masm, BUILTIN_EXIT);
91 }
92 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)93 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
94                                            Runtime::FunctionId function_id) {
95   // ----------- S t a t e -------------
96   //  -- rax : argument count (preserved for callee)
97   //  -- rdx : new target (preserved for callee)
98   //  -- rdi : target function (preserved for callee)
99   // -----------------------------------
100   {
101     FrameScope scope(masm, StackFrame::INTERNAL);
102     // Push the number of arguments to the callee.
103     __ Integer32ToSmi(rax, rax);
104     __ Push(rax);
105     // Push a copy of the target function and the new target.
106     __ Push(rdi);
107     __ Push(rdx);
108     // Function is also the parameter to the runtime call.
109     __ Push(rdi);
110 
111     __ CallRuntime(function_id, 1);
112     __ movp(rcx, rax);
113 
114     // Restore target function and new target.
115     __ Pop(rdx);
116     __ Pop(rdi);
117     __ Pop(rax);
118     __ SmiToInteger32(rax, rax);
119   }
120   static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
121   __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
122   __ jmp(rcx);
123 }
124 
125 namespace {
126 
Generate_JSBuiltinsConstructStubHelper(MacroAssembler * masm)127 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
128   // ----------- S t a t e -------------
129   //  -- rax: number of arguments
130   //  -- rdi: constructor function
131   //  -- rdx: new target
132   //  -- rsi: context
133   // -----------------------------------
134 
135   // Enter a construct frame.
136   {
137     FrameScope scope(masm, StackFrame::CONSTRUCT);
138 
139     // Preserve the incoming parameters on the stack.
140     __ Integer32ToSmi(rcx, rax);
141     __ Push(rsi);
142     __ Push(rcx);
143 
144     // The receiver for the builtin/api call.
145     __ PushRoot(Heap::kTheHoleValueRootIndex);
146 
147     // Set up pointer to last argument.
148     __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
149 
150     // Copy arguments and receiver to the expression stack.
151     Label loop, entry;
152     __ movp(rcx, rax);
153     // ----------- S t a t e -------------
154     //  --                rax: number of arguments (untagged)
155     //  --                rdi: constructor function
156     //  --                rdx: new target
157     //  --                rbx: pointer to last argument
158     //  --                rcx: counter
159     //  -- sp[0*kPointerSize]: the hole (receiver)
160     //  -- sp[1*kPointerSize]: number of arguments (tagged)
161     //  -- sp[2*kPointerSize]: context
162     // -----------------------------------
163     __ jmp(&entry);
164     __ bind(&loop);
165     __ Push(Operand(rbx, rcx, times_pointer_size, 0));
166     __ bind(&entry);
167     __ decp(rcx);
168     __ j(greater_equal, &loop, Label::kNear);
169 
170     // Call the function.
171     // rax: number of arguments (untagged)
172     // rdi: constructor function
173     // rdx: new target
174     ParameterCount actual(rax);
175     __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
176 
177     // Restore context from the frame.
178     __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
179     // Restore smi-tagged arguments count from the frame.
180     __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
181 
182     // Leave construct frame.
183   }
184 
185   // Remove caller arguments from the stack and return.
186   __ PopReturnAddressTo(rcx);
187   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
188   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
189   __ PushReturnAddressFrom(rcx);
190 
191   __ ret(0);
192 }
193 }  // namespace
194 
195 // The construct stub for ES5 constructor functions and ES6 class constructors.
Generate_JSConstructStubGeneric(MacroAssembler * masm)196 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
197   // ----------- S t a t e -------------
198   //  -- rax: number of arguments (untagged)
199   //  -- rdi: constructor function
200   //  -- rdx: new target
201   //  -- rsi: context
202   //  -- sp[...]: constructor arguments
203   // -----------------------------------
204 
205   // Enter a construct frame.
206   {
207     FrameScope scope(masm, StackFrame::CONSTRUCT);
208     Label post_instantiation_deopt_entry, not_create_implicit_receiver;
209 
210     // Preserve the incoming parameters on the stack.
211     __ Integer32ToSmi(rcx, rax);
212     __ Push(rsi);
213     __ Push(rcx);
214     __ Push(rdi);
215     __ PushRoot(Heap::kTheHoleValueRootIndex);
216     __ Push(rdx);
217 
218     // ----------- S t a t e -------------
219     //  --         sp[0*kPointerSize]: new target
220     //  --         sp[1*kPointerSize]: padding
221     //  -- rdi and sp[2*kPointerSize]: constructor function
222     //  --         sp[3*kPointerSize]: argument count
223     //  --         sp[4*kPointerSize]: context
224     // -----------------------------------
225 
226     __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
227     __ testl(FieldOperand(rbx, SharedFunctionInfo::kFlagsOffset),
228              Immediate(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
229     __ j(not_zero, &not_create_implicit_receiver, Label::kNear);
230 
231     // If not derived class constructor: Allocate the new receiver object.
232     __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
233     __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
234             RelocInfo::CODE_TARGET);
235     __ jmp(&post_instantiation_deopt_entry, Label::kNear);
236 
237     // Else: use TheHoleValue as receiver for constructor call
238     __ bind(&not_create_implicit_receiver);
239     __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
240 
241     // ----------- S t a t e -------------
242     //  -- rax                          implicit receiver
243     //  -- Slot 4 / sp[0*kPointerSize]  new target
244     //  -- Slot 3 / sp[1*kPointerSize]  padding
245     //  -- Slot 2 / sp[2*kPointerSize]  constructor function
246     //  -- Slot 1 / sp[3*kPointerSize]  number of arguments (tagged)
247     //  -- Slot 0 / sp[4*kPointerSize]  context
248     // -----------------------------------
249     // Deoptimizer enters here.
250     masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
251         masm->pc_offset());
252     __ bind(&post_instantiation_deopt_entry);
253 
254     // Restore new target.
255     __ Pop(rdx);
256 
257     // Push the allocated receiver to the stack. We need two copies
258     // because we may have to return the original one and the calling
259     // conventions dictate that the called function pops the receiver.
260     __ Push(rax);
261     __ Push(rax);
262 
263     // ----------- S t a t e -------------
264     //  -- sp[0*kPointerSize]  implicit receiver
265     //  -- sp[1*kPointerSize]  implicit receiver
266     //  -- sp[2*kPointerSize]  padding
267     //  -- sp[3*kPointerSize]  constructor function
268     //  -- sp[4*kPointerSize]  number of arguments (tagged)
269     //  -- sp[5*kPointerSize]  context
270     // -----------------------------------
271 
272     // Restore constructor function and argument count.
273     __ movp(rdi, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
274     __ SmiToInteger32(rax,
275                       Operand(rbp, ConstructFrameConstants::kLengthOffset));
276 
277     // Set up pointer to last argument.
278     __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
279 
280     // Copy arguments and receiver to the expression stack.
281     Label loop, entry;
282     __ movp(rcx, rax);
283     // ----------- S t a t e -------------
284     //  --                        rax: number of arguments (untagged)
285     //  --                        rdx: new target
286     //  --                        rbx: pointer to last argument
287     //  --                        rcx: counter (tagged)
288     //  --         sp[0*kPointerSize]: implicit receiver
289     //  --         sp[1*kPointerSize]: implicit receiver
290     //  --         sp[2*kPointerSize]: padding
291     //  -- rdi and sp[3*kPointerSize]: constructor function
292     //  --         sp[4*kPointerSize]: number of arguments (tagged)
293     //  --         sp[5*kPointerSize]: context
294     // -----------------------------------
295     __ jmp(&entry, Label::kNear);
296     __ bind(&loop);
297     __ Push(Operand(rbx, rcx, times_pointer_size, 0));
298     __ bind(&entry);
299     __ decp(rcx);
300     __ j(greater_equal, &loop, Label::kNear);
301 
302     // Call the function.
303     ParameterCount actual(rax);
304     __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
305 
306     // ----------- S t a t e -------------
307     //  -- rax                 constructor result
308     //  -- sp[0*kPointerSize]  implicit receiver
309     //  -- sp[1*kPointerSize]  padding
310     //  -- sp[2*kPointerSize]  constructor function
311     //  -- sp[3*kPointerSize]  number of arguments
312     //  -- sp[4*kPointerSize]  context
313     // -----------------------------------
314 
315     // Store offset of return address for deoptimizer.
316     masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
317         masm->pc_offset());
318 
319     // Restore context from the frame.
320     __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
321 
322     // If the result is an object (in the ECMA sense), we should get rid
323     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
324     // on page 74.
325     Label use_receiver, do_throw, leave_frame;
326 
327     // If the result is undefined, we jump out to using the implicit receiver.
328     __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &use_receiver,
329                   Label::kNear);
330 
331     // Otherwise we do a smi check and fall through to check if the return value
332     // is a valid receiver.
333 
334     // If the result is a smi, it is *not* an object in the ECMA sense.
335     __ JumpIfSmi(rax, &use_receiver, Label::kNear);
336 
337     // If the type of the result (stored in its map) is less than
338     // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
339     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
340     __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
341     __ j(above_equal, &leave_frame, Label::kNear);
342     __ jmp(&use_receiver, Label::kNear);
343 
344     __ bind(&do_throw);
345     __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
346 
347     // Throw away the result of the constructor invocation and use the
348     // on-stack receiver as the result.
349     __ bind(&use_receiver);
350     __ movp(rax, Operand(rsp, 0 * kPointerSize));
351     __ JumpIfRoot(rax, Heap::kTheHoleValueRootIndex, &do_throw, Label::kNear);
352 
353     __ bind(&leave_frame);
354     // Restore the arguments count.
355     __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
356     // Leave construct frame.
357   }
358   // Remove caller arguments from the stack and return.
359   __ PopReturnAddressTo(rcx);
360   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
361   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
362   __ PushReturnAddressFrom(rcx);
363   __ ret(0);
364 }
365 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)366 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
367   Generate_JSBuiltinsConstructStubHelper(masm);
368 }
369 
Generate_ConstructedNonConstructable(MacroAssembler * masm)370 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
371   FrameScope scope(masm, StackFrame::INTERNAL);
372   __ Push(rdi);
373   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
374 }
375 
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch,Label * stack_overflow,Label::Distance stack_overflow_distance=Label::kFar)376 static void Generate_StackOverflowCheck(
377     MacroAssembler* masm, Register num_args, Register scratch,
378     Label* stack_overflow,
379     Label::Distance stack_overflow_distance = Label::kFar) {
380   // Check the stack for overflow. We are not trying to catch
381   // interruptions (e.g. debug break and preemption) here, so the "real stack
382   // limit" is checked.
383   __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
384   __ movp(scratch, rsp);
385   // Make scratch the space we have left. The stack might already be overflowed
386   // here which will cause scratch to become negative.
387   __ subp(scratch, kScratchRegister);
388   __ sarp(scratch, Immediate(kPointerSizeLog2));
389   // Check if the arguments will overflow the stack.
390   __ cmpp(scratch, num_args);
391   // Signed comparison.
392   __ j(less_equal, stack_overflow, stack_overflow_distance);
393 }
394 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)395 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
396                                              bool is_construct) {
397   ProfileEntryHookStub::MaybeCallEntryHook(masm);
398 
399   // Expects five C++ function parameters.
400   // - Object* new_target
401   // - JSFunction* function
402   // - Object* receiver
403   // - int argc
404   // - Object*** argv
405   // (see Handle::Invoke in execution.cc).
406 
407   // Open a C++ scope for the FrameScope.
408   {
409 // Platform specific argument handling. After this, the stack contains
410 // an internal frame and the pushed function and receiver, and
411 // register rax and rbx holds the argument count and argument array,
412 // while rdi holds the function pointer, rsi the context, and rdx the
413 // new.target.
414 
415 #ifdef _WIN64
416     // MSVC parameters in:
417     // rcx        : new_target
418     // rdx        : function
419     // r8         : receiver
420     // r9         : argc
421     // [rsp+0x20] : argv
422 
423     // Enter an internal frame.
424     FrameScope scope(masm, StackFrame::INTERNAL);
425 
426     // Setup the context (we need to use the caller context from the isolate).
427     ExternalReference context_address = ExternalReference::Create(
428         IsolateAddressId::kContextAddress, masm->isolate());
429     __ movp(rsi, masm->ExternalOperand(context_address));
430 
431     // Push the function and the receiver onto the stack.
432     __ Push(rdx);
433     __ Push(r8);
434 
435     // Load the number of arguments and setup pointer to the arguments.
436     __ movp(rax, r9);
437     // Load the previous frame pointer to access C argument on stack
438     __ movp(kScratchRegister, Operand(rbp, 0));
439     __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
440     // Load the function pointer into rdi.
441     __ movp(rdi, rdx);
442     // Load the new.target into rdx.
443     __ movp(rdx, rcx);
444 #else   // _WIN64
445     // GCC parameters in:
446     // rdi : new_target
447     // rsi : function
448     // rdx : receiver
449     // rcx : argc
450     // r8  : argv
451 
452     __ movp(r11, rdi);
453     __ movp(rdi, rsi);
454     // rdi : function
455     // r11 : new_target
456 
457     // Clear the context before we push it when entering the internal frame.
458     __ Set(rsi, 0);
459 
460     // Enter an internal frame.
461     FrameScope scope(masm, StackFrame::INTERNAL);
462 
463     // Setup the context (we need to use the caller context from the isolate).
464     ExternalReference context_address = ExternalReference::Create(
465         IsolateAddressId::kContextAddress, masm->isolate());
466     __ movp(rsi, masm->ExternalOperand(context_address));
467 
468     // Push the function and receiver onto the stack.
469     __ Push(rdi);
470     __ Push(rdx);
471 
472     // Load the number of arguments and setup pointer to the arguments.
473     __ movp(rax, rcx);
474     __ movp(rbx, r8);
475 
476     // Load the new.target into rdx.
477     __ movp(rdx, r11);
478 #endif  // _WIN64
479 
480     // Current stack contents:
481     // [rsp + 2 * kPointerSize ... ] : Internal frame
482     // [rsp + kPointerSize]          : function
483     // [rsp]                         : receiver
484     // Current register contents:
485     // rax : argc
486     // rbx : argv
487     // rsi : context
488     // rdi : function
489     // rdx : new.target
490 
491     // Check if we have enough stack space to push all arguments.
492     // Argument count in rax. Clobbers rcx.
493     Label enough_stack_space, stack_overflow;
494     Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
495     __ jmp(&enough_stack_space, Label::kNear);
496 
497     __ bind(&stack_overflow);
498     __ CallRuntime(Runtime::kThrowStackOverflow);
499     // This should be unreachable.
500     __ int3();
501 
502     __ bind(&enough_stack_space);
503 
504     // Copy arguments to the stack in a loop.
505     // Register rbx points to array of pointers to handle locations.
506     // Push the values of these handles.
507     Label loop, entry;
508     __ Set(rcx, 0);  // Set loop variable to 0.
509     __ jmp(&entry, Label::kNear);
510     __ bind(&loop);
511     __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
512     __ Push(Operand(kScratchRegister, 0));  // dereference handle
513     __ addp(rcx, Immediate(1));
514     __ bind(&entry);
515     __ cmpp(rcx, rax);
516     __ j(not_equal, &loop, Label::kNear);
517 
518     // Invoke the builtin code.
519     Handle<Code> builtin = is_construct
520                                ? BUILTIN_CODE(masm->isolate(), Construct)
521                                : masm->isolate()->builtins()->Call();
522     __ Call(builtin, RelocInfo::CODE_TARGET);
523 
524     // Exit the internal frame. Notice that this also removes the empty
525     // context and the function left on the stack by the code
526     // invocation.
527   }
528 
529   __ ret(0);
530 }
531 
Generate_JSEntryTrampoline(MacroAssembler * masm)532 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
533   Generate_JSEntryTrampolineHelper(masm, false);
534 }
535 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)536 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
537   Generate_JSEntryTrampolineHelper(masm, true);
538 }
539 
GetSharedFunctionInfoBytecode(MacroAssembler * masm,Register sfi_data,Register scratch1)540 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
541                                           Register sfi_data,
542                                           Register scratch1) {
543   Label done;
544 
545   __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
546   __ j(not_equal, &done, Label::kNear);
547   __ movp(sfi_data,
548           FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
549 
550   __ bind(&done);
551 }
552 
553 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)554 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
555   // ----------- S t a t e -------------
556   //  -- rax    : the value to pass to the generator
557   //  -- rdx    : the JSGeneratorObject to resume
558   //  -- rsp[0] : return address
559   // -----------------------------------
560   __ AssertGeneratorObject(rdx);
561 
562   // Store input value into generator object.
563   __ movp(FieldOperand(rdx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
564   __ RecordWriteField(rdx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
565                       kDontSaveFPRegs);
566 
567   // Load suspended function and context.
568   __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
569   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
570 
571   // Flood function if we are stepping.
572   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
573   Label stepping_prepared;
574   ExternalReference debug_hook =
575       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
576   Operand debug_hook_operand = masm->ExternalOperand(debug_hook);
577   __ cmpb(debug_hook_operand, Immediate(0));
578   __ j(not_equal, &prepare_step_in_if_stepping);
579 
580   // Flood function if we need to continue stepping in the suspended generator.
581   ExternalReference debug_suspended_generator =
582       ExternalReference::debug_suspended_generator_address(masm->isolate());
583   Operand debug_suspended_generator_operand =
584       masm->ExternalOperand(debug_suspended_generator);
585   __ cmpp(rdx, debug_suspended_generator_operand);
586   __ j(equal, &prepare_step_in_suspended_generator);
587   __ bind(&stepping_prepared);
588 
589   // Check the stack for overflow. We are not trying to catch interruptions
590   // (i.e. debug break and preemption) here, so check the "real stack limit".
591   Label stack_overflow;
592   __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
593   __ j(below, &stack_overflow);
594 
595   // Pop return address.
596   __ PopReturnAddressTo(rax);
597 
598   // Push receiver.
599   __ Push(FieldOperand(rdx, JSGeneratorObject::kReceiverOffset));
600 
601   // ----------- S t a t e -------------
602   //  -- rax    : return address
603   //  -- rdx    : the JSGeneratorObject to resume
604   //  -- rdi    : generator function
605   //  -- rsi    : generator context
606   //  -- rsp[0] : generator receiver
607   // -----------------------------------
608 
609   // Push holes for arguments to generator function. Since the parser forced
610   // context allocation for any variables in generators, the actual argument
611   // values have already been copied into the context and these dummy values
612   // will never be used.
613   __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
614   __ movl(rcx,
615           FieldOperand(rcx, SharedFunctionInfo::kFormalParameterCountOffset));
616   {
617     Label done_loop, loop;
618     __ bind(&loop);
619     __ subl(rcx, Immediate(1));
620     __ j(carry, &done_loop, Label::kNear);
621     __ PushRoot(Heap::kTheHoleValueRootIndex);
622     __ jmp(&loop);
623     __ bind(&done_loop);
624   }
625 
626   // Underlying function needs to have bytecode available.
627   if (FLAG_debug_code) {
628     __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
629     __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
630     GetSharedFunctionInfoBytecode(masm, rcx, kScratchRegister);
631     __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
632     __ Assert(equal, AbortReason::kMissingBytecodeArray);
633   }
634 
635   // Resume (Ignition/TurboFan) generator object.
636   {
637     __ PushReturnAddressFrom(rax);
638     __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
639     __ movsxlq(rax, FieldOperand(
640                         rax, SharedFunctionInfo::kFormalParameterCountOffset));
641     // We abuse new.target both to indicate that this is a resume call and to
642     // pass in the generator object.  In ordinary calls, new.target is always
643     // undefined because generator functions are non-constructable.
644     static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
645     __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
646     __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
647     __ jmp(rcx);
648   }
649 
650   __ bind(&prepare_step_in_if_stepping);
651   {
652     FrameScope scope(masm, StackFrame::INTERNAL);
653     __ Push(rdx);
654     __ Push(rdi);
655     // Push hole as receiver since we do not use it for stepping.
656     __ PushRoot(Heap::kTheHoleValueRootIndex);
657     __ CallRuntime(Runtime::kDebugOnFunctionCall);
658     __ Pop(rdx);
659     __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
660   }
661   __ jmp(&stepping_prepared);
662 
663   __ bind(&prepare_step_in_suspended_generator);
664   {
665     FrameScope scope(masm, StackFrame::INTERNAL);
666     __ Push(rdx);
667     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
668     __ Pop(rdx);
669     __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
670   }
671   __ jmp(&stepping_prepared);
672 
673   __ bind(&stack_overflow);
674   {
675     FrameScope scope(masm, StackFrame::INTERNAL);
676     __ CallRuntime(Runtime::kThrowStackOverflow);
677     __ int3();  // This should be unreachable.
678   }
679 }
680 
681 // TODO(juliana): if we remove the code below then we don't need all
682 // the parameters.
ReplaceClosureCodeWithOptimizedCode(MacroAssembler * masm,Register optimized_code,Register closure,Register scratch1,Register scratch2,Register scratch3)683 static void ReplaceClosureCodeWithOptimizedCode(
684     MacroAssembler* masm, Register optimized_code, Register closure,
685     Register scratch1, Register scratch2, Register scratch3) {
686 
687   // Store the optimized code in the closure.
688   __ movp(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
689   __ movp(scratch1, optimized_code);  // Write barrier clobbers scratch1 below.
690   __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
691                       kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
692 }
693 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch1,Register scratch2)694 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
695                                   Register scratch2) {
696   Register args_count = scratch1;
697   Register return_pc = scratch2;
698 
699   // Get the arguments + receiver count.
700   __ movp(args_count,
701           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
702   __ movl(args_count,
703           FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
704 
705   // Leave the frame (also dropping the register file).
706   __ leave();
707 
708   // Drop receiver + arguments.
709   __ PopReturnAddressTo(return_pc);
710   __ addp(rsp, args_count);
711   __ PushReturnAddressFrom(return_pc);
712 }
713 
714 // Tail-call |function_id| if |smi_entry| == |marker|
TailCallRuntimeIfMarkerEquals(MacroAssembler * masm,Register smi_entry,OptimizationMarker marker,Runtime::FunctionId function_id)715 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
716                                           Register smi_entry,
717                                           OptimizationMarker marker,
718                                           Runtime::FunctionId function_id) {
719   Label no_match;
720   __ SmiCompare(smi_entry, Smi::FromEnum(marker));
721   __ j(not_equal, &no_match);
722   GenerateTailCallToReturnedCode(masm, function_id);
723   __ bind(&no_match);
724 }
725 
MaybeTailCallOptimizedCodeSlot(MacroAssembler * masm,Register feedback_vector,Register scratch1,Register scratch2,Register scratch3)726 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
727                                            Register feedback_vector,
728                                            Register scratch1, Register scratch2,
729                                            Register scratch3) {
730   // ----------- S t a t e -------------
731   //  -- rax : argument count (preserved for callee if needed, and caller)
732   //  -- rdx : new target (preserved for callee if needed, and caller)
733   //  -- rdi : target function (preserved for callee if needed, and caller)
734   //  -- feedback vector (preserved for caller if needed)
735   // -----------------------------------
736   DCHECK(!AreAliased(feedback_vector, rax, rdx, rdi, scratch1, scratch2,
737                      scratch3));
738 
739   Label optimized_code_slot_is_weak_ref, fallthrough;
740 
741   Register closure = rdi;
742   Register optimized_code_entry = scratch1;
743 
744   __ movp(optimized_code_entry,
745           FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
746 
747   // Check if the code entry is a Smi. If yes, we interpret it as an
748   // optimisation marker. Otherwise, interpret it as a weak reference to a code
749   // object.
750   __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
751 
752   {
753     // Optimized code slot is a Smi optimization marker.
754 
755     // Fall through if no optimization trigger.
756     __ SmiCompare(optimized_code_entry,
757                   Smi::FromEnum(OptimizationMarker::kNone));
758     __ j(equal, &fallthrough);
759 
760     TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
761                                   OptimizationMarker::kLogFirstExecution,
762                                   Runtime::kFunctionFirstExecution);
763     TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
764                                   OptimizationMarker::kCompileOptimized,
765                                   Runtime::kCompileOptimized_NotConcurrent);
766     TailCallRuntimeIfMarkerEquals(
767         masm, optimized_code_entry,
768         OptimizationMarker::kCompileOptimizedConcurrent,
769         Runtime::kCompileOptimized_Concurrent);
770 
771     {
772       // Otherwise, the marker is InOptimizationQueue, so fall through hoping
773       // that an interrupt will eventually update the slot with optimized code.
774       if (FLAG_debug_code) {
775         __ SmiCompare(optimized_code_entry,
776                       Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
777         __ Assert(equal, AbortReason::kExpectedOptimizationSentinel);
778       }
779       __ jmp(&fallthrough);
780     }
781   }
782 
783   {
784     // Optimized code slot is a weak reference.
785     __ bind(&optimized_code_slot_is_weak_ref);
786 
787     __ LoadWeakValue(optimized_code_entry, &fallthrough);
788 
789     // Check if the optimized code is marked for deopt. If it is, call the
790     // runtime to clear it.
791     Label found_deoptimized_code;
792     __ movp(scratch2,
793             FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
794     __ testl(
795         FieldOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset),
796         Immediate(1 << Code::kMarkedForDeoptimizationBit));
797     __ j(not_zero, &found_deoptimized_code);
798 
799     // Optimized code is good, get it into the closure and link the closure into
800     // the optimized functions list, then tail call the optimized code.
801     // The feedback vector is no longer used, so re-use it as a scratch
802     // register.
803     ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
804                                         scratch2, scratch3, feedback_vector);
805     static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
806     __ Move(rcx, optimized_code_entry);
807     __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
808     __ jmp(rcx);
809 
810     // Optimized code slot contains deoptimized code, evict it and re-enter the
811     // closure's code.
812     __ bind(&found_deoptimized_code);
813     GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
814   }
815 
816   // Fall-through if the optimized code cell is clear and there is no
817   // optimization marker.
818   __ bind(&fallthrough);
819 }
820 
821 // Advance the current bytecode offset. This simulates what all bytecode
822 // handlers do upon completion of the underlying operation. Will bail out to a
823 // label if the bytecode (without prefix) is a return bytecode.
AdvanceBytecodeOffsetOrReturn(MacroAssembler * masm,Register bytecode_array,Register bytecode_offset,Register bytecode,Register scratch1,Label * if_return)824 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
825                                           Register bytecode_array,
826                                           Register bytecode_offset,
827                                           Register bytecode, Register scratch1,
828                                           Label* if_return) {
829   Register bytecode_size_table = scratch1;
830   DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
831                      bytecode));
832 
833   __ Move(bytecode_size_table,
834           ExternalReference::bytecode_size_table_address());
835 
836   // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
837   Label process_bytecode, extra_wide;
838   STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
839   STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
840   STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
841   STATIC_ASSERT(3 ==
842                 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
843   __ cmpb(bytecode, Immediate(0x3));
844   __ j(above, &process_bytecode, Label::kNear);
845   __ testb(bytecode, Immediate(0x1));
846   __ j(not_equal, &extra_wide, Label::kNear);
847 
848   // Load the next bytecode and update table to the wide scaled table.
849   __ incl(bytecode_offset);
850   __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
851   __ addp(bytecode_size_table,
852           Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
853   __ jmp(&process_bytecode, Label::kNear);
854 
855   __ bind(&extra_wide);
856   // Load the next bytecode and update table to the extra wide scaled table.
857   __ incl(bytecode_offset);
858   __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
859   __ addp(bytecode_size_table,
860           Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
861 
862   __ bind(&process_bytecode);
863 
864 // Bailout to the return label if this is a return bytecode.
865 #define JUMP_IF_EQUAL(NAME)                                             \
866   __ cmpb(bytecode,                                                     \
867           Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
868   __ j(equal, if_return, Label::kFar);
869   RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
870 #undef JUMP_IF_EQUAL
871 
872   // Otherwise, load the size of the current bytecode and advance the offset.
873   __ addl(bytecode_offset, Operand(bytecode_size_table, bytecode, times_4, 0));
874 }
875 
876 // Generate code for entering a JS function with the interpreter.
877 // On entry to the function the receiver and arguments have been pushed on the
878 // stack left to right.  The actual argument count matches the formal parameter
879 // count expected by the function.
880 //
881 // The live registers are:
882 //   o rdi: the JS function object being called
883 //   o rdx: the incoming new target or generator object
884 //   o rsi: our context
885 //   o rbp: the caller's frame pointer
886 //   o rsp: stack pointer (pointing to return address)
887 //
888 // The function builds an interpreter frame.  See InterpreterFrameConstants in
889 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)890 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
891   ProfileEntryHookStub::MaybeCallEntryHook(masm);
892 
893   Register closure = rdi;
894   Register feedback_vector = rbx;
895 
896   // Load the feedback vector from the closure.
897   __ movp(feedback_vector,
898           FieldOperand(closure, JSFunction::kFeedbackCellOffset));
899   __ movp(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
900   // Read off the optimized code slot in the feedback vector, and if there
901   // is optimized code or an optimization marker, call that instead.
902   MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r14, r15);
903 
904   // Open a frame scope to indicate that there is a frame on the stack.  The
905   // MANUAL indicates that the scope shouldn't actually generate code to set up
906   // the frame (that is done below).
907   FrameScope frame_scope(masm, StackFrame::MANUAL);
908   __ pushq(rbp);  // Caller's frame pointer.
909   __ movp(rbp, rsp);
910   __ Push(rsi);  // Callee's context.
911   __ Push(rdi);  // Callee's JS function.
912 
913   // Get the bytecode array from the function object (or from the DebugInfo if
914   // it is present) and load it into kInterpreterBytecodeArrayRegister.
915   Label maybe_load_debug_bytecode_array, bytecode_array_loaded;
916   __ movp(rax, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
917   __ movp(kInterpreterBytecodeArrayRegister,
918           FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
919   GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister,
920                                 kScratchRegister);
921   __ JumpIfNotSmi(FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset),
922                   &maybe_load_debug_bytecode_array);
923   __ bind(&bytecode_array_loaded);
924 
925   // Increment invocation count for the function.
926   __ incl(
927       FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
928 
929   // Check function data field is actually a BytecodeArray object.
930   if (FLAG_debug_code) {
931     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
932     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
933                      rax);
934     __ Assert(
935         equal,
936         AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
937   }
938 
939   // Reset code age.
940   __ movb(FieldOperand(kInterpreterBytecodeArrayRegister,
941                        BytecodeArray::kBytecodeAgeOffset),
942           Immediate(BytecodeArray::kNoAgeBytecodeAge));
943 
944   // Load initial bytecode offset.
945   __ movp(kInterpreterBytecodeOffsetRegister,
946           Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
947 
948   // Push bytecode array and Smi tagged bytecode offset.
949   __ Push(kInterpreterBytecodeArrayRegister);
950   __ Integer32ToSmi(rcx, kInterpreterBytecodeOffsetRegister);
951   __ Push(rcx);
952 
953   // Allocate the local and temporary register file on the stack.
954   {
955     // Load frame size from the BytecodeArray object.
956     __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
957                               BytecodeArray::kFrameSizeOffset));
958 
959     // Do a stack check to ensure we don't go over the limit.
960     Label ok;
961     __ movp(rax, rsp);
962     __ subp(rax, rcx);
963     __ CompareRoot(rax, Heap::kRealStackLimitRootIndex);
964     __ j(above_equal, &ok, Label::kNear);
965     __ CallRuntime(Runtime::kThrowStackOverflow);
966     __ bind(&ok);
967 
968     // If ok, push undefined as the initial value for all register file entries.
969     Label loop_header;
970     Label loop_check;
971     __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
972     __ j(always, &loop_check, Label::kNear);
973     __ bind(&loop_header);
974     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
975     __ Push(rax);
976     // Continue loop if not done.
977     __ bind(&loop_check);
978     __ subp(rcx, Immediate(kPointerSize));
979     __ j(greater_equal, &loop_header, Label::kNear);
980   }
981 
982   // If the bytecode array has a valid incoming new target or generator object
983   // register, initialize it with incoming value which was passed in rdx.
984   Label no_incoming_new_target_or_generator_register;
985   __ movsxlq(
986       rax,
987       FieldOperand(kInterpreterBytecodeArrayRegister,
988                    BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
989   __ testl(rax, rax);
990   __ j(zero, &no_incoming_new_target_or_generator_register, Label::kNear);
991   __ movp(Operand(rbp, rax, times_pointer_size, 0), rdx);
992   __ bind(&no_incoming_new_target_or_generator_register);
993 
994   // Load accumulator with undefined.
995   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
996 
997   // Load the dispatch table into a register and dispatch to the bytecode
998   // handler at the current bytecode offset.
999   Label do_dispatch;
1000   __ bind(&do_dispatch);
1001   __ Move(
1002       kInterpreterDispatchTableRegister,
1003       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1004   __ movzxbp(r11, Operand(kInterpreterBytecodeArrayRegister,
1005                           kInterpreterBytecodeOffsetRegister, times_1, 0));
1006   __ movp(
1007       kJavaScriptCallCodeStartRegister,
1008       Operand(kInterpreterDispatchTableRegister, r11, times_pointer_size, 0));
1009   __ call(kJavaScriptCallCodeStartRegister);
1010   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1011 
1012   // Any returns to the entry trampoline are either due to the return bytecode
1013   // or the interpreter tail calling a builtin and then a dispatch.
1014 
1015   // Get bytecode array and bytecode offset from the stack frame.
1016   __ movp(kInterpreterBytecodeArrayRegister,
1017           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1018   __ movp(kInterpreterBytecodeOffsetRegister,
1019           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1020   __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
1021                     kInterpreterBytecodeOffsetRegister);
1022 
1023   // Either return, or advance to the next bytecode and dispatch.
1024   Label do_return;
1025   __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
1026                           kInterpreterBytecodeOffsetRegister, times_1, 0));
1027   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1028                                 kInterpreterBytecodeOffsetRegister, rbx, rcx,
1029                                 &do_return);
1030   __ jmp(&do_dispatch);
1031 
1032   __ bind(&do_return);
1033   // The return value is in rax.
1034   LeaveInterpreterFrame(masm, rbx, rcx);
1035   __ ret(0);
1036 
1037   // Load debug copy of the bytecode array if it exists.
1038   // kInterpreterBytecodeArrayRegister is already loaded with
1039   // SharedFunctionInfo::kFunctionDataOffset.
1040   __ bind(&maybe_load_debug_bytecode_array);
1041   __ movp(rax, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1042   __ movp(rcx, FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset));
1043   __ movp(kScratchRegister,
1044           FieldOperand(rcx, DebugInfo::kDebugBytecodeArrayOffset));
1045   __ JumpIfRoot(kScratchRegister, Heap::kUndefinedValueRootIndex,
1046                 &bytecode_array_loaded);
1047 
1048   __ movp(kInterpreterBytecodeArrayRegister, kScratchRegister);
1049   __ SmiToInteger32(rax, FieldOperand(rcx, DebugInfo::kFlagsOffset));
1050   __ andb(rax, Immediate(DebugInfo::kDebugExecutionMode));
1051   STATIC_ASSERT(static_cast<int>(DebugInfo::kDebugExecutionMode) ==
1052                 static_cast<int>(DebugInfo::kSideEffects));
1053   ExternalReference debug_execution_mode_address =
1054       ExternalReference::debug_execution_mode_address(masm->isolate());
1055   Operand debug_execution_mode =
1056       masm->ExternalOperand(debug_execution_mode_address);
1057   __ cmpb(rax, debug_execution_mode);
1058   __ j(equal, &bytecode_array_loaded);
1059 
1060   __ Push(closure);
1061   __ Push(feedback_vector);
1062   __ Push(kInterpreterBytecodeArrayRegister);
1063   __ Push(closure);
1064   __ CallRuntime(Runtime::kDebugApplyInstrumentation);
1065   __ Pop(kInterpreterBytecodeArrayRegister);
1066   __ Pop(feedback_vector);
1067   __ Pop(closure);
1068   __ jmp(&bytecode_array_loaded);
1069 }
1070 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register start_address,Register scratch)1071 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1072                                          Register num_args,
1073                                          Register start_address,
1074                                          Register scratch) {
1075   // Find the address of the last argument.
1076   __ Move(scratch, num_args);
1077   __ shlp(scratch, Immediate(kPointerSizeLog2));
1078   __ negp(scratch);
1079   __ addp(scratch, start_address);
1080 
1081   // Push the arguments.
1082   Label loop_header, loop_check;
1083   __ j(always, &loop_check, Label::kNear);
1084   __ bind(&loop_header);
1085   __ Push(Operand(start_address, 0));
1086   __ subp(start_address, Immediate(kPointerSize));
1087   __ bind(&loop_check);
1088   __ cmpp(start_address, scratch);
1089   __ j(greater, &loop_header, Label::kNear);
1090 }
1091 
1092 // static
Generate_InterpreterPushArgsThenCallImpl(MacroAssembler * masm,ConvertReceiverMode receiver_mode,InterpreterPushArgsMode mode)1093 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1094     MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1095     InterpreterPushArgsMode mode) {
1096   DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1097   // ----------- S t a t e -------------
1098   //  -- rax : the number of arguments (not including the receiver)
1099   //  -- rbx : the address of the first argument to be pushed. Subsequent
1100   //           arguments should be consecutive above this, in the same order as
1101   //           they are to be pushed onto the stack.
1102   //  -- rdi : the target to call (can be any Object).
1103   // -----------------------------------
1104   Label stack_overflow;
1105 
1106   // Number of values to be pushed.
1107   __ leal(rcx, Operand(rax, 1));  // Add one for receiver.
1108 
1109   // Add a stack check before pushing arguments.
1110   Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
1111 
1112   // Pop return address to allow tail-call after pushing arguments.
1113   __ PopReturnAddressTo(kScratchRegister);
1114 
1115   // Push "undefined" as the receiver arg if we need to.
1116   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1117     __ PushRoot(Heap::kUndefinedValueRootIndex);
1118     __ decl(rcx);  // Subtract one for receiver.
1119   }
1120 
1121   // rbx and rdx will be modified.
1122   Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
1123 
1124   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1125     __ Pop(rbx);                 // Pass the spread in a register
1126     __ decl(rax);                // Subtract one for spread
1127   }
1128 
1129   // Call the target.
1130   __ PushReturnAddressFrom(kScratchRegister);  // Re-push return address.
1131 
1132   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1133     __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1134             RelocInfo::CODE_TARGET);
1135   } else {
1136     __ Jump(masm->isolate()->builtins()->Call(receiver_mode),
1137             RelocInfo::CODE_TARGET);
1138   }
1139 
1140   // Throw stack overflow exception.
1141   __ bind(&stack_overflow);
1142   {
1143     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1144     // This should be unreachable.
1145     __ int3();
1146   }
1147 }
1148 
1149 // static
Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler * masm,InterpreterPushArgsMode mode)1150 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1151     MacroAssembler* masm, InterpreterPushArgsMode mode) {
1152   // ----------- S t a t e -------------
1153   //  -- rax : the number of arguments (not including the receiver)
1154   //  -- rdx : the new target (either the same as the constructor or
1155   //           the JSFunction on which new was invoked initially)
1156   //  -- rdi : the constructor to call (can be any Object)
1157   //  -- rbx : the allocation site feedback if available, undefined otherwise
1158   //  -- rcx : the address of the first argument to be pushed. Subsequent
1159   //           arguments should be consecutive above this, in the same order as
1160   //           they are to be pushed onto the stack.
1161   // -----------------------------------
1162   Label stack_overflow;
1163 
1164   // Add a stack check before pushing arguments.
1165   Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow);
1166 
1167   // Pop return address to allow tail-call after pushing arguments.
1168   __ PopReturnAddressTo(kScratchRegister);
1169 
1170   // Push slot for the receiver to be constructed.
1171   __ Push(Immediate(0));
1172 
1173   // rcx and r8 will be modified.
1174   Generate_InterpreterPushArgs(masm, rax, rcx, r8);
1175 
1176   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1177     __ Pop(rbx);                 // Pass the spread in a register
1178     __ decl(rax);                // Subtract one for spread
1179 
1180     // Push return address in preparation for the tail-call.
1181     __ PushReturnAddressFrom(kScratchRegister);
1182   } else {
1183     __ PushReturnAddressFrom(kScratchRegister);
1184     __ AssertUndefinedOrAllocationSite(rbx);
1185   }
1186 
1187   if (mode == InterpreterPushArgsMode::kArrayFunction) {
1188     // Tail call to the array construct stub (still in the caller
1189     // context at this point).
1190     __ AssertFunction(rdi);
1191     // Jump to the constructor function (rax, rbx, rdx passed on).
1192     ArrayConstructorStub array_constructor_stub(masm->isolate());
1193     __ Jump(array_constructor_stub.GetCode(), RelocInfo::CODE_TARGET);
1194   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1195     // Call the constructor (rax, rdx, rdi passed on).
1196     __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1197             RelocInfo::CODE_TARGET);
1198   } else {
1199     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1200     // Call the constructor (rax, rdx, rdi passed on).
1201     __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1202   }
1203 
1204   // Throw stack overflow exception.
1205   __ bind(&stack_overflow);
1206   {
1207     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1208     // This should be unreachable.
1209     __ int3();
1210   }
1211 }
1212 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1213 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1214   // Set the return address to the correct point in the interpreter entry
1215   // trampoline.
1216   Label builtin_trampoline, trampoline_loaded;
1217   Smi* interpreter_entry_return_pc_offset(
1218       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1219   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1220 
1221   // If the SFI function_data is an InterpreterData, get the trampoline stored
1222   // in it, otherwise get the trampoline from the builtins list.
1223   __ movp(rbx, Operand(rbp, StandardFrameConstants::kFunctionOffset));
1224   __ movp(rbx, FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset));
1225   __ movp(rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
1226   __ CmpObjectType(rbx, INTERPRETER_DATA_TYPE, kScratchRegister);
1227   __ j(not_equal, &builtin_trampoline, Label::kNear);
1228 
1229   __ movp(rbx,
1230           FieldOperand(rbx, InterpreterData::kInterpreterTrampolineOffset));
1231   __ jmp(&trampoline_loaded, Label::kNear);
1232 
1233   __ bind(&builtin_trampoline);
1234   __ Move(rbx, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
1235 
1236   __ bind(&trampoline_loaded);
1237   __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() +
1238                          Code::kHeaderSize - kHeapObjectTag));
1239   __ Push(rbx);
1240 
1241   // Initialize dispatch table register.
1242   __ Move(
1243       kInterpreterDispatchTableRegister,
1244       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1245 
1246   // Get the bytecode array pointer from the frame.
1247   __ movp(kInterpreterBytecodeArrayRegister,
1248           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1249 
1250   if (FLAG_debug_code) {
1251     // Check function data field is actually a BytecodeArray object.
1252     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1253     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1254                      rbx);
1255     __ Assert(
1256         equal,
1257         AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1258   }
1259 
1260   // Get the target bytecode offset from the frame.
1261   __ movp(kInterpreterBytecodeOffsetRegister,
1262           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1263   __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
1264                     kInterpreterBytecodeOffsetRegister);
1265 
1266   // Dispatch to the target bytecode.
1267   __ movzxbp(r11, Operand(kInterpreterBytecodeArrayRegister,
1268                           kInterpreterBytecodeOffsetRegister, times_1, 0));
1269   __ movp(
1270       kJavaScriptCallCodeStartRegister,
1271       Operand(kInterpreterDispatchTableRegister, r11, times_pointer_size, 0));
1272   __ jmp(kJavaScriptCallCodeStartRegister);
1273 }
1274 
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1275 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1276   // Get bytecode array and bytecode offset from the stack frame.
1277   __ movp(kInterpreterBytecodeArrayRegister,
1278           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1279   __ movp(kInterpreterBytecodeOffsetRegister,
1280           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1281   __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
1282                     kInterpreterBytecodeOffsetRegister);
1283 
1284   // Load the current bytecode.
1285   __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
1286                           kInterpreterBytecodeOffsetRegister, times_1, 0));
1287 
1288   // Advance to the next bytecode.
1289   Label if_return;
1290   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1291                                 kInterpreterBytecodeOffsetRegister, rbx, rcx,
1292                                 &if_return);
1293 
1294   // Convert new bytecode offset to a Smi and save in the stackframe.
1295   __ Integer32ToSmi(rbx, kInterpreterBytecodeOffsetRegister);
1296   __ movp(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rbx);
1297 
1298   Generate_InterpreterEnterBytecode(masm);
1299 
1300   // We should never take the if_return path.
1301   __ bind(&if_return);
1302   __ Abort(AbortReason::kInvalidBytecodeAdvance);
1303 }
1304 
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1305 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1306   Generate_InterpreterEnterBytecode(masm);
1307 }
1308 
1309 // TODO(jupvfranco): investigate whether there is any case where the CompileLazy
1310 // builtin does not set the code field in the JS function. If there isn't then
1311 // we do not need this builtin and can jump directly to CompileLazy.
Generate_CompileLazyDeoptimizedCode(MacroAssembler * masm)1312 void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
1313   // Set the code slot inside the JSFunction to CompileLazy.
1314   __ Move(rcx, BUILTIN_CODE(masm->isolate(), CompileLazy));
1315   __ movp(FieldOperand(rdi, JSFunction::kCodeOffset), rcx);
1316   __ RecordWriteField(rdi, JSFunction::kCodeOffset, rcx, r15, kDontSaveFPRegs,
1317                       OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1318   // Jump to compile lazy.
1319   Generate_CompileLazy(masm);
1320 }
1321 
GetSharedFunctionInfoCode(MacroAssembler * masm,Register sfi_data,Register scratch1)1322 static void GetSharedFunctionInfoCode(MacroAssembler* masm, Register sfi_data,
1323                                       Register scratch1) {
1324   // Figure out the SFI's code object.
1325   Label done;
1326   Label check_is_bytecode_array;
1327   Label check_is_exported_function_data;
1328   Label check_is_fixed_array;
1329   Label check_is_pre_parsed_scope_data;
1330   Label check_is_function_template_info;
1331   Label check_is_interpreter_data;
1332 
1333   Register data_type = scratch1;
1334 
1335   // IsSmi: Is builtin
1336   __ JumpIfNotSmi(sfi_data, &check_is_bytecode_array);
1337   __ Move(scratch1, ExternalReference::builtins_address(masm->isolate()));
1338   SmiIndex index = masm->SmiToIndex(sfi_data, sfi_data, kPointerSizeLog2);
1339   __ movp(sfi_data, Operand(scratch1, index.reg, index.scale, 0));
1340   __ j(always, &done);
1341 
1342   // Get map for subsequent checks.
1343   __ bind(&check_is_bytecode_array);
1344   __ movp(data_type, FieldOperand(sfi_data, HeapObject::kMapOffset));
1345   __ movw(data_type, FieldOperand(data_type, Map::kInstanceTypeOffset));
1346 
1347   // IsBytecodeArray: Interpret bytecode
1348   __ cmpw(data_type, Immediate(BYTECODE_ARRAY_TYPE));
1349   __ j(not_equal, &check_is_exported_function_data);
1350   __ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
1351   __ j(always, &done);
1352 
1353   // IsWasmExportedFunctionData: Use the wrapper code
1354   __ bind(&check_is_exported_function_data);
1355   __ cmpw(data_type, Immediate(WASM_EXPORTED_FUNCTION_DATA_TYPE));
1356   __ j(not_equal, &check_is_fixed_array);
1357   __ movp(sfi_data,
1358           FieldOperand(sfi_data, WasmExportedFunctionData::kWrapperCodeOffset));
1359   __ j(always, &done);
1360 
1361   // IsFixedArray: Instantiate using AsmWasmData
1362   __ bind(&check_is_fixed_array);
1363   __ cmpw(data_type, Immediate(FIXED_ARRAY_TYPE));
1364   __ j(not_equal, &check_is_pre_parsed_scope_data);
1365   __ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InstantiateAsmJs));
1366   __ j(always, &done);
1367 
1368   // IsPreParsedScopeData: Compile lazy
1369   __ bind(&check_is_pre_parsed_scope_data);
1370   __ cmpw(data_type, Immediate(TUPLE2_TYPE));
1371   __ j(not_equal, &check_is_function_template_info);
1372   __ Move(sfi_data, BUILTIN_CODE(masm->isolate(), CompileLazy));
1373   __ j(always, &done);
1374 
1375   // IsFunctionTemplateInfo: API call
1376   __ bind(&check_is_function_template_info);
1377   __ cmpw(data_type, Immediate(FUNCTION_TEMPLATE_INFO_TYPE));
1378   __ j(not_equal, &check_is_interpreter_data);
1379   __ Move(sfi_data, BUILTIN_CODE(masm->isolate(), HandleApiCall));
1380   __ j(always, &done);
1381 
1382   // IsInterpreterData: Interpret bytecode with unique interpreter
1383   __ bind(&check_is_interpreter_data);
1384   if (FLAG_debug_code) {
1385     __ cmpw(data_type, Immediate(INTERPRETER_DATA_TYPE));
1386     __ Check(equal, AbortReason::kInvalidSharedFunctionInfoData);
1387   }
1388   __ movp(
1389       sfi_data,
1390       FieldOperand(sfi_data, InterpreterData::kInterpreterTrampolineOffset));
1391 
1392   __ bind(&done);
1393 }
1394 
Generate_CompileLazy(MacroAssembler * masm)1395 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1396   // ----------- S t a t e -------------
1397   //  -- rax : argument count (preserved for callee)
1398   //  -- rdx : new target (preserved for callee)
1399   //  -- rdi : target function (preserved for callee)
1400   // -----------------------------------
1401   // First lookup code, maybe we don't need to compile!
1402   Label gotta_call_runtime;
1403 
1404   Register closure = rdi;
1405   Register feedback_vector = rbx;
1406 
1407   // Do we have a valid feedback vector?
1408   __ movp(feedback_vector,
1409           FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1410   __ movp(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
1411   __ JumpIfRoot(feedback_vector, Heap::kUndefinedValueRootIndex,
1412                 &gotta_call_runtime);
1413 
1414   // Is there an optimization marker or optimized code in the feedback vector?
1415   MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r14, r15);
1416 
1417   // We found no optimized code. Infer the code object needed for the SFI.
1418   Register entry = rcx;
1419   __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1420   __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kFunctionDataOffset));
1421   GetSharedFunctionInfoCode(masm, entry, rbx);
1422 
1423   // If code entry points to anything other than CompileLazy, install that.
1424   __ Move(rbx, masm->CodeObject());
1425   __ cmpp(entry, rbx);
1426   __ j(equal, &gotta_call_runtime);
1427 
1428   // Install the SFI's code entry.
1429   __ movp(FieldOperand(closure, JSFunction::kCodeOffset), entry);
1430   __ movp(r14, entry);  // Write barrier clobbers r14 below.
1431   __ RecordWriteField(closure, JSFunction::kCodeOffset, r14, r15,
1432                       kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1433   __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
1434   __ jmp(entry);
1435 
1436   __ bind(&gotta_call_runtime);
1437   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1438 }
1439 
1440 // Lazy deserialization design doc: http://goo.gl/dxkYDZ.
Generate_DeserializeLazy(MacroAssembler * masm)1441 void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
1442   // ----------- S t a t e -------------
1443   //  -- rax : argument count (preserved for callee)
1444   //  -- rdx : new target (preserved for callee)
1445   //  -- rdi : target function (preserved for callee)
1446   // -----------------------------------
1447 
1448   Label deserialize_in_runtime;
1449 
1450   Register target = rdi;  // Must be preserved
1451   Register scratch0 = rbx;
1452   Register scratch1 = r12;
1453 
1454   CHECK(scratch0 != rax && scratch0 != rdx && scratch0 != rdi);
1455   CHECK(scratch1 != rax && scratch1 != rdx && scratch1 != rdi);
1456   CHECK(scratch0 != scratch1);
1457 
1458   // Load the builtin id for lazy deserialization from SharedFunctionInfo.
1459 
1460   __ AssertFunction(target);
1461   __ movp(scratch0,
1462           FieldOperand(target, JSFunction::kSharedFunctionInfoOffset));
1463 
1464   __ movp(scratch1,
1465           FieldOperand(scratch0, SharedFunctionInfo::kFunctionDataOffset));
1466   __ AssertSmi(scratch1);
1467 
1468   // The builtin may already have been deserialized. If that is the case, it is
1469   // stored in the builtins table, and we can copy to correct code object to
1470   // both the shared function info and function without calling into runtime.
1471   //
1472   // Otherwise, we need to call into runtime to deserialize.
1473 
1474   {
1475     // Load the code object at builtins_table[builtin_id] into scratch1.
1476 
1477     __ SmiToInteger32(scratch1, scratch1);
1478     __ Move(scratch0, ExternalReference::builtins_address(masm->isolate()));
1479     __ movp(scratch1, Operand(scratch0, scratch1, times_pointer_size, 0));
1480 
1481     // Check if the loaded code object has already been deserialized. This is
1482     // the case iff it does not equal DeserializeLazy.
1483 
1484     __ Move(scratch0, masm->CodeObject());
1485     __ cmpp(scratch1, scratch0);
1486     __ j(equal, &deserialize_in_runtime);
1487   }
1488 
1489   {
1490     // If we've reached this spot, the target builtin has been deserialized and
1491     // we simply need to copy it over to the target function.
1492 
1493     Register target_builtin = scratch1;
1494 
1495     __ movp(FieldOperand(target, JSFunction::kCodeOffset), target_builtin);
1496     __ movp(r14, target_builtin);  // Write barrier clobbers r14 below.
1497     __ RecordWriteField(target, JSFunction::kCodeOffset, r14, r15,
1498                         kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1499 
1500     // All copying is done. Jump to the deserialized code object.
1501 
1502     __ leap(target_builtin, FieldOperand(target_builtin, Code::kHeaderSize));
1503     __ jmp(target_builtin);
1504   }
1505 
1506   __ bind(&deserialize_in_runtime);
1507   GenerateTailCallToReturnedCode(masm, Runtime::kDeserializeLazy);
1508 }
1509 
Generate_InstantiateAsmJs(MacroAssembler * masm)1510 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1511   // ----------- S t a t e -------------
1512   //  -- rax : argument count (preserved for callee)
1513   //  -- rdx : new target (preserved for callee)
1514   //  -- rdi : target function (preserved for callee)
1515   // -----------------------------------
1516   Label failed;
1517   {
1518     FrameScope scope(masm, StackFrame::INTERNAL);
1519     // Preserve argument count for later compare.
1520     __ movp(rcx, rax);
1521     // Push the number of arguments to the callee.
1522     __ Integer32ToSmi(rax, rax);
1523     __ Push(rax);
1524     // Push a copy of the target function and the new target.
1525     __ Push(rdi);
1526     __ Push(rdx);
1527 
1528     // The function.
1529     __ Push(rdi);
1530     // Copy arguments from caller (stdlib, foreign, heap).
1531     Label args_done;
1532     for (int j = 0; j < 4; ++j) {
1533       Label over;
1534       if (j < 3) {
1535         __ cmpp(rcx, Immediate(j));
1536         __ j(not_equal, &over, Label::kNear);
1537       }
1538       for (int i = j - 1; i >= 0; --i) {
1539         __ Push(Operand(
1540             rbp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
1541       }
1542       for (int i = 0; i < 3 - j; ++i) {
1543         __ PushRoot(Heap::kUndefinedValueRootIndex);
1544       }
1545       if (j < 3) {
1546         __ jmp(&args_done, Label::kNear);
1547         __ bind(&over);
1548       }
1549     }
1550     __ bind(&args_done);
1551 
1552     // Call runtime, on success unwind frame, and parent frame.
1553     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1554     // A smi 0 is returned on failure, an object on success.
1555     __ JumpIfSmi(rax, &failed, Label::kNear);
1556 
1557     __ Drop(2);
1558     __ Pop(rcx);
1559     __ SmiToInteger32(rcx, rcx);
1560     scope.GenerateLeaveFrame();
1561 
1562     __ PopReturnAddressTo(rbx);
1563     __ incp(rcx);
1564     __ leap(rsp, Operand(rsp, rcx, times_pointer_size, 0));
1565     __ PushReturnAddressFrom(rbx);
1566     __ ret(0);
1567 
1568     __ bind(&failed);
1569     // Restore target function and new target.
1570     __ Pop(rdx);
1571     __ Pop(rdi);
1572     __ Pop(rax);
1573     __ SmiToInteger32(rax, rax);
1574   }
1575   // On failure, tail call back to regular js by re-calling the function
1576   // which has be reset to the compile lazy builtin.
1577   __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1578   __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1579   __ jmp(rcx);
1580 }
1581 
1582 namespace {
Generate_ContinueToBuiltinHelper(MacroAssembler * masm,bool java_script_builtin,bool with_result)1583 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1584                                       bool java_script_builtin,
1585                                       bool with_result) {
1586   const RegisterConfiguration* config(RegisterConfiguration::Default());
1587   int allocatable_register_count = config->num_allocatable_general_registers();
1588   if (with_result) {
1589     // Overwrite the hole inserted by the deoptimizer with the return value from
1590     // the LAZY deopt point.
1591     __ movq(Operand(rsp,
1592                     config->num_allocatable_general_registers() * kPointerSize +
1593                         BuiltinContinuationFrameConstants::kFixedFrameSize),
1594             rax);
1595   }
1596   for (int i = allocatable_register_count - 1; i >= 0; --i) {
1597     int code = config->GetAllocatableGeneralCode(i);
1598     __ popq(Register::from_code(code));
1599     if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1600       __ SmiToInteger32(Register::from_code(code), Register::from_code(code));
1601     }
1602   }
1603   __ movq(
1604       rbp,
1605       Operand(rsp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1606   const int offsetToPC =
1607       BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp - kPointerSize;
1608   __ popq(Operand(rsp, offsetToPC));
1609   __ Drop(offsetToPC / kPointerSize);
1610   __ addq(Operand(rsp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
1611   __ Ret();
1612 }
1613 }  // namespace
1614 
Generate_ContinueToCodeStubBuiltin(MacroAssembler * masm)1615 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1616   Generate_ContinueToBuiltinHelper(masm, false, false);
1617 }
1618 
Generate_ContinueToCodeStubBuiltinWithResult(MacroAssembler * masm)1619 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1620     MacroAssembler* masm) {
1621   Generate_ContinueToBuiltinHelper(masm, false, true);
1622 }
1623 
Generate_ContinueToJavaScriptBuiltin(MacroAssembler * masm)1624 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1625   Generate_ContinueToBuiltinHelper(masm, true, false);
1626 }
1627 
Generate_ContinueToJavaScriptBuiltinWithResult(MacroAssembler * masm)1628 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1629     MacroAssembler* masm) {
1630   Generate_ContinueToBuiltinHelper(masm, true, true);
1631 }
1632 
Generate_NotifyDeoptimized(MacroAssembler * masm)1633 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1634   // Enter an internal frame.
1635   {
1636     FrameScope scope(masm, StackFrame::INTERNAL);
1637     __ CallRuntime(Runtime::kNotifyDeoptimized);
1638     // Tear down internal frame.
1639   }
1640 
1641   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
1642   __ movp(rax, Operand(rsp, kPCOnStackSize));
1643   __ ret(1 * kPointerSize);  // Remove rax.
1644 }
1645 
1646 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1647 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1648   // ----------- S t a t e -------------
1649   //  -- rax     : argc
1650   //  -- rsp[0]  : return address
1651   //  -- rsp[8]  : argArray
1652   //  -- rsp[16] : thisArg
1653   //  -- rsp[24] : receiver
1654   // -----------------------------------
1655 
1656   // 1. Load receiver into rdi, argArray into rbx (if present), remove all
1657   // arguments from the stack (including the receiver), and push thisArg (if
1658   // present) instead.
1659   {
1660     Label no_arg_array, no_this_arg;
1661     StackArgumentsAccessor args(rsp, rax);
1662     __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1663     __ movp(rbx, rdx);
1664     __ movp(rdi, args.GetReceiverOperand());
1665     __ testp(rax, rax);
1666     __ j(zero, &no_this_arg, Label::kNear);
1667     {
1668       __ movp(rdx, args.GetArgumentOperand(1));
1669       __ cmpp(rax, Immediate(1));
1670       __ j(equal, &no_arg_array, Label::kNear);
1671       __ movp(rbx, args.GetArgumentOperand(2));
1672       __ bind(&no_arg_array);
1673     }
1674     __ bind(&no_this_arg);
1675     __ PopReturnAddressTo(rcx);
1676     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1677     __ Push(rdx);
1678     __ PushReturnAddressFrom(rcx);
1679   }
1680 
1681   // ----------- S t a t e -------------
1682   //  -- rbx     : argArray
1683   //  -- rdi     : receiver
1684   //  -- rsp[0]  : return address
1685   //  -- rsp[8]  : thisArg
1686   // -----------------------------------
1687 
1688   // 2. We don't need to check explicitly for callable receiver here,
1689   // since that's the first thing the Call/CallWithArrayLike builtins
1690   // will do.
1691 
1692   // 3. Tail call with no arguments if argArray is null or undefined.
1693   Label no_arguments;
1694   __ JumpIfRoot(rbx, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1695   __ JumpIfRoot(rbx, Heap::kUndefinedValueRootIndex, &no_arguments,
1696                 Label::kNear);
1697 
1698   // 4a. Apply the receiver to the given argArray.
1699   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1700           RelocInfo::CODE_TARGET);
1701 
1702   // 4b. The argArray is either null or undefined, so we tail call without any
1703   // arguments to the receiver. Since we did not create a frame for
1704   // Function.prototype.apply() yet, we use a normal Call builtin here.
1705   __ bind(&no_arguments);
1706   {
1707     __ Set(rax, 0);
1708     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1709   }
1710 }
1711 
1712 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1713 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1714   // Stack Layout:
1715   // rsp[0]           : Return address
1716   // rsp[8]           : Argument n
1717   // rsp[16]          : Argument n-1
1718   //  ...
1719   // rsp[8 * n]       : Argument 1
1720   // rsp[8 * (n + 1)] : Receiver (callable to call)
1721   //
1722   // rax contains the number of arguments, n, not counting the receiver.
1723   //
1724   // 1. Make sure we have at least one argument.
1725   {
1726     Label done;
1727     __ testp(rax, rax);
1728     __ j(not_zero, &done, Label::kNear);
1729     __ PopReturnAddressTo(rbx);
1730     __ PushRoot(Heap::kUndefinedValueRootIndex);
1731     __ PushReturnAddressFrom(rbx);
1732     __ incp(rax);
1733     __ bind(&done);
1734   }
1735 
1736   // 2. Get the callable to call (passed as receiver) from the stack.
1737   {
1738     StackArgumentsAccessor args(rsp, rax);
1739     __ movp(rdi, args.GetReceiverOperand());
1740   }
1741 
1742   // 3. Shift arguments and return address one slot down on the stack
1743   //    (overwriting the original receiver).  Adjust argument count to make
1744   //    the original first argument the new receiver.
1745   {
1746     Label loop;
1747     __ movp(rcx, rax);
1748     StackArgumentsAccessor args(rsp, rcx);
1749     __ bind(&loop);
1750     __ movp(rbx, args.GetArgumentOperand(1));
1751     __ movp(args.GetArgumentOperand(0), rbx);
1752     __ decp(rcx);
1753     __ j(not_zero, &loop);              // While non-zero.
1754     __ DropUnderReturnAddress(1, rbx);  // Drop one slot under return address.
1755     __ decp(rax);  // One fewer argument (first argument is new receiver).
1756   }
1757 
1758   // 4. Call the callable.
1759   // Since we did not create a frame for Function.prototype.call() yet,
1760   // we use a normal Call builtin here.
1761   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1762 }
1763 
Generate_ReflectApply(MacroAssembler * masm)1764 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1765   // ----------- S t a t e -------------
1766   //  -- rax     : argc
1767   //  -- rsp[0]  : return address
1768   //  -- rsp[8]  : argumentsList
1769   //  -- rsp[16] : thisArgument
1770   //  -- rsp[24] : target
1771   //  -- rsp[32] : receiver
1772   // -----------------------------------
1773 
1774   // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1775   // remove all arguments from the stack (including the receiver), and push
1776   // thisArgument (if present) instead.
1777   {
1778     Label done;
1779     StackArgumentsAccessor args(rsp, rax);
1780     __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1781     __ movp(rdx, rdi);
1782     __ movp(rbx, rdi);
1783     __ cmpp(rax, Immediate(1));
1784     __ j(below, &done, Label::kNear);
1785     __ movp(rdi, args.GetArgumentOperand(1));  // target
1786     __ j(equal, &done, Label::kNear);
1787     __ movp(rdx, args.GetArgumentOperand(2));  // thisArgument
1788     __ cmpp(rax, Immediate(3));
1789     __ j(below, &done, Label::kNear);
1790     __ movp(rbx, args.GetArgumentOperand(3));  // argumentsList
1791     __ bind(&done);
1792     __ PopReturnAddressTo(rcx);
1793     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1794     __ Push(rdx);
1795     __ PushReturnAddressFrom(rcx);
1796   }
1797 
1798   // ----------- S t a t e -------------
1799   //  -- rbx     : argumentsList
1800   //  -- rdi     : target
1801   //  -- rsp[0]  : return address
1802   //  -- rsp[8]  : thisArgument
1803   // -----------------------------------
1804 
1805   // 2. We don't need to check explicitly for callable target here,
1806   // since that's the first thing the Call/CallWithArrayLike builtins
1807   // will do.
1808 
1809   // 3. Apply the target to the given argumentsList.
1810   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1811           RelocInfo::CODE_TARGET);
1812 }
1813 
Generate_ReflectConstruct(MacroAssembler * masm)1814 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1815   // ----------- S t a t e -------------
1816   //  -- rax     : argc
1817   //  -- rsp[0]  : return address
1818   //  -- rsp[8]  : new.target (optional)
1819   //  -- rsp[16] : argumentsList
1820   //  -- rsp[24] : target
1821   //  -- rsp[32] : receiver
1822   // -----------------------------------
1823 
1824   // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1825   // new.target into rdx (if present, otherwise use target), remove all
1826   // arguments from the stack (including the receiver), and push thisArgument
1827   // (if present) instead.
1828   {
1829     Label done;
1830     StackArgumentsAccessor args(rsp, rax);
1831     __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1832     __ movp(rdx, rdi);
1833     __ movp(rbx, rdi);
1834     __ cmpp(rax, Immediate(1));
1835     __ j(below, &done, Label::kNear);
1836     __ movp(rdi, args.GetArgumentOperand(1));  // target
1837     __ movp(rdx, rdi);                         // new.target defaults to target
1838     __ j(equal, &done, Label::kNear);
1839     __ movp(rbx, args.GetArgumentOperand(2));  // argumentsList
1840     __ cmpp(rax, Immediate(3));
1841     __ j(below, &done, Label::kNear);
1842     __ movp(rdx, args.GetArgumentOperand(3));  // new.target
1843     __ bind(&done);
1844     __ PopReturnAddressTo(rcx);
1845     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1846     __ PushRoot(Heap::kUndefinedValueRootIndex);
1847     __ PushReturnAddressFrom(rcx);
1848   }
1849 
1850   // ----------- S t a t e -------------
1851   //  -- rbx     : argumentsList
1852   //  -- rdx     : new.target
1853   //  -- rdi     : target
1854   //  -- rsp[0]  : return address
1855   //  -- rsp[8]  : receiver (undefined)
1856   // -----------------------------------
1857 
1858   // 2. We don't need to check explicitly for constructor target here,
1859   // since that's the first thing the Construct/ConstructWithArrayLike
1860   // builtins will do.
1861 
1862   // 3. We don't need to check explicitly for constructor new.target here,
1863   // since that's the second thing the Construct/ConstructWithArrayLike
1864   // builtins will do.
1865 
1866   // 4. Construct the target with the given new.target and argumentsList.
1867   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1868           RelocInfo::CODE_TARGET);
1869 }
1870 
Generate_InternalArrayConstructor(MacroAssembler * masm)1871 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
1872   // ----------- S t a t e -------------
1873   //  -- rax    : argc
1874   //  -- rsp[0] : return address
1875   //  -- rsp[8] : last argument
1876   // -----------------------------------
1877   Label generic_array_code;
1878 
1879   if (FLAG_debug_code) {
1880     // Initial map for the builtin InternalArray functions should be maps.
1881     __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1882     // Will both indicate a nullptr and a Smi.
1883     STATIC_ASSERT(kSmiTag == 0);
1884     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1885     __ Check(not_smi,
1886              AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1887     __ CmpObjectType(rbx, MAP_TYPE, rcx);
1888     __ Check(equal, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1889   }
1890 
1891   // Run the native code for the InternalArray function called as a normal
1892   // function.
1893   // tail call a stub
1894   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1895   InternalArrayConstructorStub stub(masm->isolate());
1896   __ TailCallStub(&stub);
1897 }
1898 
Generate_ArrayConstructor(MacroAssembler * masm)1899 void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
1900   // ----------- S t a t e -------------
1901   //  -- rax    : argc
1902   //  -- rdi    : array function
1903   //  -- rsp[0] : return address
1904   //  -- rsp[8] : last argument
1905   // -----------------------------------
1906   Label generic_array_code;
1907 
1908   if (FLAG_debug_code) {
1909     // Initial map for the builtin Array functions should be maps.
1910     __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1911     // Will both indicate a nullptr and a Smi.
1912     STATIC_ASSERT(kSmiTag == 0);
1913     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1914     __ Check(not_smi, AbortReason::kUnexpectedInitialMapForArrayFunction);
1915     __ CmpObjectType(rbx, MAP_TYPE, rcx);
1916     __ Check(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
1917   }
1918 
1919   // rbx is the AllocationSite - here undefined.
1920   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1921   // If rdx (new target) is undefined, then this is the 'Call' case, so move
1922   // rdi (the constructor) to rdx.
1923   Label call;
1924   __ cmpp(rdx, rbx);
1925   __ j(not_equal, &call);
1926   __ movp(rdx, rdi);
1927 
1928   // Run the native code for the Array function called as a normal function.
1929   __ bind(&call);
1930   ArrayConstructorStub stub(masm->isolate());
1931   __ TailCallStub(&stub);
1932 }
1933 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1934 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1935   __ pushq(rbp);
1936   __ movp(rbp, rsp);
1937 
1938   // Store the arguments adaptor context sentinel.
1939   __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1940 
1941   // Push the function on the stack.
1942   __ Push(rdi);
1943 
1944   // Preserve the number of arguments on the stack. Must preserve rax,
1945   // rbx and rcx because these registers are used when copying the
1946   // arguments and the receiver.
1947   __ Integer32ToSmi(r8, rax);
1948   __ Push(r8);
1949 
1950   __ Push(Immediate(0));  // Padding.
1951 }
1952 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1953 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1954   // Retrieve the number of arguments from the stack. Number is a Smi.
1955   __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1956 
1957   // Leave the frame.
1958   __ movp(rsp, rbp);
1959   __ popq(rbp);
1960 
1961   // Remove caller arguments from the stack.
1962   __ PopReturnAddressTo(rcx);
1963   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1964   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1965   __ PushReturnAddressFrom(rcx);
1966 }
1967 
1968 // static
Generate_AllocateInNewSpace(MacroAssembler * masm)1969 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
1970   // ----------- S t a t e -------------
1971   //  -- rdx    : requested object size (untagged)
1972   //  -- rsp[0] : return address
1973   // -----------------------------------
1974   __ Integer32ToSmi(rdx, rdx);
1975   __ PopReturnAddressTo(rcx);
1976   __ Push(rdx);
1977   __ PushReturnAddressFrom(rcx);
1978   __ Move(rsi, Smi::kZero);
1979   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
1980 }
1981 
1982 // static
Generate_AllocateInOldSpace(MacroAssembler * masm)1983 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
1984   // ----------- S t a t e -------------
1985   //  -- rdx    : requested object size (untagged)
1986   //  -- rsp[0] : return address
1987   // -----------------------------------
1988   __ Integer32ToSmi(rdx, rdx);
1989   __ PopReturnAddressTo(rcx);
1990   __ Push(rdx);
1991   __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
1992   __ PushReturnAddressFrom(rcx);
1993   __ Move(rsi, Smi::kZero);
1994   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
1995 }
1996 
1997 // static
Generate_Abort(MacroAssembler * masm)1998 void Builtins::Generate_Abort(MacroAssembler* masm) {
1999   // ----------- S t a t e -------------
2000   //  -- rdx    : message_id as Smi
2001   //  -- rsp[0] : return address
2002   // -----------------------------------
2003   __ PopReturnAddressTo(rcx);
2004   __ Push(rdx);
2005   __ PushReturnAddressFrom(rcx);
2006   __ Move(rsi, Smi::kZero);
2007   __ TailCallRuntime(Runtime::kAbort);
2008 }
2009 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2010 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2011   // ----------- S t a t e -------------
2012   //  -- rax : actual number of arguments
2013   //  -- rbx : expected number of arguments
2014   //  -- rdx : new target (passed through to callee)
2015   //  -- rdi : function (passed through to callee)
2016   // -----------------------------------
2017 
2018   Label invoke, dont_adapt_arguments, stack_overflow;
2019   Counters* counters = masm->isolate()->counters();
2020   __ IncrementCounter(counters->arguments_adaptors(), 1);
2021 
2022   Label enough, too_few;
2023   __ cmpp(rax, rbx);
2024   __ j(less, &too_few);
2025   __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2026   __ j(equal, &dont_adapt_arguments);
2027 
2028   {  // Enough parameters: Actual >= expected.
2029     __ bind(&enough);
2030     EnterArgumentsAdaptorFrame(masm);
2031     // The registers rcx and r8 will be modified. The register rbx is only read.
2032     Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
2033 
2034     // Copy receiver and all expected arguments.
2035     const int offset = StandardFrameConstants::kCallerSPOffset;
2036     __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
2037     __ Set(r8, -1);  // account for receiver
2038 
2039     Label copy;
2040     __ bind(&copy);
2041     __ incp(r8);
2042     __ Push(Operand(rax, 0));
2043     __ subp(rax, Immediate(kPointerSize));
2044     __ cmpp(r8, rbx);
2045     __ j(less, &copy);
2046     __ jmp(&invoke);
2047   }
2048 
2049   {  // Too few parameters: Actual < expected.
2050     __ bind(&too_few);
2051 
2052     EnterArgumentsAdaptorFrame(masm);
2053     // The registers rcx and r8 will be modified. The register rbx is only read.
2054     Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
2055 
2056     // Copy receiver and all actual arguments.
2057     const int offset = StandardFrameConstants::kCallerSPOffset;
2058     __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
2059     __ Set(r8, -1);  // account for receiver
2060 
2061     Label copy;
2062     __ bind(&copy);
2063     __ incp(r8);
2064     __ Push(Operand(rdi, 0));
2065     __ subp(rdi, Immediate(kPointerSize));
2066     __ cmpp(r8, rax);
2067     __ j(less, &copy);
2068 
2069     // Fill remaining expected arguments with undefined values.
2070     Label fill;
2071     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
2072     __ bind(&fill);
2073     __ incp(r8);
2074     __ Push(kScratchRegister);
2075     __ cmpp(r8, rbx);
2076     __ j(less, &fill);
2077 
2078     // Restore function pointer.
2079     __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2080   }
2081 
2082   // Call the entry point.
2083   __ bind(&invoke);
2084   __ movp(rax, rbx);
2085   // rax : expected number of arguments
2086   // rdx : new target (passed through to callee)
2087   // rdi : function (passed through to callee)
2088   static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
2089   __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
2090   __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2091   __ call(rcx);
2092 
2093   // Store offset of return address for deoptimizer.
2094   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2095 
2096   // Leave frame and return.
2097   LeaveArgumentsAdaptorFrame(masm);
2098   __ ret(0);
2099 
2100   // -------------------------------------------
2101   // Dont adapt arguments.
2102   // -------------------------------------------
2103   __ bind(&dont_adapt_arguments);
2104   static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
2105   __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
2106   __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2107   __ jmp(rcx);
2108 
2109   __ bind(&stack_overflow);
2110   {
2111     FrameScope frame(masm, StackFrame::MANUAL);
2112     __ CallRuntime(Runtime::kThrowStackOverflow);
2113     __ int3();
2114   }
2115 }
2116 
2117 // static
Generate_CallOrConstructVarargs(MacroAssembler * masm,Handle<Code> code)2118 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
2119                                                Handle<Code> code) {
2120   // ----------- S t a t e -------------
2121   //  -- rdi    : target
2122   //  -- rax    : number of parameters on the stack (not including the receiver)
2123   //  -- rbx    : arguments list (a FixedArray)
2124   //  -- rcx    : len (number of elements to push from args)
2125   //  -- rdx    : new.target (for [[Construct]])
2126   //  -- rsp[0] : return address
2127   // -----------------------------------
2128   __ AssertFixedArray(rbx);
2129 
2130   // Check for stack overflow.
2131   {
2132     // Check the stack for overflow. We are not trying to catch interruptions
2133     // (i.e. debug break and preemption) here, so check the "real stack limit".
2134     Label done;
2135     __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
2136     __ movp(r8, rsp);
2137     // Make r8 the space we have left. The stack might already be overflowed
2138     // here which will cause r8 to become negative.
2139     __ subp(r8, kScratchRegister);
2140     __ sarp(r8, Immediate(kPointerSizeLog2));
2141     // Check if the arguments will overflow the stack.
2142     __ cmpp(r8, rcx);
2143     __ j(greater, &done, Label::kNear);  // Signed comparison.
2144     __ TailCallRuntime(Runtime::kThrowStackOverflow);
2145     __ bind(&done);
2146   }
2147 
2148   // Push additional arguments onto the stack.
2149   {
2150     __ PopReturnAddressTo(r8);
2151     __ Set(r9, 0);
2152     Label done, push, loop;
2153     __ bind(&loop);
2154     __ cmpl(r9, rcx);
2155     __ j(equal, &done, Label::kNear);
2156     // Turn the hole into undefined as we go.
2157     __ movp(r11,
2158             FieldOperand(rbx, r9, times_pointer_size, FixedArray::kHeaderSize));
2159     __ CompareRoot(r11, Heap::kTheHoleValueRootIndex);
2160     __ j(not_equal, &push, Label::kNear);
2161     __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
2162     __ bind(&push);
2163     __ Push(r11);
2164     __ incl(r9);
2165     __ jmp(&loop);
2166     __ bind(&done);
2167     __ PushReturnAddressFrom(r8);
2168     __ addq(rax, r9);
2169   }
2170 
2171   // Tail-call to the actual Call or Construct builtin.
2172   __ Jump(code, RelocInfo::CODE_TARGET);
2173 }
2174 
2175 // static
Generate_CallOrConstructForwardVarargs(MacroAssembler * masm,CallOrConstructMode mode,Handle<Code> code)2176 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2177                                                       CallOrConstructMode mode,
2178                                                       Handle<Code> code) {
2179   // ----------- S t a t e -------------
2180   //  -- rax : the number of arguments (not including the receiver)
2181   //  -- rdx : the new target (for [[Construct]] calls)
2182   //  -- rdi : the target to call (can be any Object)
2183   //  -- rcx : start index (to support rest parameters)
2184   // -----------------------------------
2185 
2186   // Check if new.target has a [[Construct]] internal method.
2187   if (mode == CallOrConstructMode::kConstruct) {
2188     Label new_target_constructor, new_target_not_constructor;
2189     __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
2190     __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
2191     __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2192              Immediate(Map::IsConstructorBit::kMask));
2193     __ j(not_zero, &new_target_constructor, Label::kNear);
2194     __ bind(&new_target_not_constructor);
2195     {
2196       FrameScope scope(masm, StackFrame::MANUAL);
2197       __ EnterFrame(StackFrame::INTERNAL);
2198       __ Push(rdx);
2199       __ CallRuntime(Runtime::kThrowNotConstructor);
2200     }
2201     __ bind(&new_target_constructor);
2202   }
2203 
2204   // Check if we have an arguments adaptor frame below the function frame.
2205   Label arguments_adaptor, arguments_done;
2206   __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2207   __ cmpp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
2208           Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2209   __ j(equal, &arguments_adaptor, Label::kNear);
2210   {
2211     __ movp(r8, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2212     __ movp(r8, FieldOperand(r8, JSFunction::kSharedFunctionInfoOffset));
2213     __ movl(r8,
2214             FieldOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
2215     __ movp(rbx, rbp);
2216   }
2217   __ jmp(&arguments_done, Label::kNear);
2218   __ bind(&arguments_adaptor);
2219   {
2220     __ SmiToInteger32(
2221         r8, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2222   }
2223   __ bind(&arguments_done);
2224 
2225   Label stack_done, stack_overflow;
2226   __ subl(r8, rcx);
2227   __ j(less_equal, &stack_done);
2228   {
2229     // Check for stack overflow.
2230     Generate_StackOverflowCheck(masm, r8, rcx, &stack_overflow, Label::kNear);
2231 
2232     // Forward the arguments from the caller frame.
2233     {
2234       Label loop;
2235       __ addl(rax, r8);
2236       __ PopReturnAddressTo(rcx);
2237       __ bind(&loop);
2238       {
2239         StackArgumentsAccessor args(rbx, r8, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2240         __ Push(args.GetArgumentOperand(0));
2241         __ decl(r8);
2242         __ j(not_zero, &loop);
2243       }
2244       __ PushReturnAddressFrom(rcx);
2245     }
2246   }
2247   __ jmp(&stack_done, Label::kNear);
2248   __ bind(&stack_overflow);
2249   __ TailCallRuntime(Runtime::kThrowStackOverflow);
2250   __ bind(&stack_done);
2251 
2252   // Tail-call to the {code} handler.
2253   __ Jump(code, RelocInfo::CODE_TARGET);
2254 }
2255 
2256 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode)2257 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2258                                      ConvertReceiverMode mode) {
2259   // ----------- S t a t e -------------
2260   //  -- rax : the number of arguments (not including the receiver)
2261   //  -- rdi : the function to call (checked to be a JSFunction)
2262   // -----------------------------------
2263   StackArgumentsAccessor args(rsp, rax);
2264   __ AssertFunction(rdi);
2265 
2266   // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2267   // Check that the function is not a "classConstructor".
2268   Label class_constructor;
2269   __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2270   __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
2271            Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
2272   __ j(not_zero, &class_constructor);
2273 
2274   // ----------- S t a t e -------------
2275   //  -- rax : the number of arguments (not including the receiver)
2276   //  -- rdx : the shared function info.
2277   //  -- rdi : the function to call (checked to be a JSFunction)
2278   // -----------------------------------
2279 
2280   // Enter the context of the function; ToObject has to run in the function
2281   // context, and we also need to take the global proxy from the function
2282   // context in case of conversion.
2283   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2284   // We need to convert the receiver for non-native sloppy mode functions.
2285   Label done_convert;
2286   __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
2287            Immediate(SharedFunctionInfo::IsNativeBit::kMask |
2288                      SharedFunctionInfo::IsStrictBit::kMask));
2289   __ j(not_zero, &done_convert);
2290   {
2291     // ----------- S t a t e -------------
2292     //  -- rax : the number of arguments (not including the receiver)
2293     //  -- rdx : the shared function info.
2294     //  -- rdi : the function to call (checked to be a JSFunction)
2295     //  -- rsi : the function context.
2296     // -----------------------------------
2297 
2298     if (mode == ConvertReceiverMode::kNullOrUndefined) {
2299       // Patch receiver to global proxy.
2300       __ LoadGlobalProxy(rcx);
2301     } else {
2302       Label convert_to_object, convert_receiver;
2303       __ movp(rcx, args.GetReceiverOperand());
2304       __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
2305       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2306       __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
2307       __ j(above_equal, &done_convert);
2308       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2309         Label convert_global_proxy;
2310         __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex,
2311                       &convert_global_proxy, Label::kNear);
2312         __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
2313                          Label::kNear);
2314         __ bind(&convert_global_proxy);
2315         {
2316           // Patch receiver to global proxy.
2317           __ LoadGlobalProxy(rcx);
2318         }
2319         __ jmp(&convert_receiver);
2320       }
2321       __ bind(&convert_to_object);
2322       {
2323         // Convert receiver using ToObject.
2324         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2325         // in the fast case? (fall back to AllocateInNewSpace?)
2326         FrameScope scope(masm, StackFrame::INTERNAL);
2327         __ Integer32ToSmi(rax, rax);
2328         __ Push(rax);
2329         __ Push(rdi);
2330         __ movp(rax, rcx);
2331         __ Push(rsi);
2332         __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2333                 RelocInfo::CODE_TARGET);
2334         __ Pop(rsi);
2335         __ movp(rcx, rax);
2336         __ Pop(rdi);
2337         __ Pop(rax);
2338         __ SmiToInteger32(rax, rax);
2339       }
2340       __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2341       __ bind(&convert_receiver);
2342     }
2343     __ movp(args.GetReceiverOperand(), rcx);
2344   }
2345   __ bind(&done_convert);
2346 
2347   // ----------- S t a t e -------------
2348   //  -- rax : the number of arguments (not including the receiver)
2349   //  -- rdx : the shared function info.
2350   //  -- rdi : the function to call (checked to be a JSFunction)
2351   //  -- rsi : the function context.
2352   // -----------------------------------
2353 
2354   __ movsxlq(
2355       rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2356   ParameterCount actual(rax);
2357   ParameterCount expected(rbx);
2358 
2359   __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION);
2360 
2361   // The function is a "classConstructor", need to raise an exception.
2362   __ bind(&class_constructor);
2363   {
2364     FrameScope frame(masm, StackFrame::INTERNAL);
2365     __ Push(rdi);
2366     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2367   }
2368 }
2369 
2370 namespace {
2371 
Generate_PushBoundArguments(MacroAssembler * masm)2372 void Generate_PushBoundArguments(MacroAssembler* masm) {
2373   // ----------- S t a t e -------------
2374   //  -- rax : the number of arguments (not including the receiver)
2375   //  -- rdx : new.target (only in case of [[Construct]])
2376   //  -- rdi : target (checked to be a JSBoundFunction)
2377   // -----------------------------------
2378 
2379   // Load [[BoundArguments]] into rcx and length of that into rbx.
2380   Label no_bound_arguments;
2381   __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2382   __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2383   __ testl(rbx, rbx);
2384   __ j(zero, &no_bound_arguments);
2385   {
2386     // ----------- S t a t e -------------
2387     //  -- rax : the number of arguments (not including the receiver)
2388     //  -- rdx : new.target (only in case of [[Construct]])
2389     //  -- rdi : target (checked to be a JSBoundFunction)
2390     //  -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2391     //  -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2392     // -----------------------------------
2393 
2394     // Reserve stack space for the [[BoundArguments]].
2395     {
2396       Label done;
2397       __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
2398       __ subp(rsp, kScratchRegister);
2399       // Check the stack for overflow. We are not trying to catch interruptions
2400       // (i.e. debug break and preemption) here, so check the "real stack
2401       // limit".
2402       __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
2403       __ j(greater, &done, Label::kNear);  // Signed comparison.
2404       // Restore the stack pointer.
2405       __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
2406       {
2407         FrameScope scope(masm, StackFrame::MANUAL);
2408         __ EnterFrame(StackFrame::INTERNAL);
2409         __ CallRuntime(Runtime::kThrowStackOverflow);
2410       }
2411       __ bind(&done);
2412     }
2413 
2414     // Adjust effective number of arguments to include return address.
2415     __ incl(rax);
2416 
2417     // Relocate arguments and return address down the stack.
2418     {
2419       Label loop;
2420       __ Set(rcx, 0);
2421       __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
2422       __ bind(&loop);
2423       __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
2424       __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
2425       __ incl(rcx);
2426       __ cmpl(rcx, rax);
2427       __ j(less, &loop);
2428     }
2429 
2430     // Copy [[BoundArguments]] to the stack (below the arguments).
2431     {
2432       Label loop;
2433       __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2434       __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2435       __ bind(&loop);
2436       __ decl(rbx);
2437       __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
2438                                              FixedArray::kHeaderSize));
2439       __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
2440       __ leal(rax, Operand(rax, 1));
2441       __ j(greater, &loop);
2442     }
2443 
2444     // Adjust effective number of arguments (rax contains the number of
2445     // arguments from the call plus return address plus the number of
2446     // [[BoundArguments]]), so we need to subtract one for the return address.
2447     __ decl(rax);
2448   }
2449   __ bind(&no_bound_arguments);
2450 }
2451 
2452 }  // namespace
2453 
2454 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm)2455 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2456   // ----------- S t a t e -------------
2457   //  -- rax : the number of arguments (not including the receiver)
2458   //  -- rdi : the function to call (checked to be a JSBoundFunction)
2459   // -----------------------------------
2460   __ AssertBoundFunction(rdi);
2461 
2462   // Patch the receiver to [[BoundThis]].
2463   StackArgumentsAccessor args(rsp, rax);
2464   __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
2465   __ movp(args.GetReceiverOperand(), rbx);
2466 
2467   // Push the [[BoundArguments]] onto the stack.
2468   Generate_PushBoundArguments(masm);
2469 
2470   // Call the [[BoundTargetFunction]] via the Call builtin.
2471   __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2472   __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2473           RelocInfo::CODE_TARGET);
2474 }
2475 
2476 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode)2477 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2478   // ----------- S t a t e -------------
2479   //  -- rax : the number of arguments (not including the receiver)
2480   //  -- rdi : the target to call (can be any Object)
2481   // -----------------------------------
2482   StackArgumentsAccessor args(rsp, rax);
2483 
2484   Label non_callable;
2485   __ JumpIfSmi(rdi, &non_callable);
2486   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2487   __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2488           RelocInfo::CODE_TARGET, equal);
2489 
2490   __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2491   __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2492           RelocInfo::CODE_TARGET, equal);
2493 
2494   // Check if target has a [[Call]] internal method.
2495   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2496            Immediate(Map::IsCallableBit::kMask));
2497   __ j(zero, &non_callable, Label::kNear);
2498 
2499   // Check if target is a proxy and call CallProxy external builtin
2500   __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2501   __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET,
2502           equal);
2503 
2504   // 2. Call to something else, which might have a [[Call]] internal method (if
2505   // not we raise an exception).
2506 
2507   // Overwrite the original receiver with the (original) target.
2508   __ movp(args.GetReceiverOperand(), rdi);
2509   // Let the "call_as_function_delegate" take care of the rest.
2510   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2511   __ Jump(masm->isolate()->builtins()->CallFunction(
2512               ConvertReceiverMode::kNotNullOrUndefined),
2513           RelocInfo::CODE_TARGET);
2514 
2515   // 3. Call to something that is not callable.
2516   __ bind(&non_callable);
2517   {
2518     FrameScope scope(masm, StackFrame::INTERNAL);
2519     __ Push(rdi);
2520     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2521   }
2522 }
2523 
2524 // static
Generate_ConstructFunction(MacroAssembler * masm)2525 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2526   // ----------- S t a t e -------------
2527   //  -- rax : the number of arguments (not including the receiver)
2528   //  -- rdx : the new target (checked to be a constructor)
2529   //  -- rdi : the constructor to call (checked to be a JSFunction)
2530   // -----------------------------------
2531   __ AssertConstructor(rdi);
2532   __ AssertFunction(rdi);
2533 
2534   // Calling convention for function specific ConstructStubs require
2535   // rbx to contain either an AllocationSite or undefined.
2536   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2537 
2538   // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2539   __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2540   __ testl(FieldOperand(rcx, SharedFunctionInfo::kFlagsOffset),
2541            Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2542   __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2543           RelocInfo::CODE_TARGET, not_zero);
2544 
2545   __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2546           RelocInfo::CODE_TARGET);
2547 }
2548 
2549 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2550 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2551   // ----------- S t a t e -------------
2552   //  -- rax : the number of arguments (not including the receiver)
2553   //  -- rdx : the new target (checked to be a constructor)
2554   //  -- rdi : the constructor to call (checked to be a JSBoundFunction)
2555   // -----------------------------------
2556   __ AssertConstructor(rdi);
2557   __ AssertBoundFunction(rdi);
2558 
2559   // Push the [[BoundArguments]] onto the stack.
2560   Generate_PushBoundArguments(masm);
2561 
2562   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2563   {
2564     Label done;
2565     __ cmpp(rdi, rdx);
2566     __ j(not_equal, &done, Label::kNear);
2567     __ movp(rdx,
2568             FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2569     __ bind(&done);
2570   }
2571 
2572   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2573   __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2574   __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2575 }
2576 
2577 // static
Generate_Construct(MacroAssembler * masm)2578 void Builtins::Generate_Construct(MacroAssembler* masm) {
2579   // ----------- S t a t e -------------
2580   //  -- rax : the number of arguments (not including the receiver)
2581   //  -- rdx : the new target (either the same as the constructor or
2582   //           the JSFunction on which new was invoked initially)
2583   //  -- rdi : the constructor to call (can be any Object)
2584   // -----------------------------------
2585   StackArgumentsAccessor args(rsp, rax);
2586 
2587   // Check if target is a Smi.
2588   Label non_constructor;
2589   __ JumpIfSmi(rdi, &non_constructor);
2590 
2591   // Check if target has a [[Construct]] internal method.
2592   __ movq(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
2593   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2594            Immediate(Map::IsConstructorBit::kMask));
2595   __ j(zero, &non_constructor);
2596 
2597   // Dispatch based on instance type.
2598   __ CmpInstanceType(rcx, JS_FUNCTION_TYPE);
2599   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2600           RelocInfo::CODE_TARGET, equal);
2601 
2602   // Only dispatch to bound functions after checking whether they are
2603   // constructors.
2604   __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2605   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2606           RelocInfo::CODE_TARGET, equal);
2607 
2608   // Only dispatch to proxies after checking whether they are constructors.
2609   __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2610   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy), RelocInfo::CODE_TARGET,
2611           equal);
2612 
2613   // Called Construct on an exotic Object with a [[Construct]] internal method.
2614   {
2615     // Overwrite the original receiver with the (original) target.
2616     __ movp(args.GetReceiverOperand(), rdi);
2617     // Let the "call_as_constructor_delegate" take care of the rest.
2618     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2619     __ Jump(masm->isolate()->builtins()->CallFunction(),
2620             RelocInfo::CODE_TARGET);
2621   }
2622 
2623   // Called Construct on an Object that doesn't have a [[Construct]] internal
2624   // method.
2625   __ bind(&non_constructor);
2626   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2627           RelocInfo::CODE_TARGET);
2628 }
2629 
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)2630 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
2631                                               bool has_handler_frame) {
2632   // Lookup the function in the JavaScript frame.
2633   if (has_handler_frame) {
2634     __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2635     __ movp(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset));
2636   } else {
2637     __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2638   }
2639 
2640   {
2641     FrameScope scope(masm, StackFrame::INTERNAL);
2642     // Pass function as argument.
2643     __ Push(rax);
2644     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2645   }
2646 
2647   Label skip;
2648   // If the code object is null, just return to the caller.
2649   __ testp(rax, rax);
2650   __ j(not_equal, &skip, Label::kNear);
2651   __ ret(0);
2652 
2653   __ bind(&skip);
2654 
2655   // Drop any potential handler frame that is be sitting on top of the actual
2656   // JavaScript frame. This is the case then OSR is triggered from bytecode.
2657   if (has_handler_frame) {
2658     __ leave();
2659   }
2660 
2661   // Load deoptimization data from the code object.
2662   __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2663 
2664   // Load the OSR entrypoint offset from the deoptimization data.
2665   __ SmiToInteger32(rbx,
2666                     Operand(rbx, FixedArray::OffsetOfElementAt(
2667                                      DeoptimizationData::kOsrPcOffsetIndex) -
2668                                      kHeapObjectTag));
2669 
2670   // Compute the target address = code_obj + header_size + osr_offset
2671   __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
2672 
2673   // Overwrite the return address on the stack.
2674   __ movq(StackOperandForReturnAddress(0), rax);
2675 
2676   // And "return" to the OSR entry point of the function.
2677   __ ret(0);
2678 }
2679 
Generate_OnStackReplacement(MacroAssembler * masm)2680 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
2681   Generate_OnStackReplacementHelper(masm, false);
2682 }
2683 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)2684 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2685   Generate_OnStackReplacementHelper(masm, true);
2686 }
2687 
Generate_WasmCompileLazy(MacroAssembler * masm)2688 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2689   {
2690     FrameScope scope(masm, StackFrame::INTERNAL);
2691 
2692     // Save all parameter registers (see wasm-linkage.cc). They might be
2693     // overwritten in the runtime call below. We don't have any callee-saved
2694     // registers in wasm, so no need to store anything else.
2695     for (Register reg : wasm::kGpParamRegisters) {
2696       if (reg == kWasmInstanceRegister) continue;
2697       __ Push(reg);
2698     }
2699     __ subp(rsp, Immediate(16 * arraysize(wasm::kFpParamRegisters)));
2700     int offset = 0;
2701     for (DoubleRegister reg : wasm::kFpParamRegisters) {
2702       __ movdqu(Operand(rsp, offset), reg);
2703       offset += 16;
2704     }
2705 
2706     // Pass the WASM instance as an explicit argument to WasmCompileLazy.
2707     __ Push(kWasmInstanceRegister);
2708     // Initialize the JavaScript context with 0. CEntry will use it to
2709     // set the current context on the isolate.
2710     __ Move(kContextRegister, Smi::kZero);
2711     __ CallRuntime(Runtime::kWasmCompileLazy);
2712     // The entrypoint address is the first return value.
2713     __ movq(r11, kReturnRegister0);
2714     // The WASM instance is the second return value.
2715     __ movq(kWasmInstanceRegister, kReturnRegister1);
2716 
2717     // Restore registers.
2718     for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
2719       offset -= 16;
2720       __ movdqu(reg, Operand(rsp, offset));
2721     }
2722     DCHECK_EQ(0, offset);
2723     __ addp(rsp, Immediate(16 * arraysize(wasm::kFpParamRegisters)));
2724     for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
2725       if (reg == kWasmInstanceRegister) continue;
2726       __ Pop(reg);
2727     }
2728   }
2729   // Finally, jump to the entrypoint.
2730   __ jmp(r11);
2731 }
2732 
Generate_CEntry(MacroAssembler * masm,int result_size,SaveFPRegsMode save_doubles,ArgvMode argv_mode,bool builtin_exit_frame)2733 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2734                                SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2735                                bool builtin_exit_frame) {
2736   // rax: number of arguments including receiver
2737   // rbx: pointer to C function  (C callee-saved)
2738   // rbp: frame pointer of calling JS frame (restored after C call)
2739   // rsp: stack pointer  (restored after C call)
2740   // rsi: current context (restored)
2741   //
2742   // If argv_mode == kArgvInRegister:
2743   // r15: pointer to the first argument
2744 
2745   ProfileEntryHookStub::MaybeCallEntryHook(masm);
2746 
2747 #ifdef _WIN64
2748   // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
2749   // stack to be aligned to 16 bytes. It only allows a single-word to be
2750   // returned in register rax. Larger return sizes must be written to an address
2751   // passed as a hidden first argument.
2752   const Register kCCallArg0 = rcx;
2753   const Register kCCallArg1 = rdx;
2754   const Register kCCallArg2 = r8;
2755   const Register kCCallArg3 = r9;
2756   const int kArgExtraStackSpace = 2;
2757   const int kMaxRegisterResultSize = 1;
2758 #else
2759   // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
2760   // are returned in rax, and a struct of two pointers are returned in rax+rdx.
2761   // Larger return sizes must be written to an address passed as a hidden first
2762   // argument.
2763   const Register kCCallArg0 = rdi;
2764   const Register kCCallArg1 = rsi;
2765   const Register kCCallArg2 = rdx;
2766   const Register kCCallArg3 = rcx;
2767   const int kArgExtraStackSpace = 0;
2768   const int kMaxRegisterResultSize = 2;
2769 #endif  // _WIN64
2770 
2771   // Enter the exit frame that transitions from JavaScript to C++.
2772   int arg_stack_space =
2773       kArgExtraStackSpace +
2774       (result_size <= kMaxRegisterResultSize ? 0 : result_size);
2775   if (argv_mode == kArgvInRegister) {
2776     DCHECK(save_doubles == kDontSaveFPRegs);
2777     DCHECK(!builtin_exit_frame);
2778     __ EnterApiExitFrame(arg_stack_space);
2779     // Move argc into r14 (argv is already in r15).
2780     __ movp(r14, rax);
2781   } else {
2782     __ EnterExitFrame(
2783         arg_stack_space, save_doubles == kSaveFPRegs,
2784         builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2785   }
2786 
2787   // rbx: pointer to builtin function  (C callee-saved).
2788   // rbp: frame pointer of exit frame  (restored after C call).
2789   // rsp: stack pointer (restored after C call).
2790   // r14: number of arguments including receiver (C callee-saved).
2791   // r15: argv pointer (C callee-saved).
2792 
2793   // Check stack alignment.
2794   if (FLAG_debug_code) {
2795     __ CheckStackAlignment();
2796   }
2797 
2798   // Call C function. The arguments object will be created by stubs declared by
2799   // DECLARE_RUNTIME_FUNCTION().
2800   if (result_size <= kMaxRegisterResultSize) {
2801     // Pass a pointer to the Arguments object as the first argument.
2802     // Return result in single register (rax), or a register pair (rax, rdx).
2803     __ movp(kCCallArg0, r14);  // argc.
2804     __ movp(kCCallArg1, r15);  // argv.
2805     __ Move(kCCallArg2, ExternalReference::isolate_address(masm->isolate()));
2806   } else {
2807     DCHECK_LE(result_size, 2);
2808     // Pass a pointer to the result location as the first argument.
2809     __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
2810     // Pass a pointer to the Arguments object as the second argument.
2811     __ movp(kCCallArg1, r14);  // argc.
2812     __ movp(kCCallArg2, r15);  // argv.
2813     __ Move(kCCallArg3, ExternalReference::isolate_address(masm->isolate()));
2814   }
2815   __ call(rbx);
2816 
2817   if (result_size > kMaxRegisterResultSize) {
2818     // Read result values stored on stack. Result is stored
2819     // above the the two Arguments object slots on Win64.
2820     DCHECK_LE(result_size, 2);
2821     __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
2822     __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
2823   }
2824   // Result is in rax or rdx:rax - do not destroy these registers!
2825 
2826   // Check result for exception sentinel.
2827   Label exception_returned;
2828   __ CompareRoot(rax, Heap::kExceptionRootIndex);
2829   __ j(equal, &exception_returned);
2830 
2831   // Check that there is no pending exception, otherwise we
2832   // should have returned the exception sentinel.
2833   if (FLAG_debug_code) {
2834     Label okay;
2835     __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
2836     ExternalReference pending_exception_address = ExternalReference::Create(
2837         IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2838     Operand pending_exception_operand =
2839         masm->ExternalOperand(pending_exception_address);
2840     __ cmpp(r14, pending_exception_operand);
2841     __ j(equal, &okay, Label::kNear);
2842     __ int3();
2843     __ bind(&okay);
2844   }
2845 
2846   // Exit the JavaScript to C++ exit frame.
2847   __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
2848   __ ret(0);
2849 
2850   // Handling of exception.
2851   __ bind(&exception_returned);
2852 
2853   ExternalReference pending_handler_context_address = ExternalReference::Create(
2854       IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2855   ExternalReference pending_handler_entrypoint_address =
2856       ExternalReference::Create(
2857           IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2858   ExternalReference pending_handler_fp_address = ExternalReference::Create(
2859       IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2860   ExternalReference pending_handler_sp_address = ExternalReference::Create(
2861       IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2862 
2863   // Ask the runtime for help to determine the handler. This will set rax to
2864   // contain the current pending exception, don't clobber it.
2865   ExternalReference find_handler =
2866       ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2867   {
2868     FrameScope scope(masm, StackFrame::MANUAL);
2869     __ movp(arg_reg_1, Immediate(0));  // argc.
2870     __ movp(arg_reg_2, Immediate(0));  // argv.
2871     __ Move(arg_reg_3, ExternalReference::isolate_address(masm->isolate()));
2872     __ PrepareCallCFunction(3);
2873     __ CallCFunction(find_handler, 3);
2874   }
2875   // Retrieve the handler context, SP and FP.
2876   __ movp(rsi, masm->ExternalOperand(pending_handler_context_address));
2877   __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address));
2878   __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address));
2879 
2880   // If the handler is a JS frame, restore the context to the frame. Note that
2881   // the context will be set to (rsi == 0) for non-JS frames.
2882   Label skip;
2883   __ testp(rsi, rsi);
2884   __ j(zero, &skip, Label::kNear);
2885   __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
2886   __ bind(&skip);
2887 
2888   // Reset the masking register. This is done independent of the underlying
2889   // feature flag {FLAG_branch_load_poisoning} to make the snapshot work with
2890   // both configurations. It is safe to always do this, because the underlying
2891   // register is caller-saved and can be arbitrarily clobbered.
2892   __ ResetSpeculationPoisonRegister();
2893 
2894   // Compute the handler entry address and jump to it.
2895   __ movp(rdi, masm->ExternalOperand(pending_handler_entrypoint_address));
2896   __ jmp(rdi);
2897 }
2898 
Generate_DoubleToI(MacroAssembler * masm)2899 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2900   Label check_negative, process_64_bits, done;
2901 
2902   // Account for return address and saved regs.
2903   const int kArgumentOffset = 4 * kRegisterSize;
2904 
2905   MemOperand mantissa_operand(MemOperand(rsp, kArgumentOffset));
2906   MemOperand exponent_operand(
2907       MemOperand(rsp, kArgumentOffset + kDoubleSize / 2));
2908 
2909   // The result is returned on the stack.
2910   MemOperand return_operand = mantissa_operand;
2911 
2912   Register scratch1 = rbx;
2913 
2914   // Since we must use rcx for shifts below, use some other register (rax)
2915   // to calculate the result if ecx is the requested return register.
2916   Register result_reg = rax;
2917   // Save ecx if it isn't the return register and therefore volatile, or if it
2918   // is the return register, then save the temp register we use in its stead
2919   // for the result.
2920   Register save_reg = rax;
2921   __ pushq(rcx);
2922   __ pushq(scratch1);
2923   __ pushq(save_reg);
2924 
2925   __ movl(scratch1, mantissa_operand);
2926   __ Movsd(kScratchDoubleReg, mantissa_operand);
2927   __ movl(rcx, exponent_operand);
2928 
2929   __ andl(rcx, Immediate(HeapNumber::kExponentMask));
2930   __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
2931   __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
2932   __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
2933   __ j(below, &process_64_bits, Label::kNear);
2934 
2935   // Result is entirely in lower 32-bits of mantissa
2936   int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
2937   __ subl(rcx, Immediate(delta));
2938   __ xorl(result_reg, result_reg);
2939   __ cmpl(rcx, Immediate(31));
2940   __ j(above, &done, Label::kNear);
2941   __ shll_cl(scratch1);
2942   __ jmp(&check_negative, Label::kNear);
2943 
2944   __ bind(&process_64_bits);
2945   __ Cvttsd2siq(result_reg, kScratchDoubleReg);
2946   __ jmp(&done, Label::kNear);
2947 
2948   // If the double was negative, negate the integer result.
2949   __ bind(&check_negative);
2950   __ movl(result_reg, scratch1);
2951   __ negl(result_reg);
2952   __ cmpl(exponent_operand, Immediate(0));
2953   __ cmovl(greater, result_reg, scratch1);
2954 
2955   // Restore registers
2956   __ bind(&done);
2957   __ movl(return_operand, result_reg);
2958   __ popq(save_reg);
2959   __ popq(scratch1);
2960   __ popq(rcx);
2961   __ ret(0);
2962 }
2963 
Generate_MathPowInternal(MacroAssembler * masm)2964 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2965   const Register exponent = MathPowTaggedDescriptor::exponent();
2966   DCHECK(exponent == rdx);
2967   const Register scratch = rcx;
2968   const XMMRegister double_result = xmm3;
2969   const XMMRegister double_base = xmm2;
2970   const XMMRegister double_exponent = xmm1;
2971   const XMMRegister double_scratch = xmm4;
2972 
2973   Label call_runtime, done, exponent_not_smi, int_exponent;
2974 
2975   // Save 1 in double_result - we need this several times later on.
2976   __ movp(scratch, Immediate(1));
2977   __ Cvtlsi2sd(double_result, scratch);
2978 
2979   Label fast_power, try_arithmetic_simplification;
2980   // Detect integer exponents stored as double.
2981   __ DoubleToI(exponent, double_exponent, double_scratch,
2982                &try_arithmetic_simplification, &try_arithmetic_simplification);
2983   __ jmp(&int_exponent);
2984 
2985   __ bind(&try_arithmetic_simplification);
2986   __ Cvttsd2si(exponent, double_exponent);
2987   // Skip to runtime if possibly NaN (indicated by the indefinite integer).
2988   __ cmpl(exponent, Immediate(0x1));
2989   __ j(overflow, &call_runtime);
2990 
2991   // Using FPU instructions to calculate power.
2992   Label fast_power_failed;
2993   __ bind(&fast_power);
2994   __ fnclex();  // Clear flags to catch exceptions later.
2995   // Transfer (B)ase and (E)xponent onto the FPU register stack.
2996   __ subp(rsp, Immediate(kDoubleSize));
2997   __ Movsd(Operand(rsp, 0), double_exponent);
2998   __ fld_d(Operand(rsp, 0));  // E
2999   __ Movsd(Operand(rsp, 0), double_base);
3000   __ fld_d(Operand(rsp, 0));  // B, E
3001 
3002   // Exponent is in st(1) and base is in st(0)
3003   // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
3004   // FYL2X calculates st(1) * log2(st(0))
3005   __ fyl2x();    // X
3006   __ fld(0);     // X, X
3007   __ frndint();  // rnd(X), X
3008   __ fsub(1);    // rnd(X), X-rnd(X)
3009   __ fxch(1);    // X - rnd(X), rnd(X)
3010   // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
3011   __ f2xm1();   // 2^(X-rnd(X)) - 1, rnd(X)
3012   __ fld1();    // 1, 2^(X-rnd(X)) - 1, rnd(X)
3013   __ faddp(1);  // 2^(X-rnd(X)), rnd(X)
3014   // FSCALE calculates st(0) * 2^st(1)
3015   __ fscale();  // 2^X, rnd(X)
3016   __ fstp(1);
3017   // Bail out to runtime in case of exceptions in the status word.
3018   __ fnstsw_ax();
3019   __ testb(rax, Immediate(0x5F));  // Check for all but precision exception.
3020   __ j(not_zero, &fast_power_failed, Label::kNear);
3021   __ fstp_d(Operand(rsp, 0));
3022   __ Movsd(double_result, Operand(rsp, 0));
3023   __ addp(rsp, Immediate(kDoubleSize));
3024   __ jmp(&done);
3025 
3026   __ bind(&fast_power_failed);
3027   __ fninit();
3028   __ addp(rsp, Immediate(kDoubleSize));
3029   __ jmp(&call_runtime);
3030 
3031   // Calculate power with integer exponent.
3032   __ bind(&int_exponent);
3033   const XMMRegister double_scratch2 = double_exponent;
3034   // Back up exponent as we need to check if exponent is negative later.
3035   __ movp(scratch, exponent);                // Back up exponent.
3036   __ Movsd(double_scratch, double_base);     // Back up base.
3037   __ Movsd(double_scratch2, double_result);  // Load double_exponent with 1.
3038 
3039   // Get absolute value of exponent.
3040   Label no_neg, while_true, while_false;
3041   __ testl(scratch, scratch);
3042   __ j(positive, &no_neg, Label::kNear);
3043   __ negl(scratch);
3044   __ bind(&no_neg);
3045 
3046   __ j(zero, &while_false, Label::kNear);
3047   __ shrl(scratch, Immediate(1));
3048   // Above condition means CF==0 && ZF==0.  This means that the
3049   // bit that has been shifted out is 0 and the result is not 0.
3050   __ j(above, &while_true, Label::kNear);
3051   __ Movsd(double_result, double_scratch);
3052   __ j(zero, &while_false, Label::kNear);
3053 
3054   __ bind(&while_true);
3055   __ shrl(scratch, Immediate(1));
3056   __ Mulsd(double_scratch, double_scratch);
3057   __ j(above, &while_true, Label::kNear);
3058   __ Mulsd(double_result, double_scratch);
3059   __ j(not_zero, &while_true);
3060 
3061   __ bind(&while_false);
3062   // If the exponent is negative, return 1/result.
3063   __ testl(exponent, exponent);
3064   __ j(greater, &done);
3065   __ Divsd(double_scratch2, double_result);
3066   __ Movsd(double_result, double_scratch2);
3067   // Test whether result is zero.  Bail out to check for subnormal result.
3068   // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
3069   __ Xorpd(double_scratch2, double_scratch2);
3070   __ Ucomisd(double_scratch2, double_result);
3071   // double_exponent aliased as double_scratch2 has already been overwritten
3072   // and may not have contained the exponent value in the first place when the
3073   // input was a smi.  We reset it with exponent value before bailing out.
3074   __ j(not_equal, &done);
3075   __ Cvtlsi2sd(double_exponent, exponent);
3076 
3077   // Returning or bailing out.
3078   __ bind(&call_runtime);
3079   // Move base to the correct argument register.  Exponent is already in xmm1.
3080   __ Movsd(xmm0, double_base);
3081   DCHECK(double_exponent == xmm1);
3082   {
3083     AllowExternalCallThatCantCauseGC scope(masm);
3084     __ PrepareCallCFunction(2);
3085     __ CallCFunction(ExternalReference::power_double_double_function(), 2);
3086   }
3087   // Return value is in xmm0.
3088   __ Movsd(double_result, xmm0);
3089 
3090   __ bind(&done);
3091   __ ret(0);
3092 }
3093 
3094 #undef __
3095 
3096 }  // namespace internal
3097 }  // namespace v8
3098 
3099 #endif  // V8_TARGET_ARCH_X64
3100