1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/codegen/register-arch.h"
6 #if V8_TARGET_ARCH_IA32
7 
8 #include "src/api/api-arguments.h"
9 #include "src/base/bits-iterator.h"
10 #include "src/base/iterator.h"
11 #include "src/codegen/code-factory.h"
12 #include "src/codegen/interface-descriptors-inl.h"
13 // For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
14 #include "src/codegen/macro-assembler-inl.h"
15 #include "src/codegen/register-configuration.h"
16 #include "src/debug/debug.h"
17 #include "src/deoptimizer/deoptimizer.h"
18 #include "src/execution/frame-constants.h"
19 #include "src/execution/frames.h"
20 #include "src/heap/heap-inl.h"
21 #include "src/logging/counters.h"
22 #include "src/objects/cell.h"
23 #include "src/objects/foreign.h"
24 #include "src/objects/heap-number.h"
25 #include "src/objects/js-generator.h"
26 #include "src/objects/objects-inl.h"
27 #include "src/objects/smi.h"
28 
29 #if V8_ENABLE_WEBASSEMBLY
30 #include "src/wasm/wasm-linkage.h"
31 #include "src/wasm/wasm-objects.h"
32 #endif  // V8_ENABLE_WEBASSEMBLY
33 
34 namespace v8 {
35 namespace internal {
36 
37 #define __ ACCESS_MASM(masm)
38 
Generate_Adaptor(MacroAssembler * masm,Address address)39 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address) {
40   __ Move(kJavaScriptCallExtraArg1Register,
41           Immediate(ExternalReference::Create(address)));
42   __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
43           RelocInfo::CODE_TARGET);
44 }
45 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)46 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
47                                            Runtime::FunctionId function_id) {
48   // ----------- S t a t e -------------
49   //  -- eax : actual argument count
50   //  -- edx : new target (preserved for callee)
51   //  -- edi : target function (preserved for callee)
52   // -----------------------------------
53   ASM_CODE_COMMENT(masm);
54   {
55     FrameScope scope(masm, StackFrame::INTERNAL);
56     // Push a copy of the target function, the new target and the actual
57     // argument count.
58     __ push(kJavaScriptCallTargetRegister);
59     __ push(kJavaScriptCallNewTargetRegister);
60     __ SmiTag(kJavaScriptCallArgCountRegister);
61     __ push(kJavaScriptCallArgCountRegister);
62     // Function is also the parameter to the runtime call.
63     __ push(kJavaScriptCallTargetRegister);
64 
65     __ CallRuntime(function_id, 1);
66     __ mov(ecx, eax);
67 
68     // Restore target function, new target and actual argument count.
69     __ pop(kJavaScriptCallArgCountRegister);
70     __ SmiUntag(kJavaScriptCallArgCountRegister);
71     __ pop(kJavaScriptCallNewTargetRegister);
72     __ pop(kJavaScriptCallTargetRegister);
73   }
74 
75   static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
76   __ JumpCodeObject(ecx);
77 }
78 
79 namespace {
80 
81 enum class ArgumentsElementType {
82   kRaw,    // Push arguments as they are.
83   kHandle  // Dereference arguments before pushing.
84 };
85 
Generate_PushArguments(MacroAssembler * masm,Register array,Register argc,Register scratch1,Register scratch2,ArgumentsElementType element_type)86 void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
87                             Register scratch1, Register scratch2,
88                             ArgumentsElementType element_type) {
89   DCHECK(!AreAliased(array, argc, scratch1, scratch2));
90   Register counter = scratch1;
91   Label loop, entry;
92   if (kJSArgcIncludesReceiver) {
93     __ lea(counter, Operand(argc, -kJSArgcReceiverSlots));
94   } else {
95     __ mov(counter, argc);
96   }
97   __ jmp(&entry);
98   __ bind(&loop);
99   Operand value(array, counter, times_system_pointer_size, 0);
100   if (element_type == ArgumentsElementType::kHandle) {
101     DCHECK(scratch2 != no_reg);
102     __ mov(scratch2, value);
103     value = Operand(scratch2, 0);
104   }
105   __ Push(value);
106   __ bind(&entry);
107   __ dec(counter);
108   __ j(greater_equal, &loop, Label::kNear);
109 }
110 
Generate_JSBuiltinsConstructStubHelper(MacroAssembler * masm)111 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
112   // ----------- S t a t e -------------
113   //  -- eax: number of arguments
114   //  -- edi: constructor function
115   //  -- edx: new target
116   //  -- esi: context
117   // -----------------------------------
118 
119   Label stack_overflow;
120 
121   __ StackOverflowCheck(eax, ecx, &stack_overflow);
122 
123   // Enter a construct frame.
124   {
125     FrameScope scope(masm, StackFrame::CONSTRUCT);
126 
127     // Preserve the incoming parameters on the stack.
128     __ SmiTag(eax);
129     __ push(esi);
130     __ push(eax);
131     __ SmiUntag(eax);
132 
133     // TODO(victorgomes): When the arguments adaptor is completely removed, we
134     // should get the formal parameter count and copy the arguments in its
135     // correct position (including any undefined), instead of delaying this to
136     // InvokeFunction.
137 
138     // Set up pointer to first argument (skip receiver).
139     __ lea(esi, Operand(ebp, StandardFrameConstants::kCallerSPOffset +
140                                  kSystemPointerSize));
141     // Copy arguments to the expression stack.
142     // esi: Pointer to start of arguments.
143     // eax: Number of arguments.
144     Generate_PushArguments(masm, esi, eax, ecx, no_reg,
145                            ArgumentsElementType::kRaw);
146     // The receiver for the builtin/api call.
147     __ PushRoot(RootIndex::kTheHoleValue);
148 
149     // Call the function.
150     // eax: number of arguments (untagged)
151     // edi: constructor function
152     // edx: new target
153     // Reload context from the frame.
154     __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
155     __ InvokeFunction(edi, edx, eax, InvokeType::kCall);
156 
157     // Restore context from the frame.
158     __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
159     // Restore smi-tagged arguments count from the frame.
160     __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
161     // Leave construct frame.
162   }
163 
164   // Remove caller arguments from the stack and return.
165   __ DropArguments(edx, ecx, TurboAssembler::kCountIsSmi,
166                    kJSArgcIncludesReceiver
167                        ? TurboAssembler::kCountIncludesReceiver
168                        : TurboAssembler::kCountExcludesReceiver);
169   __ ret(0);
170 
171   __ bind(&stack_overflow);
172   {
173     FrameScope scope(masm, StackFrame::INTERNAL);
174     __ CallRuntime(Runtime::kThrowStackOverflow);
175     __ int3();  // This should be unreachable.
176   }
177 }
178 
179 }  // namespace
180 
181 // The construct stub for ES5 constructor functions and ES6 class constructors.
Generate_JSConstructStubGeneric(MacroAssembler * masm)182 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
183   // ----------- S t a t e -------------
184   //  -- eax: number of arguments (untagged)
185   //  -- edi: constructor function
186   //  -- edx: new target
187   //  -- esi: context
188   //  -- sp[...]: constructor arguments
189   // -----------------------------------
190 
191   FrameScope scope(masm, StackFrame::MANUAL);
192   // Enter a construct frame.
193   __ EnterFrame(StackFrame::CONSTRUCT);
194 
195   Label post_instantiation_deopt_entry, not_create_implicit_receiver;
196 
197   // Preserve the incoming parameters on the stack.
198   __ mov(ecx, eax);
199   __ SmiTag(ecx);
200   __ Push(esi);
201   __ Push(ecx);
202   __ Push(edi);
203   __ PushRoot(RootIndex::kTheHoleValue);
204   __ Push(edx);
205 
206   // ----------- S t a t e -------------
207   //  --         sp[0*kSystemPointerSize]: new target
208   //  --         sp[1*kSystemPointerSize]: padding
209   //  -- edi and sp[2*kSystemPointerSize]: constructor function
210   //  --         sp[3*kSystemPointerSize]: argument count
211   //  --         sp[4*kSystemPointerSize]: context
212   // -----------------------------------
213 
214   __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
215   __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kFlagsOffset));
216   __ DecodeField<SharedFunctionInfo::FunctionKindBits>(eax);
217   __ JumpIfIsInRange(eax, kDefaultDerivedConstructor, kDerivedConstructor, ecx,
218                      &not_create_implicit_receiver, Label::kNear);
219 
220   // If not derived class constructor: Allocate the new receiver object.
221   __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
222                       eax);
223   __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject), RelocInfo::CODE_TARGET);
224   __ jmp(&post_instantiation_deopt_entry, Label::kNear);
225 
226   // Else: use TheHoleValue as receiver for constructor call
227   __ bind(&not_create_implicit_receiver);
228   __ LoadRoot(eax, RootIndex::kTheHoleValue);
229 
230   // ----------- S t a t e -------------
231   //  --                         eax: implicit receiver
232   //  -- Slot 4 / sp[0*kSystemPointerSize]: new target
233   //  -- Slot 3 / sp[1*kSystemPointerSize]: padding
234   //  -- Slot 2 / sp[2*kSystemPointerSize]: constructor function
235   //  -- Slot 1 / sp[3*kSystemPointerSize]: number of arguments (tagged)
236   //  -- Slot 0 / sp[4*kSystemPointerSize]: context
237   // -----------------------------------
238   // Deoptimizer enters here.
239   masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
240       masm->pc_offset());
241   __ bind(&post_instantiation_deopt_entry);
242 
243   // Restore new target.
244   __ Pop(edx);
245 
246   // Push the allocated receiver to the stack.
247   __ Push(eax);
248 
249   // We need two copies because we may have to return the original one
250   // and the calling conventions dictate that the called function pops the
251   // receiver. The second copy is pushed after the arguments, we saved in r8
252   // since rax needs to store the number of arguments before
253   // InvokingFunction.
254   __ movd(xmm0, eax);
255 
256   // Set up pointer to first argument (skip receiver).
257   __ lea(edi, Operand(ebp, StandardFrameConstants::kCallerSPOffset +
258                                kSystemPointerSize));
259 
260   // Restore argument count.
261   __ mov(eax, Operand(ebp, ConstructFrameConstants::kLengthOffset));
262   __ SmiUntag(eax);
263 
264   // Check if we have enough stack space to push all arguments.
265   // Argument count in eax. Clobbers ecx.
266   Label stack_overflow;
267   __ StackOverflowCheck(eax, ecx, &stack_overflow);
268 
269   // TODO(victorgomes): When the arguments adaptor is completely removed, we
270   // should get the formal parameter count and copy the arguments in its
271   // correct position (including any undefined), instead of delaying this to
272   // InvokeFunction.
273 
274   // Copy arguments to the expression stack.
275   // edi: Pointer to start of arguments.
276   // eax: Number of arguments.
277   Generate_PushArguments(masm, edi, eax, ecx, no_reg,
278                          ArgumentsElementType::kRaw);
279 
280   // Push implicit receiver.
281   __ movd(ecx, xmm0);
282   __ Push(ecx);
283 
284   // Restore and and call the constructor function.
285   __ mov(edi, Operand(ebp, ConstructFrameConstants::kConstructorOffset));
286   __ InvokeFunction(edi, edx, eax, InvokeType::kCall);
287 
288   // ----------- S t a t e -------------
289   //  --                eax: constructor result
290   //  -- sp[0*kSystemPointerSize]: implicit receiver
291   //  -- sp[1*kSystemPointerSize]: padding
292   //  -- sp[2*kSystemPointerSize]: constructor function
293   //  -- sp[3*kSystemPointerSize]: number of arguments
294   //  -- sp[4*kSystemPointerSize]: context
295   // -----------------------------------
296 
297   // Store offset of return address for deoptimizer.
298   masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
299       masm->pc_offset());
300 
301   // If the result is an object (in the ECMA sense), we should get rid
302   // of the receiver and use the result; see ECMA-262 section 13.2.2-7
303   // on page 74.
304 
305   Label check_result, use_receiver, do_throw, leave_and_return;
306   // If the result is undefined, we jump out to using the implicit receiver.
307   __ JumpIfNotRoot(eax, RootIndex::kUndefinedValue, &check_result,
308                    Label::kNear);
309 
310   // Throw away the result of the constructor invocation and use the
311   // on-stack receiver as the result.
312   __ bind(&use_receiver);
313   __ mov(eax, Operand(esp, 0 * kSystemPointerSize));
314   __ JumpIfRoot(eax, RootIndex::kTheHoleValue, &do_throw);
315 
316   __ bind(&leave_and_return);
317   // Restore smi-tagged arguments count from the frame.
318   __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
319   __ LeaveFrame(StackFrame::CONSTRUCT);
320 
321   // Remove caller arguments from the stack and return.
322   __ DropArguments(edx, ecx, TurboAssembler::kCountIsSmi,
323                    kJSArgcIncludesReceiver
324                        ? TurboAssembler::kCountIncludesReceiver
325                        : TurboAssembler::kCountExcludesReceiver);
326   __ ret(0);
327 
328   // Otherwise we do a smi check and fall through to check if the return value
329   // is a valid receiver.
330   __ bind(&check_result);
331 
332   // If the result is a smi, it is *not* an object in the ECMA sense.
333   __ JumpIfSmi(eax, &use_receiver, Label::kNear);
334 
335   // If the type of the result (stored in its map) is less than
336   // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
337   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
338   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
339   __ j(above_equal, &leave_and_return, Label::kNear);
340   __ jmp(&use_receiver, Label::kNear);
341 
342   __ bind(&do_throw);
343   // Restore context from the frame.
344   __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
345   __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
346   // This should be unreachable.
347   __ int3();
348 
349   __ bind(&stack_overflow);
350   // Restore context from the frame.
351   __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
352   __ CallRuntime(Runtime::kThrowStackOverflow);
353   // This should be unreachable.
354   __ int3();
355 }
356 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)357 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
358   Generate_JSBuiltinsConstructStubHelper(masm);
359 }
360 
Generate_ConstructedNonConstructable(MacroAssembler * masm)361 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
362   FrameScope scope(masm, StackFrame::INTERNAL);
363   __ push(edi);
364   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
365 }
366 
367 namespace {
368 
369 // Called with the native C calling convention. The corresponding function
370 // signature is either:
371 //
372 //   using JSEntryFunction = GeneratedCode<Address(
373 //       Address root_register_value, Address new_target, Address target,
374 //       Address receiver, intptr_t argc, Address** argv)>;
375 // or
376 //   using JSEntryFunction = GeneratedCode<Address(
377 //       Address root_register_value, MicrotaskQueue* microtask_queue)>;
Generate_JSEntryVariant(MacroAssembler * masm,StackFrame::Type type,Builtin entry_trampoline)378 void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
379                              Builtin entry_trampoline) {
380   Label invoke, handler_entry, exit;
381   Label not_outermost_js, not_outermost_js_2;
382 
383   {
384     NoRootArrayScope uninitialized_root_register(masm);
385 
386     // Set up frame.
387     __ push(ebp);
388     __ mov(ebp, esp);
389 
390     // Push marker in two places.
391     __ push(Immediate(StackFrame::TypeToMarker(type)));
392     // Reserve a slot for the context. It is filled after the root register has
393     // been set up.
394     __ AllocateStackSpace(kSystemPointerSize);
395     // Save callee-saved registers (C calling conventions).
396     __ push(edi);
397     __ push(esi);
398     __ push(ebx);
399 
400     // Initialize the root register based on the given Isolate* argument.
401     // C calling convention. The first argument is passed on the stack.
402     __ mov(kRootRegister,
403            Operand(ebp, EntryFrameConstants::kRootRegisterValueOffset));
404   }
405 
406   // Save copies of the top frame descriptor on the stack.
407   ExternalReference c_entry_fp = ExternalReference::Create(
408       IsolateAddressId::kCEntryFPAddress, masm->isolate());
409   __ push(__ ExternalReferenceAsOperand(c_entry_fp, edi));
410 
411   // Clear c_entry_fp, now we've pushed its previous value to the stack.
412   // If the c_entry_fp is not already zero and we don't clear it, the
413   // SafeStackFrameIterator will assume we are executing C++ and miss the JS
414   // frames on top.
415   __ mov(__ ExternalReferenceAsOperand(c_entry_fp, edi), Immediate(0));
416 
417   // Store the context address in the previously-reserved slot.
418   ExternalReference context_address = ExternalReference::Create(
419       IsolateAddressId::kContextAddress, masm->isolate());
420   __ mov(edi, __ ExternalReferenceAsOperand(context_address, edi));
421   static constexpr int kOffsetToContextSlot = -2 * kSystemPointerSize;
422   __ mov(Operand(ebp, kOffsetToContextSlot), edi);
423 
424   // If this is the outermost JS call, set js_entry_sp value.
425   ExternalReference js_entry_sp = ExternalReference::Create(
426       IsolateAddressId::kJSEntrySPAddress, masm->isolate());
427   __ cmp(__ ExternalReferenceAsOperand(js_entry_sp, edi), Immediate(0));
428   __ j(not_equal, &not_outermost_js, Label::kNear);
429   __ mov(__ ExternalReferenceAsOperand(js_entry_sp, edi), ebp);
430   __ push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
431   __ jmp(&invoke, Label::kNear);
432   __ bind(&not_outermost_js);
433   __ push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
434 
435   // Jump to a faked try block that does the invoke, with a faked catch
436   // block that sets the pending exception.
437   __ jmp(&invoke);
438   __ bind(&handler_entry);
439 
440   // Store the current pc as the handler offset. It's used later to create the
441   // handler table.
442   masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
443 
444   // Caught exception: Store result (exception) in the pending exception
445   // field in the JSEnv and return a failure sentinel.
446   ExternalReference pending_exception = ExternalReference::Create(
447       IsolateAddressId::kPendingExceptionAddress, masm->isolate());
448   __ mov(__ ExternalReferenceAsOperand(pending_exception, edi), eax);
449   __ Move(eax, masm->isolate()->factory()->exception());
450   __ jmp(&exit);
451 
452   // Invoke: Link this frame into the handler chain.
453   __ bind(&invoke);
454   __ PushStackHandler(edi);
455 
456   // Invoke the function by calling through JS entry trampoline builtin and
457   // pop the faked function when we return.
458   Handle<Code> trampoline_code =
459       masm->isolate()->builtins()->code_handle(entry_trampoline);
460   __ Call(trampoline_code, RelocInfo::CODE_TARGET);
461 
462   // Unlink this frame from the handler chain.
463   __ PopStackHandler(edi);
464 
465   __ bind(&exit);
466 
467   // Check if the current stack frame is marked as the outermost JS frame.
468   __ pop(edi);
469   __ cmp(edi, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
470   __ j(not_equal, &not_outermost_js_2);
471   __ mov(__ ExternalReferenceAsOperand(js_entry_sp, edi), Immediate(0));
472   __ bind(&not_outermost_js_2);
473 
474   // Restore the top frame descriptor from the stack.
475   __ pop(__ ExternalReferenceAsOperand(c_entry_fp, edi));
476 
477   // Restore callee-saved registers (C calling conventions).
478   __ pop(ebx);
479   __ pop(esi);
480   __ pop(edi);
481   __ add(esp, Immediate(2 * kSystemPointerSize));  // remove markers
482 
483   // Restore frame pointer and return.
484   __ pop(ebp);
485   __ ret(0);
486 }
487 
488 }  // namespace
489 
Generate_JSEntry(MacroAssembler * masm)490 void Builtins::Generate_JSEntry(MacroAssembler* masm) {
491   Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
492 }
493 
Generate_JSConstructEntry(MacroAssembler * masm)494 void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
495   Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
496                           Builtin::kJSConstructEntryTrampoline);
497 }
498 
Generate_JSRunMicrotasksEntry(MacroAssembler * masm)499 void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
500   Generate_JSEntryVariant(masm, StackFrame::ENTRY,
501                           Builtin::kRunMicrotasksTrampoline);
502 }
503 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)504 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
505                                              bool is_construct) {
506   {
507     FrameScope scope(masm, StackFrame::INTERNAL);
508 
509     const Register scratch1 = edx;
510     const Register scratch2 = edi;
511 
512     // Setup the context (we need to use the caller context from the isolate).
513     ExternalReference context_address = ExternalReference::Create(
514         IsolateAddressId::kContextAddress, masm->isolate());
515     __ mov(esi, __ ExternalReferenceAsOperand(context_address, scratch1));
516 
517     // Load the previous frame pointer (edx) to access C arguments
518     __ mov(scratch1, Operand(ebp, 0));
519 
520     // Push the function.
521     __ push(Operand(scratch1, EntryFrameConstants::kFunctionArgOffset));
522 
523     // Load the number of arguments and setup pointer to the arguments.
524     __ mov(eax, Operand(scratch1, EntryFrameConstants::kArgcOffset));
525     __ mov(scratch1, Operand(scratch1, EntryFrameConstants::kArgvOffset));
526 
527     // Check if we have enough stack space to push all arguments.
528     // Argument count in eax. Clobbers ecx.
529     Label enough_stack_space, stack_overflow;
530     __ StackOverflowCheck(eax, ecx, &stack_overflow);
531     __ jmp(&enough_stack_space);
532 
533     __ bind(&stack_overflow);
534     __ CallRuntime(Runtime::kThrowStackOverflow);
535     // This should be unreachable.
536     __ int3();
537 
538     __ bind(&enough_stack_space);
539 
540     // Copy arguments to the stack.
541     // scratch1 (edx): Pointer to start of arguments.
542     // eax: Number of arguments.
543     Generate_PushArguments(masm, scratch1, eax, ecx, scratch2,
544                            ArgumentsElementType::kHandle);
545 
546     // Load the previous frame pointer to access C arguments
547     __ mov(scratch2, Operand(ebp, 0));
548 
549     // Push the receiver onto the stack.
550     __ push(Operand(scratch2, EntryFrameConstants::kReceiverArgOffset));
551 
552     // Get the new.target and function from the frame.
553     __ mov(edx, Operand(scratch2, EntryFrameConstants::kNewTargetArgOffset));
554     __ mov(edi, Operand(scratch2, EntryFrameConstants::kFunctionArgOffset));
555 
556     // Invoke the code.
557     Handle<Code> builtin = is_construct
558                                ? BUILTIN_CODE(masm->isolate(), Construct)
559                                : masm->isolate()->builtins()->Call();
560     __ Call(builtin, RelocInfo::CODE_TARGET);
561 
562     // Exit the internal frame. Notice that this also removes the empty.
563     // context and the function left on the stack by the code
564     // invocation.
565   }
566   __ ret(0);
567 }
568 
Generate_JSEntryTrampoline(MacroAssembler * masm)569 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
570   Generate_JSEntryTrampolineHelper(masm, false);
571 }
572 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)573 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
574   Generate_JSEntryTrampolineHelper(masm, true);
575 }
576 
Generate_RunMicrotasksTrampoline(MacroAssembler * masm)577 void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
578   // This expects two C++ function parameters passed by Invoke() in
579   // execution.cc.
580   //   r1: microtask_queue
581   __ mov(RunMicrotasksDescriptor::MicrotaskQueueRegister(),
582          Operand(ebp, EntryFrameConstants::kMicrotaskQueueArgOffset));
583   __ Jump(BUILTIN_CODE(masm->isolate(), RunMicrotasks), RelocInfo::CODE_TARGET);
584 }
585 
GetSharedFunctionInfoBytecode(MacroAssembler * masm,Register sfi_data,Register scratch1)586 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
587                                           Register sfi_data,
588                                           Register scratch1) {
589   Label done;
590 
591   __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
592   __ j(not_equal, &done, Label::kNear);
593   __ mov(sfi_data,
594          FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
595 
596   __ bind(&done);
597 }
598 
AssertCodeIsBaseline(MacroAssembler * masm,Register code,Register scratch)599 static void AssertCodeIsBaseline(MacroAssembler* masm, Register code,
600                                  Register scratch) {
601   DCHECK(!AreAliased(code, scratch));
602   // Verify that the code kind is baseline code via the CodeKind.
603   __ mov(scratch, FieldOperand(code, Code::kFlagsOffset));
604   __ DecodeField<Code::KindField>(scratch);
605   __ cmp(scratch, Immediate(static_cast<int>(CodeKind::BASELINE)));
606   __ Assert(equal, AbortReason::kExpectedBaselineData);
607 }
608 
GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler * masm,Register sfi_data,Register scratch1,Label * is_baseline)609 static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm,
610                                                     Register sfi_data,
611                                                     Register scratch1,
612                                                     Label* is_baseline) {
613   ASM_CODE_COMMENT(masm);
614   Label done;
615   __ LoadMap(scratch1, sfi_data);
616 
617   __ CmpInstanceType(scratch1, CODET_TYPE);
618   if (FLAG_debug_code) {
619     Label not_baseline;
620     __ j(not_equal, &not_baseline);
621     AssertCodeIsBaseline(masm, sfi_data, scratch1);
622     __ j(equal, is_baseline);
623     __ bind(&not_baseline);
624   } else {
625     __ j(equal, is_baseline);
626   }
627 
628   __ CmpInstanceType(scratch1, INTERPRETER_DATA_TYPE);
629   __ j(not_equal, &done, Label::kNear);
630 
631   __ mov(sfi_data,
632          FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
633 
634   __ bind(&done);
635 }
636 
637 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)638 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
639   // ----------- S t a t e -------------
640   //  -- eax    : the value to pass to the generator
641   //  -- edx    : the JSGeneratorObject to resume
642   //  -- esp[0] : return address
643   // -----------------------------------
644   // Store input value into generator object.
645   __ mov(FieldOperand(edx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
646   Register object = WriteBarrierDescriptor::ObjectRegister();
647   __ mov(object, edx);
648   __ RecordWriteField(object, JSGeneratorObject::kInputOrDebugPosOffset, eax,
649                       WriteBarrierDescriptor::SlotAddressRegister(),
650                       SaveFPRegsMode::kIgnore);
651   // Check that edx is still valid, RecordWrite might have clobbered it.
652   __ AssertGeneratorObject(edx);
653 
654   // Load suspended function and context.
655   __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
656   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
657 
658   // Flood function if we are stepping.
659   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
660   Label stepping_prepared;
661   ExternalReference debug_hook =
662       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
663   __ cmpb(__ ExternalReferenceAsOperand(debug_hook, ecx), Immediate(0));
664   __ j(not_equal, &prepare_step_in_if_stepping);
665 
666   // Flood function if we need to continue stepping in the suspended generator.
667   ExternalReference debug_suspended_generator =
668       ExternalReference::debug_suspended_generator_address(masm->isolate());
669   __ cmp(edx, __ ExternalReferenceAsOperand(debug_suspended_generator, ecx));
670   __ j(equal, &prepare_step_in_suspended_generator);
671   __ bind(&stepping_prepared);
672 
673   // Check the stack for overflow. We are not trying to catch interruptions
674   // (i.e. debug break and preemption) here, so check the "real stack limit".
675   Label stack_overflow;
676   __ CompareStackLimit(esp, StackLimitKind::kRealStackLimit);
677   __ j(below, &stack_overflow);
678 
679   // Pop return address.
680   __ PopReturnAddressTo(eax);
681 
682   // ----------- S t a t e -------------
683   //  -- eax    : return address
684   //  -- edx    : the JSGeneratorObject to resume
685   //  -- edi    : generator function
686   //  -- esi    : generator context
687   // -----------------------------------
688 
689   {
690     __ movd(xmm0, ebx);
691 
692     // Copy the function arguments from the generator object's register file.
693     __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
694     __ movzx_w(ecx, FieldOperand(
695                         ecx, SharedFunctionInfo::kFormalParameterCountOffset));
696     if (kJSArgcIncludesReceiver) {
697       __ dec(ecx);
698     }
699     __ mov(ebx,
700            FieldOperand(edx, JSGeneratorObject::kParametersAndRegistersOffset));
701     {
702       Label done_loop, loop;
703       __ bind(&loop);
704       __ dec(ecx);
705       __ j(less, &done_loop);
706       __ Push(
707           FieldOperand(ebx, ecx, times_tagged_size, FixedArray::kHeaderSize));
708       __ jmp(&loop);
709       __ bind(&done_loop);
710     }
711 
712     // Push receiver.
713     __ Push(FieldOperand(edx, JSGeneratorObject::kReceiverOffset));
714 
715     // Restore registers.
716     __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
717     __ movd(ebx, xmm0);
718   }
719 
720   // Underlying function needs to have bytecode available.
721   if (FLAG_debug_code) {
722     Label is_baseline, ok;
723     __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
724     __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
725     __ Push(eax);
726     GetSharedFunctionInfoBytecodeOrBaseline(masm, ecx, eax, &is_baseline);
727     __ Pop(eax);
728 
729     __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx);
730     __ Assert(equal, AbortReason::kMissingBytecodeArray);
731     __ jmp(&ok);
732 
733     __ bind(&is_baseline);
734     __ Pop(eax);
735     __ CmpObjectType(ecx, CODET_TYPE, ecx);
736     __ Assert(equal, AbortReason::kMissingBytecodeArray);
737 
738     __ bind(&ok);
739   }
740 
741   // Resume (Ignition/TurboFan) generator object.
742   {
743     __ PushReturnAddressFrom(eax);
744     __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
745     __ movzx_w(eax, FieldOperand(
746                         eax, SharedFunctionInfo::kFormalParameterCountOffset));
747     // We abuse new.target both to indicate that this is a resume call and to
748     // pass in the generator object.  In ordinary calls, new.target is always
749     // undefined because generator functions are non-constructable.
750     static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
751     __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
752     __ JumpCodeObject(ecx);
753   }
754 
755   __ bind(&prepare_step_in_if_stepping);
756   {
757     FrameScope scope(masm, StackFrame::INTERNAL);
758     __ Push(edx);
759     __ Push(edi);
760     // Push hole as receiver since we do not use it for stepping.
761     __ PushRoot(RootIndex::kTheHoleValue);
762     __ CallRuntime(Runtime::kDebugOnFunctionCall);
763     __ Pop(edx);
764     __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
765   }
766   __ jmp(&stepping_prepared);
767 
768   __ bind(&prepare_step_in_suspended_generator);
769   {
770     FrameScope scope(masm, StackFrame::INTERNAL);
771     __ Push(edx);
772     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
773     __ Pop(edx);
774     __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
775   }
776   __ jmp(&stepping_prepared);
777 
778   __ bind(&stack_overflow);
779   {
780     FrameScope scope(masm, StackFrame::INTERNAL);
781     __ CallRuntime(Runtime::kThrowStackOverflow);
782     __ int3();  // This should be unreachable.
783   }
784 }
785 
ReplaceClosureCodeWithOptimizedCode(MacroAssembler * masm,Register optimized_code,Register closure,Register value,Register slot_address)786 static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
787                                                 Register optimized_code,
788                                                 Register closure,
789                                                 Register value,
790                                                 Register slot_address) {
791   ASM_CODE_COMMENT(masm);
792   // Store the optimized code in the closure.
793   __ mov(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
794   __ mov(value, optimized_code);  // Write barrier clobbers slot_address below.
795   __ RecordWriteField(closure, JSFunction::kCodeOffset, value, slot_address,
796                       SaveFPRegsMode::kIgnore, RememberedSetAction::kOmit,
797                       SmiCheck::kOmit);
798 }
799 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch1,Register scratch2)800 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
801                                   Register scratch2) {
802   ASM_CODE_COMMENT(masm);
803   Register params_size = scratch1;
804   // Get the size of the formal parameters + receiver (in bytes).
805   __ mov(params_size,
806          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
807   __ mov(params_size,
808          FieldOperand(params_size, BytecodeArray::kParameterSizeOffset));
809 
810   Register actual_params_size = scratch2;
811   // Compute the size of the actual parameters + receiver (in bytes).
812   __ mov(actual_params_size, Operand(ebp, StandardFrameConstants::kArgCOffset));
813   __ lea(actual_params_size,
814          Operand(actual_params_size, times_system_pointer_size,
815                  kJSArgcIncludesReceiver ? 0 : kSystemPointerSize));
816 
817   // If actual is bigger than formal, then we should use it to free up the stack
818   // arguments.
819   Label corrected_args_count;
820   __ cmp(params_size, actual_params_size);
821   __ j(greater_equal, &corrected_args_count, Label::kNear);
822   __ mov(params_size, actual_params_size);
823   __ bind(&corrected_args_count);
824 
825   // Leave the frame (also dropping the register file).
826   __ leave();
827 
828   // Drop receiver + arguments.
829   __ DropArguments(params_size, scratch2, TurboAssembler::kCountIsBytes,
830                    TurboAssembler::kCountIncludesReceiver);
831 }
832 
833 // Tail-call |function_id| if |actual_marker| == |expected_marker|
TailCallRuntimeIfMarkerEquals(MacroAssembler * masm,Register actual_marker,OptimizationMarker expected_marker,Runtime::FunctionId function_id)834 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
835                                           Register actual_marker,
836                                           OptimizationMarker expected_marker,
837                                           Runtime::FunctionId function_id) {
838   ASM_CODE_COMMENT(masm);
839   Label no_match;
840   __ cmp(actual_marker, expected_marker);
841   __ j(not_equal, &no_match, Label::kNear);
842   GenerateTailCallToReturnedCode(masm, function_id);
843   __ bind(&no_match);
844 }
845 
TailCallOptimizedCodeSlot(MacroAssembler * masm,Register optimized_code_entry)846 static void TailCallOptimizedCodeSlot(MacroAssembler* masm,
847                                       Register optimized_code_entry) {
848   // ----------- S t a t e -------------
849   //  -- eax : actual argument count
850   //  -- edx : new target (preserved for callee if needed, and caller)
851   //  -- edi : target function (preserved for callee if needed, and caller)
852   // -----------------------------------
853   ASM_CODE_COMMENT(masm);
854   DCHECK(!AreAliased(edx, edi, optimized_code_entry));
855 
856   Register closure = edi;
857   __ Push(eax);
858   __ Push(edx);
859 
860   Label heal_optimized_code_slot;
861 
862   // If the optimized code is cleared, go to runtime to update the optimization
863   // marker field.
864   __ LoadWeakValue(optimized_code_entry, &heal_optimized_code_slot);
865 
866   // Check if the optimized code is marked for deopt. If it is, bailout to a
867   // given label.
868   __ mov(eax,
869          FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
870   __ test(FieldOperand(eax, CodeDataContainer::kKindSpecificFlagsOffset),
871           Immediate(1 << Code::kMarkedForDeoptimizationBit));
872   __ j(not_zero, &heal_optimized_code_slot);
873 
874   // Optimized code is good, get it into the closure and link the closure
875   // into the optimized functions list, then tail call the optimized code.
876   __ Push(optimized_code_entry);
877   ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure, edx,
878                                       ecx);
879   static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
880   __ Pop(optimized_code_entry);
881   __ LoadCodeObjectEntry(ecx, optimized_code_entry);
882   __ Pop(edx);
883   __ Pop(eax);
884   __ jmp(ecx);
885 
886   // Optimized code slot contains deoptimized code or code is cleared and
887   // optimized code marker isn't updated. Evict the code, update the marker
888   // and re-enter the closure's code.
889   __ bind(&heal_optimized_code_slot);
890   __ Pop(edx);
891   __ Pop(eax);
892   GenerateTailCallToReturnedCode(masm, Runtime::kHealOptimizedCodeSlot);
893 }
894 
MaybeOptimizeCode(MacroAssembler * masm,Register optimization_marker)895 static void MaybeOptimizeCode(MacroAssembler* masm,
896                               Register optimization_marker) {
897   // ----------- S t a t e -------------
898   //  -- eax : actual argument count
899   //  -- edx : new target (preserved for callee if needed, and caller)
900   //  -- edi : target function (preserved for callee if needed, and caller)
901   //  -- optimization_marker : a Smi containing a non-zero optimization marker.
902   // -----------------------------------
903   ASM_CODE_COMMENT(masm);
904   DCHECK(!AreAliased(edx, edi, optimization_marker));
905 
906   // TODO(v8:8394): The logging of first execution will break if
907   // feedback vectors are not allocated. We need to find a different way of
908   // logging these events if required.
909   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
910                                 OptimizationMarker::kLogFirstExecution,
911                                 Runtime::kFunctionFirstExecution);
912   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
913                                 OptimizationMarker::kCompileOptimized,
914                                 Runtime::kCompileOptimized_NotConcurrent);
915   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
916                                 OptimizationMarker::kCompileOptimizedConcurrent,
917                                 Runtime::kCompileOptimized_Concurrent);
918 
919   // Marker should be one of LogFirstExecution / CompileOptimized /
920   // CompileOptimizedConcurrent. InOptimizationQueue and None shouldn't reach
921   // here.
922   if (FLAG_debug_code) {
923     __ int3();
924   }
925 }
926 
927 // Advance the current bytecode offset. This simulates what all bytecode
928 // handlers do upon completion of the underlying operation. Will bail out to a
929 // label if the bytecode (without prefix) is a return bytecode. Will not advance
930 // the bytecode offset if the current bytecode is a JumpLoop, instead just
931 // re-executing the JumpLoop to jump to the correct bytecode.
AdvanceBytecodeOffsetOrReturn(MacroAssembler * masm,Register bytecode_array,Register bytecode_offset,Register scratch1,Register scratch2,Register scratch3,Label * if_return)932 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
933                                           Register bytecode_array,
934                                           Register bytecode_offset,
935                                           Register scratch1, Register scratch2,
936                                           Register scratch3, Label* if_return) {
937   ASM_CODE_COMMENT(masm);
938   Register bytecode_size_table = scratch1;
939   Register bytecode = scratch2;
940 
941   // The bytecode offset value will be increased by one in wide and extra wide
942   // cases. In the case of having a wide or extra wide JumpLoop bytecode, we
943   // will restore the original bytecode. In order to simplify the code, we have
944   // a backup of it.
945   Register original_bytecode_offset = scratch3;
946   DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
947                      bytecode, original_bytecode_offset));
948   __ Move(bytecode_size_table,
949           Immediate(ExternalReference::bytecode_size_table_address()));
950 
951   // Load the current bytecode.
952   __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
953   __ Move(original_bytecode_offset, bytecode_offset);
954 
955   // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
956   Label process_bytecode, extra_wide;
957   STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
958   STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
959   STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
960   STATIC_ASSERT(3 ==
961                 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
962   __ cmp(bytecode, Immediate(0x3));
963   __ j(above, &process_bytecode, Label::kNear);
964   // The code to load the next bytecode is common to both wide and extra wide.
965   // We can hoist them up here. inc has to happen before test since it
966   // modifies the ZF flag.
967   __ inc(bytecode_offset);
968   __ test(bytecode, Immediate(0x1));
969   __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
970   __ j(not_equal, &extra_wide, Label::kNear);
971 
972   // Load the next bytecode and update table to the wide scaled table.
973   __ add(bytecode_size_table,
974          Immediate(kByteSize * interpreter::Bytecodes::kBytecodeCount));
975   __ jmp(&process_bytecode, Label::kNear);
976 
977   __ bind(&extra_wide);
978   // Update table to the extra wide scaled table.
979   __ add(bytecode_size_table,
980          Immediate(2 * kByteSize * interpreter::Bytecodes::kBytecodeCount));
981 
982   __ bind(&process_bytecode);
983 
984 // Bailout to the return label if this is a return bytecode.
985 #define JUMP_IF_EQUAL(NAME)                                            \
986   __ cmp(bytecode,                                                     \
987          Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
988   __ j(equal, if_return);
989   RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
990 #undef JUMP_IF_EQUAL
991 
992   // If this is a JumpLoop, re-execute it to perform the jump to the beginning
993   // of the loop.
994   Label end, not_jump_loop;
995   __ cmp(bytecode,
996          Immediate(static_cast<int>(interpreter::Bytecode::kJumpLoop)));
997   __ j(not_equal, &not_jump_loop, Label::kNear);
998   // If this is a wide or extra wide JumpLoop, we need to restore the original
999   // bytecode_offset since we might have increased it to skip the wide /
1000   // extra-wide prefix bytecode.
1001   __ Move(bytecode_offset, original_bytecode_offset);
1002   __ jmp(&end, Label::kNear);
1003 
1004   __ bind(&not_jump_loop);
1005   // Otherwise, load the size of the current bytecode and advance the offset.
1006   __ movzx_b(bytecode_size_table,
1007              Operand(bytecode_size_table, bytecode, times_1, 0));
1008   __ add(bytecode_offset, bytecode_size_table);
1009 
1010   __ bind(&end);
1011 }
1012 
1013 // Read off the optimization state in the feedback vector and check if there
1014 // is optimized code or a optimization marker that needs to be processed.
1015 // Registers optimization_state and feedback_vector must be aliased.
LoadOptimizationStateAndJumpIfNeedsProcessing(MacroAssembler * masm,Register optimization_state,XMMRegister saved_feedback_vector,Label * has_optimized_code_or_marker)1016 static void LoadOptimizationStateAndJumpIfNeedsProcessing(
1017     MacroAssembler* masm, Register optimization_state,
1018     XMMRegister saved_feedback_vector, Label* has_optimized_code_or_marker) {
1019   ASM_CODE_COMMENT(masm);
1020   Register feedback_vector = optimization_state;
1021 
1022   // Store feedback_vector. We may need it if we need to load the optimize code
1023   // slot entry.
1024   __ movd(saved_feedback_vector, feedback_vector);
1025   __ mov(optimization_state,
1026          FieldOperand(feedback_vector, FeedbackVector::kFlagsOffset));
1027 
1028   // Check if there is optimized code or a optimization marker that needes to be
1029   // processed.
1030   __ test(
1031       optimization_state,
1032       Immediate(FeedbackVector::kHasOptimizedCodeOrCompileOptimizedMarkerMask));
1033   __ j(not_zero, has_optimized_code_or_marker);
1034 }
1035 
MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(MacroAssembler * masm,Register optimization_state,XMMRegister saved_feedback_vector)1036 static void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(
1037     MacroAssembler* masm, Register optimization_state,
1038     XMMRegister saved_feedback_vector) {
1039   ASM_CODE_COMMENT(masm);
1040   Label maybe_has_optimized_code;
1041   // Check if optimized code is available
1042   __ test(
1043       optimization_state,
1044       Immediate(FeedbackVector::kHasCompileOptimizedOrLogFirstExecutionMarker));
1045   __ j(zero, &maybe_has_optimized_code);
1046 
1047   Register optimization_marker = optimization_state;
1048   __ DecodeField<FeedbackVector::OptimizationMarkerBits>(optimization_marker);
1049   MaybeOptimizeCode(masm, optimization_marker);
1050 
1051   __ bind(&maybe_has_optimized_code);
1052   Register optimized_code_entry = optimization_marker;
1053   Register feedback_vector = optimization_marker;
1054   __ movd(feedback_vector, saved_feedback_vector);  // Restore feedback vector.
1055   __ mov(
1056       optimized_code_entry,
1057       FieldOperand(feedback_vector, FeedbackVector::kMaybeOptimizedCodeOffset));
1058   TailCallOptimizedCodeSlot(masm, optimized_code_entry);
1059 }
1060 
1061 // Generate code for entering a JS function with the interpreter.
1062 // On entry to the function the receiver and arguments have been pushed on the
1063 // stack left to right.
1064 //
1065 // The live registers are:
1066 //   o eax: actual argument count
1067 //   o edi: the JS function object being called
1068 //   o edx: the incoming new target or generator object
1069 //   o esi: our context
1070 //   o ebp: the caller's frame pointer
1071 //   o esp: stack pointer (pointing to return address)
1072 //
1073 // The function builds an interpreter frame. See InterpreterFrameConstants in
1074 // frame-constants.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)1075 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1076   Register closure = edi;
1077 
1078   __ movd(xmm0, eax);  // Spill actual argument count.
1079 
1080   // The bytecode array could have been flushed from the shared function info,
1081   // if so, call into CompileLazy.
1082   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1083   __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
1084 
1085   Label is_baseline;
1086   GetSharedFunctionInfoBytecodeOrBaseline(masm, ecx, eax, &is_baseline);
1087 
1088   Label compile_lazy;
1089   __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, eax);
1090   __ j(not_equal, &compile_lazy);
1091 
1092   Register feedback_vector = ecx;
1093   Label push_stack_frame;
1094   // Load feedback vector and check if it is valid. If valid, check for
1095   // optimized code and update invocation count. Otherwise, setup the stack
1096   // frame.
1097   __ mov(feedback_vector,
1098          FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1099   __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
1100   __ mov(eax, FieldOperand(feedback_vector, HeapObject::kMapOffset));
1101   __ CmpInstanceType(eax, FEEDBACK_VECTOR_TYPE);
1102   __ j(not_equal, &push_stack_frame);
1103 
1104   // Load the optimization state from the feedback vector and re-use the
1105   // register.
1106   Label has_optimized_code_or_marker;
1107   Register optimization_state = ecx;
1108   LoadOptimizationStateAndJumpIfNeedsProcessing(masm, optimization_state, xmm1,
1109                                                 &has_optimized_code_or_marker);
1110 
1111   Label not_optimized;
1112   __ bind(&not_optimized);
1113 
1114   // Load the feedback vector and increment the invocation count.
1115   __ mov(feedback_vector,
1116          FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1117   __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
1118   __ inc(FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
1119 
1120   // Open a frame scope to indicate that there is a frame on the stack.  The
1121   // MANUAL indicates that the scope shouldn't actually generate code to set
1122   // up the frame (that is done below).
1123   __ bind(&push_stack_frame);
1124   FrameScope frame_scope(masm, StackFrame::MANUAL);
1125   __ push(ebp);  // Caller's frame pointer.
1126   __ mov(ebp, esp);
1127   __ push(kContextRegister);               // Callee's context.
1128   __ push(kJavaScriptCallTargetRegister);  // Callee's JS function.
1129   __ movd(kJavaScriptCallArgCountRegister, xmm0);
1130   __ push(kJavaScriptCallArgCountRegister);  // Actual argument count.
1131 
1132   // Get the bytecode array from the function object and load it into
1133   // kInterpreterBytecodeArrayRegister.
1134   __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1135   __ mov(kInterpreterBytecodeArrayRegister,
1136          FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
1137   GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, eax);
1138 
1139   // Check function data field is actually a BytecodeArray object.
1140   if (FLAG_debug_code) {
1141     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1142     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1143                      eax);
1144     __ Assert(
1145         equal,
1146         AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1147   }
1148 
1149   // Reset code age and the OSR arming. The OSR field and BytecodeAgeOffset are
1150   // 8-bit fields next to each other, so we could just optimize by writing a
1151   // 16-bit. These static asserts guard our assumption is valid.
1152   STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
1153                 BytecodeArray::kOsrLoopNestingLevelOffset + kCharSize);
1154   STATIC_ASSERT(BytecodeArray::kNoAgeBytecodeAge == 0);
1155   __ mov_w(FieldOperand(kInterpreterBytecodeArrayRegister,
1156                         BytecodeArray::kOsrLoopNestingLevelOffset),
1157            Immediate(0));
1158 
1159   // Push bytecode array.
1160   __ push(kInterpreterBytecodeArrayRegister);
1161   // Push Smi tagged initial bytecode array offset.
1162   __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag)));
1163 
1164   // Allocate the local and temporary register file on the stack.
1165   Label stack_overflow;
1166   {
1167     // Load frame size from the BytecodeArray object.
1168     Register frame_size = ecx;
1169     __ mov(frame_size, FieldOperand(kInterpreterBytecodeArrayRegister,
1170                                     BytecodeArray::kFrameSizeOffset));
1171 
1172     // Do a stack check to ensure we don't go over the limit.
1173     __ mov(eax, esp);
1174     __ sub(eax, frame_size);
1175     __ CompareStackLimit(eax, StackLimitKind::kRealStackLimit);
1176     __ j(below, &stack_overflow);
1177 
1178     // If ok, push undefined as the initial value for all register file entries.
1179     Label loop_header;
1180     Label loop_check;
1181     __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1182     __ jmp(&loop_check);
1183     __ bind(&loop_header);
1184     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1185     __ push(kInterpreterAccumulatorRegister);
1186     // Continue loop if not done.
1187     __ bind(&loop_check);
1188     __ sub(frame_size, Immediate(kSystemPointerSize));
1189     __ j(greater_equal, &loop_header);
1190   }
1191 
1192   // If the bytecode array has a valid incoming new target or generator object
1193   // register, initialize it with incoming value which was passed in edx.
1194   Label no_incoming_new_target_or_generator_register;
1195   __ mov(ecx, FieldOperand(
1196                   kInterpreterBytecodeArrayRegister,
1197                   BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1198   __ test(ecx, ecx);
1199   __ j(zero, &no_incoming_new_target_or_generator_register);
1200   __ mov(Operand(ebp, ecx, times_system_pointer_size, 0), edx);
1201   __ bind(&no_incoming_new_target_or_generator_register);
1202 
1203   // Perform interrupt stack check.
1204   // TODO(solanes): Merge with the real stack limit check above.
1205   Label stack_check_interrupt, after_stack_check_interrupt;
1206   __ CompareStackLimit(esp, StackLimitKind::kInterruptStackLimit);
1207   __ j(below, &stack_check_interrupt);
1208   __ bind(&after_stack_check_interrupt);
1209 
1210   // The accumulator is already loaded with undefined.
1211 
1212   __ mov(kInterpreterBytecodeOffsetRegister,
1213          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
1214 
1215   // Load the dispatch table into a register and dispatch to the bytecode
1216   // handler at the current bytecode offset.
1217   Label do_dispatch;
1218   __ bind(&do_dispatch);
1219   __ Move(kInterpreterDispatchTableRegister,
1220           Immediate(ExternalReference::interpreter_dispatch_table_address(
1221               masm->isolate())));
1222   __ movzx_b(ecx, Operand(kInterpreterBytecodeArrayRegister,
1223                           kInterpreterBytecodeOffsetRegister, times_1, 0));
1224   __ mov(kJavaScriptCallCodeStartRegister,
1225          Operand(kInterpreterDispatchTableRegister, ecx,
1226                  times_system_pointer_size, 0));
1227   __ call(kJavaScriptCallCodeStartRegister);
1228   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1229 
1230   // Any returns to the entry trampoline are either due to the return bytecode
1231   // or the interpreter tail calling a builtin and then a dispatch.
1232 
1233   // Get bytecode array and bytecode offset from the stack frame.
1234   __ mov(kInterpreterBytecodeArrayRegister,
1235          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1236   __ mov(kInterpreterBytecodeOffsetRegister,
1237          Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1238   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1239 
1240   // Either return, or advance to the next bytecode and dispatch.
1241   Label do_return;
1242   __ Push(eax);
1243   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1244                                 kInterpreterBytecodeOffsetRegister, ecx,
1245                                 kInterpreterDispatchTableRegister, eax,
1246                                 &do_return);
1247   __ Pop(eax);
1248   __ jmp(&do_dispatch);
1249 
1250   __ bind(&do_return);
1251   __ Pop(eax);
1252   // The return value is in eax.
1253   LeaveInterpreterFrame(masm, edx, ecx);
1254   __ ret(0);
1255 
1256   __ bind(&stack_check_interrupt);
1257   // Modify the bytecode offset in the stack to be kFunctionEntryBytecodeOffset
1258   // for the call to the StackGuard.
1259   __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp),
1260          Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag +
1261                                 kFunctionEntryBytecodeOffset)));
1262   __ CallRuntime(Runtime::kStackGuard);
1263 
1264   // After the call, restore the bytecode array, bytecode offset and accumulator
1265   // registers again. Also, restore the bytecode offset in the stack to its
1266   // previous value.
1267   __ mov(kInterpreterBytecodeArrayRegister,
1268          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1269   __ mov(kInterpreterBytecodeOffsetRegister,
1270          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
1271   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1272 
1273   // It's ok to clobber kInterpreterBytecodeOffsetRegister since we are setting
1274   // it again after continuing.
1275   __ SmiTag(kInterpreterBytecodeOffsetRegister);
1276   __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp),
1277          kInterpreterBytecodeOffsetRegister);
1278 
1279   __ jmp(&after_stack_check_interrupt);
1280 
1281   __ bind(&has_optimized_code_or_marker);
1282   {
1283     // Restore actual argument count.
1284     __ movd(eax, xmm0);
1285     MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(masm, optimization_state,
1286                                                  xmm1);
1287   }
1288 
1289   __ bind(&compile_lazy);
1290   // Restore actual argument count.
1291   __ movd(eax, xmm0);
1292   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1293 
1294   __ bind(&is_baseline);
1295   {
1296     __ movd(xmm2, ecx);  // Save baseline data.
1297     // Load the feedback vector from the closure.
1298     __ mov(feedback_vector,
1299            FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1300     __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
1301 
1302     Label install_baseline_code;
1303     // Check if feedback vector is valid. If not, call prepare for baseline to
1304     // allocate it.
1305     __ LoadMap(eax, feedback_vector);
1306     __ CmpInstanceType(eax, FEEDBACK_VECTOR_TYPE);
1307     __ j(not_equal, &install_baseline_code);
1308 
1309     // Check for an optimization marker.
1310     LoadOptimizationStateAndJumpIfNeedsProcessing(
1311         masm, optimization_state, xmm1, &has_optimized_code_or_marker);
1312 
1313     // Load the baseline code into the closure.
1314     __ movd(ecx, xmm2);
1315     static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
1316     __ push(edx);  // Spill.
1317     __ push(ecx);
1318     __ Push(xmm0, eax);  // Save the argument count (currently in xmm0).
1319     ReplaceClosureCodeWithOptimizedCode(masm, ecx, closure, eax, ecx);
1320     __ pop(eax);  // Restore the argument count.
1321     __ pop(ecx);
1322     __ pop(edx);
1323     __ JumpCodeObject(ecx);
1324 
1325     __ bind(&install_baseline_code);
1326     __ movd(eax, xmm0);  // Recover argument count.
1327     GenerateTailCallToReturnedCode(masm, Runtime::kInstallBaselineCode);
1328   }
1329 
1330   __ bind(&stack_overflow);
1331   __ CallRuntime(Runtime::kThrowStackOverflow);
1332   __ int3();  // Should not return.
1333 }
1334 
GenerateInterpreterPushArgs(MacroAssembler * masm,Register array_limit,Register start_address)1335 static void GenerateInterpreterPushArgs(MacroAssembler* masm,
1336                                         Register array_limit,
1337                                         Register start_address) {
1338   // ----------- S t a t e -------------
1339   //  -- start_address : Pointer to the last argument in the args array.
1340   //  -- array_limit : Pointer to one before the first argument in the
1341   //                   args array.
1342   // -----------------------------------
1343   ASM_CODE_COMMENT(masm);
1344   Label loop_header, loop_check;
1345   __ jmp(&loop_check);
1346   __ bind(&loop_header);
1347   __ Push(Operand(array_limit, 0));
1348   __ bind(&loop_check);
1349   __ add(array_limit, Immediate(kSystemPointerSize));
1350   __ cmp(array_limit, start_address);
1351   __ j(below_equal, &loop_header, Label::kNear);
1352 }
1353 
1354 // static
Generate_InterpreterPushArgsThenCallImpl(MacroAssembler * masm,ConvertReceiverMode receiver_mode,InterpreterPushArgsMode mode)1355 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1356     MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1357     InterpreterPushArgsMode mode) {
1358   DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1359   // ----------- S t a t e -------------
1360   //  -- eax : the number of arguments
1361   //  -- ecx : the address of the first argument to be pushed. Subsequent
1362   //           arguments should be consecutive above this, in the same order as
1363   //           they are to be pushed onto the stack.
1364   //  -- edi : the target to call (can be any Object).
1365   // -----------------------------------
1366 
1367   const Register scratch = edx;
1368   const Register argv = ecx;
1369 
1370   Label stack_overflow;
1371   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1372     // The spread argument should not be pushed.
1373     __ dec(eax);
1374   }
1375 
1376   // Add a stack check before pushing the arguments.
1377   __ StackOverflowCheck(eax, scratch, &stack_overflow, true);
1378   __ movd(xmm0, eax);  // Spill number of arguments.
1379 
1380   // Compute the expected number of arguments.
1381   int argc_modification = kJSArgcIncludesReceiver ? 0 : 1;
1382   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1383     argc_modification -= 1;
1384   }
1385   if (argc_modification != 0) {
1386     __ lea(scratch, Operand(eax, argc_modification));
1387   } else {
1388     __ mov(scratch, eax);
1389   }
1390 
1391   // Pop return address to allow tail-call after pushing arguments.
1392   __ PopReturnAddressTo(eax);
1393 
1394   // Find the address of the last argument.
1395   __ shl(scratch, kSystemPointerSizeLog2);
1396   __ neg(scratch);
1397   __ add(scratch, argv);
1398 
1399   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1400     __ movd(xmm1, scratch);
1401     GenerateInterpreterPushArgs(masm, scratch, argv);
1402     // Pass the spread in the register ecx.
1403     __ movd(ecx, xmm1);
1404     __ mov(ecx, Operand(ecx, 0));
1405   } else {
1406     GenerateInterpreterPushArgs(masm, scratch, argv);
1407   }
1408 
1409   // Push "undefined" as the receiver arg if we need to.
1410   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1411     __ PushRoot(RootIndex::kUndefinedValue);
1412   }
1413 
1414   __ PushReturnAddressFrom(eax);
1415   __ movd(eax, xmm0);  // Restore number of arguments.
1416 
1417   // Call the target.
1418   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1419     __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1420             RelocInfo::CODE_TARGET);
1421   } else {
1422     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1423             RelocInfo::CODE_TARGET);
1424   }
1425 
1426   __ bind(&stack_overflow);
1427   {
1428     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1429 
1430     // This should be unreachable.
1431     __ int3();
1432   }
1433 }
1434 
1435 namespace {
1436 
1437 // This function modifies start_addr, and only reads the contents of num_args
1438 // register. scratch1 and scratch2 are used as temporary registers.
Generate_InterpreterPushZeroAndArgsAndReturnAddress(MacroAssembler * masm,Register num_args,Register start_addr,Register scratch1,Register scratch2,int num_slots_to_move,Label * stack_overflow)1439 void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1440     MacroAssembler* masm, Register num_args, Register start_addr,
1441     Register scratch1, Register scratch2, int num_slots_to_move,
1442     Label* stack_overflow) {
1443   // We have to move return address and the temporary registers above it
1444   // before we can copy arguments onto the stack. To achieve this:
1445   // Step 1: Increment the stack pointer by num_args + 1 for receiver (if it is
1446   // not included in argc already). Step 2: Move the return address and values
1447   // around it to the top of stack. Step 3: Copy the arguments into the correct
1448   // locations.
1449   //  current stack    =====>    required stack layout
1450   // |             |            | return addr   | (2) <-- esp (1)
1451   // |             |            | addtl. slot   |
1452   // |             |            | arg N         | (3)
1453   // |             |            | ....          |
1454   // |             |            | arg 1         |
1455   // | return addr | <-- esp    | arg 0         |
1456   // | addtl. slot |            | receiver slot |
1457 
1458   // Check for stack overflow before we increment the stack pointer.
1459   __ StackOverflowCheck(num_args, scratch1, stack_overflow, true);
1460 
1461   // Step 1 - Update the stack pointer.
1462 
1463   constexpr int receiver_offset =
1464       kJSArgcIncludesReceiver ? 0 : kSystemPointerSize;
1465   __ lea(scratch1,
1466          Operand(num_args, times_system_pointer_size, receiver_offset));
1467   __ AllocateStackSpace(scratch1);
1468 
1469   // Step 2 move return_address and slots around it to the correct locations.
1470   // Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
1471   // basically when the source and destination overlap. We at least need one
1472   // extra slot for receiver, so no extra checks are required to avoid copy.
1473   for (int i = 0; i < num_slots_to_move + 1; i++) {
1474     __ mov(scratch1, Operand(esp, num_args, times_system_pointer_size,
1475                              i * kSystemPointerSize + receiver_offset));
1476     __ mov(Operand(esp, i * kSystemPointerSize), scratch1);
1477   }
1478 
1479   // Step 3 copy arguments to correct locations.
1480   // Slot meant for receiver contains return address. Reset it so that
1481   // we will not incorrectly interpret return address as an object.
1482   __ mov(Operand(esp, (num_slots_to_move + 1) * kSystemPointerSize),
1483          Immediate(0));
1484   __ mov(scratch1, Immediate(0));
1485 
1486   Label loop_header, loop_check;
1487   __ jmp(&loop_check);
1488   __ bind(&loop_header);
1489   __ mov(scratch2, Operand(start_addr, 0));
1490   __ mov(Operand(esp, scratch1, times_system_pointer_size,
1491                  (num_slots_to_move + 1) * kSystemPointerSize),
1492          scratch2);
1493   __ sub(start_addr, Immediate(kSystemPointerSize));
1494   __ bind(&loop_check);
1495   __ inc(scratch1);
1496   __ cmp(scratch1, eax);
1497   if (kJSArgcIncludesReceiver) {
1498     __ j(less, &loop_header, Label::kNear);
1499   } else {
1500     __ j(less_equal, &loop_header, Label::kNear);
1501   }
1502 }
1503 
1504 }  // anonymous namespace
1505 
1506 // static
Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler * masm,InterpreterPushArgsMode mode)1507 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1508     MacroAssembler* masm, InterpreterPushArgsMode mode) {
1509   // ----------- S t a t e -------------
1510   //  -- eax     : the number of arguments
1511   //  -- ecx     : the address of the first argument to be pushed. Subsequent
1512   //               arguments should be consecutive above this, in the same order
1513   //               as they are to be pushed onto the stack.
1514   //  -- esp[0]  : return address
1515   //  -- esp[4]  : allocation site feedback (if available or undefined)
1516   //  -- esp[8]  : the new target
1517   //  -- esp[12] : the constructor
1518   // -----------------------------------
1519   Label stack_overflow;
1520 
1521   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1522     // The spread argument should not be pushed.
1523     __ dec(eax);
1524   }
1525 
1526   // Push arguments and move return address and stack spill slots to the top of
1527   // stack. The eax register is readonly. The ecx register will be modified. edx
1528   // and edi are used as scratch registers.
1529   Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1530       masm, eax, ecx, edx, edi,
1531       InterpreterPushArgsThenConstructDescriptor::GetStackParameterCount(),
1532       &stack_overflow);
1533 
1534   // Call the appropriate constructor. eax and ecx already contain intended
1535   // values, remaining registers still need to be initialized from the stack.
1536 
1537   if (mode == InterpreterPushArgsMode::kArrayFunction) {
1538     // Tail call to the array construct stub (still in the caller context at
1539     // this point).
1540 
1541     __ movd(xmm0, eax);  // Spill number of arguments.
1542     __ PopReturnAddressTo(eax);
1543     __ Pop(kJavaScriptCallExtraArg1Register);
1544     __ Pop(kJavaScriptCallNewTargetRegister);
1545     __ Pop(kJavaScriptCallTargetRegister);
1546     __ PushReturnAddressFrom(eax);
1547 
1548     __ AssertFunction(kJavaScriptCallTargetRegister, eax);
1549     __ AssertUndefinedOrAllocationSite(kJavaScriptCallExtraArg1Register, eax);
1550 
1551     __ movd(eax, xmm0);  // Reload number of arguments.
1552     __ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
1553             RelocInfo::CODE_TARGET);
1554   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1555     __ movd(xmm0, eax);  // Spill number of arguments.
1556     __ PopReturnAddressTo(eax);
1557     __ Drop(1);  // The allocation site is unused.
1558     __ Pop(kJavaScriptCallNewTargetRegister);
1559     __ Pop(kJavaScriptCallTargetRegister);
1560     // Pass the spread in the register ecx, overwriting ecx.
1561     __ mov(ecx, Operand(ecx, 0));
1562     __ PushReturnAddressFrom(eax);
1563     __ movd(eax, xmm0);  // Reload number of arguments.
1564     __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1565             RelocInfo::CODE_TARGET);
1566   } else {
1567     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1568     __ PopReturnAddressTo(ecx);
1569     __ Drop(1);  // The allocation site is unused.
1570     __ Pop(kJavaScriptCallNewTargetRegister);
1571     __ Pop(kJavaScriptCallTargetRegister);
1572     __ PushReturnAddressFrom(ecx);
1573 
1574     __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1575   }
1576 
1577   __ bind(&stack_overflow);
1578   __ TailCallRuntime(Runtime::kThrowStackOverflow);
1579   __ int3();
1580 }
1581 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1582 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1583   // Set the return address to the correct point in the interpreter entry
1584   // trampoline.
1585   Label builtin_trampoline, trampoline_loaded;
1586   Smi interpreter_entry_return_pc_offset(
1587       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1588   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1589 
1590   static constexpr Register scratch = ecx;
1591 
1592   // If the SFI function_data is an InterpreterData, the function will have a
1593   // custom copy of the interpreter entry trampoline for profiling. If so,
1594   // get the custom trampoline, otherwise grab the entry address of the global
1595   // trampoline.
1596   __ mov(scratch, Operand(ebp, StandardFrameConstants::kFunctionOffset));
1597   __ mov(scratch, FieldOperand(scratch, JSFunction::kSharedFunctionInfoOffset));
1598   __ mov(scratch,
1599          FieldOperand(scratch, SharedFunctionInfo::kFunctionDataOffset));
1600   __ Push(eax);
1601   __ CmpObjectType(scratch, INTERPRETER_DATA_TYPE, eax);
1602   __ j(not_equal, &builtin_trampoline, Label::kNear);
1603 
1604   __ mov(scratch,
1605          FieldOperand(scratch, InterpreterData::kInterpreterTrampolineOffset));
1606   __ add(scratch, Immediate(Code::kHeaderSize - kHeapObjectTag));
1607   __ jmp(&trampoline_loaded, Label::kNear);
1608 
1609   __ bind(&builtin_trampoline);
1610   __ mov(scratch,
1611          __ ExternalReferenceAsOperand(
1612              ExternalReference::
1613                  address_of_interpreter_entry_trampoline_instruction_start(
1614                      masm->isolate()),
1615              scratch));
1616 
1617   __ bind(&trampoline_loaded);
1618   __ Pop(eax);
1619   __ add(scratch, Immediate(interpreter_entry_return_pc_offset.value()));
1620   __ push(scratch);
1621 
1622   // Initialize the dispatch table register.
1623   __ Move(kInterpreterDispatchTableRegister,
1624           Immediate(ExternalReference::interpreter_dispatch_table_address(
1625               masm->isolate())));
1626 
1627   // Get the bytecode array pointer from the frame.
1628   __ mov(kInterpreterBytecodeArrayRegister,
1629          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1630 
1631   if (FLAG_debug_code) {
1632     // Check function data field is actually a BytecodeArray object.
1633     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1634     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1635                      scratch);
1636     __ Assert(
1637         equal,
1638         AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1639   }
1640 
1641   // Get the target bytecode offset from the frame.
1642   __ mov(kInterpreterBytecodeOffsetRegister,
1643          Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1644   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1645 
1646   if (FLAG_debug_code) {
1647     Label okay;
1648     __ cmp(kInterpreterBytecodeOffsetRegister,
1649            Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
1650     __ j(greater_equal, &okay, Label::kNear);
1651     __ int3();
1652     __ bind(&okay);
1653   }
1654 
1655   // Dispatch to the target bytecode.
1656   __ movzx_b(scratch, Operand(kInterpreterBytecodeArrayRegister,
1657                               kInterpreterBytecodeOffsetRegister, times_1, 0));
1658   __ mov(kJavaScriptCallCodeStartRegister,
1659          Operand(kInterpreterDispatchTableRegister, scratch,
1660                  times_system_pointer_size, 0));
1661   __ jmp(kJavaScriptCallCodeStartRegister);
1662 }
1663 
Generate_InterpreterEnterAtNextBytecode(MacroAssembler * masm)1664 void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
1665   // Get bytecode array and bytecode offset from the stack frame.
1666   __ mov(kInterpreterBytecodeArrayRegister,
1667          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1668   __ mov(kInterpreterBytecodeOffsetRegister,
1669          Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1670   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1671 
1672   Label enter_bytecode, function_entry_bytecode;
1673   __ cmp(kInterpreterBytecodeOffsetRegister,
1674          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag +
1675                    kFunctionEntryBytecodeOffset));
1676   __ j(equal, &function_entry_bytecode);
1677 
1678   // Advance to the next bytecode.
1679   Label if_return;
1680   __ Push(eax);
1681   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1682                                 kInterpreterBytecodeOffsetRegister, ecx, esi,
1683                                 eax, &if_return);
1684   __ Pop(eax);
1685 
1686   __ bind(&enter_bytecode);
1687   // Convert new bytecode offset to a Smi and save in the stackframe.
1688   __ mov(ecx, kInterpreterBytecodeOffsetRegister);
1689   __ SmiTag(ecx);
1690   __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp), ecx);
1691 
1692   Generate_InterpreterEnterBytecode(masm);
1693 
1694   __ bind(&function_entry_bytecode);
1695   // If the code deoptimizes during the implicit function entry stack interrupt
1696   // check, it will have a bailout ID of kFunctionEntryBytecodeOffset, which is
1697   // not a valid bytecode offset. Detect this case and advance to the first
1698   // actual bytecode.
1699   __ mov(kInterpreterBytecodeOffsetRegister,
1700          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
1701   __ jmp(&enter_bytecode);
1702 
1703   // We should never take the if_return path.
1704   __ bind(&if_return);
1705   // No need to pop eax here since we will be aborting anyway.
1706   __ Abort(AbortReason::kInvalidBytecodeAdvance);
1707 }
1708 
Generate_InterpreterEnterAtBytecode(MacroAssembler * masm)1709 void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
1710   Generate_InterpreterEnterBytecode(masm);
1711 }
1712 // static
Generate_BaselineOutOfLinePrologue(MacroAssembler * masm)1713 void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
1714   auto descriptor =
1715       Builtins::CallInterfaceDescriptorFor(Builtin::kBaselineOutOfLinePrologue);
1716   Register arg_count = descriptor.GetRegisterParameter(
1717       BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
1718   Register frame_size = descriptor.GetRegisterParameter(
1719       BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1720 
1721   // Save argument count and bytecode array.
1722   XMMRegister saved_arg_count = xmm0;
1723   XMMRegister saved_bytecode_array = xmm1;
1724   XMMRegister saved_frame_size = xmm2;
1725   XMMRegister saved_feedback_vector = xmm3;
1726   __ movd(saved_arg_count, arg_count);
1727   __ movd(saved_frame_size, frame_size);
1728 
1729   // Use the arg count (eax) as the scratch register.
1730   Register scratch = arg_count;
1731 
1732   // Load the feedback vector from the closure.
1733   Register feedback_vector = ecx;
1734   Register closure = descriptor.GetRegisterParameter(
1735       BaselineOutOfLinePrologueDescriptor::kClosure);
1736   __ mov(feedback_vector,
1737          FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1738   __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
1739   if (FLAG_debug_code) {
1740     __ CmpObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE, scratch);
1741     __ Assert(equal, AbortReason::kExpectedFeedbackVector);
1742   }
1743 
1744   // Load the optimization state from the feedback vector and re-use the
1745   // register.
1746   Label has_optimized_code_or_marker;
1747   Register optimization_state = ecx;
1748   LoadOptimizationStateAndJumpIfNeedsProcessing(masm, optimization_state,
1749                                                 saved_feedback_vector,
1750                                                 &has_optimized_code_or_marker);
1751 
1752   // Load the feedback vector and increment the invocation count.
1753   __ movd(feedback_vector, saved_feedback_vector);
1754   __ inc(FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
1755 
1756   XMMRegister return_address = xmm4;
1757   // Save the return address, so that we can push it to the end of the newly
1758   // set-up frame once we're done setting it up.
1759   __ PopReturnAddressTo(return_address, scratch);
1760   // The bytecode array was pushed to the stack by the caller.
1761   __ Pop(saved_bytecode_array, scratch);
1762   FrameScope frame_scope(masm, StackFrame::MANUAL);
1763   {
1764     ASM_CODE_COMMENT_STRING(masm, "Frame Setup");
1765     __ EnterFrame(StackFrame::BASELINE);
1766 
1767     __ Push(descriptor.GetRegisterParameter(
1768         BaselineOutOfLinePrologueDescriptor::kCalleeContext));  // Callee's
1769                                                                 // context.
1770     Register callee_js_function = descriptor.GetRegisterParameter(
1771         BaselineOutOfLinePrologueDescriptor::kClosure);
1772     DCHECK_EQ(callee_js_function, kJavaScriptCallTargetRegister);
1773     DCHECK_EQ(callee_js_function, kJSFunctionRegister);
1774     __ Push(callee_js_function);        // Callee's JS function.
1775     __ Push(saved_arg_count, scratch);  // Push actual argument count.
1776 
1777     // We'll use the bytecode for both code age/OSR resetting, and pushing onto
1778     // the frame, so load it into a register.
1779     Register bytecode_array = scratch;
1780     __ movd(bytecode_array, saved_bytecode_array);
1781     // Reset code age and the OSR arming. The OSR field and BytecodeAgeOffset
1782     // are 8-bit fields next to each other, so we could just optimize by writing
1783     // a 16-bit. These static asserts guard our assumption is valid.
1784     STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
1785                   BytecodeArray::kOsrLoopNestingLevelOffset + kCharSize);
1786     STATIC_ASSERT(BytecodeArray::kNoAgeBytecodeAge == 0);
1787     __ mov_w(
1788         FieldOperand(bytecode_array, BytecodeArray::kOsrLoopNestingLevelOffset),
1789         Immediate(0));
1790     __ Push(bytecode_array);
1791 
1792     // Baseline code frames store the feedback vector where interpreter would
1793     // store the bytecode offset.
1794     __ Push(saved_feedback_vector, scratch);
1795   }
1796 
1797   Label call_stack_guard;
1798   {
1799     ASM_CODE_COMMENT_STRING(masm, "Stack/interrupt check");
1800     // Stack check. This folds the checks for both the interrupt stack limit
1801     // check and the real stack limit into one by just checking for the
1802     // interrupt limit. The interrupt limit is either equal to the real stack
1803     // limit or tighter. By ensuring we have space until that limit after
1804     // building the frame we can quickly precheck both at once.
1805     //
1806     // TODO(v8:11429): Backport this folded check to the
1807     // InterpreterEntryTrampoline.
1808     __ movd(frame_size, saved_frame_size);
1809     __ Move(scratch, esp);
1810     DCHECK_NE(frame_size, kJavaScriptCallNewTargetRegister);
1811     __ sub(scratch, frame_size);
1812     __ CompareStackLimit(scratch, StackLimitKind::kInterruptStackLimit);
1813     __ j(below, &call_stack_guard);
1814   }
1815 
1816   // Push the return address back onto the stack for return.
1817   __ PushReturnAddressFrom(return_address, scratch);
1818   // Return to caller pushed pc, without any frame teardown.
1819   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1820   __ Ret();
1821 
1822   __ bind(&has_optimized_code_or_marker);
1823   {
1824     ASM_CODE_COMMENT_STRING(masm, "Optimized marker check");
1825     // Drop the return address and bytecode array, rebalancing the return stack
1826     // buffer by using JumpMode::kPushAndReturn. We can't leave the slot and
1827     // overwrite it on return since we may do a runtime call along the way that
1828     // requires the stack to only contain valid frames.
1829     __ Drop(2);
1830     __ movd(arg_count, saved_arg_count);  // Restore actual argument count.
1831     MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(masm, optimization_state,
1832                                                  saved_feedback_vector);
1833     __ Trap();
1834   }
1835 
1836   __ bind(&call_stack_guard);
1837   {
1838     ASM_CODE_COMMENT_STRING(masm, "Stack/interrupt call");
1839     {
1840       // Push the baseline code return address now, as if it had been pushed by
1841       // the call to this builtin.
1842       __ PushReturnAddressFrom(return_address, scratch);
1843       FrameScope frame_scope(masm, StackFrame::INTERNAL);
1844       // Save incoming new target or generator
1845       __ Push(kJavaScriptCallNewTargetRegister);
1846       __ SmiTag(frame_size);
1847       __ Push(frame_size);
1848       __ CallRuntime(Runtime::kStackGuardWithGap, 1);
1849       __ Pop(kJavaScriptCallNewTargetRegister);
1850     }
1851 
1852     // Return to caller pushed pc, without any frame teardown.
1853     __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1854     __ Ret();
1855   }
1856 }
1857 
1858 namespace {
Generate_ContinueToBuiltinHelper(MacroAssembler * masm,bool java_script_builtin,bool with_result)1859 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1860                                       bool java_script_builtin,
1861                                       bool with_result) {
1862   const RegisterConfiguration* config(RegisterConfiguration::Default());
1863   int allocatable_register_count = config->num_allocatable_general_registers();
1864   if (with_result) {
1865     if (java_script_builtin) {
1866       // xmm0 is not included in the allocateable registers.
1867       __ movd(xmm0, eax);
1868     } else {
1869       // Overwrite the hole inserted by the deoptimizer with the return value
1870       // from the LAZY deopt point.
1871       __ mov(
1872           Operand(esp, config->num_allocatable_general_registers() *
1873                                kSystemPointerSize +
1874                            BuiltinContinuationFrameConstants::kFixedFrameSize),
1875           eax);
1876     }
1877   }
1878 
1879   // Replace the builtin index Smi on the stack with the start address of the
1880   // builtin loaded from the builtins table. The ret below will return to this
1881   // address.
1882   int offset_to_builtin_index = allocatable_register_count * kSystemPointerSize;
1883   __ mov(eax, Operand(esp, offset_to_builtin_index));
1884   __ LoadEntryFromBuiltinIndex(eax);
1885   __ mov(Operand(esp, offset_to_builtin_index), eax);
1886 
1887   for (int i = allocatable_register_count - 1; i >= 0; --i) {
1888     int code = config->GetAllocatableGeneralCode(i);
1889     __ pop(Register::from_code(code));
1890     if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1891       __ SmiUntag(Register::from_code(code));
1892     }
1893   }
1894   if (with_result && java_script_builtin) {
1895     // Overwrite the hole inserted by the deoptimizer with the return value from
1896     // the LAZY deopt point. eax contains the arguments count, the return value
1897     // from LAZY is always the last argument.
1898     __ movd(Operand(esp, eax, times_system_pointer_size,
1899                     BuiltinContinuationFrameConstants::kFixedFrameSize -
1900                         (kJSArgcIncludesReceiver ? kSystemPointerSize : 0)),
1901             xmm0);
1902   }
1903   __ mov(
1904       ebp,
1905       Operand(esp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1906   const int offsetToPC =
1907       BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp -
1908       kSystemPointerSize;
1909   __ pop(Operand(esp, offsetToPC));
1910   __ Drop(offsetToPC / kSystemPointerSize);
1911   __ ret(0);
1912 }
1913 }  // namespace
1914 
Generate_ContinueToCodeStubBuiltin(MacroAssembler * masm)1915 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1916   Generate_ContinueToBuiltinHelper(masm, false, false);
1917 }
1918 
Generate_ContinueToCodeStubBuiltinWithResult(MacroAssembler * masm)1919 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1920     MacroAssembler* masm) {
1921   Generate_ContinueToBuiltinHelper(masm, false, true);
1922 }
1923 
Generate_ContinueToJavaScriptBuiltin(MacroAssembler * masm)1924 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1925   Generate_ContinueToBuiltinHelper(masm, true, false);
1926 }
1927 
Generate_ContinueToJavaScriptBuiltinWithResult(MacroAssembler * masm)1928 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1929     MacroAssembler* masm) {
1930   Generate_ContinueToBuiltinHelper(masm, true, true);
1931 }
1932 
Generate_NotifyDeoptimized(MacroAssembler * masm)1933 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1934   {
1935     FrameScope scope(masm, StackFrame::INTERNAL);
1936     __ CallRuntime(Runtime::kNotifyDeoptimized);
1937     // Tear down internal frame.
1938   }
1939 
1940   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
1941   __ mov(eax, Operand(esp, 1 * kSystemPointerSize));
1942   __ ret(1 * kSystemPointerSize);  // Remove eax.
1943 }
1944 
1945 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1946 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1947   // ----------- S t a t e -------------
1948   //  -- eax     : argc
1949   //  -- esp[0]  : return address
1950   //  -- esp[1]  : receiver
1951   //  -- esp[2]  : thisArg
1952   //  -- esp[3]  : argArray
1953   // -----------------------------------
1954 
1955   // 1. Load receiver into xmm0, argArray into edx (if present), remove all
1956   // arguments from the stack (including the receiver), and push thisArg (if
1957   // present) instead.
1958   {
1959     Label no_arg_array, no_this_arg;
1960     StackArgumentsAccessor args(eax);
1961     // Spill receiver to allow the usage of edi as a scratch register.
1962     __ movd(xmm0, args.GetReceiverOperand());
1963 
1964     __ LoadRoot(edx, RootIndex::kUndefinedValue);
1965     __ mov(edi, edx);
1966     if (kJSArgcIncludesReceiver) {
1967       __ cmp(eax, Immediate(JSParameterCount(0)));
1968       __ j(equal, &no_this_arg, Label::kNear);
1969     } else {
1970       __ test(eax, eax);
1971       __ j(zero, &no_this_arg, Label::kNear);
1972     }
1973     {
1974       __ mov(edi, args[1]);
1975       __ cmp(eax, Immediate(JSParameterCount(1)));
1976       __ j(equal, &no_arg_array, Label::kNear);
1977       __ mov(edx, args[2]);
1978       __ bind(&no_arg_array);
1979     }
1980     __ bind(&no_this_arg);
1981     __ DropArgumentsAndPushNewReceiver(
1982         eax, edi, ecx, TurboAssembler::kCountIsInteger,
1983         kJSArgcIncludesReceiver ? TurboAssembler::kCountIncludesReceiver
1984                                 : TurboAssembler::kCountExcludesReceiver);
1985 
1986     // Restore receiver to edi.
1987     __ movd(edi, xmm0);
1988   }
1989 
1990   // ----------- S t a t e -------------
1991   //  -- edx    : argArray
1992   //  -- edi    : receiver
1993   //  -- esp[0] : return address
1994   //  -- esp[4] : thisArg
1995   // -----------------------------------
1996 
1997   // 2. We don't need to check explicitly for callable receiver here,
1998   // since that's the first thing the Call/CallWithArrayLike builtins
1999   // will do.
2000 
2001   // 3. Tail call with no arguments if argArray is null or undefined.
2002   Label no_arguments;
2003   __ JumpIfRoot(edx, RootIndex::kNullValue, &no_arguments, Label::kNear);
2004   __ JumpIfRoot(edx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
2005 
2006   // 4a. Apply the receiver to the given argArray.
2007   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
2008           RelocInfo::CODE_TARGET);
2009 
2010   // 4b. The argArray is either null or undefined, so we tail call without any
2011   // arguments to the receiver.
2012   __ bind(&no_arguments);
2013   {
2014     __ Move(eax, JSParameterCount(0));
2015     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2016   }
2017 }
2018 
2019 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)2020 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2021   // Stack Layout:
2022   // esp[0]           : Return address
2023   // esp[8]           : Argument 0 (receiver: callable to call)
2024   // esp[16]          : Argument 1
2025   //  ...
2026   // esp[8 * n]       : Argument n-1
2027   // esp[8 * (n + 1)] : Argument n
2028   // eax contains the number of arguments, n.
2029 
2030   // 1. Get the callable to call (passed as receiver) from the stack.
2031   {
2032     StackArgumentsAccessor args(eax);
2033     __ mov(edi, args.GetReceiverOperand());
2034   }
2035 
2036   // 2. Save the return address and drop the callable.
2037   __ PopReturnAddressTo(edx);
2038   __ Pop(ecx);
2039 
2040   // 3. Make sure we have at least one argument.
2041   {
2042     Label done;
2043     if (kJSArgcIncludesReceiver) {
2044       __ cmp(eax, Immediate(JSParameterCount(0)));
2045       __ j(greater, &done, Label::kNear);
2046     } else {
2047       __ test(eax, eax);
2048       __ j(not_zero, &done, Label::kNear);
2049     }
2050     __ PushRoot(RootIndex::kUndefinedValue);
2051     __ inc(eax);
2052     __ bind(&done);
2053   }
2054 
2055   // 4. Push back the return address one slot down on the stack (overwriting the
2056   // original callable), making the original first argument the new receiver.
2057   __ PushReturnAddressFrom(edx);
2058   __ dec(eax);  // One fewer argument (first argument is new receiver).
2059 
2060   // 5. Call the callable.
2061   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2062 }
2063 
Generate_ReflectApply(MacroAssembler * masm)2064 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2065   // ----------- S t a t e -------------
2066   //  -- eax     : argc
2067   //  -- esp[0]  : return address
2068   //  -- esp[4]  : receiver
2069   //  -- esp[8]  : target         (if argc >= 1)
2070   //  -- esp[12] : thisArgument   (if argc >= 2)
2071   //  -- esp[16] : argumentsList  (if argc == 3)
2072   // -----------------------------------
2073 
2074   // 1. Load target into edi (if present), argumentsList into edx (if present),
2075   // remove all arguments from the stack (including the receiver), and push
2076   // thisArgument (if present) instead.
2077   {
2078     Label done;
2079     StackArgumentsAccessor args(eax);
2080     __ LoadRoot(edi, RootIndex::kUndefinedValue);
2081     __ mov(edx, edi);
2082     __ mov(ecx, edi);
2083     __ cmp(eax, Immediate(JSParameterCount(1)));
2084     __ j(below, &done, Label::kNear);
2085     __ mov(edi, args[1]);  // target
2086     __ j(equal, &done, Label::kNear);
2087     __ mov(ecx, args[2]);  // thisArgument
2088     __ cmp(eax, Immediate(JSParameterCount(3)));
2089     __ j(below, &done, Label::kNear);
2090     __ mov(edx, args[3]);  // argumentsList
2091     __ bind(&done);
2092 
2093     // Spill argumentsList to use edx as a scratch register.
2094     __ movd(xmm0, edx);
2095 
2096     __ DropArgumentsAndPushNewReceiver(
2097         eax, ecx, edx, TurboAssembler::kCountIsInteger,
2098         kJSArgcIncludesReceiver ? TurboAssembler::kCountIncludesReceiver
2099                                 : TurboAssembler::kCountExcludesReceiver);
2100 
2101     // Restore argumentsList.
2102     __ movd(edx, xmm0);
2103   }
2104 
2105   // ----------- S t a t e -------------
2106   //  -- edx    : argumentsList
2107   //  -- edi    : target
2108   //  -- esp[0] : return address
2109   //  -- esp[4] : thisArgument
2110   // -----------------------------------
2111 
2112   // 2. We don't need to check explicitly for callable target here,
2113   // since that's the first thing the Call/CallWithArrayLike builtins
2114   // will do.
2115 
2116   // 3. Apply the target to the given argumentsList.
2117   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
2118           RelocInfo::CODE_TARGET);
2119 }
2120 
Generate_ReflectConstruct(MacroAssembler * masm)2121 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2122   // ----------- S t a t e -------------
2123   //  -- eax     : argc
2124   //  -- esp[0]  : return address
2125   //  -- esp[4]  : receiver
2126   //  -- esp[8]  : target
2127   //  -- esp[12] : argumentsList
2128   //  -- esp[16] : new.target (optional)
2129   // -----------------------------------
2130 
2131   // 1. Load target into edi (if present), argumentsList into ecx (if present),
2132   // new.target into edx (if present, otherwise use target), remove all
2133   // arguments from the stack (including the receiver), and push thisArgument
2134   // (if present) instead.
2135   {
2136     Label done;
2137     StackArgumentsAccessor args(eax);
2138     __ LoadRoot(edi, RootIndex::kUndefinedValue);
2139     __ mov(edx, edi);
2140     __ mov(ecx, edi);
2141     __ cmp(eax, Immediate(JSParameterCount(1)));
2142     __ j(below, &done, Label::kNear);
2143     __ mov(edi, args[1]);  // target
2144     __ mov(edx, edi);
2145     __ j(equal, &done, Label::kNear);
2146     __ mov(ecx, args[2]);  // argumentsList
2147     __ cmp(eax, Immediate(JSParameterCount(3)));
2148     __ j(below, &done, Label::kNear);
2149     __ mov(edx, args[3]);  // new.target
2150     __ bind(&done);
2151 
2152     // Spill argumentsList to use ecx as a scratch register.
2153     __ movd(xmm0, ecx);
2154 
2155     __ DropArgumentsAndPushNewReceiver(
2156         eax, masm->RootAsOperand(RootIndex::kUndefinedValue), ecx,
2157         TurboAssembler::kCountIsInteger,
2158         kJSArgcIncludesReceiver ? TurboAssembler::kCountIncludesReceiver
2159                                 : TurboAssembler::kCountExcludesReceiver);
2160 
2161     // Restore argumentsList.
2162     __ movd(ecx, xmm0);
2163   }
2164 
2165   // ----------- S t a t e -------------
2166   //  -- ecx    : argumentsList
2167   //  -- edx    : new.target
2168   //  -- edi    : target
2169   //  -- esp[0] : return address
2170   //  -- esp[4] : receiver (undefined)
2171   // -----------------------------------
2172 
2173   // 2. We don't need to check explicitly for constructor target here,
2174   // since that's the first thing the Construct/ConstructWithArrayLike
2175   // builtins will do.
2176 
2177   // 3. We don't need to check explicitly for constructor new.target here,
2178   // since that's the second thing the Construct/ConstructWithArrayLike
2179   // builtins will do.
2180 
2181   // 4. Construct the target with the given new.target and argumentsList.
2182   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
2183           RelocInfo::CODE_TARGET);
2184 }
2185 
2186 namespace {
2187 
2188 // Allocate new stack space for |count| arguments and shift all existing
2189 // arguments already on the stack. |pointer_to_new_space_out| points to the
2190 // first free slot on the stack to copy additional arguments to and
2191 // |argc_in_out| is updated to include |count|.
Generate_AllocateSpaceAndShiftExistingArguments(MacroAssembler * masm,Register count,Register argc_in_out,Register pointer_to_new_space_out,Register scratch1,Register scratch2)2192 void Generate_AllocateSpaceAndShiftExistingArguments(
2193     MacroAssembler* masm, Register count, Register argc_in_out,
2194     Register pointer_to_new_space_out, Register scratch1, Register scratch2) {
2195   DCHECK(!AreAliased(count, argc_in_out, pointer_to_new_space_out, scratch1,
2196                      scratch2));
2197   // Use pointer_to_new_space_out as scratch until we set it to the correct
2198   // value at the end.
2199   Register old_esp = pointer_to_new_space_out;
2200   Register new_space = scratch1;
2201   __ mov(old_esp, esp);
2202 
2203   __ lea(new_space, Operand(count, times_system_pointer_size, 0));
2204   __ AllocateStackSpace(new_space);
2205 
2206   if (!kJSArgcIncludesReceiver) {
2207     __ inc(argc_in_out);
2208   }
2209   Register current = scratch1;
2210   Register value = scratch2;
2211 
2212   Label loop, entry;
2213   __ mov(current, 0);
2214   __ jmp(&entry);
2215   __ bind(&loop);
2216   __ mov(value, Operand(old_esp, current, times_system_pointer_size, 0));
2217   __ mov(Operand(esp, current, times_system_pointer_size, 0), value);
2218   __ inc(current);
2219   __ bind(&entry);
2220   __ cmp(current, argc_in_out);
2221   __ j(less_equal, &loop, Label::kNear);
2222 
2223   // Point to the next free slot above the shifted arguments (argc + 1 slot for
2224   // the return address).
2225   __ lea(
2226       pointer_to_new_space_out,
2227       Operand(esp, argc_in_out, times_system_pointer_size, kSystemPointerSize));
2228   // Update the total number of arguments.
2229   if (kJSArgcIncludesReceiver) {
2230     __ add(argc_in_out, count);
2231   } else {
2232     // Also subtract the receiver again.
2233     __ lea(argc_in_out, Operand(argc_in_out, count, times_1, -1));
2234   }
2235 }
2236 
2237 }  // namespace
2238 
2239 // static
2240 // TODO(v8:11615): Observe Code::kMaxArguments in CallOrConstructVarargs
Generate_CallOrConstructVarargs(MacroAssembler * masm,Handle<Code> code)2241 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
2242                                                Handle<Code> code) {
2243   // ----------- S t a t e -------------
2244   //  -- edi    : target
2245   //  -- esi    : context for the Call / Construct builtin
2246   //  -- eax    : number of parameters on the stack
2247   //  -- ecx    : len (number of elements to from args)
2248   //  -- edx    : new.target (checked to be constructor or undefined)
2249   //  -- esp[4] : arguments list (a FixedArray)
2250   //  -- esp[0] : return address.
2251   // -----------------------------------
2252 
2253   __ movd(xmm0, edx);  // Spill new.target.
2254   __ movd(xmm1, edi);  // Spill target.
2255   __ movd(xmm3, esi);  // Spill the context.
2256 
2257   const Register kArgumentsList = esi;
2258   const Register kArgumentsLength = ecx;
2259 
2260   __ PopReturnAddressTo(edx);
2261   __ pop(kArgumentsList);
2262   __ PushReturnAddressFrom(edx);
2263 
2264   if (FLAG_debug_code) {
2265     // Allow kArgumentsList to be a FixedArray, or a FixedDoubleArray if
2266     // kArgumentsLength == 0.
2267     Label ok, fail;
2268     __ AssertNotSmi(kArgumentsList);
2269     __ mov(edx, FieldOperand(kArgumentsList, HeapObject::kMapOffset));
2270     __ CmpInstanceType(edx, FIXED_ARRAY_TYPE);
2271     __ j(equal, &ok);
2272     __ CmpInstanceType(edx, FIXED_DOUBLE_ARRAY_TYPE);
2273     __ j(not_equal, &fail);
2274     __ cmp(kArgumentsLength, 0);
2275     __ j(equal, &ok);
2276     // Fall through.
2277     __ bind(&fail);
2278     __ Abort(AbortReason::kOperandIsNotAFixedArray);
2279 
2280     __ bind(&ok);
2281   }
2282 
2283   // Check the stack for overflow. We are not trying to catch interruptions
2284   // (i.e. debug break and preemption) here, so check the "real stack limit".
2285   Label stack_overflow;
2286   __ StackOverflowCheck(kArgumentsLength, edx, &stack_overflow);
2287 
2288   __ movd(xmm4, kArgumentsList);  // Spill the arguments list.
2289   // Move the arguments already in the stack,
2290   // including the receiver and the return address.
2291   // kArgumentsLength (ecx): Number of arguments to make room for.
2292   // eax: Number of arguments already on the stack.
2293   // edx: Points to first free slot on the stack after arguments were shifted.
2294   Generate_AllocateSpaceAndShiftExistingArguments(masm, kArgumentsLength, eax,
2295                                                   edx, edi, esi);
2296   __ movd(kArgumentsList, xmm4);  // Recover arguments list.
2297   __ movd(xmm2, eax);             // Spill argument count.
2298 
2299   // Push additional arguments onto the stack.
2300   {
2301     __ Move(eax, Immediate(0));
2302     Label done, push, loop;
2303     __ bind(&loop);
2304     __ cmp(eax, kArgumentsLength);
2305     __ j(equal, &done, Label::kNear);
2306     // Turn the hole into undefined as we go.
2307     __ mov(edi, FieldOperand(kArgumentsList, eax, times_tagged_size,
2308                              FixedArray::kHeaderSize));
2309     __ CompareRoot(edi, RootIndex::kTheHoleValue);
2310     __ j(not_equal, &push, Label::kNear);
2311     __ LoadRoot(edi, RootIndex::kUndefinedValue);
2312     __ bind(&push);
2313     __ mov(Operand(edx, 0), edi);
2314     __ add(edx, Immediate(kSystemPointerSize));
2315     __ inc(eax);
2316     __ jmp(&loop);
2317     __ bind(&done);
2318   }
2319 
2320   // Restore eax, edi and edx.
2321   __ movd(esi, xmm3);  // Restore the context.
2322   __ movd(eax, xmm2);  // Restore argument count.
2323   __ movd(edi, xmm1);  // Restore target.
2324   __ movd(edx, xmm0);  // Restore new.target.
2325 
2326   // Tail-call to the actual Call or Construct builtin.
2327   __ Jump(code, RelocInfo::CODE_TARGET);
2328 
2329   __ bind(&stack_overflow);
2330   __ movd(esi, xmm3);  // Restore the context.
2331   __ TailCallRuntime(Runtime::kThrowStackOverflow);
2332 }
2333 
2334 // static
Generate_CallOrConstructForwardVarargs(MacroAssembler * masm,CallOrConstructMode mode,Handle<Code> code)2335 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2336                                                       CallOrConstructMode mode,
2337                                                       Handle<Code> code) {
2338   // ----------- S t a t e -------------
2339   //  -- eax : the number of arguments
2340   //  -- edi : the target to call (can be any Object)
2341   //  -- esi : context for the Call / Construct builtin
2342   //  -- edx : the new target (for [[Construct]] calls)
2343   //  -- ecx : start index (to support rest parameters)
2344   // -----------------------------------
2345 
2346   __ movd(xmm0, esi);  // Spill the context.
2347 
2348   Register scratch = esi;
2349 
2350   // Check if new.target has a [[Construct]] internal method.
2351   if (mode == CallOrConstructMode::kConstruct) {
2352     Label new_target_constructor, new_target_not_constructor;
2353     __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
2354     __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
2355     __ test_b(FieldOperand(scratch, Map::kBitFieldOffset),
2356               Immediate(Map::Bits1::IsConstructorBit::kMask));
2357     __ j(not_zero, &new_target_constructor, Label::kNear);
2358     __ bind(&new_target_not_constructor);
2359     {
2360       FrameScope scope(masm, StackFrame::MANUAL);
2361       __ EnterFrame(StackFrame::INTERNAL);
2362       __ Push(edx);
2363       __ movd(esi, xmm0);  // Restore the context.
2364       __ CallRuntime(Runtime::kThrowNotConstructor);
2365     }
2366     __ bind(&new_target_constructor);
2367   }
2368 
2369   __ movd(xmm1, edx);  // Preserve new.target (in case of [[Construct]]).
2370 
2371   Label stack_done, stack_overflow;
2372   __ mov(edx, Operand(ebp, StandardFrameConstants::kArgCOffset));
2373   if (kJSArgcIncludesReceiver) {
2374     __ dec(edx);
2375   }
2376   __ sub(edx, ecx);
2377   __ j(less_equal, &stack_done);
2378   {
2379     // ----------- S t a t e -------------
2380     //  -- eax : the number of arguments already in the stack
2381     //  -- ecx : start index (to support rest parameters)
2382     //  -- edx : number of arguments to copy, i.e. arguments count - start index
2383     //  -- edi : the target to call (can be any Object)
2384     //  -- ebp : point to the caller stack frame
2385     //  -- xmm0 : context for the Call / Construct builtin
2386     //  -- xmm1 : the new target (for [[Construct]] calls)
2387     // -----------------------------------
2388 
2389     // Forward the arguments from the caller frame.
2390     __ movd(xmm2, edi);  // Preserve the target to call.
2391     __ StackOverflowCheck(edx, edi, &stack_overflow);
2392     __ movd(xmm3, ebx);  // Preserve root register.
2393 
2394     Register scratch = ebx;
2395 
2396     // Move the arguments already in the stack,
2397     // including the receiver and the return address.
2398     // edx: Number of arguments to make room for.
2399     // eax: Number of arguments already on the stack.
2400     // esi: Points to first free slot on the stack after arguments were shifted.
2401     Generate_AllocateSpaceAndShiftExistingArguments(masm, edx, eax, esi, ebx,
2402                                                     edi);
2403 
2404     // Point to the first argument to copy (skipping receiver).
2405     __ lea(ecx, Operand(ecx, times_system_pointer_size,
2406                         CommonFrameConstants::kFixedFrameSizeAboveFp +
2407                             kSystemPointerSize));
2408     __ add(ecx, ebp);
2409 
2410     // Copy the additional caller arguments onto the stack.
2411     // TODO(victorgomes): Consider using forward order as potentially more cache
2412     // friendly.
2413     {
2414       Register src = ecx, dest = esi, num = edx;
2415       Label loop;
2416       __ bind(&loop);
2417       __ dec(num);
2418       __ mov(scratch, Operand(src, num, times_system_pointer_size, 0));
2419       __ mov(Operand(dest, num, times_system_pointer_size, 0), scratch);
2420       __ j(not_zero, &loop);
2421     }
2422 
2423     __ movd(ebx, xmm3);  // Restore root register.
2424     __ movd(edi, xmm2);  // Restore the target to call.
2425   }
2426   __ bind(&stack_done);
2427 
2428   __ movd(edx, xmm1);  // Restore new.target (in case of [[Construct]]).
2429   __ movd(esi, xmm0);  // Restore the context.
2430 
2431   // Tail-call to the {code} handler.
2432   __ Jump(code, RelocInfo::CODE_TARGET);
2433 
2434   __ bind(&stack_overflow);
2435   __ movd(edi, xmm2);  // Restore the target to call.
2436   __ movd(esi, xmm0);  // Restore the context.
2437   __ TailCallRuntime(Runtime::kThrowStackOverflow);
2438 }
2439 
2440 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode)2441 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2442                                      ConvertReceiverMode mode) {
2443   // ----------- S t a t e -------------
2444   //  -- eax : the number of arguments
2445   //  -- edi : the function to call (checked to be a JSFunction)
2446   // -----------------------------------
2447   StackArgumentsAccessor args(eax);
2448   __ AssertFunction(edi, edx);
2449 
2450   __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2451 
2452   // Enter the context of the function; ToObject has to run in the function
2453   // context, and we also need to take the global proxy from the function
2454   // context in case of conversion.
2455   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2456   // We need to convert the receiver for non-native sloppy mode functions.
2457   Label done_convert;
2458   __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset),
2459           Immediate(SharedFunctionInfo::IsNativeBit::kMask |
2460                     SharedFunctionInfo::IsStrictBit::kMask));
2461   __ j(not_zero, &done_convert);
2462   {
2463     // ----------- S t a t e -------------
2464     //  -- eax : the number of arguments
2465     //  -- edx : the shared function info.
2466     //  -- edi : the function to call (checked to be a JSFunction)
2467     //  -- esi : the function context.
2468     // -----------------------------------
2469 
2470     if (mode == ConvertReceiverMode::kNullOrUndefined) {
2471       // Patch receiver to global proxy.
2472       __ LoadGlobalProxy(ecx);
2473     } else {
2474       Label convert_to_object, convert_receiver;
2475       __ mov(ecx, args.GetReceiverOperand());
2476       __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
2477       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2478       __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);  // Clobbers ecx.
2479       __ j(above_equal, &done_convert);
2480       // Reload the receiver (it was clobbered by CmpObjectType).
2481       __ mov(ecx, args.GetReceiverOperand());
2482       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2483         Label convert_global_proxy;
2484         __ JumpIfRoot(ecx, RootIndex::kUndefinedValue, &convert_global_proxy,
2485                       Label::kNear);
2486         __ JumpIfNotRoot(ecx, RootIndex::kNullValue, &convert_to_object,
2487                          Label::kNear);
2488         __ bind(&convert_global_proxy);
2489         {
2490           // Patch receiver to global proxy.
2491           __ LoadGlobalProxy(ecx);
2492         }
2493         __ jmp(&convert_receiver);
2494       }
2495       __ bind(&convert_to_object);
2496       {
2497         // Convert receiver using ToObject.
2498         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2499         // in the fast case? (fall back to AllocateInNewSpace?)
2500         FrameScope scope(masm, StackFrame::INTERNAL);
2501         __ SmiTag(eax);
2502         __ Push(eax);
2503         __ Push(edi);
2504         __ mov(eax, ecx);
2505         __ Push(esi);
2506         __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2507                 RelocInfo::CODE_TARGET);
2508         __ Pop(esi);
2509         __ mov(ecx, eax);
2510         __ Pop(edi);
2511         __ Pop(eax);
2512         __ SmiUntag(eax);
2513       }
2514       __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2515       __ bind(&convert_receiver);
2516     }
2517     __ mov(args.GetReceiverOperand(), ecx);
2518   }
2519   __ bind(&done_convert);
2520 
2521   // ----------- S t a t e -------------
2522   //  -- eax : the number of arguments
2523   //  -- edx : the shared function info.
2524   //  -- edi : the function to call (checked to be a JSFunction)
2525   //  -- esi : the function context.
2526   // -----------------------------------
2527 
2528   __ movzx_w(
2529       ecx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2530   __ InvokeFunctionCode(edi, no_reg, ecx, eax, InvokeType::kJump);
2531 }
2532 
2533 namespace {
2534 
Generate_PushBoundArguments(MacroAssembler * masm)2535 void Generate_PushBoundArguments(MacroAssembler* masm) {
2536   // ----------- S t a t e -------------
2537   //  -- eax : the number of arguments
2538   //  -- edx : new.target (only in case of [[Construct]])
2539   //  -- edi : target (checked to be a JSBoundFunction)
2540   // -----------------------------------
2541   __ movd(xmm0, edx);  // Spill edx.
2542 
2543   // Load [[BoundArguments]] into ecx and length of that into edx.
2544   Label no_bound_arguments;
2545   __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2546   __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
2547   __ SmiUntag(edx);
2548   __ test(edx, edx);
2549   __ j(zero, &no_bound_arguments);
2550   {
2551     // ----------- S t a t e -------------
2552     //  -- eax  : the number of arguments
2553     //  -- xmm0 : new.target (only in case of [[Construct]])
2554     //  -- edi  : target (checked to be a JSBoundFunction)
2555     //  -- ecx  : the [[BoundArguments]] (implemented as FixedArray)
2556     //  -- edx  : the number of [[BoundArguments]]
2557     // -----------------------------------
2558 
2559     // Check the stack for overflow.
2560     {
2561       Label done, stack_overflow;
2562       __ StackOverflowCheck(edx, ecx, &stack_overflow);
2563       __ jmp(&done);
2564       __ bind(&stack_overflow);
2565       {
2566         FrameScope frame(masm, StackFrame::MANUAL);
2567         __ EnterFrame(StackFrame::INTERNAL);
2568         __ CallRuntime(Runtime::kThrowStackOverflow);
2569         __ int3();
2570       }
2571       __ bind(&done);
2572     }
2573 
2574     // Spill context.
2575     __ movd(xmm3, esi);
2576 
2577     // Save Return Adress and Receiver into registers.
2578     __ pop(esi);
2579     __ movd(xmm1, esi);
2580     __ pop(esi);
2581     __ movd(xmm2, esi);
2582 
2583     // Push [[BoundArguments]] to the stack.
2584     {
2585       Label loop;
2586       __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2587       __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
2588       __ SmiUntag(edx);
2589       // Adjust effective number of arguments (eax contains the number of
2590       // arguments from the call not including receiver plus the number of
2591       // [[BoundArguments]]).
2592       __ add(eax, edx);
2593       __ bind(&loop);
2594       __ dec(edx);
2595       __ mov(esi, FieldOperand(ecx, edx, times_tagged_size,
2596                                FixedArray::kHeaderSize));
2597       __ push(esi);
2598       __ j(greater, &loop);
2599     }
2600 
2601     // Restore Receiver and Return Address.
2602     __ movd(esi, xmm2);
2603     __ push(esi);
2604     __ movd(esi, xmm1);
2605     __ push(esi);
2606 
2607     // Restore context.
2608     __ movd(esi, xmm3);
2609   }
2610 
2611   __ bind(&no_bound_arguments);
2612   __ movd(edx, xmm0);  // Reload edx.
2613 }
2614 
2615 }  // namespace
2616 
2617 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm)2618 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2619   // ----------- S t a t e -------------
2620   //  -- eax : the number of arguments
2621   //  -- edi : the function to call (checked to be a JSBoundFunction)
2622   // -----------------------------------
2623   __ AssertBoundFunction(edi);
2624 
2625   // Patch the receiver to [[BoundThis]].
2626   StackArgumentsAccessor args(eax);
2627   __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2628   __ mov(args.GetReceiverOperand(), ecx);
2629 
2630   // Push the [[BoundArguments]] onto the stack.
2631   Generate_PushBoundArguments(masm);
2632 
2633   // Call the [[BoundTargetFunction]] via the Call builtin.
2634   __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2635   __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2636           RelocInfo::CODE_TARGET);
2637 }
2638 
2639 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode)2640 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2641   // ----------- S t a t e -------------
2642   //  -- eax : the number of arguments
2643   //  -- edi : the target to call (can be any Object).
2644   // -----------------------------------
2645   Register argc = eax;
2646   Register target = edi;
2647   Register map = ecx;
2648   Register instance_type = edx;
2649   DCHECK(!AreAliased(argc, target, map, instance_type));
2650 
2651   StackArgumentsAccessor args(argc);
2652 
2653   Label non_callable, non_smi, non_callable_jsfunction, non_jsboundfunction,
2654       non_proxy, class_constructor;
2655   __ JumpIfSmi(target, &non_callable);
2656   __ bind(&non_smi);
2657   __ LoadMap(map, target);
2658   __ CmpInstanceTypeRange(map, instance_type, map,
2659                           FIRST_CALLABLE_JS_FUNCTION_TYPE,
2660                           LAST_CALLABLE_JS_FUNCTION_TYPE);
2661   __ j(above, &non_callable_jsfunction);
2662   __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2663           RelocInfo::CODE_TARGET);
2664 
2665   __ bind(&non_callable_jsfunction);
2666   __ cmpw(instance_type, Immediate(JS_BOUND_FUNCTION_TYPE));
2667   __ j(not_equal, &non_jsboundfunction);
2668   __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2669           RelocInfo::CODE_TARGET);
2670 
2671   // Check if target is a proxy and call CallProxy external builtin
2672   __ bind(&non_jsboundfunction);
2673   __ LoadMap(map, target);
2674   __ test_b(FieldOperand(map, Map::kBitFieldOffset),
2675             Immediate(Map::Bits1::IsCallableBit::kMask));
2676   __ j(zero, &non_callable);
2677 
2678   // Call CallProxy external builtin
2679   __ cmpw(instance_type, Immediate(JS_PROXY_TYPE));
2680   __ j(not_equal, &non_proxy);
2681   __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2682 
2683   // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2684   // Check that the function is not a "classConstructor".
2685   __ bind(&non_proxy);
2686   __ cmpw(instance_type, Immediate(JS_CLASS_CONSTRUCTOR_TYPE));
2687   __ j(equal, &class_constructor);
2688 
2689   // 2. Call to something else, which might have a [[Call]] internal method (if
2690   // not we raise an exception).
2691   // Overwrite the original receiver with the (original) target.
2692   __ mov(args.GetReceiverOperand(), target);
2693   // Let the "call_as_function_delegate" take care of the rest.
2694   __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2695   __ Jump(masm->isolate()->builtins()->CallFunction(
2696               ConvertReceiverMode::kNotNullOrUndefined),
2697           RelocInfo::CODE_TARGET);
2698 
2699   // 3. Call to something that is not callable.
2700   __ bind(&non_callable);
2701   {
2702     FrameScope scope(masm, StackFrame::INTERNAL);
2703     __ Push(target);
2704     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2705     __ Trap();  // Unreachable.
2706   }
2707 
2708   // 4. The function is a "classConstructor", need to raise an exception.
2709   __ bind(&class_constructor);
2710   {
2711     FrameScope frame(masm, StackFrame::INTERNAL);
2712     __ Push(target);
2713     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2714     __ Trap();  // Unreachable.
2715   }
2716 }
2717 
2718 // static
Generate_ConstructFunction(MacroAssembler * masm)2719 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2720   // ----------- S t a t e -------------
2721   //  -- eax : the number of arguments
2722   //  -- edx : the new target (checked to be a constructor)
2723   //  -- edi : the constructor to call (checked to be a JSFunction)
2724   // -----------------------------------
2725   __ AssertConstructor(edi);
2726   __ AssertFunction(edi, ecx);
2727 
2728   Label call_generic_stub;
2729 
2730   // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2731   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2732   __ test(FieldOperand(ecx, SharedFunctionInfo::kFlagsOffset),
2733           Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2734   __ j(zero, &call_generic_stub, Label::kNear);
2735 
2736   // Calling convention for function specific ConstructStubs require
2737   // ecx to contain either an AllocationSite or undefined.
2738   __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2739   __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2740           RelocInfo::CODE_TARGET);
2741 
2742   __ bind(&call_generic_stub);
2743   // Calling convention for function specific ConstructStubs require
2744   // ecx to contain either an AllocationSite or undefined.
2745   __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2746   __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2747           RelocInfo::CODE_TARGET);
2748 }
2749 
2750 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2751 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2752   // ----------- S t a t e -------------
2753   //  -- eax : the number of arguments
2754   //  -- edx : the new target (checked to be a constructor)
2755   //  -- edi : the constructor to call (checked to be a JSBoundFunction)
2756   // -----------------------------------
2757   __ AssertConstructor(edi);
2758   __ AssertBoundFunction(edi);
2759 
2760   // Push the [[BoundArguments]] onto the stack.
2761   Generate_PushBoundArguments(masm);
2762 
2763   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2764   {
2765     Label done;
2766     __ cmp(edi, edx);
2767     __ j(not_equal, &done, Label::kNear);
2768     __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2769     __ bind(&done);
2770   }
2771 
2772   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2773   __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2774   __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2775 }
2776 
2777 // static
Generate_Construct(MacroAssembler * masm)2778 void Builtins::Generate_Construct(MacroAssembler* masm) {
2779   // ----------- S t a t e -------------
2780   //  -- eax : the number of arguments
2781   //  -- edx : the new target (either the same as the constructor or
2782   //           the JSFunction on which new was invoked initially)
2783   //  -- edi : the constructor to call (can be any Object)
2784   // -----------------------------------
2785   Register argc = eax;
2786   Register target = edi;
2787   Register map = ecx;
2788   DCHECK(!AreAliased(argc, target, map));
2789 
2790   StackArgumentsAccessor args(argc);
2791 
2792   // Check if target is a Smi.
2793   Label non_constructor, non_proxy, non_jsfunction, non_jsboundfunction;
2794   __ JumpIfSmi(target, &non_constructor);
2795 
2796   // Check if target has a [[Construct]] internal method.
2797   __ mov(map, FieldOperand(target, HeapObject::kMapOffset));
2798   __ test_b(FieldOperand(map, Map::kBitFieldOffset),
2799             Immediate(Map::Bits1::IsConstructorBit::kMask));
2800   __ j(zero, &non_constructor);
2801 
2802   // Dispatch based on instance type.
2803   __ CmpInstanceTypeRange(map, map, map, FIRST_JS_FUNCTION_TYPE,
2804                           LAST_JS_FUNCTION_TYPE);
2805   __ j(above, &non_jsfunction);
2806   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2807           RelocInfo::CODE_TARGET);
2808 
2809   // Only dispatch to bound functions after checking whether they are
2810   // constructors.
2811   __ bind(&non_jsfunction);
2812   __ mov(map, FieldOperand(target, HeapObject::kMapOffset));
2813   __ CmpInstanceType(map, JS_BOUND_FUNCTION_TYPE);
2814   __ j(not_equal, &non_jsboundfunction);
2815   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2816           RelocInfo::CODE_TARGET);
2817 
2818   // Only dispatch to proxies after checking whether they are constructors.
2819   __ bind(&non_jsboundfunction);
2820   __ CmpInstanceType(map, JS_PROXY_TYPE);
2821   __ j(not_equal, &non_proxy);
2822   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2823           RelocInfo::CODE_TARGET);
2824 
2825   // Called Construct on an exotic Object with a [[Construct]] internal method.
2826   __ bind(&non_proxy);
2827   {
2828     // Overwrite the original receiver with the (original) target.
2829     __ mov(args.GetReceiverOperand(), target);
2830     // Let the "call_as_constructor_delegate" take care of the rest.
2831     __ LoadNativeContextSlot(target,
2832                              Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2833     __ Jump(masm->isolate()->builtins()->CallFunction(),
2834             RelocInfo::CODE_TARGET);
2835   }
2836 
2837   // Called Construct on an Object that doesn't have a [[Construct]] internal
2838   // method.
2839   __ bind(&non_constructor);
2840   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2841           RelocInfo::CODE_TARGET);
2842 }
2843 
2844 namespace {
2845 
Generate_OSREntry(MacroAssembler * masm,Register entry_address)2846 void Generate_OSREntry(MacroAssembler* masm, Register entry_address) {
2847   ASM_CODE_COMMENT(masm);
2848   // Overwrite the return address on the stack.
2849   __ mov(Operand(esp, 0), entry_address);
2850 
2851   // And "return" to the OSR entry point of the function.
2852   __ ret(0);
2853 }
2854 
OnStackReplacement(MacroAssembler * masm,bool is_interpreter)2855 void OnStackReplacement(MacroAssembler* masm, bool is_interpreter) {
2856   ASM_CODE_COMMENT(masm);
2857   {
2858     FrameScope scope(masm, StackFrame::INTERNAL);
2859     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2860   }
2861 
2862   Label skip;
2863   // If the code object is null, just return to the caller.
2864   __ cmp(eax, Immediate(0));
2865   __ j(not_equal, &skip, Label::kNear);
2866   __ ret(0);
2867 
2868   __ bind(&skip);
2869 
2870   if (is_interpreter) {
2871     // Drop the handler frame that is be sitting on top of the actual
2872     // JavaScript frame. This is the case then OSR is triggered from bytecode.
2873     __ leave();
2874   }
2875 
2876   // Load deoptimization data from the code object.
2877   __ mov(ecx, Operand(eax, Code::kDeoptimizationDataOrInterpreterDataOffset -
2878                                kHeapObjectTag));
2879 
2880   // Load the OSR entrypoint offset from the deoptimization data.
2881   __ mov(ecx, Operand(ecx, FixedArray::OffsetOfElementAt(
2882                                DeoptimizationData::kOsrPcOffsetIndex) -
2883                                kHeapObjectTag));
2884   __ SmiUntag(ecx);
2885 
2886   // Compute the target address = code_obj + header_size + osr_offset
2887   __ lea(eax, Operand(eax, ecx, times_1, Code::kHeaderSize - kHeapObjectTag));
2888 
2889   Generate_OSREntry(masm, eax);
2890 }
2891 
2892 }  // namespace
2893 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)2894 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2895   return OnStackReplacement(masm, true);
2896 }
2897 
Generate_BaselineOnStackReplacement(MacroAssembler * masm)2898 void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
2899   __ mov(kContextRegister,
2900          MemOperand(ebp, BaselineFrameConstants::kContextOffset));
2901   return OnStackReplacement(masm, false);
2902 }
2903 
2904 #if V8_ENABLE_WEBASSEMBLY
Generate_WasmCompileLazy(MacroAssembler * masm)2905 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2906   // The function index was put in edi by the jump table trampoline.
2907   // Convert to Smi for the runtime call.
2908   __ SmiTag(kWasmCompileLazyFuncIndexRegister);
2909   {
2910     HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2911     FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2912 
2913     // Save all parameter registers (see wasm-linkage.h). They might be
2914     // overwritten in the runtime call below. We don't have any callee-saved
2915     // registers in wasm, so no need to store anything else.
2916     static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
2917                       arraysize(wasm::kGpParamRegisters),
2918                   "frame size mismatch");
2919     for (Register reg : wasm::kGpParamRegisters) {
2920       __ Push(reg);
2921     }
2922     static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
2923                       arraysize(wasm::kFpParamRegisters),
2924                   "frame size mismatch");
2925     __ AllocateStackSpace(kSimd128Size * arraysize(wasm::kFpParamRegisters));
2926     int offset = 0;
2927     for (DoubleRegister reg : wasm::kFpParamRegisters) {
2928       __ movdqu(Operand(esp, offset), reg);
2929       offset += kSimd128Size;
2930     }
2931 
2932     // Push the Wasm instance as an explicit argument to WasmCompileLazy.
2933     __ Push(kWasmInstanceRegister);
2934     // Push the function index as second argument.
2935     __ Push(kWasmCompileLazyFuncIndexRegister);
2936     // Initialize the JavaScript context with 0. CEntry will use it to
2937     // set the current context on the isolate.
2938     __ Move(kContextRegister, Smi::zero());
2939     {
2940       // At this point, ebx has been spilled to the stack but is not yet
2941       // overwritten with another value. We can still use it as kRootRegister.
2942       __ CallRuntime(Runtime::kWasmCompileLazy, 2);
2943     }
2944     // The entrypoint address is the return value.
2945     __ mov(edi, kReturnRegister0);
2946 
2947     // Restore registers.
2948     for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
2949       offset -= kSimd128Size;
2950       __ movdqu(reg, Operand(esp, offset));
2951     }
2952     DCHECK_EQ(0, offset);
2953     __ add(esp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2954     for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
2955       __ Pop(reg);
2956     }
2957   }
2958   // Finally, jump to the entrypoint.
2959   __ jmp(edi);
2960 }
2961 
Generate_WasmDebugBreak(MacroAssembler * masm)2962 void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
2963   HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2964   {
2965     FrameScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
2966 
2967     // Save all parameter registers. They might hold live values, we restore
2968     // them after the runtime call.
2969     for (int reg_code : base::bits::IterateBitsBackwards(
2970              WasmDebugBreakFrameConstants::kPushedGpRegs)) {
2971       __ Push(Register::from_code(reg_code));
2972     }
2973 
2974     constexpr int kFpStackSize =
2975         kSimd128Size * WasmDebugBreakFrameConstants::kNumPushedFpRegisters;
2976     __ AllocateStackSpace(kFpStackSize);
2977     int offset = kFpStackSize;
2978     for (int reg_code : base::bits::IterateBitsBackwards(
2979              WasmDebugBreakFrameConstants::kPushedFpRegs)) {
2980       offset -= kSimd128Size;
2981       __ movdqu(Operand(esp, offset), DoubleRegister::from_code(reg_code));
2982     }
2983 
2984     // Initialize the JavaScript context with 0. CEntry will use it to
2985     // set the current context on the isolate.
2986     __ Move(kContextRegister, Smi::zero());
2987     __ CallRuntime(Runtime::kWasmDebugBreak, 0);
2988 
2989     // Restore registers.
2990     for (int reg_code :
2991          base::bits::IterateBits(WasmDebugBreakFrameConstants::kPushedFpRegs)) {
2992       __ movdqu(DoubleRegister::from_code(reg_code), Operand(esp, offset));
2993       offset += kSimd128Size;
2994     }
2995     __ add(esp, Immediate(kFpStackSize));
2996     for (int reg_code :
2997          base::bits::IterateBits(WasmDebugBreakFrameConstants::kPushedGpRegs)) {
2998       __ Pop(Register::from_code(reg_code));
2999     }
3000   }
3001 
3002   __ ret(0);
3003 }
3004 
Generate_GenericJSToWasmWrapper(MacroAssembler * masm)3005 void Builtins::Generate_GenericJSToWasmWrapper(MacroAssembler* masm) {
3006   // TODO(v8:10701): Implement for this platform.
3007   __ Trap();
3008 }
3009 
Generate_WasmOnStackReplace(MacroAssembler * masm)3010 void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
3011   // Only needed on x64.
3012   __ Trap();
3013 }
3014 #endif  // V8_ENABLE_WEBASSEMBLY
3015 
Generate_CEntry(MacroAssembler * masm,int result_size,SaveFPRegsMode save_doubles,ArgvMode argv_mode,bool builtin_exit_frame)3016 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
3017                                SaveFPRegsMode save_doubles, ArgvMode argv_mode,
3018                                bool builtin_exit_frame) {
3019   // eax: number of arguments including receiver
3020   // edx: pointer to C function
3021   // ebp: frame pointer  (restored after C call)
3022   // esp: stack pointer  (restored after C call)
3023   // esi: current context (C callee-saved)
3024   // edi: JS function of the caller (C callee-saved)
3025   //
3026   // If argv_mode == ArgvMode::kRegister:
3027   // ecx: pointer to the first argument
3028 
3029   STATIC_ASSERT(eax == kRuntimeCallArgCountRegister);
3030   STATIC_ASSERT(ecx == kRuntimeCallArgvRegister);
3031   STATIC_ASSERT(edx == kRuntimeCallFunctionRegister);
3032   STATIC_ASSERT(esi == kContextRegister);
3033   STATIC_ASSERT(edi == kJSFunctionRegister);
3034 
3035   DCHECK(!AreAliased(kRuntimeCallArgCountRegister, kRuntimeCallArgvRegister,
3036                      kRuntimeCallFunctionRegister, kContextRegister,
3037                      kJSFunctionRegister, kRootRegister));
3038 
3039   // Reserve space on the stack for the three arguments passed to the call. If
3040   // result size is greater than can be returned in registers, also reserve
3041   // space for the hidden argument for the result location, and space for the
3042   // result itself.
3043   int arg_stack_space = 3;
3044 
3045   // Enter the exit frame that transitions from JavaScript to C++.
3046   if (argv_mode == ArgvMode::kRegister) {
3047     DCHECK(save_doubles == SaveFPRegsMode::kIgnore);
3048     DCHECK(!builtin_exit_frame);
3049     __ EnterApiExitFrame(arg_stack_space, edi);
3050 
3051     // Move argc and argv into the correct registers.
3052     __ mov(esi, ecx);
3053     __ mov(edi, eax);
3054   } else {
3055     __ EnterExitFrame(
3056         arg_stack_space, save_doubles == SaveFPRegsMode::kSave,
3057         builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
3058   }
3059 
3060   // edx: pointer to C function
3061   // ebp: frame pointer  (restored after C call)
3062   // esp: stack pointer  (restored after C call)
3063   // edi: number of arguments including receiver  (C callee-saved)
3064   // esi: pointer to the first argument (C callee-saved)
3065 
3066   // Result returned in eax, or eax+edx if result size is 2.
3067 
3068   // Check stack alignment.
3069   if (FLAG_debug_code) {
3070     __ CheckStackAlignment();
3071   }
3072   // Call C function.
3073   __ mov(Operand(esp, 0 * kSystemPointerSize), edi);  // argc.
3074   __ mov(Operand(esp, 1 * kSystemPointerSize), esi);  // argv.
3075   __ Move(ecx, Immediate(ExternalReference::isolate_address(masm->isolate())));
3076   __ mov(Operand(esp, 2 * kSystemPointerSize), ecx);
3077   __ call(kRuntimeCallFunctionRegister);
3078 
3079   // Result is in eax or edx:eax - do not destroy these registers!
3080 
3081   // Check result for exception sentinel.
3082   Label exception_returned;
3083   __ CompareRoot(eax, RootIndex::kException);
3084   __ j(equal, &exception_returned);
3085 
3086   // Check that there is no pending exception, otherwise we
3087   // should have returned the exception sentinel.
3088   if (FLAG_debug_code) {
3089     __ push(edx);
3090     __ LoadRoot(edx, RootIndex::kTheHoleValue);
3091     Label okay;
3092     ExternalReference pending_exception_address = ExternalReference::Create(
3093         IsolateAddressId::kPendingExceptionAddress, masm->isolate());
3094     __ cmp(edx, __ ExternalReferenceAsOperand(pending_exception_address, ecx));
3095     // Cannot use check here as it attempts to generate call into runtime.
3096     __ j(equal, &okay, Label::kNear);
3097     __ int3();
3098     __ bind(&okay);
3099     __ pop(edx);
3100   }
3101 
3102   // Exit the JavaScript to C++ exit frame.
3103   __ LeaveExitFrame(save_doubles == SaveFPRegsMode::kSave,
3104                     argv_mode == ArgvMode::kStack);
3105   __ ret(0);
3106 
3107   // Handling of exception.
3108   __ bind(&exception_returned);
3109 
3110   ExternalReference pending_handler_context_address = ExternalReference::Create(
3111       IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
3112   ExternalReference pending_handler_entrypoint_address =
3113       ExternalReference::Create(
3114           IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
3115   ExternalReference pending_handler_fp_address = ExternalReference::Create(
3116       IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
3117   ExternalReference pending_handler_sp_address = ExternalReference::Create(
3118       IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
3119 
3120   // Ask the runtime for help to determine the handler. This will set eax to
3121   // contain the current pending exception, don't clobber it.
3122   ExternalReference find_handler =
3123       ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
3124   {
3125     FrameScope scope(masm, StackFrame::MANUAL);
3126     __ PrepareCallCFunction(3, eax);
3127     __ mov(Operand(esp, 0 * kSystemPointerSize), Immediate(0));  // argc.
3128     __ mov(Operand(esp, 1 * kSystemPointerSize), Immediate(0));  // argv.
3129     __ Move(esi,
3130             Immediate(ExternalReference::isolate_address(masm->isolate())));
3131     __ mov(Operand(esp, 2 * kSystemPointerSize), esi);
3132     __ CallCFunction(find_handler, 3);
3133   }
3134 
3135   // Retrieve the handler context, SP and FP.
3136   __ mov(esp, __ ExternalReferenceAsOperand(pending_handler_sp_address, esi));
3137   __ mov(ebp, __ ExternalReferenceAsOperand(pending_handler_fp_address, esi));
3138   __ mov(esi,
3139          __ ExternalReferenceAsOperand(pending_handler_context_address, esi));
3140 
3141   // If the handler is a JS frame, restore the context to the frame. Note that
3142   // the context will be set to (esi == 0) for non-JS frames.
3143   Label skip;
3144   __ test(esi, esi);
3145   __ j(zero, &skip, Label::kNear);
3146   __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
3147   __ bind(&skip);
3148 
3149   // Clear c_entry_fp, like we do in `LeaveExitFrame`.
3150   ExternalReference c_entry_fp_address = ExternalReference::Create(
3151       IsolateAddressId::kCEntryFPAddress, masm->isolate());
3152   __ mov(__ ExternalReferenceAsOperand(c_entry_fp_address, esi), Immediate(0));
3153 
3154   // Compute the handler entry address and jump to it.
3155   __ mov(edi, __ ExternalReferenceAsOperand(pending_handler_entrypoint_address,
3156                                             edi));
3157   __ jmp(edi);
3158 }
3159 
Generate_DoubleToI(MacroAssembler * masm)3160 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
3161   Label check_negative, process_64_bits, done;
3162 
3163   // Account for return address and saved regs.
3164   const int kArgumentOffset = 4 * kSystemPointerSize;
3165 
3166   MemOperand mantissa_operand(MemOperand(esp, kArgumentOffset));
3167   MemOperand exponent_operand(
3168       MemOperand(esp, kArgumentOffset + kDoubleSize / 2));
3169 
3170   // The result is returned on the stack.
3171   MemOperand return_operand = mantissa_operand;
3172 
3173   Register scratch1 = ebx;
3174 
3175   // Since we must use ecx for shifts below, use some other register (eax)
3176   // to calculate the result.
3177   Register result_reg = eax;
3178   // Save ecx if it isn't the return register and therefore volatile, or if it
3179   // is the return register, then save the temp register we use in its stead for
3180   // the result.
3181   Register save_reg = eax;
3182   __ push(ecx);
3183   __ push(scratch1);
3184   __ push(save_reg);
3185 
3186   __ mov(scratch1, mantissa_operand);
3187   if (CpuFeatures::IsSupported(SSE3)) {
3188     CpuFeatureScope scope(masm, SSE3);
3189     // Load x87 register with heap number.
3190     __ fld_d(mantissa_operand);
3191   }
3192   __ mov(ecx, exponent_operand);
3193 
3194   __ and_(ecx, HeapNumber::kExponentMask);
3195   __ shr(ecx, HeapNumber::kExponentShift);
3196   __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
3197   __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
3198   __ j(below, &process_64_bits);
3199 
3200   // Result is entirely in lower 32-bits of mantissa
3201   int delta =
3202       HeapNumber::kExponentBias + base::Double::kPhysicalSignificandSize;
3203   if (CpuFeatures::IsSupported(SSE3)) {
3204     __ fstp(0);
3205   }
3206   __ sub(ecx, Immediate(delta));
3207   __ xor_(result_reg, result_reg);
3208   __ cmp(ecx, Immediate(31));
3209   __ j(above, &done);
3210   __ shl_cl(scratch1);
3211   __ jmp(&check_negative);
3212 
3213   __ bind(&process_64_bits);
3214   if (CpuFeatures::IsSupported(SSE3)) {
3215     CpuFeatureScope scope(masm, SSE3);
3216     // Reserve space for 64 bit answer.
3217     __ AllocateStackSpace(kDoubleSize);  // Nolint.
3218     // Do conversion, which cannot fail because we checked the exponent.
3219     __ fisttp_d(Operand(esp, 0));
3220     __ mov(result_reg, Operand(esp, 0));  // Load low word of answer as result
3221     __ add(esp, Immediate(kDoubleSize));
3222     __ jmp(&done);
3223   } else {
3224     // Result must be extracted from shifted 32-bit mantissa
3225     __ sub(ecx, Immediate(delta));
3226     __ neg(ecx);
3227     __ mov(result_reg, exponent_operand);
3228     __ and_(
3229         result_reg,
3230         Immediate(static_cast<uint32_t>(base::Double::kSignificandMask >> 32)));
3231     __ add(result_reg,
3232            Immediate(static_cast<uint32_t>(base::Double::kHiddenBit >> 32)));
3233     __ shrd_cl(scratch1, result_reg);
3234     __ shr_cl(result_reg);
3235     __ test(ecx, Immediate(32));
3236     __ cmov(not_equal, scratch1, result_reg);
3237   }
3238 
3239   // If the double was negative, negate the integer result.
3240   __ bind(&check_negative);
3241   __ mov(result_reg, scratch1);
3242   __ neg(result_reg);
3243   __ cmp(exponent_operand, Immediate(0));
3244   __ cmov(greater, result_reg, scratch1);
3245 
3246   // Restore registers
3247   __ bind(&done);
3248   __ mov(return_operand, result_reg);
3249   __ pop(save_reg);
3250   __ pop(scratch1);
3251   __ pop(ecx);
3252   __ ret(0);
3253 }
3254 
3255 namespace {
3256 
3257 // Generates an Operand for saving parameters after PrepareCallApiFunction.
ApiParameterOperand(int index)3258 Operand ApiParameterOperand(int index) {
3259   return Operand(esp, index * kSystemPointerSize);
3260 }
3261 
3262 // Prepares stack to put arguments (aligns and so on). Reserves
3263 // space for return value if needed (assumes the return value is a handle).
3264 // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
3265 // etc. Saves context (esi). If space was reserved for return value then
3266 // stores the pointer to the reserved slot into esi.
PrepareCallApiFunction(MacroAssembler * masm,int argc,Register scratch)3267 void PrepareCallApiFunction(MacroAssembler* masm, int argc, Register scratch) {
3268   ASM_CODE_COMMENT(masm);
3269   __ EnterApiExitFrame(argc, scratch);
3270   if (FLAG_debug_code) {
3271     __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
3272   }
3273 }
3274 
3275 // Calls an API function.  Allocates HandleScope, extracts returned value
3276 // from handle and propagates exceptions.  Clobbers esi, edi and
3277 // caller-save registers.  Restores context.  On return removes
3278 // stack_space * kSystemPointerSize (GCed).
CallApiFunctionAndReturn(MacroAssembler * masm,Register function_address,ExternalReference thunk_ref,Operand thunk_last_arg,int stack_space,Operand * stack_space_operand,Operand return_value_operand)3279 void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address,
3280                               ExternalReference thunk_ref,
3281                               Operand thunk_last_arg, int stack_space,
3282                               Operand* stack_space_operand,
3283                               Operand return_value_operand) {
3284   Isolate* isolate = masm->isolate();
3285 
3286   ExternalReference next_address =
3287       ExternalReference::handle_scope_next_address(isolate);
3288   ExternalReference limit_address =
3289       ExternalReference::handle_scope_limit_address(isolate);
3290   ExternalReference level_address =
3291       ExternalReference::handle_scope_level_address(isolate);
3292 
3293   DCHECK(edx == function_address);
3294   // Allocate HandleScope in callee-save registers.
3295   __ add(__ ExternalReferenceAsOperand(level_address, esi), Immediate(1));
3296   __ mov(esi, __ ExternalReferenceAsOperand(next_address, esi));
3297   __ mov(edi, __ ExternalReferenceAsOperand(limit_address, edi));
3298 
3299   Label profiler_enabled, end_profiler_check;
3300   __ Move(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
3301   __ cmpb(Operand(eax, 0), Immediate(0));
3302   __ j(not_zero, &profiler_enabled);
3303   __ Move(eax, Immediate(ExternalReference::address_of_runtime_stats_flag()));
3304   __ cmp(Operand(eax, 0), Immediate(0));
3305   __ j(not_zero, &profiler_enabled);
3306   {
3307     // Call the api function directly.
3308     __ mov(eax, function_address);
3309     __ jmp(&end_profiler_check);
3310   }
3311   __ bind(&profiler_enabled);
3312   {
3313     // Additional parameter is the address of the actual getter function.
3314     __ mov(thunk_last_arg, function_address);
3315     __ Move(eax, Immediate(thunk_ref));
3316   }
3317   __ bind(&end_profiler_check);
3318 
3319   // Call the api function.
3320   __ call(eax);
3321 
3322   Label prologue;
3323   // Load the value from ReturnValue
3324   __ mov(eax, return_value_operand);
3325 
3326   Label promote_scheduled_exception;
3327   Label delete_allocated_handles;
3328   Label leave_exit_frame;
3329 
3330   __ bind(&prologue);
3331   // No more valid handles (the result handle was the last one). Restore
3332   // previous handle scope.
3333   __ mov(__ ExternalReferenceAsOperand(next_address, ecx), esi);
3334   __ sub(__ ExternalReferenceAsOperand(level_address, ecx), Immediate(1));
3335   __ Assert(above_equal, AbortReason::kInvalidHandleScopeLevel);
3336   __ cmp(edi, __ ExternalReferenceAsOperand(limit_address, ecx));
3337   __ j(not_equal, &delete_allocated_handles);
3338 
3339   // Leave the API exit frame.
3340   __ bind(&leave_exit_frame);
3341   if (stack_space_operand != nullptr) {
3342     DCHECK_EQ(stack_space, 0);
3343     __ mov(edx, *stack_space_operand);
3344   }
3345   __ LeaveApiExitFrame();
3346 
3347   // Check if the function scheduled an exception.
3348   ExternalReference scheduled_exception_address =
3349       ExternalReference::scheduled_exception_address(isolate);
3350   __ mov(ecx, __ ExternalReferenceAsOperand(scheduled_exception_address, ecx));
3351   __ CompareRoot(ecx, RootIndex::kTheHoleValue);
3352   __ j(not_equal, &promote_scheduled_exception);
3353 
3354 #if DEBUG
3355   // Check if the function returned a valid JavaScript value.
3356   Label ok;
3357   Register return_value = eax;
3358   Register map = ecx;
3359 
3360   __ JumpIfSmi(return_value, &ok, Label::kNear);
3361   __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
3362 
3363   __ CmpInstanceType(map, LAST_NAME_TYPE);
3364   __ j(below_equal, &ok, Label::kNear);
3365 
3366   __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
3367   __ j(above_equal, &ok, Label::kNear);
3368 
3369   __ CompareRoot(map, RootIndex::kHeapNumberMap);
3370   __ j(equal, &ok, Label::kNear);
3371 
3372   __ CompareRoot(map, RootIndex::kBigIntMap);
3373   __ j(equal, &ok, Label::kNear);
3374 
3375   __ CompareRoot(return_value, RootIndex::kUndefinedValue);
3376   __ j(equal, &ok, Label::kNear);
3377 
3378   __ CompareRoot(return_value, RootIndex::kTrueValue);
3379   __ j(equal, &ok, Label::kNear);
3380 
3381   __ CompareRoot(return_value, RootIndex::kFalseValue);
3382   __ j(equal, &ok, Label::kNear);
3383 
3384   __ CompareRoot(return_value, RootIndex::kNullValue);
3385   __ j(equal, &ok, Label::kNear);
3386 
3387   __ Abort(AbortReason::kAPICallReturnedInvalidObject);
3388 
3389   __ bind(&ok);
3390 #endif
3391 
3392   if (stack_space_operand == nullptr) {
3393     DCHECK_NE(stack_space, 0);
3394     __ ret(stack_space * kSystemPointerSize);
3395   } else {
3396     DCHECK_EQ(0, stack_space);
3397     __ pop(ecx);
3398     __ add(esp, edx);
3399     __ jmp(ecx);
3400   }
3401 
3402   // Re-throw by promoting a scheduled exception.
3403   __ bind(&promote_scheduled_exception);
3404   __ TailCallRuntime(Runtime::kPromoteScheduledException);
3405 
3406   // HandleScope limit has changed. Delete allocated extensions.
3407   ExternalReference delete_extensions =
3408       ExternalReference::delete_handle_scope_extensions();
3409   __ bind(&delete_allocated_handles);
3410   __ mov(__ ExternalReferenceAsOperand(limit_address, ecx), edi);
3411   __ mov(edi, eax);
3412   __ Move(eax, Immediate(ExternalReference::isolate_address(isolate)));
3413   __ mov(Operand(esp, 0), eax);
3414   __ Move(eax, Immediate(delete_extensions));
3415   __ call(eax);
3416   __ mov(eax, edi);
3417   __ jmp(&leave_exit_frame);
3418 }
3419 
3420 }  // namespace
3421 
Generate_CallApiCallback(MacroAssembler * masm)3422 void Builtins::Generate_CallApiCallback(MacroAssembler* masm) {
3423   // ----------- S t a t e -------------
3424   //  -- esi                 : context
3425   //  -- edx                 : api function address
3426   //  -- ecx                 : arguments count (not including the receiver)
3427   //  -- eax                 : call data
3428   //  -- edi                 : holder
3429   //  -- esp[0]              : return address
3430   //  -- esp[8]              : argument 0 (receiver)
3431   //  -- esp[16]             : argument 1
3432   //  -- ...
3433   //  -- esp[argc * 8]       : argument (argc - 1)
3434   //  -- esp[(argc + 1) * 8] : argument argc
3435   // -----------------------------------
3436 
3437   Register api_function_address = edx;
3438   Register argc = ecx;
3439   Register call_data = eax;
3440   Register holder = edi;
3441 
3442   // Park argc in xmm0.
3443   __ movd(xmm0, argc);
3444 
3445   DCHECK(!AreAliased(api_function_address, argc, holder));
3446 
3447   using FCA = FunctionCallbackArguments;
3448 
3449   STATIC_ASSERT(FCA::kArgsLength == 6);
3450   STATIC_ASSERT(FCA::kNewTargetIndex == 5);
3451   STATIC_ASSERT(FCA::kDataIndex == 4);
3452   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
3453   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
3454   STATIC_ASSERT(FCA::kIsolateIndex == 1);
3455   STATIC_ASSERT(FCA::kHolderIndex == 0);
3456 
3457   // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
3458   //
3459   // Current state:
3460   //   esp[0]: return address
3461   //
3462   // Target state:
3463   //   esp[0 * kSystemPointerSize]: return address
3464   //   esp[1 * kSystemPointerSize]: kHolder
3465   //   esp[2 * kSystemPointerSize]: kIsolate
3466   //   esp[3 * kSystemPointerSize]: undefined (kReturnValueDefaultValue)
3467   //   esp[4 * kSystemPointerSize]: undefined (kReturnValue)
3468   //   esp[5 * kSystemPointerSize]: kData
3469   //   esp[6 * kSystemPointerSize]: undefined (kNewTarget)
3470 
3471   __ PopReturnAddressTo(ecx);
3472   __ PushRoot(RootIndex::kUndefinedValue);
3473   __ Push(call_data);
3474   __ PushRoot(RootIndex::kUndefinedValue);
3475   __ PushRoot(RootIndex::kUndefinedValue);
3476   __ Push(Immediate(ExternalReference::isolate_address(masm->isolate())));
3477   __ Push(holder);
3478   __ PushReturnAddressFrom(ecx);
3479 
3480   // Reload argc from xmm0.
3481   __ movd(argc, xmm0);
3482 
3483   // Keep a pointer to kHolder (= implicit_args) in a scratch register.
3484   // We use it below to set up the FunctionCallbackInfo object.
3485   Register scratch = eax;
3486   __ lea(scratch, Operand(esp, 1 * kSystemPointerSize));
3487 
3488   // The API function takes a reference to v8::Arguments. If the CPU profiler
3489   // is enabled, a wrapper function will be called and we need to pass
3490   // the address of the callback as an additional parameter. Always allocate
3491   // space for it.
3492   static constexpr int kApiArgc = 1 + 1;
3493 
3494   // Allocate the v8::Arguments structure in the arguments' space since
3495   // it's not controlled by GC.
3496   static constexpr int kApiStackSpace = 4;
3497 
3498   PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace, edi);
3499 
3500   // FunctionCallbackInfo::implicit_args_ (points at kHolder as set up above).
3501   __ mov(ApiParameterOperand(kApiArgc + 0), scratch);
3502 
3503   // FunctionCallbackInfo::values_ (points at the first varargs argument passed
3504   // on the stack).
3505   __ lea(scratch,
3506          Operand(scratch, (FCA::kArgsLength + 1) * kSystemPointerSize));
3507   __ mov(ApiParameterOperand(kApiArgc + 1), scratch);
3508 
3509   // FunctionCallbackInfo::length_.
3510   __ mov(ApiParameterOperand(kApiArgc + 2), argc);
3511 
3512   // We also store the number of bytes to drop from the stack after returning
3513   // from the API function here.
3514   __ lea(scratch,
3515          Operand(argc, times_system_pointer_size,
3516                  (FCA::kArgsLength + 1 /* receiver */) * kSystemPointerSize));
3517   __ mov(ApiParameterOperand(kApiArgc + 3), scratch);
3518 
3519   // v8::InvocationCallback's argument.
3520   __ lea(scratch, ApiParameterOperand(kApiArgc + 0));
3521   __ mov(ApiParameterOperand(0), scratch);
3522 
3523   ExternalReference thunk_ref = ExternalReference::invoke_function_callback();
3524 
3525   // There are two stack slots above the arguments we constructed on the stack:
3526   // the stored ebp (pushed by EnterApiExitFrame), and the return address.
3527   static constexpr int kStackSlotsAboveFCA = 2;
3528   Operand return_value_operand(
3529       ebp,
3530       (kStackSlotsAboveFCA + FCA::kReturnValueOffset) * kSystemPointerSize);
3531 
3532   static constexpr int kUseStackSpaceOperand = 0;
3533   Operand stack_space_operand = ApiParameterOperand(kApiArgc + 3);
3534   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
3535                            ApiParameterOperand(1), kUseStackSpaceOperand,
3536                            &stack_space_operand, return_value_operand);
3537 }
3538 
Generate_CallApiGetter(MacroAssembler * masm)3539 void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
3540   // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
3541   // name below the exit frame to make GC aware of them.
3542   STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
3543   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
3544   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
3545   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
3546   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
3547   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
3548   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
3549   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
3550 
3551   Register receiver = ApiGetterDescriptor::ReceiverRegister();
3552   Register holder = ApiGetterDescriptor::HolderRegister();
3553   Register callback = ApiGetterDescriptor::CallbackRegister();
3554   Register scratch = edi;
3555   DCHECK(!AreAliased(receiver, holder, callback, scratch));
3556 
3557   __ pop(scratch);  // Pop return address to extend the frame.
3558   __ push(receiver);
3559   __ push(FieldOperand(callback, AccessorInfo::kDataOffset));
3560   __ PushRoot(RootIndex::kUndefinedValue);  // ReturnValue
3561   // ReturnValue default value
3562   __ PushRoot(RootIndex::kUndefinedValue);
3563   __ Push(Immediate(ExternalReference::isolate_address(masm->isolate())));
3564   __ push(holder);
3565   __ push(Immediate(Smi::zero()));  // should_throw_on_error -> false
3566   __ push(FieldOperand(callback, AccessorInfo::kNameOffset));
3567   __ push(scratch);  // Restore return address.
3568 
3569   // v8::PropertyCallbackInfo::args_ array and name handle.
3570   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
3571 
3572   // Allocate v8::PropertyCallbackInfo object, arguments for callback and
3573   // space for optional callback address parameter (in case CPU profiler is
3574   // active) in non-GCed stack space.
3575   const int kApiArgc = 3 + 1;
3576 
3577   PrepareCallApiFunction(masm, kApiArgc, scratch);
3578 
3579   // Load address of v8::PropertyAccessorInfo::args_ array. The value in ebp
3580   // here corresponds to esp + kSystemPointerSize before PrepareCallApiFunction.
3581   __ lea(scratch, Operand(ebp, kSystemPointerSize + 2 * kSystemPointerSize));
3582   // Create v8::PropertyCallbackInfo object on the stack and initialize
3583   // it's args_ field.
3584   Operand info_object = ApiParameterOperand(3);
3585   __ mov(info_object, scratch);
3586 
3587   // Name as handle.
3588   __ sub(scratch, Immediate(kSystemPointerSize));
3589   __ mov(ApiParameterOperand(0), scratch);
3590   // Arguments pointer.
3591   __ lea(scratch, info_object);
3592   __ mov(ApiParameterOperand(1), scratch);
3593   // Reserve space for optional callback address parameter.
3594   Operand thunk_last_arg = ApiParameterOperand(2);
3595 
3596   ExternalReference thunk_ref =
3597       ExternalReference::invoke_accessor_getter_callback();
3598 
3599   __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
3600   Register function_address = edx;
3601   __ mov(function_address,
3602          FieldOperand(scratch, Foreign::kForeignAddressOffset));
3603   // +3 is to skip prolog, return address and name handle.
3604   Operand return_value_operand(
3605       ebp,
3606       (PropertyCallbackArguments::kReturnValueOffset + 3) * kSystemPointerSize);
3607   Operand* const kUseStackSpaceConstant = nullptr;
3608   CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg,
3609                            kStackUnwindSpace, kUseStackSpaceConstant,
3610                            return_value_operand);
3611 }
3612 
Generate_DirectCEntry(MacroAssembler * masm)3613 void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
3614   __ int3();  // Unused on this architecture.
3615 }
3616 
3617 namespace {
3618 
3619 enum Direction { FORWARD, BACKWARD };
3620 enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
3621 
3622 // Expects registers:
3623 // esi - source, aligned if alignment == ALIGNED
3624 // edi - destination, always aligned
3625 // ecx - count (copy size in bytes)
3626 // edx - loop count (number of 64 byte chunks)
MemMoveEmitMainLoop(MacroAssembler * masm,Label * move_last_15,Direction direction,Alignment alignment)3627 void MemMoveEmitMainLoop(MacroAssembler* masm, Label* move_last_15,
3628                          Direction direction, Alignment alignment) {
3629   ASM_CODE_COMMENT(masm);
3630   Register src = esi;
3631   Register dst = edi;
3632   Register count = ecx;
3633   Register loop_count = edx;
3634   Label loop, move_last_31, move_last_63;
3635   __ cmp(loop_count, 0);
3636   __ j(equal, &move_last_63);
3637   __ bind(&loop);
3638   // Main loop. Copy in 64 byte chunks.
3639   if (direction == BACKWARD) __ sub(src, Immediate(0x40));
3640   __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
3641   __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
3642   __ movdq(alignment == MOVE_ALIGNED, xmm2, Operand(src, 0x20));
3643   __ movdq(alignment == MOVE_ALIGNED, xmm3, Operand(src, 0x30));
3644   if (direction == FORWARD) __ add(src, Immediate(0x40));
3645   if (direction == BACKWARD) __ sub(dst, Immediate(0x40));
3646   __ movdqa(Operand(dst, 0x00), xmm0);
3647   __ movdqa(Operand(dst, 0x10), xmm1);
3648   __ movdqa(Operand(dst, 0x20), xmm2);
3649   __ movdqa(Operand(dst, 0x30), xmm3);
3650   if (direction == FORWARD) __ add(dst, Immediate(0x40));
3651   __ dec(loop_count);
3652   __ j(not_zero, &loop);
3653   // At most 63 bytes left to copy.
3654   __ bind(&move_last_63);
3655   __ test(count, Immediate(0x20));
3656   __ j(zero, &move_last_31);
3657   if (direction == BACKWARD) __ sub(src, Immediate(0x20));
3658   __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
3659   __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
3660   if (direction == FORWARD) __ add(src, Immediate(0x20));
3661   if (direction == BACKWARD) __ sub(dst, Immediate(0x20));
3662   __ movdqa(Operand(dst, 0x00), xmm0);
3663   __ movdqa(Operand(dst, 0x10), xmm1);
3664   if (direction == FORWARD) __ add(dst, Immediate(0x20));
3665   // At most 31 bytes left to copy.
3666   __ bind(&move_last_31);
3667   __ test(count, Immediate(0x10));
3668   __ j(zero, move_last_15);
3669   if (direction == BACKWARD) __ sub(src, Immediate(0x10));
3670   __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0));
3671   if (direction == FORWARD) __ add(src, Immediate(0x10));
3672   if (direction == BACKWARD) __ sub(dst, Immediate(0x10));
3673   __ movdqa(Operand(dst, 0), xmm0);
3674   if (direction == FORWARD) __ add(dst, Immediate(0x10));
3675 }
3676 
MemMoveEmitPopAndReturn(MacroAssembler * masm)3677 void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
3678   __ pop(esi);
3679   __ pop(edi);
3680   __ ret(0);
3681 }
3682 
3683 }  // namespace
3684 
Generate_MemMove(MacroAssembler * masm)3685 void Builtins::Generate_MemMove(MacroAssembler* masm) {
3686   // Generated code is put into a fixed, unmovable buffer, and not into
3687   // the V8 heap. We can't, and don't, refer to any relocatable addresses
3688   // (e.g. the JavaScript nan-object).
3689 
3690   // 32-bit C declaration function calls pass arguments on stack.
3691 
3692   // Stack layout:
3693   // esp[12]: Third argument, size.
3694   // esp[8]: Second argument, source pointer.
3695   // esp[4]: First argument, destination pointer.
3696   // esp[0]: return address
3697 
3698   const int kDestinationOffset = 1 * kSystemPointerSize;
3699   const int kSourceOffset = 2 * kSystemPointerSize;
3700   const int kSizeOffset = 3 * kSystemPointerSize;
3701 
3702   // When copying up to this many bytes, use special "small" handlers.
3703   const size_t kSmallCopySize = 8;
3704   // When copying up to this many bytes, use special "medium" handlers.
3705   const size_t kMediumCopySize = 63;
3706   // When non-overlapping region of src and dst is less than this,
3707   // use a more careful implementation (slightly slower).
3708   const size_t kMinMoveDistance = 16;
3709   // Note that these values are dictated by the implementation below,
3710   // do not just change them and hope things will work!
3711 
3712   int stack_offset = 0;  // Update if we change the stack height.
3713 
3714   Label backward, backward_much_overlap;
3715   Label forward_much_overlap, small_size, medium_size, pop_and_return;
3716   __ push(edi);
3717   __ push(esi);
3718   stack_offset += 2 * kSystemPointerSize;
3719   Register dst = edi;
3720   Register src = esi;
3721   Register count = ecx;
3722   Register loop_count = edx;
3723   __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
3724   __ mov(src, Operand(esp, stack_offset + kSourceOffset));
3725   __ mov(count, Operand(esp, stack_offset + kSizeOffset));
3726 
3727   __ cmp(dst, src);
3728   __ j(equal, &pop_and_return);
3729 
3730   __ prefetch(Operand(src, 0), 1);
3731   __ cmp(count, kSmallCopySize);
3732   __ j(below_equal, &small_size);
3733   __ cmp(count, kMediumCopySize);
3734   __ j(below_equal, &medium_size);
3735   __ cmp(dst, src);
3736   __ j(above, &backward);
3737 
3738   {
3739     // |dst| is a lower address than |src|. Copy front-to-back.
3740     Label unaligned_source, move_last_15, skip_last_move;
3741     __ mov(eax, src);
3742     __ sub(eax, dst);
3743     __ cmp(eax, kMinMoveDistance);
3744     __ j(below, &forward_much_overlap);
3745     // Copy first 16 bytes.
3746     __ movdqu(xmm0, Operand(src, 0));
3747     __ movdqu(Operand(dst, 0), xmm0);
3748     // Determine distance to alignment: 16 - (dst & 0xF).
3749     __ mov(edx, dst);
3750     __ and_(edx, 0xF);
3751     __ neg(edx);
3752     __ add(edx, Immediate(16));
3753     __ add(dst, edx);
3754     __ add(src, edx);
3755     __ sub(count, edx);
3756     // dst is now aligned. Main copy loop.
3757     __ mov(loop_count, count);
3758     __ shr(loop_count, 6);
3759     // Check if src is also aligned.
3760     __ test(src, Immediate(0xF));
3761     __ j(not_zero, &unaligned_source);
3762     // Copy loop for aligned source and destination.
3763     MemMoveEmitMainLoop(masm, &move_last_15, FORWARD, MOVE_ALIGNED);
3764     // At most 15 bytes to copy. Copy 16 bytes at end of string.
3765     __ bind(&move_last_15);
3766     __ and_(count, 0xF);
3767     __ j(zero, &skip_last_move, Label::kNear);
3768     __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
3769     __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
3770     __ bind(&skip_last_move);
3771     MemMoveEmitPopAndReturn(masm);
3772 
3773     // Copy loop for unaligned source and aligned destination.
3774     __ bind(&unaligned_source);
3775     MemMoveEmitMainLoop(masm, &move_last_15, FORWARD, MOVE_UNALIGNED);
3776     __ jmp(&move_last_15);
3777 
3778     // Less than kMinMoveDistance offset between dst and src.
3779     Label loop_until_aligned, last_15_much_overlap;
3780     __ bind(&loop_until_aligned);
3781     __ mov_b(eax, Operand(src, 0));
3782     __ inc(src);
3783     __ mov_b(Operand(dst, 0), eax);
3784     __ inc(dst);
3785     __ dec(count);
3786     __ bind(&forward_much_overlap);  // Entry point into this block.
3787     __ test(dst, Immediate(0xF));
3788     __ j(not_zero, &loop_until_aligned);
3789     // dst is now aligned, src can't be. Main copy loop.
3790     __ mov(loop_count, count);
3791     __ shr(loop_count, 6);
3792     MemMoveEmitMainLoop(masm, &last_15_much_overlap, FORWARD, MOVE_UNALIGNED);
3793     __ bind(&last_15_much_overlap);
3794     __ and_(count, 0xF);
3795     __ j(zero, &pop_and_return);
3796     __ cmp(count, kSmallCopySize);
3797     __ j(below_equal, &small_size);
3798     __ jmp(&medium_size);
3799   }
3800 
3801   {
3802     // |dst| is a higher address than |src|. Copy backwards.
3803     Label unaligned_source, move_first_15, skip_last_move;
3804     __ bind(&backward);
3805     // |dst| and |src| always point to the end of what's left to copy.
3806     __ add(dst, count);
3807     __ add(src, count);
3808     __ mov(eax, dst);
3809     __ sub(eax, src);
3810     __ cmp(eax, kMinMoveDistance);
3811     __ j(below, &backward_much_overlap);
3812     // Copy last 16 bytes.
3813     __ movdqu(xmm0, Operand(src, -0x10));
3814     __ movdqu(Operand(dst, -0x10), xmm0);
3815     // Find distance to alignment: dst & 0xF
3816     __ mov(edx, dst);
3817     __ and_(edx, 0xF);
3818     __ sub(dst, edx);
3819     __ sub(src, edx);
3820     __ sub(count, edx);
3821     // dst is now aligned. Main copy loop.
3822     __ mov(loop_count, count);
3823     __ shr(loop_count, 6);
3824     // Check if src is also aligned.
3825     __ test(src, Immediate(0xF));
3826     __ j(not_zero, &unaligned_source);
3827     // Copy loop for aligned source and destination.
3828     MemMoveEmitMainLoop(masm, &move_first_15, BACKWARD, MOVE_ALIGNED);
3829     // At most 15 bytes to copy. Copy 16 bytes at beginning of string.
3830     __ bind(&move_first_15);
3831     __ and_(count, 0xF);
3832     __ j(zero, &skip_last_move, Label::kNear);
3833     __ sub(src, count);
3834     __ sub(dst, count);
3835     __ movdqu(xmm0, Operand(src, 0));
3836     __ movdqu(Operand(dst, 0), xmm0);
3837     __ bind(&skip_last_move);
3838     MemMoveEmitPopAndReturn(masm);
3839 
3840     // Copy loop for unaligned source and aligned destination.
3841     __ bind(&unaligned_source);
3842     MemMoveEmitMainLoop(masm, &move_first_15, BACKWARD, MOVE_UNALIGNED);
3843     __ jmp(&move_first_15);
3844 
3845     // Less than kMinMoveDistance offset between dst and src.
3846     Label loop_until_aligned, first_15_much_overlap;
3847     __ bind(&loop_until_aligned);
3848     __ dec(src);
3849     __ dec(dst);
3850     __ mov_b(eax, Operand(src, 0));
3851     __ mov_b(Operand(dst, 0), eax);
3852     __ dec(count);
3853     __ bind(&backward_much_overlap);  // Entry point into this block.
3854     __ test(dst, Immediate(0xF));
3855     __ j(not_zero, &loop_until_aligned);
3856     // dst is now aligned, src can't be. Main copy loop.
3857     __ mov(loop_count, count);
3858     __ shr(loop_count, 6);
3859     MemMoveEmitMainLoop(masm, &first_15_much_overlap, BACKWARD, MOVE_UNALIGNED);
3860     __ bind(&first_15_much_overlap);
3861     __ and_(count, 0xF);
3862     __ j(zero, &pop_and_return);
3863     // Small/medium handlers expect dst/src to point to the beginning.
3864     __ sub(dst, count);
3865     __ sub(src, count);
3866     __ cmp(count, kSmallCopySize);
3867     __ j(below_equal, &small_size);
3868     __ jmp(&medium_size);
3869   }
3870   {
3871     // Special handlers for 9 <= copy_size < 64. No assumptions about
3872     // alignment or move distance, so all reads must be unaligned and
3873     // must happen before any writes.
3874     Label f9_16, f17_32, f33_48, f49_63;
3875 
3876     __ bind(&f9_16);
3877     __ movsd(xmm0, Operand(src, 0));
3878     __ movsd(xmm1, Operand(src, count, times_1, -8));
3879     __ movsd(Operand(dst, 0), xmm0);
3880     __ movsd(Operand(dst, count, times_1, -8), xmm1);
3881     MemMoveEmitPopAndReturn(masm);
3882 
3883     __ bind(&f17_32);
3884     __ movdqu(xmm0, Operand(src, 0));
3885     __ movdqu(xmm1, Operand(src, count, times_1, -0x10));
3886     __ movdqu(Operand(dst, 0x00), xmm0);
3887     __ movdqu(Operand(dst, count, times_1, -0x10), xmm1);
3888     MemMoveEmitPopAndReturn(masm);
3889 
3890     __ bind(&f33_48);
3891     __ movdqu(xmm0, Operand(src, 0x00));
3892     __ movdqu(xmm1, Operand(src, 0x10));
3893     __ movdqu(xmm2, Operand(src, count, times_1, -0x10));
3894     __ movdqu(Operand(dst, 0x00), xmm0);
3895     __ movdqu(Operand(dst, 0x10), xmm1);
3896     __ movdqu(Operand(dst, count, times_1, -0x10), xmm2);
3897     MemMoveEmitPopAndReturn(masm);
3898 
3899     __ bind(&f49_63);
3900     __ movdqu(xmm0, Operand(src, 0x00));
3901     __ movdqu(xmm1, Operand(src, 0x10));
3902     __ movdqu(xmm2, Operand(src, 0x20));
3903     __ movdqu(xmm3, Operand(src, count, times_1, -0x10));
3904     __ movdqu(Operand(dst, 0x00), xmm0);
3905     __ movdqu(Operand(dst, 0x10), xmm1);
3906     __ movdqu(Operand(dst, 0x20), xmm2);
3907     __ movdqu(Operand(dst, count, times_1, -0x10), xmm3);
3908     MemMoveEmitPopAndReturn(masm);
3909 
3910     __ bind(&medium_size);  // Entry point into this block.
3911     __ mov(eax, count);
3912     __ dec(eax);
3913     __ shr(eax, 4);
3914     if (FLAG_debug_code) {
3915       Label ok;
3916       __ cmp(eax, 3);
3917       __ j(below_equal, &ok);
3918       __ int3();
3919       __ bind(&ok);
3920     }
3921 
3922     // Dispatch to handlers.
3923     Label eax_is_2_or_3;
3924 
3925     __ cmp(eax, 1);
3926     __ j(greater, &eax_is_2_or_3);
3927     __ j(less, &f9_16);  // eax == 0.
3928     __ jmp(&f17_32);     // eax == 1.
3929 
3930     __ bind(&eax_is_2_or_3);
3931     __ cmp(eax, 3);
3932     __ j(less, &f33_48);  // eax == 2.
3933     __ jmp(&f49_63);      // eax == 3.
3934   }
3935   {
3936     // Specialized copiers for copy_size <= 8 bytes.
3937     Label f0, f1, f2, f3, f4, f5_8;
3938     __ bind(&f0);
3939     MemMoveEmitPopAndReturn(masm);
3940 
3941     __ bind(&f1);
3942     __ mov_b(eax, Operand(src, 0));
3943     __ mov_b(Operand(dst, 0), eax);
3944     MemMoveEmitPopAndReturn(masm);
3945 
3946     __ bind(&f2);
3947     __ mov_w(eax, Operand(src, 0));
3948     __ mov_w(Operand(dst, 0), eax);
3949     MemMoveEmitPopAndReturn(masm);
3950 
3951     __ bind(&f3);
3952     __ mov_w(eax, Operand(src, 0));
3953     __ mov_b(edx, Operand(src, 2));
3954     __ mov_w(Operand(dst, 0), eax);
3955     __ mov_b(Operand(dst, 2), edx);
3956     MemMoveEmitPopAndReturn(masm);
3957 
3958     __ bind(&f4);
3959     __ mov(eax, Operand(src, 0));
3960     __ mov(Operand(dst, 0), eax);
3961     MemMoveEmitPopAndReturn(masm);
3962 
3963     __ bind(&f5_8);
3964     __ mov(eax, Operand(src, 0));
3965     __ mov(edx, Operand(src, count, times_1, -4));
3966     __ mov(Operand(dst, 0), eax);
3967     __ mov(Operand(dst, count, times_1, -4), edx);
3968     MemMoveEmitPopAndReturn(masm);
3969 
3970     __ bind(&small_size);  // Entry point into this block.
3971     if (FLAG_debug_code) {
3972       Label ok;
3973       __ cmp(count, 8);
3974       __ j(below_equal, &ok);
3975       __ int3();
3976       __ bind(&ok);
3977     }
3978 
3979     // Dispatch to handlers.
3980     Label count_is_above_3, count_is_2_or_3;
3981 
3982     __ cmp(count, 3);
3983     __ j(greater, &count_is_above_3);
3984 
3985     __ cmp(count, 1);
3986     __ j(greater, &count_is_2_or_3);
3987     __ j(less, &f0);  // count == 0.
3988     __ jmp(&f1);      // count == 1.
3989 
3990     __ bind(&count_is_2_or_3);
3991     __ cmp(count, 3);
3992     __ j(less, &f2);  // count == 2.
3993     __ jmp(&f3);      // count == 3.
3994 
3995     __ bind(&count_is_above_3);
3996     __ cmp(count, 5);
3997     __ j(less, &f4);  // count == 4.
3998     __ jmp(&f5_8);    // count in [5, 8[.
3999   }
4000 
4001   __ bind(&pop_and_return);
4002   MemMoveEmitPopAndReturn(masm);
4003 }
4004 
4005 namespace {
4006 
Generate_DeoptimizationEntry(MacroAssembler * masm,DeoptimizeKind deopt_kind)4007 void Generate_DeoptimizationEntry(MacroAssembler* masm,
4008                                   DeoptimizeKind deopt_kind) {
4009   Isolate* isolate = masm->isolate();
4010 
4011   // Save all general purpose registers before messing with them.
4012   const int kNumberOfRegisters = Register::kNumRegisters;
4013 
4014   const int kDoubleRegsSize = kDoubleSize * XMMRegister::kNumRegisters;
4015   __ AllocateStackSpace(kDoubleRegsSize);
4016   const RegisterConfiguration* config = RegisterConfiguration::Default();
4017   for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4018     int code = config->GetAllocatableDoubleCode(i);
4019     XMMRegister xmm_reg = XMMRegister::from_code(code);
4020     int offset = code * kDoubleSize;
4021     __ movsd(Operand(esp, offset), xmm_reg);
4022   }
4023 
4024   __ pushad();
4025 
4026   ExternalReference c_entry_fp_address =
4027       ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate);
4028   __ mov(masm->ExternalReferenceAsOperand(c_entry_fp_address, esi), ebp);
4029 
4030   const int kSavedRegistersAreaSize =
4031       kNumberOfRegisters * kSystemPointerSize + kDoubleRegsSize;
4032 
4033   // Get the address of the location in the code object
4034   // and compute the fp-to-sp delta in register edx.
4035   __ mov(ecx, Operand(esp, kSavedRegistersAreaSize));
4036   __ lea(edx, Operand(esp, kSavedRegistersAreaSize + 1 * kSystemPointerSize));
4037 
4038   __ sub(edx, ebp);
4039   __ neg(edx);
4040 
4041   // Allocate a new deoptimizer object.
4042   __ PrepareCallCFunction(6, eax);
4043   __ mov(eax, Immediate(0));
4044   Label context_check;
4045   __ mov(edi, Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset));
4046   __ JumpIfSmi(edi, &context_check);
4047   __ mov(eax, Operand(ebp, StandardFrameConstants::kFunctionOffset));
4048   __ bind(&context_check);
4049   __ mov(Operand(esp, 0 * kSystemPointerSize), eax);  // Function.
4050   __ mov(Operand(esp, 1 * kSystemPointerSize),
4051          Immediate(static_cast<int>(deopt_kind)));
4052   __ mov(Operand(esp, 2 * kSystemPointerSize),
4053          Immediate(Deoptimizer::kFixedExitSizeMarker));  // Bailout id.
4054   __ mov(Operand(esp, 3 * kSystemPointerSize), ecx);     // Code address or 0.
4055   __ mov(Operand(esp, 4 * kSystemPointerSize), edx);     // Fp-to-sp delta.
4056   __ Move(Operand(esp, 5 * kSystemPointerSize),
4057           Immediate(ExternalReference::isolate_address(masm->isolate())));
4058   {
4059     AllowExternalCallThatCantCauseGC scope(masm);
4060     __ CallCFunction(ExternalReference::new_deoptimizer_function(), 6);
4061   }
4062 
4063   // Preserve deoptimizer object in register eax and get the input
4064   // frame descriptor pointer.
4065   __ mov(esi, Operand(eax, Deoptimizer::input_offset()));
4066 
4067   // Fill in the input registers.
4068   for (int i = kNumberOfRegisters - 1; i >= 0; i--) {
4069     int offset =
4070         (i * kSystemPointerSize) + FrameDescription::registers_offset();
4071     __ pop(Operand(esi, offset));
4072   }
4073 
4074   int double_regs_offset = FrameDescription::double_registers_offset();
4075   // Fill in the double input registers.
4076   for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4077     int code = config->GetAllocatableDoubleCode(i);
4078     int dst_offset = code * kDoubleSize + double_regs_offset;
4079     int src_offset = code * kDoubleSize;
4080     __ movsd(xmm0, Operand(esp, src_offset));
4081     __ movsd(Operand(esi, dst_offset), xmm0);
4082   }
4083 
4084   // Clear FPU all exceptions.
4085   // TODO(ulan): Find out why the TOP register is not zero here in some cases,
4086   // and check that the generated code never deoptimizes with unbalanced stack.
4087   __ fnclex();
4088 
4089   // Mark the stack as not iterable for the CPU profiler which won't be able to
4090   // walk the stack without the return address.
4091   __ mov_b(__ ExternalReferenceAsOperand(
4092                ExternalReference::stack_is_iterable_address(isolate), edx),
4093            Immediate(0));
4094 
4095   // Remove the return address and the double registers.
4096   __ add(esp, Immediate(kDoubleRegsSize + 1 * kSystemPointerSize));
4097 
4098   // Compute a pointer to the unwinding limit in register ecx; that is
4099   // the first stack slot not part of the input frame.
4100   __ mov(ecx, Operand(esi, FrameDescription::frame_size_offset()));
4101   __ add(ecx, esp);
4102 
4103   // Unwind the stack down to - but not including - the unwinding
4104   // limit and copy the contents of the activation frame to the input
4105   // frame description.
4106   __ lea(edx, Operand(esi, FrameDescription::frame_content_offset()));
4107   Label pop_loop_header;
4108   __ jmp(&pop_loop_header);
4109   Label pop_loop;
4110   __ bind(&pop_loop);
4111   __ pop(Operand(edx, 0));
4112   __ add(edx, Immediate(sizeof(uint32_t)));
4113   __ bind(&pop_loop_header);
4114   __ cmp(ecx, esp);
4115   __ j(not_equal, &pop_loop);
4116 
4117   // Compute the output frame in the deoptimizer.
4118   __ push(eax);
4119   __ PrepareCallCFunction(1, esi);
4120   __ mov(Operand(esp, 0 * kSystemPointerSize), eax);
4121   {
4122     AllowExternalCallThatCantCauseGC scope(masm);
4123     __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
4124   }
4125   __ pop(eax);
4126 
4127   __ mov(esp, Operand(eax, Deoptimizer::caller_frame_top_offset()));
4128 
4129   // Replace the current (input) frame with the output frames.
4130   Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
4131   // Outer loop state: eax = current FrameDescription**, edx = one
4132   // past the last FrameDescription**.
4133   __ mov(edx, Operand(eax, Deoptimizer::output_count_offset()));
4134   __ mov(eax, Operand(eax, Deoptimizer::output_offset()));
4135   __ lea(edx, Operand(eax, edx, times_system_pointer_size, 0));
4136   __ jmp(&outer_loop_header);
4137   __ bind(&outer_push_loop);
4138   // Inner loop state: esi = current FrameDescription*, ecx = loop
4139   // index.
4140   __ mov(esi, Operand(eax, 0));
4141   __ mov(ecx, Operand(esi, FrameDescription::frame_size_offset()));
4142   __ jmp(&inner_loop_header);
4143   __ bind(&inner_push_loop);
4144   __ sub(ecx, Immediate(sizeof(uint32_t)));
4145   __ push(Operand(esi, ecx, times_1, FrameDescription::frame_content_offset()));
4146   __ bind(&inner_loop_header);
4147   __ test(ecx, ecx);
4148   __ j(not_zero, &inner_push_loop);
4149   __ add(eax, Immediate(kSystemPointerSize));
4150   __ bind(&outer_loop_header);
4151   __ cmp(eax, edx);
4152   __ j(below, &outer_push_loop);
4153 
4154   // In case of a failed STUB, we have to restore the XMM registers.
4155   for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4156     int code = config->GetAllocatableDoubleCode(i);
4157     XMMRegister xmm_reg = XMMRegister::from_code(code);
4158     int src_offset = code * kDoubleSize + double_regs_offset;
4159     __ movsd(xmm_reg, Operand(esi, src_offset));
4160   }
4161 
4162   // Push pc and continuation from the last output frame.
4163   __ push(Operand(esi, FrameDescription::pc_offset()));
4164   __ push(Operand(esi, FrameDescription::continuation_offset()));
4165 
4166   // Push the registers from the last output frame.
4167   for (int i = 0; i < kNumberOfRegisters; i++) {
4168     int offset =
4169         (i * kSystemPointerSize) + FrameDescription::registers_offset();
4170     __ push(Operand(esi, offset));
4171   }
4172 
4173   __ mov_b(__ ExternalReferenceAsOperand(
4174                ExternalReference::stack_is_iterable_address(isolate), edx),
4175            Immediate(1));
4176 
4177   // Restore the registers from the stack.
4178   __ popad();
4179 
4180   __ InitializeRootRegister();
4181 
4182   // Return to the continuation point.
4183   __ ret(0);
4184 }
4185 
4186 }  // namespace
4187 
Generate_DeoptimizationEntry_Eager(MacroAssembler * masm)4188 void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
4189   Generate_DeoptimizationEntry(masm, DeoptimizeKind::kEager);
4190 }
4191 
Generate_DeoptimizationEntry_Soft(MacroAssembler * masm)4192 void Builtins::Generate_DeoptimizationEntry_Soft(MacroAssembler* masm) {
4193   Generate_DeoptimizationEntry(masm, DeoptimizeKind::kSoft);
4194 }
4195 
Generate_DeoptimizationEntry_Bailout(MacroAssembler * masm)4196 void Builtins::Generate_DeoptimizationEntry_Bailout(MacroAssembler* masm) {
4197   Generate_DeoptimizationEntry(masm, DeoptimizeKind::kBailout);
4198 }
4199 
Generate_DeoptimizationEntry_Lazy(MacroAssembler * masm)4200 void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
4201   Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
4202 }
4203 
4204 namespace {
4205 
4206 // Restarts execution either at the current or next (in execution order)
4207 // bytecode. If there is baseline code on the shared function info, converts an
4208 // interpreter frame into a baseline frame and continues execution in baseline
4209 // code. Otherwise execution continues with bytecode.
Generate_BaselineOrInterpreterEntry(MacroAssembler * masm,bool next_bytecode,bool is_osr=false)4210 void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm,
4211                                          bool next_bytecode,
4212                                          bool is_osr = false) {
4213   Label start;
4214   __ bind(&start);
4215 
4216   // Spill the accumulator register; note that we're not within a frame, so we
4217   // have to make sure to pop it before doing any GC-visible calls.
4218   __ push(kInterpreterAccumulatorRegister);
4219 
4220   // Get function from the frame.
4221   Register closure = eax;
4222   __ mov(closure, MemOperand(ebp, StandardFrameConstants::kFunctionOffset));
4223 
4224   // Get the Code object from the shared function info.
4225   Register code_obj = esi;
4226   __ mov(code_obj,
4227          FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
4228   __ mov(code_obj,
4229          FieldOperand(code_obj, SharedFunctionInfo::kFunctionDataOffset));
4230 
4231   // Check if we have baseline code. For OSR entry it is safe to assume we
4232   // always have baseline code.
4233   if (!is_osr) {
4234     Label start_with_baseline;
4235     __ CmpObjectType(code_obj, CODET_TYPE, kInterpreterBytecodeOffsetRegister);
4236     __ j(equal, &start_with_baseline);
4237 
4238     // Start with bytecode as there is no baseline code.
4239     __ pop(kInterpreterAccumulatorRegister);
4240     Builtin builtin_id = next_bytecode
4241                              ? Builtin::kInterpreterEnterAtNextBytecode
4242                              : Builtin::kInterpreterEnterAtBytecode;
4243     __ Jump(masm->isolate()->builtins()->code_handle(builtin_id),
4244             RelocInfo::CODE_TARGET);
4245 
4246     __ bind(&start_with_baseline);
4247   } else if (FLAG_debug_code) {
4248     __ CmpObjectType(code_obj, CODET_TYPE, kInterpreterBytecodeOffsetRegister);
4249     __ Assert(equal, AbortReason::kExpectedBaselineData);
4250   }
4251 
4252   if (FLAG_debug_code) {
4253     AssertCodeIsBaseline(masm, code_obj, ecx);
4254   }
4255 
4256   // Load the feedback vector.
4257   Register feedback_vector = ecx;
4258   __ mov(feedback_vector,
4259          FieldOperand(closure, JSFunction::kFeedbackCellOffset));
4260   __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
4261 
4262   Label install_baseline_code;
4263   // Check if feedback vector is valid. If not, call prepare for baseline to
4264   // allocate it.
4265   __ CmpObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE,
4266                    kInterpreterBytecodeOffsetRegister);
4267   __ j(not_equal, &install_baseline_code);
4268 
4269   // Save BytecodeOffset from the stack frame.
4270   __ mov(kInterpreterBytecodeOffsetRegister,
4271          MemOperand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
4272   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
4273   // Replace BytecodeOffset with the feedback vector.
4274   __ mov(MemOperand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp),
4275          feedback_vector);
4276   feedback_vector = no_reg;
4277 
4278   // Compute baseline pc for bytecode offset.
4279   ExternalReference get_baseline_pc_extref;
4280   if (next_bytecode || is_osr) {
4281     get_baseline_pc_extref =
4282         ExternalReference::baseline_pc_for_next_executed_bytecode();
4283   } else {
4284     get_baseline_pc_extref =
4285         ExternalReference::baseline_pc_for_bytecode_offset();
4286   }
4287   Register get_baseline_pc = ecx;
4288   __ LoadAddress(get_baseline_pc, get_baseline_pc_extref);
4289 
4290   // If the code deoptimizes during the implicit function entry stack interrupt
4291   // check, it will have a bailout ID of kFunctionEntryBytecodeOffset, which is
4292   // not a valid bytecode offset.
4293   // TODO(pthier): Investigate if it is feasible to handle this special case
4294   // in TurboFan instead of here.
4295   Label valid_bytecode_offset, function_entry_bytecode;
4296   if (!is_osr) {
4297     __ cmp(kInterpreterBytecodeOffsetRegister,
4298            Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag +
4299                      kFunctionEntryBytecodeOffset));
4300     __ j(equal, &function_entry_bytecode);
4301   }
4302 
4303   __ sub(kInterpreterBytecodeOffsetRegister,
4304          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
4305 
4306   __ bind(&valid_bytecode_offset);
4307   // Get bytecode array from the stack frame.
4308   __ mov(kInterpreterBytecodeArrayRegister,
4309          MemOperand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
4310   {
4311     FrameScope scope(masm, StackFrame::INTERNAL);
4312     __ PrepareCallCFunction(3, eax);
4313     __ mov(Operand(esp, 0 * kSystemPointerSize), code_obj);
4314     __ mov(Operand(esp, 1 * kSystemPointerSize),
4315            kInterpreterBytecodeOffsetRegister);
4316     __ mov(Operand(esp, 2 * kSystemPointerSize),
4317            kInterpreterBytecodeArrayRegister);
4318     __ CallCFunction(get_baseline_pc, 3);
4319   }
4320   __ lea(code_obj,
4321          FieldOperand(code_obj, kReturnRegister0, times_1, Code::kHeaderSize));
4322   __ pop(kInterpreterAccumulatorRegister);
4323 
4324   if (is_osr) {
4325     // Reset the OSR loop nesting depth to disarm back edges.
4326     // TODO(pthier): Separate baseline Sparkplug from TF arming and don't disarm
4327     // Sparkplug here.
4328     __ mov_w(FieldOperand(kInterpreterBytecodeArrayRegister,
4329                           BytecodeArray::kOsrLoopNestingLevelOffset),
4330              Immediate(0));
4331     Generate_OSREntry(masm, code_obj);
4332   } else {
4333     __ jmp(code_obj);
4334   }
4335   __ Trap();  // Unreachable.
4336 
4337   if (!is_osr) {
4338     __ bind(&function_entry_bytecode);
4339     // If the bytecode offset is kFunctionEntryOffset, get the start address of
4340     // the first bytecode.
4341     __ mov(kInterpreterBytecodeOffsetRegister, Immediate(0));
4342     if (next_bytecode) {
4343       __ LoadAddress(get_baseline_pc,
4344                      ExternalReference::baseline_pc_for_bytecode_offset());
4345     }
4346     __ jmp(&valid_bytecode_offset);
4347   }
4348 
4349   __ bind(&install_baseline_code);
4350   // Pop/re-push the accumulator so that it's spilled within the below frame
4351   // scope, to keep the stack valid. Use ecx for this -- we can't save it in
4352   // kInterpreterAccumulatorRegister because that aliases with closure.
4353   DCHECK(!AreAliased(ecx, kContextRegister, closure));
4354   __ pop(ecx);
4355   // Restore the clobbered context register.
4356   __ mov(kContextRegister,
4357          Operand(ebp, StandardFrameConstants::kContextOffset));
4358   {
4359     FrameScope scope(masm, StackFrame::INTERNAL);
4360     __ Push(ecx);
4361     __ Push(closure);
4362     __ CallRuntime(Runtime::kInstallBaselineCode, 1);
4363     // Now that we're restarting, we don't have to worry about closure and
4364     // accumulator aliasing, so pop the spilled accumulator directly back into
4365     // the right register.
4366     __ Pop(kInterpreterAccumulatorRegister);
4367   }
4368   // Retry from the start after installing baseline code.
4369   __ jmp(&start);
4370 }
4371 
4372 }  // namespace
4373 
Generate_BaselineOrInterpreterEnterAtBytecode(MacroAssembler * masm)4374 void Builtins::Generate_BaselineOrInterpreterEnterAtBytecode(
4375     MacroAssembler* masm) {
4376   Generate_BaselineOrInterpreterEntry(masm, false);
4377 }
4378 
Generate_BaselineOrInterpreterEnterAtNextBytecode(MacroAssembler * masm)4379 void Builtins::Generate_BaselineOrInterpreterEnterAtNextBytecode(
4380     MacroAssembler* masm) {
4381   Generate_BaselineOrInterpreterEntry(masm, true);
4382 }
4383 
Generate_InterpreterOnStackReplacement_ToBaseline(MacroAssembler * masm)4384 void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
4385     MacroAssembler* masm) {
4386   Generate_BaselineOrInterpreterEntry(masm, false, true);
4387 }
4388 
Generate_DynamicCheckMapsTrampoline(MacroAssembler * masm)4389 void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
4390   Generate_DynamicCheckMapsTrampoline<DynamicCheckMapsDescriptor>(
4391       masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMaps));
4392 }
4393 
Generate_DynamicCheckMapsWithFeedbackVectorTrampoline(MacroAssembler * masm)4394 void Builtins::Generate_DynamicCheckMapsWithFeedbackVectorTrampoline(
4395     MacroAssembler* masm) {
4396   Generate_DynamicCheckMapsTrampoline<
4397       DynamicCheckMapsWithFeedbackVectorDescriptor>(
4398       masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMapsWithFeedbackVector));
4399 }
4400 
4401 template <class Descriptor>
Generate_DynamicCheckMapsTrampoline(MacroAssembler * masm,Handle<Code> builtin_target)4402 void Builtins::Generate_DynamicCheckMapsTrampoline(
4403     MacroAssembler* masm, Handle<Code> builtin_target) {
4404   FrameScope scope(masm, StackFrame::MANUAL);
4405   __ EnterFrame(StackFrame::INTERNAL);
4406 
4407   // Only save the registers that the DynamicCheckMaps builtin can clobber.
4408   Descriptor descriptor;
4409   RegList registers = descriptor.allocatable_registers();
4410   // FLAG_debug_code is enabled CSA checks will call C function and so we need
4411   // to save all CallerSaved registers too.
4412   if (FLAG_debug_code) registers |= kJSCallerSaved;
4413   __ MaybeSaveRegisters(registers);
4414 
4415   // Load the immediate arguments from the deopt exit to pass to the builtin.
4416   Register slot_arg = descriptor.GetRegisterParameter(Descriptor::kSlot);
4417   Register handler_arg = descriptor.GetRegisterParameter(Descriptor::kHandler);
4418   __ mov(handler_arg, Operand(ebp, CommonFrameConstants::kCallerPCOffset));
4419   __ mov(slot_arg,
4420          Operand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs1PcOffset));
4421   __ mov(handler_arg,
4422          Operand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
4423 
4424   __ Call(builtin_target, RelocInfo::CODE_TARGET);
4425 
4426   Label deopt, bailout;
4427   __ cmp(eax, Immediate(static_cast<int>(DynamicCheckMapsStatus::kSuccess)));
4428   __ j(not_equal, &deopt);
4429 
4430   __ MaybeRestoreRegisters(registers);
4431   __ LeaveFrame(StackFrame::INTERNAL);
4432   __ Ret();
4433 
4434   __ bind(&deopt);
4435   __ cmp(eax, Immediate(static_cast<int>(DynamicCheckMapsStatus::kBailout)));
4436   __ j(equal, &bailout);
4437 
4438   if (FLAG_debug_code) {
4439     __ cmp(eax, Immediate(static_cast<int>(DynamicCheckMapsStatus::kDeopt)));
4440     __ Assert(equal, AbortReason::kUnexpectedDynamicCheckMapsStatus);
4441   }
4442   __ MaybeRestoreRegisters(registers);
4443   __ LeaveFrame(StackFrame::INTERNAL);
4444   Handle<Code> deopt_eager = masm->isolate()->builtins()->code_handle(
4445       Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kEager));
4446   __ Jump(deopt_eager, RelocInfo::CODE_TARGET);
4447 
4448   __ bind(&bailout);
4449   __ MaybeRestoreRegisters(registers);
4450   __ LeaveFrame(StackFrame::INTERNAL);
4451   Handle<Code> deopt_bailout = masm->isolate()->builtins()->code_handle(
4452       Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kBailout));
4453   __ Jump(deopt_bailout, RelocInfo::CODE_TARGET);
4454 }
4455 
4456 #undef __
4457 
4458 }  // namespace internal
4459 }  // namespace v8
4460 
4461 #endif  // V8_TARGET_ARCH_IA32
4462