1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_S390
6 
7 #include "src/api/api-arguments.h"
8 #include "src/codegen/code-factory.h"
9 // For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
10 #include "src/codegen/macro-assembler-inl.h"
11 #include "src/codegen/register-configuration.h"
12 #include "src/debug/debug.h"
13 #include "src/deoptimizer/deoptimizer.h"
14 #include "src/execution/frame-constants.h"
15 #include "src/execution/frames.h"
16 #include "src/heap/heap-inl.h"
17 #include "src/logging/counters.h"
18 #include "src/objects/cell.h"
19 #include "src/objects/foreign.h"
20 #include "src/objects/heap-number.h"
21 #include "src/objects/js-generator.h"
22 #include "src/objects/smi.h"
23 #include "src/runtime/runtime.h"
24 #include "src/wasm/wasm-linkage.h"
25 #include "src/wasm/wasm-objects.h"
26 
27 namespace v8 {
28 namespace internal {
29 
30 #define __ ACCESS_MASM(masm)
31 
Generate_Adaptor(MacroAssembler * masm,Address address)32 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address) {
33   __ Move(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
34   __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
35           RelocInfo::CODE_TARGET);
36 }
37 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)38 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
39                                            Runtime::FunctionId function_id) {
40   // ----------- S t a t e -------------
41   //  -- r3 : target function (preserved for callee)
42   //  -- r5 : new target (preserved for callee)
43   // -----------------------------------
44   {
45     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
46     // Push a copy of the target function and the new target.
47     // Push function as parameter to the runtime call.
48     __ Push(r3, r5, r3);
49 
50     __ CallRuntime(function_id, 1);
51     __ LoadRR(r4, r2);
52 
53     // Restore target function and new target.
54     __ Pop(r3, r5);
55   }
56   static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
57   __ JumpCodeObject(r4);
58 }
59 
60 namespace {
61 
62 enum StackLimitKind { kInterruptStackLimit, kRealStackLimit };
63 
StackLimitAsMemOperand(MacroAssembler * masm,StackLimitKind kind)64 MemOperand StackLimitAsMemOperand(MacroAssembler* masm, StackLimitKind kind) {
65   DCHECK(masm->root_array_available());
66   Isolate* isolate = masm->isolate();
67   ExternalReference limit =
68       kind == StackLimitKind::kRealStackLimit
69           ? ExternalReference::address_of_real_jslimit(isolate)
70           : ExternalReference::address_of_jslimit(isolate);
71   DCHECK(TurboAssembler::IsAddressableThroughRootRegister(isolate, limit));
72 
73   intptr_t offset =
74       TurboAssembler::RootRegisterOffsetForExternalReference(isolate, limit);
75   CHECK(is_int32(offset));
76   return MemOperand(kRootRegister, offset);
77 }
78 
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch,Label * stack_overflow)79 void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
80                                  Register scratch, Label* stack_overflow) {
81   // Check the stack for overflow. We are not trying to catch
82   // interruptions (e.g. debug break and preemption) here, so the "real stack
83   // limit" is checked.
84   __ LoadP(scratch,
85            StackLimitAsMemOperand(masm, StackLimitKind::kRealStackLimit));
86   // Make scratch the space we have left. The stack might already be overflowed
87   // here which will cause scratch to become negative.
88   __ SubP(scratch, sp, scratch);
89   // Check if the arguments will overflow the stack.
90   __ ShiftLeftP(r0, num_args, Operand(kSystemPointerSizeLog2));
91   __ CmpP(scratch, r0);
92   __ ble(stack_overflow);  // Signed comparison.
93 }
94 
Generate_JSBuiltinsConstructStubHelper(MacroAssembler * masm)95 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
96   // ----------- S t a t e -------------
97   //  -- r2     : number of arguments
98   //  -- r3     : constructor function
99   //  -- r5     : new target
100   //  -- cp     : context
101   //  -- lr     : return address
102   //  -- sp[...]: constructor arguments
103   // -----------------------------------
104 
105   Register scratch = r4;
106   Label stack_overflow;
107 
108   Generate_StackOverflowCheck(masm, r2, r7, &stack_overflow);
109 
110   // Enter a construct frame.
111   {
112     FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
113 
114     // Preserve the incoming parameters on the stack.
115     __ SmiTag(r2);
116     __ Push(cp, r2);
117     __ SmiUntag(r2);
118     // The receiver for the builtin/api call.
119     __ PushRoot(RootIndex::kTheHoleValue);
120     // Set up pointer to last argument.
121     __ la(r6, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
122 
123     // Copy arguments and receiver to the expression stack.
124     // r2: number of arguments
125     // r3: constructor function
126     // r4: address of last argument (caller sp)
127     // r5: new target
128     // cr0: condition indicating whether r2 is zero
129     // sp[0]: receiver
130     // sp[1]: receiver
131     // sp[2]: number of arguments (smi-tagged)
132     Label loop, no_args;
133     __ beq(&no_args);
134     __ ShiftLeftP(scratch, r2, Operand(kSystemPointerSizeLog2));
135     __ SubP(sp, sp, scratch);
136     __ LoadRR(r1, r2);
137     __ bind(&loop);
138     __ lay(scratch, MemOperand(scratch, -kSystemPointerSize));
139     __ LoadP(r0, MemOperand(scratch, r6));
140     __ StoreP(r0, MemOperand(scratch, sp));
141     __ BranchOnCount(r1, &loop);
142     __ bind(&no_args);
143 
144     // Call the function.
145     // r2: number of arguments
146     // r3: constructor function
147     // r5: new target
148 
149     __ InvokeFunctionWithNewTarget(r3, r5, r2, CALL_FUNCTION);
150 
151     // Restore context from the frame.
152     __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
153     // Restore smi-tagged arguments count from the frame.
154     __ LoadP(scratch, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
155 
156     // Leave construct frame.
157   }
158   // Remove caller arguments from the stack and return.
159   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
160 
161   __ SmiToPtrArrayOffset(scratch, scratch);
162   __ AddP(sp, sp, scratch);
163   __ AddP(sp, sp, Operand(kSystemPointerSize));
164   __ Ret();
165 
166   __ bind(&stack_overflow);
167   {
168     FrameScope scope(masm, StackFrame::INTERNAL);
169     __ CallRuntime(Runtime::kThrowStackOverflow);
170     __ bkpt(0);  // Unreachable code.
171   }
172 }
173 
174 }  // namespace
175 
176 // The construct stub for ES5 constructor functions and ES6 class constructors.
Generate_JSConstructStubGeneric(MacroAssembler * masm)177 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
178   // ----------- S t a t e -------------
179   //  --      r2: number of arguments (untagged)
180   //  --      r3: constructor function
181   //  --      r5: new target
182   //  --      cp: context
183   //  --      lr: return address
184   //  -- sp[...]: constructor arguments
185   // -----------------------------------
186 
187   // Enter a construct frame.
188   {
189     FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
190     Label post_instantiation_deopt_entry, not_create_implicit_receiver;
191 
192     // Preserve the incoming parameters on the stack.
193     __ SmiTag(r2);
194     __ Push(cp, r2, r3);
195     __ PushRoot(RootIndex::kUndefinedValue);
196     __ Push(r5);
197 
198     // ----------- S t a t e -------------
199     //  --        sp[0*kSystemPointerSize]: new target
200     //  --        sp[1*kSystemPointerSize]: padding
201     //  -- r3 and sp[2*kSystemPointerSize]: constructor function
202     //  --        sp[3*kSystemPointerSize]: number of arguments (tagged)
203     //  --        sp[4*kSystemPointerSize]: context
204     // -----------------------------------
205 
206     __ LoadTaggedPointerField(
207         r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
208     __ LoadlW(r6, FieldMemOperand(r6, SharedFunctionInfo::kFlagsOffset));
209     __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r6);
210     __ JumpIfIsInRange(r6, kDefaultDerivedConstructor, kDerivedConstructor,
211                        &not_create_implicit_receiver);
212 
213     // If not derived class constructor: Allocate the new receiver object.
214     __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
215                         r6, r7);
216     __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
217             RelocInfo::CODE_TARGET);
218     __ b(&post_instantiation_deopt_entry);
219 
220     // Else: use TheHoleValue as receiver for constructor call
221     __ bind(&not_create_implicit_receiver);
222     __ LoadRoot(r2, RootIndex::kTheHoleValue);
223 
224     // ----------- S t a t e -------------
225     //  --                          r2: receiver
226     //  -- Slot 4 / sp[0*kSystemPointerSize]: new target
227     //  -- Slot 3 / sp[1*kSystemPointerSize]: padding
228     //  -- Slot 2 / sp[2*kSystemPointerSize]: constructor function
229     //  -- Slot 1 / sp[3*kSystemPointerSize]: number of arguments (tagged)
230     //  -- Slot 0 / sp[4*kSystemPointerSize]: context
231     // -----------------------------------
232     // Deoptimizer enters here.
233     masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
234         masm->pc_offset());
235     __ bind(&post_instantiation_deopt_entry);
236 
237     // Restore new target.
238     __ Pop(r5);
239     // Push the allocated receiver to the stack. We need two copies
240     // because we may have to return the original one and the calling
241     // conventions dictate that the called function pops the receiver.
242     __ Push(r2, r2);
243 
244     // ----------- S t a t e -------------
245     //  --                 r5: new target
246     //  -- sp[0*kSystemPointerSize]: implicit receiver
247     //  -- sp[1*kSystemPointerSize]: implicit receiver
248     //  -- sp[2*kSystemPointerSize]: padding
249     //  -- sp[3*kSystemPointerSize]: constructor function
250     //  -- sp[4*kSystemPointerSize]: number of arguments (tagged)
251     //  -- sp[5*kSystemPointerSize]: context
252     // -----------------------------------
253 
254     // Restore constructor function and argument count.
255     __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
256     __ LoadP(r2, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
257     __ SmiUntag(r2);
258 
259     // Set up pointer to last argument.
260     __ la(r6, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
261 
262     Label enough_stack_space, stack_overflow;
263     Generate_StackOverflowCheck(masm, r2, r7, &stack_overflow);
264     __ b(&enough_stack_space);
265 
266     __ bind(&stack_overflow);
267     // Restore the context from the frame.
268     __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
269     __ CallRuntime(Runtime::kThrowStackOverflow);
270     // Unreachable code.
271     __ bkpt(0);
272 
273     __ bind(&enough_stack_space);
274 
275     // Copy arguments and receiver to the expression stack.
276     Label loop, no_args;
277     // ----------- S t a t e -------------
278     //  --                        r2: number of arguments (untagged)
279     //  --                        r5: new target
280     //  --                        r6: pointer to last argument
281     //  --                        cr0: condition indicating whether r2 is zero
282     //  --        sp[0*kSystemPointerSize]: implicit receiver
283     //  --        sp[1*kSystemPointerSize]: implicit receiver
284     //  --        sp[2*kSystemPointerSize]: padding
285     //  -- r3 and sp[3*kSystemPointerSize]: constructor function
286     //  --        sp[4*kSystemPointerSize]: number of arguments (tagged)
287     //  --        sp[5*kSystemPointerSize]: context
288     // -----------------------------------
289 
290     __ ltgr(r2, r2);
291     __ beq(&no_args);
292     __ ShiftLeftP(r8, r2, Operand(kSystemPointerSizeLog2));
293     __ SubP(sp, sp, r8);
294     __ LoadRR(r1, r2);
295     __ bind(&loop);
296     __ lay(r8, MemOperand(r8, -kSystemPointerSize));
297     __ LoadP(r0, MemOperand(r8, r6));
298     __ StoreP(r0, MemOperand(r8, sp));
299     __ BranchOnCount(r1, &loop);
300     __ bind(&no_args);
301 
302     // Call the function.
303     __ InvokeFunctionWithNewTarget(r3, r5, r2, CALL_FUNCTION);
304 
305     // ----------- S t a t e -------------
306     //  --                 r0: constructor result
307     //  -- sp[0*kSystemPointerSize]: implicit receiver
308     //  -- sp[1*kSystemPointerSize]: padding
309     //  -- sp[2*kSystemPointerSize]: constructor function
310     //  -- sp[3*kSystemPointerSize]: number of arguments
311     //  -- sp[4*kSystemPointerSize]: context
312     // -----------------------------------
313 
314     // Store offset of return address for deoptimizer.
315     masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
316         masm->pc_offset());
317 
318     // Restore the context from the frame.
319     __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
320 
321     // If the result is an object (in the ECMA sense), we should get rid
322     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
323     // on page 74.
324     Label use_receiver, do_throw, leave_frame;
325 
326     // If the result is undefined, we jump out to using the implicit receiver.
327     __ JumpIfRoot(r2, RootIndex::kUndefinedValue, &use_receiver);
328 
329     // Otherwise we do a smi check and fall through to check if the return value
330     // is a valid receiver.
331 
332     // If the result is a smi, it is *not* an object in the ECMA sense.
333     __ JumpIfSmi(r2, &use_receiver);
334 
335     // If the type of the result (stored in its map) is less than
336     // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
337     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
338     __ CompareObjectType(r2, r6, r6, FIRST_JS_RECEIVER_TYPE);
339     __ bge(&leave_frame);
340     __ b(&use_receiver);
341 
342     __ bind(&do_throw);
343     __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
344 
345     // Throw away the result of the constructor invocation and use the
346     // on-stack receiver as the result.
347     __ bind(&use_receiver);
348     __ LoadP(r2, MemOperand(sp));
349     __ JumpIfRoot(r2, RootIndex::kTheHoleValue, &do_throw);
350 
351     __ bind(&leave_frame);
352     // Restore smi-tagged arguments count from the frame.
353     __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
354     // Leave construct frame.
355   }
356 
357   // Remove caller arguments from the stack and return.
358   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
359 
360   __ SmiToPtrArrayOffset(r3, r3);
361   __ AddP(sp, sp, r3);
362   __ AddP(sp, sp, Operand(kSystemPointerSize));
363   __ Ret();
364 }
365 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)366 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
367   Generate_JSBuiltinsConstructStubHelper(masm);
368 }
369 
GetSharedFunctionInfoBytecode(MacroAssembler * masm,Register sfi_data,Register scratch1)370 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
371                                           Register sfi_data,
372                                           Register scratch1) {
373   Label done;
374 
375   __ CompareObjectType(sfi_data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
376   __ bne(&done, Label::kNear);
377   __ LoadTaggedPointerField(
378       sfi_data,
379       FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
380   __ bind(&done);
381 }
382 
383 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)384 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
385   // ----------- S t a t e -------------
386   //  -- r2 : the value to pass to the generator
387   //  -- r3 : the JSGeneratorObject to resume
388   //  -- lr : return address
389   // -----------------------------------
390   __ AssertGeneratorObject(r3);
391 
392   // Store input value into generator object.
393   __ StoreTaggedField(
394       r2, FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset), r0);
395   __ RecordWriteField(r3, JSGeneratorObject::kInputOrDebugPosOffset, r2, r5,
396                       kLRHasNotBeenSaved, kDontSaveFPRegs);
397 
398   // Load suspended function and context.
399   __ LoadTaggedPointerField(
400       r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
401   __ LoadTaggedPointerField(cp,
402                             FieldMemOperand(r6, JSFunction::kContextOffset));
403 
404   // Flood function if we are stepping.
405   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
406   Label stepping_prepared;
407   Register scratch = r7;
408 
409   ExternalReference debug_hook =
410       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
411   __ Move(scratch, debug_hook);
412   __ LoadB(scratch, MemOperand(scratch));
413   __ CmpSmiLiteral(scratch, Smi::zero(), r0);
414   __ bne(&prepare_step_in_if_stepping);
415 
416   // Flood function if we need to continue stepping in the suspended generator.
417 
418   ExternalReference debug_suspended_generator =
419       ExternalReference::debug_suspended_generator_address(masm->isolate());
420 
421   __ Move(scratch, debug_suspended_generator);
422   __ LoadP(scratch, MemOperand(scratch));
423   __ CmpP(scratch, r3);
424   __ beq(&prepare_step_in_suspended_generator);
425   __ bind(&stepping_prepared);
426 
427   // Check the stack for overflow. We are not trying to catch interruptions
428   // (i.e. debug break and preemption) here, so check the "real stack limit".
429   Label stack_overflow;
430   __ LoadP(scratch,
431            StackLimitAsMemOperand(masm, StackLimitKind::kRealStackLimit));
432   __ CmpLogicalP(sp, scratch);
433   __ blt(&stack_overflow);
434 
435   // Push receiver.
436   __ LoadTaggedPointerField(
437       scratch, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset));
438   __ Push(scratch);
439 
440   // ----------- S t a t e -------------
441   //  -- r3    : the JSGeneratorObject to resume
442   //  -- r6    : generator function
443   //  -- cp    : generator context
444   //  -- lr    : return address
445   //  -- sp[0] : generator receiver
446   // -----------------------------------
447 
448   // Copy the function arguments from the generator object's register file.
449   __ LoadTaggedPointerField(
450       r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
451   __ LoadLogicalHalfWordP(
452       r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
453   __ LoadTaggedPointerField(
454       r4,
455       FieldMemOperand(r3, JSGeneratorObject::kParametersAndRegistersOffset));
456   {
457     Label loop, done_loop;
458     __ ShiftLeftP(r1, r5, Operand(kSystemPointerSizeLog2));
459     __ SubP(sp, r1);
460 
461     __ ShiftLeftP(r5, r5, Operand(kTaggedSizeLog2));
462 
463     // ip = stack offset
464     // r5 = parameter array offset
465     __ LoadImmP(ip, Operand::Zero());
466     __ SubP(r5, Operand(kTaggedSize));
467     __ blt(&done_loop);
468 
469     __ lghi(r1, Operand(-kTaggedSize));
470 
471     __ bind(&loop);
472 
473     // parameter copy loop
474     __ LoadAnyTaggedField(r0, FieldMemOperand(r4, r5, FixedArray::kHeaderSize));
475     __ StoreP(r0, MemOperand(sp, ip));
476 
477     // update offsets
478     __ lay(ip, MemOperand(ip, kSystemPointerSize));
479 
480     __ BranchRelativeOnIdxHighP(r5, r1, &loop);
481 
482     __ bind(&done_loop);
483   }
484 
485   // Underlying function needs to have bytecode available.
486   if (FLAG_debug_code) {
487     __ LoadTaggedPointerField(
488         r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
489     __ LoadTaggedPointerField(
490         r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
491     GetSharedFunctionInfoBytecode(masm, r5, ip);
492     __ CompareObjectType(r5, r5, r5, BYTECODE_ARRAY_TYPE);
493     __ Assert(eq, AbortReason::kMissingBytecodeArray);
494   }
495 
496   // Resume (Ignition/TurboFan) generator object.
497   {
498     // We abuse new.target both to indicate that this is a resume call and to
499     // pass in the generator object.  In ordinary calls, new.target is always
500     // undefined because generator functions are non-constructable.
501     __ LoadRR(r5, r3);
502     __ LoadRR(r3, r6);
503     static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
504     __ LoadTaggedPointerField(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
505     __ JumpCodeObject(r4);
506   }
507 
508   __ bind(&prepare_step_in_if_stepping);
509   {
510     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
511     __ Push(r3, r6);
512     // Push hole as receiver since we do not use it for stepping.
513     __ PushRoot(RootIndex::kTheHoleValue);
514     __ CallRuntime(Runtime::kDebugOnFunctionCall);
515     __ Pop(r3);
516     __ LoadTaggedPointerField(
517         r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
518   }
519   __ b(&stepping_prepared);
520 
521   __ bind(&prepare_step_in_suspended_generator);
522   {
523     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
524     __ Push(r3);
525     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
526     __ Pop(r3);
527     __ LoadTaggedPointerField(
528         r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
529   }
530   __ b(&stepping_prepared);
531 
532   __ bind(&stack_overflow);
533   {
534     FrameScope scope(masm, StackFrame::INTERNAL);
535     __ CallRuntime(Runtime::kThrowStackOverflow);
536     __ bkpt(0);  // This should be unreachable.
537   }
538 }
539 
Generate_ConstructedNonConstructable(MacroAssembler * masm)540 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
541   FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
542   __ push(r3);
543   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
544 }
545 
546 namespace {
547 
548 constexpr int kPushedStackSpace =
549     (kNumCalleeSaved + 2) * kSystemPointerSize +
550     kNumCalleeSavedDoubles * kDoubleSize + 5 * kSystemPointerSize +
551     EntryFrameConstants::kCallerFPOffset - kSystemPointerSize;
552 
553 // Called with the native C calling convention. The corresponding function
554 // signature is either:
555 //
556 //   using JSEntryFunction = GeneratedCode<Address(
557 //       Address root_register_value, Address new_target, Address target,
558 //       Address receiver, intptr_t argc, Address** args)>;
559 // or
560 //   using JSEntryFunction = GeneratedCode<Address(
561 //       Address root_register_value, MicrotaskQueue* microtask_queue)>;
Generate_JSEntryVariant(MacroAssembler * masm,StackFrame::Type type,Builtins::Name entry_trampoline)562 void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
563                              Builtins::Name entry_trampoline) {
564   // The register state is either:
565   //   r2:                             root register value
566   //   r3:                             code entry
567   //   r4:                             function
568   //   r5:                             receiver
569   //   r6:                             argc
570   //   [sp + 20 * kSystemPointerSize]: argv
571   // or
572   //   r2: root_register_value
573   //   r3: microtask_queue
574 
575   Label invoke, handler_entry, exit;
576 
577   int pushed_stack_space = 0;
578   {
579     NoRootArrayScope no_root_array(masm);
580 
581     // saving floating point registers
582     // 64bit ABI requires f8 to f15 be saved
583     // http://refspecs.linuxbase.org/ELF/zSeries/lzsabi0_zSeries.html
584     __ lay(sp, MemOperand(sp, -8 * kDoubleSize));
585     __ std(d8, MemOperand(sp));
586     __ std(d9, MemOperand(sp, 1 * kDoubleSize));
587     __ std(d10, MemOperand(sp, 2 * kDoubleSize));
588     __ std(d11, MemOperand(sp, 3 * kDoubleSize));
589     __ std(d12, MemOperand(sp, 4 * kDoubleSize));
590     __ std(d13, MemOperand(sp, 5 * kDoubleSize));
591     __ std(d14, MemOperand(sp, 6 * kDoubleSize));
592     __ std(d15, MemOperand(sp, 7 * kDoubleSize));
593     pushed_stack_space += kNumCalleeSavedDoubles * kDoubleSize;
594 
595     // zLinux ABI
596     //    Incoming parameters:
597     //          r2: root register value
598     //          r3: code entry
599     //          r4: function
600     //          r5: receiver
601     //          r6: argc
602     // [sp + 20 * kSystemPointerSize]: argv
603     //    Requires us to save the callee-preserved registers r6-r13
604     //    General convention is to also save r14 (return addr) and
605     //    sp/r15 as well in a single STM/STMG
606     __ lay(sp, MemOperand(sp, -10 * kSystemPointerSize));
607     __ StoreMultipleP(r6, sp, MemOperand(sp, 0));
608     pushed_stack_space += (kNumCalleeSaved + 2) * kSystemPointerSize;
609 
610     // Initialize the root register.
611     // C calling convention. The first argument is passed in r2.
612     __ LoadRR(kRootRegister, r2);
613   }
614 
615   // save r6 to r1
616   __ LoadRR(r1, r6);
617 
618   // Push a frame with special values setup to mark it as an entry frame.
619   //   Bad FP (-1)
620   //   SMI Marker
621   //   SMI Marker
622   //   kCEntryFPAddress
623   //   Frame type
624   __ lay(sp, MemOperand(sp, -5 * kSystemPointerSize));
625   pushed_stack_space += 5 * kSystemPointerSize;
626 
627   // Push a bad frame pointer to fail if it is used.
628   __ LoadImmP(r9, Operand(-1));
629 
630   __ mov(r8, Operand(StackFrame::TypeToMarker(type)));
631   __ mov(r7, Operand(StackFrame::TypeToMarker(type)));
632   // Save copies of the top frame descriptor on the stack.
633   __ Move(r6, ExternalReference::Create(
634                  IsolateAddressId::kCEntryFPAddress, masm->isolate()));
635   __ LoadP(r6, MemOperand(r6));
636   __ StoreMultipleP(r6, r9, MemOperand(sp, kSystemPointerSize));
637 
638   Register scrach = r8;
639 
640   // Set up frame pointer for the frame to be pushed.
641   // Need to add kSystemPointerSize, because sp has one extra
642   // frame already for the frame type being pushed later.
643   __ lay(fp, MemOperand(sp, -EntryFrameConstants::kCallerFPOffset +
644                                 kSystemPointerSize));
645   pushed_stack_space +=
646       EntryFrameConstants::kCallerFPOffset - kSystemPointerSize;
647 
648   // restore r6
649   __ LoadRR(r6, r1);
650 
651   // If this is the outermost JS call, set js_entry_sp value.
652   Label non_outermost_js;
653   ExternalReference js_entry_sp =
654       ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress,
655                                 masm->isolate());
656   __ Move(r7, js_entry_sp);
657   __ LoadAndTestP(scrach, MemOperand(r7));
658   __ bne(&non_outermost_js, Label::kNear);
659   __ StoreP(fp, MemOperand(r7));
660   __ Load(scrach, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
661   Label cont;
662   __ b(&cont, Label::kNear);
663   __ bind(&non_outermost_js);
664   __ Load(scrach, Operand(StackFrame::INNER_JSENTRY_FRAME));
665 
666   __ bind(&cont);
667   __ StoreP(scrach, MemOperand(sp));  // frame-type
668 
669   // Jump to a faked try block that does the invoke, with a faked catch
670   // block that sets the pending exception.
671   __ b(&invoke, Label::kNear);
672 
673   __ bind(&handler_entry);
674 
675   // Store the current pc as the handler offset. It's used later to create the
676   // handler table.
677   masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
678 
679   // Caught exception: Store result (exception) in the pending exception
680   // field in the JSEnv and return a failure sentinel.  Coming in here the
681   // fp will be invalid because the PushStackHandler below sets it to 0 to
682   // signal the existence of the JSEntry frame.
683   __ Move(scrach,
684           ExternalReference::Create(IsolateAddressId::kPendingExceptionAddress,
685                                     masm->isolate()));
686 
687   __ StoreP(r2, MemOperand(scrach));
688   __ LoadRoot(r2, RootIndex::kException);
689   __ b(&exit, Label::kNear);
690 
691   // Invoke: Link this frame into the handler chain.
692   __ bind(&invoke);
693   // Must preserve r2-r6.
694   __ PushStackHandler();
695   // If an exception not caught by another handler occurs, this handler
696   // returns control to the code after the b(&invoke) above, which
697   // restores all kCalleeSaved registers (including cp and fp) to their
698   // saved values before returning a failure to C.
699 
700   // Invoke the function by calling through JS entry trampoline builtin.
701   // Notice that we cannot store a reference to the trampoline code directly in
702   // this stub, because runtime stubs are not traversed when doing GC.
703 
704   // Invoke the function by calling through JS entry trampoline builtin and
705   // pop the faked function when we return.
706   Handle<Code> trampoline_code =
707       masm->isolate()->builtins()->builtin_handle(entry_trampoline);
708   DCHECK_EQ(kPushedStackSpace, pushed_stack_space);
709   __ Call(trampoline_code, RelocInfo::CODE_TARGET);
710 
711   // Unlink this frame from the handler chain.
712   __ PopStackHandler();
713   __ bind(&exit);  // r2 holds result
714 
715   // Check if the current stack frame is marked as the outermost JS frame.
716   Label non_outermost_js_2;
717   __ pop(r7);
718   __ CmpP(r7, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
719   __ bne(&non_outermost_js_2, Label::kNear);
720   __ mov(scrach, Operand::Zero());
721   __ Move(r7, js_entry_sp);
722   __ StoreP(scrach, MemOperand(r7));
723   __ bind(&non_outermost_js_2);
724 
725   // Restore the top frame descriptors from the stack.
726   __ pop(r5);
727   __ Move(scrach, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
728                                             masm->isolate()));
729   __ StoreP(r5, MemOperand(scrach));
730 
731   // Reset the stack to the callee saved registers.
732   __ lay(sp, MemOperand(sp, -EntryFrameConstants::kCallerFPOffset));
733 
734   // Reload callee-saved preserved regs, return address reg (r14) and sp
735   __ LoadMultipleP(r6, sp, MemOperand(sp, 0));
736   __ la(sp, MemOperand(sp, 10 * kSystemPointerSize));
737 
738 // saving floating point registers
739 #if V8_TARGET_ARCH_S390X
740   // 64bit ABI requires f8 to f15 be saved
741   __ ld(d8, MemOperand(sp));
742   __ ld(d9, MemOperand(sp, 1 * kDoubleSize));
743   __ ld(d10, MemOperand(sp, 2 * kDoubleSize));
744   __ ld(d11, MemOperand(sp, 3 * kDoubleSize));
745   __ ld(d12, MemOperand(sp, 4 * kDoubleSize));
746   __ ld(d13, MemOperand(sp, 5 * kDoubleSize));
747   __ ld(d14, MemOperand(sp, 6 * kDoubleSize));
748   __ ld(d15, MemOperand(sp, 7 * kDoubleSize));
749   __ la(sp, MemOperand(sp, 8 * kDoubleSize));
750 #else
751   // 31bit ABI requires you to store f4 and f6:
752   // http://refspecs.linuxbase.org/ELF/zSeries/lzsabi0_s390.html#AEN417
753   __ ld(d4, MemOperand(sp));
754   __ ld(d6, MemOperand(sp, kDoubleSize));
755   __ la(sp, MemOperand(sp, 2 * kDoubleSize));
756 #endif
757 
758   __ b(r14);
759 }
760 
761 }  // namespace
762 
Generate_JSEntry(MacroAssembler * masm)763 void Builtins::Generate_JSEntry(MacroAssembler* masm) {
764   Generate_JSEntryVariant(masm, StackFrame::ENTRY,
765                           Builtins::kJSEntryTrampoline);
766 }
767 
Generate_JSConstructEntry(MacroAssembler * masm)768 void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
769   Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
770                           Builtins::kJSConstructEntryTrampoline);
771 }
772 
Generate_JSRunMicrotasksEntry(MacroAssembler * masm)773 void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
774   Generate_JSEntryVariant(masm, StackFrame::ENTRY,
775                           Builtins::kRunMicrotasksTrampoline);
776 }
777 
778 // Clobbers scratch1 and scratch2; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc,Register scratch1,Register scratch2)779 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
780                                         Register scratch1, Register scratch2) {
781   // Check the stack for overflow. We are not trying to catch
782   // interruptions (e.g. debug break and preemption) here, so the "real stack
783   // limit" is checked.
784   Label okay;
785   __ LoadP(scratch1,
786            StackLimitAsMemOperand(masm, StackLimitKind::kRealStackLimit));
787   // Make scratch1 the space we have left. The stack might already be overflowed
788   // here which will cause scratch1 to become negative.
789   __ SubP(scratch1, sp, scratch1);
790   // Check if the arguments will overflow the stack.
791   __ ShiftLeftP(scratch2, argc, Operand(kSystemPointerSizeLog2));
792   __ CmpP(scratch1, scratch2);
793   __ bgt(&okay);  // Signed comparison.
794 
795   // Out of stack space.
796   __ CallRuntime(Runtime::kThrowStackOverflow);
797 
798   __ bind(&okay);
799 }
800 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)801 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
802                                              bool is_construct) {
803   // Called from Generate_JS_Entry
804   // r3: new.target
805   // r4: function
806   // r5: receiver
807   // r6: argc
808   // [fp + kPushedStackSpace + 20 * kSystemPointerSize]: argv
809   // r0,r2,r7-r9, cp may be clobbered
810 
811   // Enter an internal frame.
812   {
813     // FrameScope ends up calling MacroAssembler::EnterFrame here
814     FrameScope scope(masm, StackFrame::INTERNAL);
815 
816     // Setup the context (we need to use the caller context from the isolate).
817     ExternalReference context_address = ExternalReference::Create(
818         IsolateAddressId::kContextAddress, masm->isolate());
819     __ Move(cp, context_address);
820     __ LoadP(cp, MemOperand(cp));
821 
822     // Push the function and the receiver onto the stack.
823     __ Push(r4, r5);
824 
825     // Check if we have enough stack space to push all arguments.
826     // Clobbers r5 and r0.
827     Generate_CheckStackOverflow(masm, r6, r5, r0);
828 
829     // r3: new.target
830     // r4: function
831     // r6: argc
832     // [fp + kPushedStackSpace + 20 * kSystemPointerSize]: argv
833     // r0,r2,r5,r7-r9, cp may be clobbered
834 
835     // Setup new.target, argc and function.
836     __ LoadRR(r2, r6);
837     __ LoadRR(r5, r3);
838     __ LoadRR(r3, r4);
839 
840     // Load argv from the stack.
841     __ LoadP(r6, MemOperand(fp));
842     __ LoadP(r6, MemOperand(
843                      r6, kPushedStackSpace + EntryFrameConstants::kArgvOffset));
844 
845     // r2: argc
846     // r3: function
847     // r5: new.target
848     // r6: argv
849     // r0,r4,r7-r9, cp may be clobbered
850 
851     // Copy arguments to the stack in a loop from argv to sp.
852     // The arguments are actually placed in reverse order on sp
853     // compared to argv (i.e. arg1 is highest memory in sp).
854     // r2: argc
855     // r3: function
856     // r5: new.target
857     // r6: argv, i.e. points to first arg
858     // r7: scratch reg to hold scaled argc
859     // r8: scratch reg to hold arg handle
860     // r9: scratch reg to hold index into argv
861     Label argLoop, argExit;
862     intptr_t zero = 0;
863     __ ShiftLeftP(r7, r2, Operand(kSystemPointerSizeLog2));
864     __ SubRR(sp, r7);                // Buy the stack frame to fit args
865     __ LoadImmP(r9, Operand(zero));  // Initialize argv index
866     __ bind(&argLoop);
867     __ CmpPH(r7, Operand(zero));
868     __ beq(&argExit, Label::kNear);
869     __ lay(r7, MemOperand(r7, -kSystemPointerSize));
870     __ LoadP(r8, MemOperand(r9, r6));         // read next parameter
871     __ la(r9, MemOperand(r9, kSystemPointerSize));  // r9++;
872     __ LoadP(r0, MemOperand(r8));             // dereference handle
873     __ StoreP(r0, MemOperand(r7, sp));        // push parameter
874     __ b(&argLoop);
875     __ bind(&argExit);
876 
877     // r2: argc
878     // r3: function
879     // r5: new.target
880 
881     // Initialize all JavaScript callee-saved registers, since they will be seen
882     // by the garbage collector as part of handlers.
883     __ LoadRoot(r4, RootIndex::kUndefinedValue);
884     __ LoadRR(r6, r4);
885     __ LoadRR(r7, r6);
886     __ LoadRR(r8, r6);
887     __ LoadRR(r9, r6);
888 
889     // Invoke the code.
890     Handle<Code> builtin = is_construct
891                                ? BUILTIN_CODE(masm->isolate(), Construct)
892                                : masm->isolate()->builtins()->Call();
893     __ Call(builtin, RelocInfo::CODE_TARGET);
894 
895     // Exit the JS frame and remove the parameters (except function), and
896     // return.
897   }
898   __ b(r14);
899 
900   // r2: result
901 }
902 
Generate_JSEntryTrampoline(MacroAssembler * masm)903 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
904   Generate_JSEntryTrampolineHelper(masm, false);
905 }
906 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)907 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
908   Generate_JSEntryTrampolineHelper(masm, true);
909 }
910 
Generate_RunMicrotasksTrampoline(MacroAssembler * masm)911 void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
912   // This expects two C++ function parameters passed by Invoke() in
913   // execution.cc.
914   //   r2: root_register_value
915   //   r3: microtask_queue
916 
917   __ LoadRR(RunMicrotasksDescriptor::MicrotaskQueueRegister(), r3);
918   __ Jump(BUILTIN_CODE(masm->isolate(), RunMicrotasks), RelocInfo::CODE_TARGET);
919 }
920 
ReplaceClosureCodeWithOptimizedCode(MacroAssembler * masm,Register optimized_code,Register closure,Register scratch1,Register scratch2)921 static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
922                                                 Register optimized_code,
923                                                 Register closure,
924                                                 Register scratch1,
925                                                 Register scratch2) {
926   // Store code entry in the closure.
927   __ StoreTaggedField(optimized_code,
928                       FieldMemOperand(closure, JSFunction::kCodeOffset), r0);
929   __ LoadRR(scratch1,
930             optimized_code);  // Write barrier clobbers scratch1 below.
931   __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
932                       kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
933                       OMIT_SMI_CHECK);
934 }
935 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch)936 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
937   Register args_count = scratch;
938 
939   // Get the arguments + receiver count.
940   __ LoadP(args_count,
941            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
942   __ LoadlW(args_count,
943             FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
944 
945   // Leave the frame (also dropping the register file).
946   __ LeaveFrame(StackFrame::INTERPRETED);
947 
948   __ AddP(sp, sp, args_count);
949 }
950 
951 // Tail-call |function_id| if |smi_entry| == |marker|
TailCallRuntimeIfMarkerEquals(MacroAssembler * masm,Register smi_entry,OptimizationMarker marker,Runtime::FunctionId function_id)952 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
953                                           Register smi_entry,
954                                           OptimizationMarker marker,
955                                           Runtime::FunctionId function_id) {
956   Label no_match;
957   __ CmpSmiLiteral(smi_entry, Smi::FromEnum(marker), r0);
958   __ bne(&no_match);
959   GenerateTailCallToReturnedCode(masm, function_id);
960   __ bind(&no_match);
961 }
962 
TailCallOptimizedCodeSlot(MacroAssembler * masm,Register optimized_code_entry,Register scratch)963 static void TailCallOptimizedCodeSlot(MacroAssembler* masm,
964                                       Register optimized_code_entry,
965                                       Register scratch) {
966   // ----------- S t a t e -------------
967   //  -- r5 : new target (preserved for callee if needed, and caller)
968   //  -- r3 : target function (preserved for callee if needed, and caller)
969   // -----------------------------------
970   DCHECK(!AreAliased(r3, r5, optimized_code_entry, scratch));
971 
972   Register closure = r3;
973 
974   // Check if the optimized code is marked for deopt. If it is, call the
975   // runtime to clear it.
976   Label found_deoptimized_code;
977   __ LoadTaggedPointerField(
978       scratch,
979       FieldMemOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
980   __ LoadW(scratch, FieldMemOperand(
981                         scratch, CodeDataContainer::kKindSpecificFlagsOffset));
982   __ TestBit(scratch, Code::kMarkedForDeoptimizationBit, r0);
983   __ bne(&found_deoptimized_code);
984 
985   // Optimized code is good, get it into the closure and link the closure
986   // into the optimized functions list, then tail call the optimized code.
987   ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
988                                       scratch, r7);
989   static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
990   __ LoadCodeObjectEntry(r4, optimized_code_entry);
991   __ Jump(r4);
992 
993   // Optimized code slot contains deoptimized code, evict it and re-enter
994   // the closure's code.
995   __ bind(&found_deoptimized_code);
996   GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
997 }
998 
MaybeOptimizeCode(MacroAssembler * masm,Register feedback_vector,Register optimization_marker)999 static void MaybeOptimizeCode(MacroAssembler* masm, Register feedback_vector,
1000                               Register optimization_marker) {
1001   // ----------- S t a t e -------------
1002   //  -- r5 : new target (preserved for callee if needed, and caller)
1003   //  -- r3 : target function (preserved for callee if needed, and caller)
1004   //  -- feedback vector (preserved for caller if needed)
1005   //  -- optimization_marker : a Smi containing a non-zero optimization marker.
1006   // -----------------------------------
1007   DCHECK(!AreAliased(feedback_vector, r3, r5, optimization_marker));
1008 
1009   // TODO(v8:8394): The logging of first execution will break if
1010   // feedback vectors are not allocated. We need to find a different way of
1011   // logging these events if required.
1012   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
1013                                 OptimizationMarker::kLogFirstExecution,
1014                                 Runtime::kFunctionFirstExecution);
1015   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
1016                                 OptimizationMarker::kCompileOptimized,
1017                                 Runtime::kCompileOptimized_NotConcurrent);
1018   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
1019                                 OptimizationMarker::kCompileOptimizedConcurrent,
1020                                 Runtime::kCompileOptimized_Concurrent);
1021 
1022   // Otherwise, the marker is InOptimizationQueue, so fall through hoping
1023   // that an interrupt will eventually update the slot with optimized code.
1024   if (FLAG_debug_code) {
1025     __ CmpSmiLiteral(optimization_marker,
1026                      Smi::FromEnum(OptimizationMarker::kInOptimizationQueue),
1027                      r0);
1028     __ Assert(eq, AbortReason::kExpectedOptimizationSentinel);
1029   }
1030 }
1031 
1032 // Advance the current bytecode offset. This simulates what all bytecode
1033 // handlers do upon completion of the underlying operation. Will bail out to a
1034 // label if the bytecode (without prefix) is a return bytecode. Will not advance
1035 // the bytecode offset if the current bytecode is a JumpLoop, instead just
1036 // re-executing the JumpLoop to jump to the correct bytecode.
AdvanceBytecodeOffsetOrReturn(MacroAssembler * masm,Register bytecode_array,Register bytecode_offset,Register bytecode,Register scratch1,Register scratch2,Label * if_return)1037 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
1038                                           Register bytecode_array,
1039                                           Register bytecode_offset,
1040                                           Register bytecode, Register scratch1,
1041                                           Register scratch2, Label* if_return) {
1042   Register bytecode_size_table = scratch1;
1043   Register scratch3 = bytecode;
1044 
1045   // The bytecode offset value will be increased by one in wide and extra wide
1046   // cases. In the case of having a wide or extra wide JumpLoop bytecode, we
1047   // will restore the original bytecode. In order to simplify the code, we have
1048   // a backup of it.
1049   Register original_bytecode_offset = scratch2;
1050   DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
1051                      bytecode, original_bytecode_offset));
1052   __ Move(bytecode_size_table,
1053           ExternalReference::bytecode_size_table_address());
1054   __ Move(original_bytecode_offset, bytecode_offset);
1055 
1056   // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
1057   Label process_bytecode, extra_wide;
1058   STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
1059   STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
1060   STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
1061   STATIC_ASSERT(3 ==
1062                 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
1063   __ CmpP(bytecode, Operand(0x3));
1064   __ bgt(&process_bytecode);
1065   __ tmll(bytecode, Operand(0x1));
1066   __ bne(&extra_wide);
1067 
1068   // Load the next bytecode and update table to the wide scaled table.
1069   __ AddP(bytecode_offset, bytecode_offset, Operand(1));
1070   __ LoadlB(bytecode, MemOperand(bytecode_array, bytecode_offset));
1071   __ AddP(bytecode_size_table, bytecode_size_table,
1072           Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
1073   __ b(&process_bytecode);
1074 
1075   __ bind(&extra_wide);
1076   // Load the next bytecode and update table to the extra wide scaled table.
1077   __ AddP(bytecode_offset, bytecode_offset, Operand(1));
1078   __ LoadlB(bytecode, MemOperand(bytecode_array, bytecode_offset));
1079   __ AddP(bytecode_size_table, bytecode_size_table,
1080           Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
1081 
1082   // Load the size of the current bytecode.
1083   __ bind(&process_bytecode);
1084 
1085   // Bailout to the return label if this is a return bytecode.
1086 #define JUMP_IF_EQUAL(NAME)                                           \
1087   __ CmpP(bytecode,                                                   \
1088           Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
1089   __ beq(if_return);
1090   RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
1091 #undef JUMP_IF_EQUAL
1092 
1093   // If this is a JumpLoop, re-execute it to perform the jump to the beginning
1094   // of the loop.
1095   Label end, not_jump_loop;
1096   __ CmpP(bytecode,
1097           Operand(static_cast<int>(interpreter::Bytecode::kJumpLoop)));
1098   __ bne(&not_jump_loop);
1099   // We need to restore the original bytecode_offset since we might have
1100   // increased it to skip the wide / extra-wide prefix bytecode.
1101   __ Move(bytecode_offset, original_bytecode_offset);
1102   __ b(&end);
1103 
1104   __ bind(&not_jump_loop);
1105   // Otherwise, load the size of the current bytecode and advance the offset.
1106   __ ShiftLeftP(scratch3, bytecode, Operand(2));
1107   __ LoadlW(scratch3, MemOperand(bytecode_size_table, scratch3));
1108   __ AddP(bytecode_offset, bytecode_offset, scratch3);
1109 
1110   __ bind(&end);
1111 }
1112 
1113 // Generate code for entering a JS function with the interpreter.
1114 // On entry to the function the receiver and arguments have been pushed on the
1115 // stack left to right.  The actual argument count matches the formal parameter
1116 // count expected by the function.
1117 //
1118 // The live registers are:
1119 //   o r3: the JS function object being called.
1120 //   o r5: the incoming new target or generator object
1121 //   o cp: our context
1122 //   o pp: the caller's constant pool pointer (if enabled)
1123 //   o fp: the caller's frame pointer
1124 //   o sp: stack pointer
1125 //   o lr: return address
1126 //
1127 // The function builds an interpreter frame.  See InterpreterFrameConstants in
1128 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)1129 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1130   Register closure = r3;
1131   Register feedback_vector = r4;
1132 
1133   // Get the bytecode array from the function object and load it into
1134   // kInterpreterBytecodeArrayRegister.
1135   __ LoadTaggedPointerField(
1136       r2, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1137   // Load original bytecode array or the debug copy.
1138   __ LoadTaggedPointerField(
1139       kInterpreterBytecodeArrayRegister,
1140       FieldMemOperand(r2, SharedFunctionInfo::kFunctionDataOffset));
1141   GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, r6);
1142 
1143   // The bytecode array could have been flushed from the shared function info,
1144   // if so, call into CompileLazy.
1145   Label compile_lazy;
1146   __ CompareObjectType(kInterpreterBytecodeArrayRegister, r2, no_reg,
1147                        BYTECODE_ARRAY_TYPE);
1148   __ bne(&compile_lazy);
1149 
1150   // Load the feedback vector from the closure.
1151   __ LoadTaggedPointerField(
1152       feedback_vector,
1153       FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
1154   __ LoadTaggedPointerField(
1155       feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
1156 
1157   Label push_stack_frame;
1158   // Check if feedback vector is valid. If valid, check for optimized code
1159   // and update invocation count. Otherwise, setup the stack frame.
1160   __ LoadTaggedPointerField(
1161       r6, FieldMemOperand(feedback_vector, HeapObject::kMapOffset));
1162   __ LoadLogicalHalfWordP(r6, FieldMemOperand(r6, Map::kInstanceTypeOffset));
1163   __ CmpP(r6, Operand(FEEDBACK_VECTOR_TYPE));
1164   __ bne(&push_stack_frame);
1165 
1166   Register optimized_code_entry = r6;
1167 
1168   // Read off the optimized code slot in the feedback vector.
1169   __ LoadAnyTaggedField(
1170       optimized_code_entry,
1171       FieldMemOperand(feedback_vector,
1172                       FeedbackVector::kOptimizedCodeWeakOrSmiOffset));
1173 
1174   // Check if the optimized code slot is not empty.
1175   Label optimized_code_slot_not_empty;
1176   __ CmpSmiLiteral(optimized_code_entry,
1177                    Smi::FromEnum(OptimizationMarker::kNone), r0);
1178   __ bne(&optimized_code_slot_not_empty);
1179 
1180   Label not_optimized;
1181   __ bind(&not_optimized);
1182 
1183   // Increment invocation count for the function.
1184   __ LoadW(r1, FieldMemOperand(feedback_vector,
1185                                FeedbackVector::kInvocationCountOffset));
1186   __ AddP(r1, r1, Operand(1));
1187   __ StoreW(r1, FieldMemOperand(feedback_vector,
1188                                 FeedbackVector::kInvocationCountOffset));
1189 
1190   // Open a frame scope to indicate that there is a frame on the stack.  The
1191   // MANUAL indicates that the scope shouldn't actually generate code to set up
1192   // the frame (that is done below).
1193   __ bind(&push_stack_frame);
1194   FrameScope frame_scope(masm, StackFrame::MANUAL);
1195   __ PushStandardFrame(closure);
1196 
1197   // Reset code age and the OSR arming. The OSR field and BytecodeAgeOffset are
1198   // 8-bit fields next to each other, so we could just optimize by writing a
1199   // 16-bit. These static asserts guard our assumption is valid.
1200   STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
1201                 BytecodeArray::kOsrNestingLevelOffset + kCharSize);
1202   STATIC_ASSERT(BytecodeArray::kNoAgeBytecodeAge == 0);
1203   __ lghi(r1, Operand(0));
1204   __ StoreHalfWord(r1,
1205                    FieldMemOperand(kInterpreterBytecodeArrayRegister,
1206                                    BytecodeArray::kOsrNestingLevelOffset),
1207                    r0);
1208 
1209   // Load the initial bytecode offset.
1210   __ mov(kInterpreterBytecodeOffsetRegister,
1211          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1212 
1213   // Push bytecode array and Smi tagged bytecode array offset.
1214   __ SmiTag(r4, kInterpreterBytecodeOffsetRegister);
1215   __ Push(kInterpreterBytecodeArrayRegister, r4);
1216 
1217   // Allocate the local and temporary register file on the stack.
1218   Label stack_overflow;
1219   {
1220     // Load frame size (word) from the BytecodeArray object.
1221     __ LoadlW(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1222                                   BytecodeArray::kFrameSizeOffset));
1223 
1224     // Do a stack check to ensure we don't go over the limit.
1225     __ SubP(r8, sp, r4);
1226     __ CmpLogicalP(
1227         r8, StackLimitAsMemOperand(masm, StackLimitKind::kRealStackLimit));
1228     __ blt(&stack_overflow);
1229 
1230     // If ok, push undefined as the initial value for all register file entries.
1231     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1232     Label loop, no_args;
1233     __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1234     __ ShiftRightP(r4, r4, Operand(kSystemPointerSizeLog2));
1235     __ LoadAndTestP(r4, r4);
1236     __ beq(&no_args);
1237     __ LoadRR(r1, r4);
1238     __ bind(&loop);
1239     __ push(kInterpreterAccumulatorRegister);
1240     __ SubP(r1, Operand(1));
1241     __ bne(&loop);
1242     __ bind(&no_args);
1243   }
1244 
1245   // If the bytecode array has a valid incoming new target or generator object
1246   // register, initialize it with incoming value which was passed in r6.
1247   Label no_incoming_new_target_or_generator_register;
1248   __ LoadW(r8, FieldMemOperand(
1249                    kInterpreterBytecodeArrayRegister,
1250                    BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1251   __ CmpP(r8, Operand::Zero());
1252   __ beq(&no_incoming_new_target_or_generator_register);
1253   __ ShiftLeftP(r8, r8, Operand(kSystemPointerSizeLog2));
1254   __ StoreP(r5, MemOperand(fp, r8));
1255   __ bind(&no_incoming_new_target_or_generator_register);
1256 
1257   // Perform interrupt stack check.
1258   // TODO(solanes): Merge with the real stack limit check above.
1259   Label stack_check_interrupt, after_stack_check_interrupt;
1260   __ LoadP(r5,
1261            StackLimitAsMemOperand(masm, StackLimitKind::kInterruptStackLimit));
1262   __ CmpLogicalP(sp, r5);
1263   __ blt(&stack_check_interrupt);
1264   __ bind(&after_stack_check_interrupt);
1265 
1266   // The accumulator is already loaded with undefined.
1267 
1268   // Load the dispatch table into a register and dispatch to the bytecode
1269   // handler at the current bytecode offset.
1270   Label do_dispatch;
1271   __ bind(&do_dispatch);
1272   __ Move(
1273       kInterpreterDispatchTableRegister,
1274       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1275 
1276   __ LoadlB(r5, MemOperand(kInterpreterBytecodeArrayRegister,
1277                            kInterpreterBytecodeOffsetRegister));
1278   __ ShiftLeftP(r5, r5, Operand(kSystemPointerSizeLog2));
1279   __ LoadP(kJavaScriptCallCodeStartRegister,
1280            MemOperand(kInterpreterDispatchTableRegister, r5));
1281   __ Call(kJavaScriptCallCodeStartRegister);
1282 
1283   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1284 
1285   // Any returns to the entry trampoline are either due to the return bytecode
1286   // or the interpreter tail calling a builtin and then a dispatch.
1287 
1288   // Get bytecode array and bytecode offset from the stack frame.
1289   __ LoadP(kInterpreterBytecodeArrayRegister,
1290            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1291   __ LoadP(kInterpreterBytecodeOffsetRegister,
1292            MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1293   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1294 
1295   // Either return, or advance to the next bytecode and dispatch.
1296   Label do_return;
1297   __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
1298                            kInterpreterBytecodeOffsetRegister));
1299   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1300                                 kInterpreterBytecodeOffsetRegister, r3, r4, r5,
1301                                 &do_return);
1302   __ b(&do_dispatch);
1303 
1304   __ bind(&do_return);
1305   // The return value is in r2.
1306   LeaveInterpreterFrame(masm, r4);
1307   __ Ret();
1308 
1309   __ bind(&optimized_code_slot_not_empty);
1310   Label maybe_has_optimized_code;
1311   // Check if optimized code marker is actually a weak reference to the
1312   // optimized code.
1313   __ JumpIfNotSmi(optimized_code_entry, &maybe_has_optimized_code);
1314   MaybeOptimizeCode(masm, feedback_vector, optimized_code_entry);
1315   // Fall through if there's no runnable optimized code.
1316   __ jmp(&not_optimized);
1317 
1318   __ bind(&stack_check_interrupt);
1319   // Modify the bytecode offset in the stack to be kFunctionEntryBytecodeOffset
1320   // for the call to the StackGuard.
1321   __ mov(kInterpreterBytecodeOffsetRegister,
1322          Operand(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag +
1323                               kFunctionEntryBytecodeOffset)));
1324   __ StoreP(kInterpreterBytecodeOffsetRegister,
1325             MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1326   __ CallRuntime(Runtime::kStackGuard);
1327 
1328   // After the call, restore the bytecode array, bytecode offset and accumulator
1329   // registers again. Also, restore the bytecode offset in the stack to its
1330   // previous value.
1331   __ LoadP(kInterpreterBytecodeArrayRegister,
1332            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1333   __ mov(kInterpreterBytecodeOffsetRegister,
1334          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1335   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1336 
1337   __ SmiTag(r5, kInterpreterBytecodeOffsetRegister);
1338   __ StoreP(r5,
1339             MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1340 
1341   __ jmp(&after_stack_check_interrupt);
1342 
1343   __ bind(&maybe_has_optimized_code);
1344   // Load code entry from the weak reference, if it was cleared, resume
1345   // execution of unoptimized code.
1346   __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &not_optimized);
1347   TailCallOptimizedCodeSlot(masm, optimized_code_entry, r8);
1348 
1349   __ bind(&compile_lazy);
1350   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1351 
1352   __ bind(&stack_overflow);
1353   __ CallRuntime(Runtime::kThrowStackOverflow);
1354   __ bkpt(0);  // Should not return.
1355 }
1356 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register index,Register count,Register scratch)1357 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1358                                          Register num_args, Register index,
1359                                          Register count, Register scratch) {
1360   Label loop, skip;
1361   __ CmpP(count, Operand::Zero());
1362   __ beq(&skip);
1363   __ AddP(index, index, Operand(kSystemPointerSize));  // Bias up for LoadPU
1364   __ LoadRR(r0, count);
1365   __ bind(&loop);
1366   __ LoadP(scratch, MemOperand(index, -kSystemPointerSize));
1367   __ lay(index, MemOperand(index, -kSystemPointerSize));
1368   __ push(scratch);
1369   __ SubP(r0, Operand(1));
1370   __ bne(&loop);
1371   __ bind(&skip);
1372 }
1373 
1374 // static
Generate_InterpreterPushArgsThenCallImpl(MacroAssembler * masm,ConvertReceiverMode receiver_mode,InterpreterPushArgsMode mode)1375 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1376     MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1377     InterpreterPushArgsMode mode) {
1378   DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1379   // ----------- S t a t e -------------
1380   //  -- r2 : the number of arguments (not including the receiver)
1381   //  -- r4 : the address of the first argument to be pushed. Subsequent
1382   //          arguments should be consecutive above this, in the same order as
1383   //          they are to be pushed onto the stack.
1384   //  -- r3 : the target to call (can be any Object).
1385   // -----------------------------------
1386   Label stack_overflow;
1387 
1388   // Calculate number of arguments (AddP one for receiver).
1389   __ AddP(r5, r2, Operand(1));
1390   Generate_StackOverflowCheck(masm, r5, ip, &stack_overflow);
1391 
1392   // Push "undefined" as the receiver arg if we need to.
1393   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1394     __ PushRoot(RootIndex::kUndefinedValue);
1395     __ LoadRR(r5, r2);  // Argument count is correct.
1396   }
1397 
1398   // Push the arguments.
1399   Generate_InterpreterPushArgs(masm, r5, r4, r5, r6);
1400   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1401     __ Pop(r4);                   // Pass the spread in a register
1402     __ SubP(r2, r2, Operand(1));  // Subtract one for spread
1403   }
1404 
1405   // Call the target.
1406   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1407     __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1408             RelocInfo::CODE_TARGET);
1409   } else {
1410     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1411             RelocInfo::CODE_TARGET);
1412   }
1413 
1414   __ bind(&stack_overflow);
1415   {
1416     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1417     // Unreachable Code.
1418     __ bkpt(0);
1419   }
1420 }
1421 
1422 // static
Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler * masm,InterpreterPushArgsMode mode)1423 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1424     MacroAssembler* masm, InterpreterPushArgsMode mode) {
1425   // ----------- S t a t e -------------
1426   // -- r2 : argument count (not including receiver)
1427   // -- r5 : new target
1428   // -- r3 : constructor to call
1429   // -- r4 : allocation site feedback if available, undefined otherwise.
1430   // -- r6 : address of the first argument
1431   // -----------------------------------
1432   Label stack_overflow;
1433 
1434   // Push a slot for the receiver to be constructed.
1435   __ LoadImmP(r0, Operand::Zero());
1436   __ push(r0);
1437 
1438   // Push the arguments (skip if none).
1439   Label skip;
1440   __ CmpP(r2, Operand::Zero());
1441   __ beq(&skip);
1442   Generate_StackOverflowCheck(masm, r2, ip, &stack_overflow);
1443   Generate_InterpreterPushArgs(masm, r2, r6, r2, r7);
1444   __ bind(&skip);
1445 
1446   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1447     __ Pop(r4);                   // Pass the spread in a register
1448     __ SubP(r2, r2, Operand(1));  // Subtract one for spread
1449   } else {
1450     __ AssertUndefinedOrAllocationSite(r4, r7);
1451   }
1452   if (mode == InterpreterPushArgsMode::kArrayFunction) {
1453     __ AssertFunction(r3);
1454 
1455     // Tail call to the array construct stub (still in the caller
1456     // context at this point).
1457     Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1458     __ Jump(code, RelocInfo::CODE_TARGET);
1459   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1460     // Call the constructor with r2, r3, and r5 unmodified.
1461     __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1462             RelocInfo::CODE_TARGET);
1463   } else {
1464     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1465     // Call the constructor with r2, r3, and r5 unmodified.
1466     __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1467   }
1468 
1469   __ bind(&stack_overflow);
1470   {
1471     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1472     // Unreachable Code.
1473     __ bkpt(0);
1474   }
1475 }
1476 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1477 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1478   // Set the return address to the correct point in the interpreter entry
1479   // trampoline.
1480   Label builtin_trampoline, trampoline_loaded;
1481   Smi interpreter_entry_return_pc_offset(
1482       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1483   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1484 
1485   // If the SFI function_data is an InterpreterData, the function will have a
1486   // custom copy of the interpreter entry trampoline for profiling. If so,
1487   // get the custom trampoline, otherwise grab the entry address of the global
1488   // trampoline.
1489   __ LoadP(r4, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1490   __ LoadTaggedPointerField(
1491       r4, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1492   __ LoadTaggedPointerField(
1493       r4, FieldMemOperand(r4, SharedFunctionInfo::kFunctionDataOffset));
1494   __ CompareObjectType(r4, kInterpreterDispatchTableRegister,
1495                        kInterpreterDispatchTableRegister,
1496                        INTERPRETER_DATA_TYPE);
1497   __ bne(&builtin_trampoline);
1498 
1499   __ LoadTaggedPointerField(
1500       r4, FieldMemOperand(r4, InterpreterData::kInterpreterTrampolineOffset));
1501   __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1502   __ b(&trampoline_loaded);
1503 
1504   __ bind(&builtin_trampoline);
1505   __ Move(r4, ExternalReference::
1506                   address_of_interpreter_entry_trampoline_instruction_start(
1507                       masm->isolate()));
1508   __ LoadP(r4, MemOperand(r4));
1509 
1510   __ bind(&trampoline_loaded);
1511   __ AddP(r14, r4, Operand(interpreter_entry_return_pc_offset.value()));
1512 
1513   // Initialize the dispatch table register.
1514   __ Move(
1515       kInterpreterDispatchTableRegister,
1516       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1517 
1518   // Get the bytecode array pointer from the frame.
1519   __ LoadP(kInterpreterBytecodeArrayRegister,
1520            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1521 
1522   if (FLAG_debug_code) {
1523     // Check function data field is actually a BytecodeArray object.
1524     __ TestIfSmi(kInterpreterBytecodeArrayRegister);
1525     __ Assert(
1526         ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1527     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
1528                          BYTECODE_ARRAY_TYPE);
1529     __ Assert(
1530         eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1531   }
1532 
1533   // Get the target bytecode offset from the frame.
1534   __ LoadP(kInterpreterBytecodeOffsetRegister,
1535            MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1536   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1537 
1538   if (FLAG_debug_code) {
1539     Label okay;
1540     __ CmpP(kInterpreterBytecodeOffsetRegister,
1541             Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1542     __ bge(&okay);
1543     __ bkpt(0);
1544     __ bind(&okay);
1545   }
1546 
1547   // Dispatch to the target bytecode.
1548   UseScratchRegisterScope temps(masm);
1549   Register scratch = temps.Acquire();
1550   __ LoadlB(scratch, MemOperand(kInterpreterBytecodeArrayRegister,
1551                                 kInterpreterBytecodeOffsetRegister));
1552   __ ShiftLeftP(scratch, scratch, Operand(kSystemPointerSizeLog2));
1553   __ LoadP(kJavaScriptCallCodeStartRegister,
1554            MemOperand(kInterpreterDispatchTableRegister, scratch));
1555   __ Jump(kJavaScriptCallCodeStartRegister);
1556 }
1557 
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1558 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1559   // Get bytecode array and bytecode offset from the stack frame.
1560   __ LoadP(kInterpreterBytecodeArrayRegister,
1561            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1562   __ LoadP(kInterpreterBytecodeOffsetRegister,
1563            MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1564   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1565 
1566   Label enter_bytecode, function_entry_bytecode;
1567   __ CmpP(kInterpreterBytecodeOffsetRegister,
1568           Operand(BytecodeArray::kHeaderSize - kHeapObjectTag +
1569                   kFunctionEntryBytecodeOffset));
1570   __ beq(&function_entry_bytecode);
1571 
1572   // Load the current bytecode.
1573   __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
1574                            kInterpreterBytecodeOffsetRegister));
1575 
1576   // Advance to the next bytecode.
1577   Label if_return;
1578   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1579                                 kInterpreterBytecodeOffsetRegister, r3, r4, r5,
1580                                 &if_return);
1581 
1582   __ bind(&enter_bytecode);
1583   // Convert new bytecode offset to a Smi and save in the stackframe.
1584   __ SmiTag(r4, kInterpreterBytecodeOffsetRegister);
1585   __ StoreP(r4,
1586             MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1587 
1588   Generate_InterpreterEnterBytecode(masm);
1589 
1590   __ bind(&function_entry_bytecode);
1591   // If the code deoptimizes during the implicit function entry stack interrupt
1592   // check, it will have a bailout ID of kFunctionEntryBytecodeOffset, which is
1593   // not a valid bytecode offset. Detect this case and advance to the first
1594   // actual bytecode.
1595   __ mov(kInterpreterBytecodeOffsetRegister,
1596          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1597   __ b(&enter_bytecode);
1598 
1599   // We should never take the if_return path.
1600   __ bind(&if_return);
1601   __ Abort(AbortReason::kInvalidBytecodeAdvance);
1602 }
1603 
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1604 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1605   Generate_InterpreterEnterBytecode(masm);
1606 }
1607 
1608 namespace {
Generate_ContinueToBuiltinHelper(MacroAssembler * masm,bool java_script_builtin,bool with_result)1609 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1610                                       bool java_script_builtin,
1611                                       bool with_result) {
1612   const RegisterConfiguration* config(RegisterConfiguration::Default());
1613   int allocatable_register_count = config->num_allocatable_general_registers();
1614   if (with_result) {
1615     // Overwrite the hole inserted by the deoptimizer with the return value from
1616     // the LAZY deopt point.
1617     __ StoreP(
1618         r2,
1619         MemOperand(sp, config->num_allocatable_general_registers() *
1620                                kSystemPointerSize +
1621                            BuiltinContinuationFrameConstants::kFixedFrameSize));
1622   }
1623   for (int i = allocatable_register_count - 1; i >= 0; --i) {
1624     int code = config->GetAllocatableGeneralCode(i);
1625     __ Pop(Register::from_code(code));
1626     if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1627       __ SmiUntag(Register::from_code(code));
1628     }
1629   }
1630   __ LoadP(
1631       fp,
1632       MemOperand(sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1633   // Load builtin index (stored as a Smi) and use it to get the builtin start
1634   // address from the builtins table.
1635   UseScratchRegisterScope temps(masm);
1636   Register builtin = temps.Acquire();
1637   __ Pop(builtin);
1638   __ AddP(sp, sp,
1639           Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1640   __ Pop(r0);
1641   __ LoadRR(r14, r0);
1642   __ LoadEntryFromBuiltinIndex(builtin);
1643   __ Jump(builtin);
1644 }
1645 }  // namespace
1646 
Generate_ContinueToCodeStubBuiltin(MacroAssembler * masm)1647 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1648   Generate_ContinueToBuiltinHelper(masm, false, false);
1649 }
1650 
Generate_ContinueToCodeStubBuiltinWithResult(MacroAssembler * masm)1651 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1652     MacroAssembler* masm) {
1653   Generate_ContinueToBuiltinHelper(masm, false, true);
1654 }
1655 
Generate_ContinueToJavaScriptBuiltin(MacroAssembler * masm)1656 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1657   Generate_ContinueToBuiltinHelper(masm, true, false);
1658 }
1659 
Generate_ContinueToJavaScriptBuiltinWithResult(MacroAssembler * masm)1660 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1661     MacroAssembler* masm) {
1662   Generate_ContinueToBuiltinHelper(masm, true, true);
1663 }
1664 
Generate_NotifyDeoptimized(MacroAssembler * masm)1665 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1666   {
1667     FrameScope scope(masm, StackFrame::INTERNAL);
1668     __ CallRuntime(Runtime::kNotifyDeoptimized);
1669   }
1670 
1671   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r2.code());
1672   __ pop(r2);
1673   __ Ret();
1674 }
1675 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)1676 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1677   {
1678     FrameScope scope(masm, StackFrame::INTERNAL);
1679     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1680   }
1681 
1682   // If the code object is null, just return to the caller.
1683   Label skip;
1684   __ CmpSmiLiteral(r2, Smi::zero(), r0);
1685   __ bne(&skip);
1686   __ Ret();
1687 
1688   __ bind(&skip);
1689 
1690   // Drop the handler frame that is be sitting on top of the actual
1691   // JavaScript frame. This is the case then OSR is triggered from bytecode.
1692   __ LeaveFrame(StackFrame::STUB);
1693 
1694   // Load deoptimization data from the code object.
1695   // <deopt_data> = <code>[#deoptimization_data_offset]
1696   __ LoadTaggedPointerField(
1697       r3, FieldMemOperand(r2, Code::kDeoptimizationDataOffset));
1698 
1699   // Load the OSR entrypoint offset from the deoptimization data.
1700   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1701   __ SmiUntagField(
1702       r3, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(
1703                                   DeoptimizationData::kOsrPcOffsetIndex)));
1704 
1705   // Compute the target address = code_obj + header_size + osr_offset
1706   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1707   __ AddP(r2, r3);
1708   __ AddP(r0, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
1709   __ LoadRR(r14, r0);
1710 
1711   // And "return" to the OSR entry point of the function.
1712   __ Ret();
1713 }
1714 
1715 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1716 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1717   // ----------- S t a t e -------------
1718   //  -- r2    : argc
1719   //  -- sp[0] : argArray
1720   //  -- sp[4] : thisArg
1721   //  -- sp[8] : receiver
1722   // -----------------------------------
1723 
1724   // 1. Load receiver into r3, argArray into r4 (if present), remove all
1725   // arguments from the stack (including the receiver), and push thisArg (if
1726   // present) instead.
1727   {
1728     Label skip;
1729     Register arg_size = r7;
1730     Register new_sp = r5;
1731     Register scratch = r6;
1732     __ ShiftLeftP(arg_size, r2, Operand(kSystemPointerSizeLog2));
1733     __ AddP(new_sp, sp, arg_size);
1734     __ LoadRoot(scratch, RootIndex::kUndefinedValue);
1735     __ LoadRR(r4, scratch);
1736     __ LoadP(r3, MemOperand(new_sp, 0));  // receiver
1737     __ CmpP(arg_size, Operand(kSystemPointerSize));
1738     __ blt(&skip);
1739     __ LoadP(scratch, MemOperand(new_sp, 1 * -kSystemPointerSize));  // thisArg
1740     __ beq(&skip);
1741     __ LoadP(r4, MemOperand(new_sp, 2 * -kSystemPointerSize));  // argArray
1742     __ bind(&skip);
1743     __ LoadRR(sp, new_sp);
1744     __ StoreP(scratch, MemOperand(sp, 0));
1745   }
1746 
1747   // ----------- S t a t e -------------
1748   //  -- r4    : argArray
1749   //  -- r3    : receiver
1750   //  -- sp[0] : thisArg
1751   // -----------------------------------
1752 
1753   // 2. We don't need to check explicitly for callable receiver here,
1754   // since that's the first thing the Call/CallWithArrayLike builtins
1755   // will do.
1756 
1757   // 3. Tail call with no arguments if argArray is null or undefined.
1758   Label no_arguments;
1759   __ JumpIfRoot(r4, RootIndex::kNullValue, &no_arguments);
1760   __ JumpIfRoot(r4, RootIndex::kUndefinedValue, &no_arguments);
1761 
1762   // 4a. Apply the receiver to the given argArray.
1763   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1764           RelocInfo::CODE_TARGET);
1765 
1766   // 4b. The argArray is either null or undefined, so we tail call without any
1767   // arguments to the receiver.
1768   __ bind(&no_arguments);
1769   {
1770     __ LoadImmP(r2, Operand::Zero());
1771     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1772   }
1773 }
1774 
1775 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1776 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1777   // 1. Make sure we have at least one argument.
1778   // r2: actual number of arguments
1779   {
1780     Label done;
1781     __ CmpP(r2, Operand::Zero());
1782     __ bne(&done, Label::kNear);
1783     __ PushRoot(RootIndex::kUndefinedValue);
1784     __ AddP(r2, Operand(1));
1785     __ bind(&done);
1786   }
1787 
1788   // r2: actual number of arguments
1789   // 2. Get the callable to call (passed as receiver) from the stack.
1790   __ ShiftLeftP(r4, r2, Operand(kSystemPointerSizeLog2));
1791   __ LoadP(r3, MemOperand(sp, r4));
1792 
1793   // 3. Shift arguments and return address one slot down on the stack
1794   //    (overwriting the original receiver).  Adjust argument count to make
1795   //    the original first argument the new receiver.
1796   // r2: actual number of arguments
1797   // r3: callable
1798   {
1799     Register scratch = r5;
1800     Label loop;
1801     // Calculate the copy start address (destination). Copy end address is sp.
1802     __ AddP(r4, sp, r4);
1803 
1804     __ bind(&loop);
1805     __ LoadP(scratch, MemOperand(r4, -kSystemPointerSize));
1806     __ StoreP(scratch, MemOperand(r4));
1807     __ SubP(r4, Operand(kSystemPointerSize));
1808     __ CmpP(r4, sp);
1809     __ bne(&loop);
1810     // Adjust the actual number of arguments and remove the top element
1811     // (which is a copy of the last argument).
1812     __ SubP(r2, Operand(1));
1813     __ pop();
1814   }
1815 
1816   // 4. Call the callable.
1817   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1818 }
1819 
Generate_ReflectApply(MacroAssembler * masm)1820 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1821   // ----------- S t a t e -------------
1822   //  -- r2     : argc
1823   //  -- sp[0]  : argumentsList
1824   //  -- sp[4]  : thisArgument
1825   //  -- sp[8]  : target
1826   //  -- sp[12] : receiver
1827   // -----------------------------------
1828 
1829   // 1. Load target into r3 (if present), argumentsList into r4 (if present),
1830   // remove all arguments from the stack (including the receiver), and push
1831   // thisArgument (if present) instead.
1832   {
1833     Label skip;
1834     Register arg_size = r7;
1835     Register new_sp = r5;
1836     Register scratch = r6;
1837     __ ShiftLeftP(arg_size, r2, Operand(kSystemPointerSizeLog2));
1838     __ AddP(new_sp, sp, arg_size);
1839     __ LoadRoot(r3, RootIndex::kUndefinedValue);
1840     __ LoadRR(scratch, r3);
1841     __ LoadRR(r4, r3);
1842     __ CmpP(arg_size, Operand(kSystemPointerSize));
1843     __ blt(&skip);
1844     __ LoadP(r3, MemOperand(new_sp, 1 * -kSystemPointerSize));  // target
1845     __ beq(&skip);
1846     __ LoadP(scratch,
1847              MemOperand(new_sp, 2 * -kSystemPointerSize));  // thisArgument
1848     __ CmpP(arg_size, Operand(2 * kSystemPointerSize));
1849     __ beq(&skip);
1850     __ LoadP(r4, MemOperand(new_sp, 3 * -kSystemPointerSize));  // argumentsList
1851     __ bind(&skip);
1852     __ LoadRR(sp, new_sp);
1853     __ StoreP(scratch, MemOperand(sp, 0));
1854   }
1855 
1856   // ----------- S t a t e -------------
1857   //  -- r4    : argumentsList
1858   //  -- r3    : target
1859   //  -- sp[0] : thisArgument
1860   // -----------------------------------
1861 
1862   // 2. We don't need to check explicitly for callable target here,
1863   // since that's the first thing the Call/CallWithArrayLike builtins
1864   // will do.
1865 
1866   // 3 Apply the target to the given argumentsList.
1867   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1868           RelocInfo::CODE_TARGET);
1869 }
1870 
Generate_ReflectConstruct(MacroAssembler * masm)1871 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1872   // ----------- S t a t e -------------
1873   //  -- r2     : argc
1874   //  -- sp[0]  : new.target (optional)
1875   //  -- sp[4]  : argumentsList
1876   //  -- sp[8]  : target
1877   //  -- sp[12] : receiver
1878   // -----------------------------------
1879 
1880   // 1. Load target into r3 (if present), argumentsList into r4 (if present),
1881   // new.target into r5 (if present, otherwise use target), remove all
1882   // arguments from the stack (including the receiver), and push thisArgument
1883   // (if present) instead.
1884   {
1885     Label skip;
1886     Register arg_size = r7;
1887     Register new_sp = r6;
1888     __ ShiftLeftP(arg_size, r2, Operand(kSystemPointerSizeLog2));
1889     __ AddP(new_sp, sp, arg_size);
1890     __ LoadRoot(r3, RootIndex::kUndefinedValue);
1891     __ LoadRR(r4, r3);
1892     __ LoadRR(r5, r3);
1893     __ StoreP(r3, MemOperand(new_sp, 0));  // receiver (undefined)
1894     __ CmpP(arg_size, Operand(kSystemPointerSize));
1895     __ blt(&skip);
1896     __ LoadP(r3, MemOperand(new_sp, 1 * -kSystemPointerSize));  // target
1897     __ LoadRR(r5, r3);  // new.target defaults to target
1898     __ beq(&skip);
1899     __ LoadP(r4, MemOperand(new_sp, 2 * -kSystemPointerSize));  // argumentsList
1900     __ CmpP(arg_size, Operand(2 * kSystemPointerSize));
1901     __ beq(&skip);
1902     __ LoadP(r5, MemOperand(new_sp, 3 * -kSystemPointerSize));  // new.target
1903     __ bind(&skip);
1904     __ LoadRR(sp, new_sp);
1905   }
1906 
1907   // ----------- S t a t e -------------
1908   //  -- r4    : argumentsList
1909   //  -- r5    : new.target
1910   //  -- r3    : target
1911   //  -- sp[0] : receiver (undefined)
1912   // -----------------------------------
1913 
1914   // 2. We don't need to check explicitly for constructor target here,
1915   // since that's the first thing the Construct/ConstructWithArrayLike
1916   // builtins will do.
1917 
1918   // 3. We don't need to check explicitly for constructor new.target here,
1919   // since that's the second thing the Construct/ConstructWithArrayLike
1920   // builtins will do.
1921 
1922   // 4. Construct the target with the given new.target and argumentsList.
1923   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1924           RelocInfo::CODE_TARGET);
1925 }
1926 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1927 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1928   __ SmiTag(r2);
1929   __ Load(r6, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1930   // Stack updated as such:
1931   //    old SP --->
1932   //                 R14 Return Addr
1933   //                 Old FP                     <--- New FP
1934   //                 Argument Adapter SMI
1935   //                 Function
1936   //                 ArgC as SMI
1937   //                 Padding                    <--- New SP
1938   __ lay(sp, MemOperand(sp, -5 * kSystemPointerSize));
1939 
1940   // Cleanse the top nibble of 31-bit pointers.
1941   __ CleanseP(r14);
1942   __ StoreP(r14, MemOperand(sp, 4 * kSystemPointerSize));
1943   __ StoreP(fp, MemOperand(sp, 3 * kSystemPointerSize));
1944   __ StoreP(r6, MemOperand(sp, 2 * kSystemPointerSize));
1945   __ StoreP(r3, MemOperand(sp, 1 * kSystemPointerSize));
1946   __ StoreP(r2, MemOperand(sp, 0 * kSystemPointerSize));
1947   __ Push(Smi::zero());  // Padding.
1948   __ la(fp,
1949         MemOperand(sp, ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1950 }
1951 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1952 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1953   // ----------- S t a t e -------------
1954   //  -- r2 : result being passed through
1955   // -----------------------------------
1956   // Get the number of arguments passed (as a smi), tear down the frame and
1957   // then tear down the parameters.
1958   __ LoadP(r3, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1959   int stack_adjustment = kSystemPointerSize;  // adjust for receiver
1960   __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1961   __ SmiToPtrArrayOffset(r3, r3);
1962   __ lay(sp, MemOperand(sp, r3));
1963 }
1964 
1965 // static
Generate_CallOrConstructVarargs(MacroAssembler * masm,Handle<Code> code)1966 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1967                                                Handle<Code> code) {
1968   // ----------- S t a t e -------------
1969   //  -- r3 : target
1970   //  -- r2 : number of parameters on the stack (not including the receiver)
1971   //  -- r4 : arguments list (a FixedArray)
1972   //  -- r6 : len (number of elements to push from args)
1973   //  -- r5 : new.target (for [[Construct]])
1974   // -----------------------------------
1975 
1976   Register scratch = ip;
1977 
1978   if (masm->emit_debug_code()) {
1979     // Allow r4 to be a FixedArray, or a FixedDoubleArray if r6 == 0.
1980     Label ok, fail;
1981     __ AssertNotSmi(r4);
1982     __ LoadTaggedPointerField(scratch,
1983                               FieldMemOperand(r4, HeapObject::kMapOffset));
1984     __ LoadHalfWordP(scratch,
1985                      FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1986     __ CmpP(scratch, Operand(FIXED_ARRAY_TYPE));
1987     __ beq(&ok);
1988     __ CmpP(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1989     __ bne(&fail);
1990     __ CmpP(r6, Operand::Zero());
1991     __ beq(&ok);
1992     // Fall through.
1993     __ bind(&fail);
1994     __ Abort(AbortReason::kOperandIsNotAFixedArray);
1995 
1996     __ bind(&ok);
1997   }
1998 
1999   // Check for stack overflow.
2000   Label stack_overflow;
2001   Generate_StackOverflowCheck(masm, r6, scratch, &stack_overflow);
2002 
2003   // Push arguments onto the stack (thisArgument is already on the stack).
2004   {
2005     Label loop, no_args, skip;
2006     __ CmpP(r6, Operand::Zero());
2007     __ beq(&no_args);
2008     __ AddP(r4, r4,
2009             Operand(FixedArray::kHeaderSize - kHeapObjectTag - kTaggedSize));
2010     __ LoadRR(r1, r6);
2011     __ bind(&loop);
2012     __ LoadAnyTaggedField(scratch, MemOperand(r4, kTaggedSize), r0);
2013     __ la(r4, MemOperand(r4, kTaggedSize));
2014     __ CompareRoot(scratch, RootIndex::kTheHoleValue);
2015     __ bne(&skip, Label::kNear);
2016     __ LoadRoot(scratch, RootIndex::kUndefinedValue);
2017     __ bind(&skip);
2018     __ push(scratch);
2019     __ BranchOnCount(r1, &loop);
2020     __ bind(&no_args);
2021     __ AddP(r2, r2, r6);
2022   }
2023 
2024   // Tail-call to the actual Call or Construct builtin.
2025   __ Jump(code, RelocInfo::CODE_TARGET);
2026 
2027   __ bind(&stack_overflow);
2028   __ TailCallRuntime(Runtime::kThrowStackOverflow);
2029 }
2030 
2031 // static
Generate_CallOrConstructForwardVarargs(MacroAssembler * masm,CallOrConstructMode mode,Handle<Code> code)2032 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2033                                                       CallOrConstructMode mode,
2034                                                       Handle<Code> code) {
2035   // ----------- S t a t e -------------
2036   //  -- r2 : the number of arguments (not including the receiver)
2037   //  -- r5 : the new.target (for [[Construct]] calls)
2038   //  -- r3 : the target to call (can be any Object)
2039   //  -- r4 : start index (to support rest parameters)
2040   // -----------------------------------
2041 
2042   Register scratch = r8;
2043 
2044   if (mode == CallOrConstructMode::kConstruct) {
2045     Label new_target_constructor, new_target_not_constructor;
2046     __ JumpIfSmi(r5, &new_target_not_constructor);
2047     __ LoadTaggedPointerField(scratch,
2048                               FieldMemOperand(r5, HeapObject::kMapOffset));
2049     __ LoadlB(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
2050     __ tmll(scratch, Operand(Map::Bits1::IsConstructorBit::kShift));
2051     __ bne(&new_target_constructor);
2052     __ bind(&new_target_not_constructor);
2053     {
2054       FrameScope scope(masm, StackFrame::MANUAL);
2055       __ EnterFrame(StackFrame::INTERNAL);
2056       __ Push(r5);
2057       __ CallRuntime(Runtime::kThrowNotConstructor);
2058     }
2059     __ bind(&new_target_constructor);
2060   }
2061 
2062   // Check if we have an arguments adaptor frame below the function frame.
2063   Label arguments_adaptor, arguments_done;
2064   __ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2065   __ LoadP(scratch,
2066            MemOperand(r6, CommonFrameConstants::kContextOrFrameTypeOffset));
2067   __ CmpP(scratch,
2068           Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2069   __ beq(&arguments_adaptor);
2070   {
2071     __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
2072     __ LoadTaggedPointerField(
2073         r7, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
2074     __ LoadLogicalHalfWordP(
2075         r7,
2076         FieldMemOperand(r7, SharedFunctionInfo::kFormalParameterCountOffset));
2077     __ LoadRR(r6, fp);
2078   }
2079   __ b(&arguments_done);
2080   __ bind(&arguments_adaptor);
2081   {
2082     // Load the length from the ArgumentsAdaptorFrame.
2083     __ LoadP(r7, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset));
2084     __ SmiUntag(r7);
2085   }
2086   __ bind(&arguments_done);
2087 
2088   Label stack_done, stack_overflow;
2089   __ SubP(r7, r7, r4);
2090   __ CmpP(r7, Operand::Zero());
2091   __ ble(&stack_done);
2092   {
2093     // Check for stack overflow.
2094     Generate_StackOverflowCheck(masm, r7, r4, &stack_overflow);
2095 
2096     // Forward the arguments from the caller frame.
2097     {
2098       Label loop;
2099       __ AddP(r6, r6, Operand(kSystemPointerSize));
2100       __ AddP(r2, r2, r7);
2101       __ bind(&loop);
2102       {
2103         __ ShiftLeftP(scratch, r7, Operand(kSystemPointerSizeLog2));
2104         __ LoadP(scratch, MemOperand(r6, scratch));
2105         __ push(scratch);
2106         __ SubP(r7, r7, Operand(1));
2107         __ CmpP(r7, Operand::Zero());
2108         __ bne(&loop);
2109       }
2110     }
2111   }
2112   __ b(&stack_done);
2113   __ bind(&stack_overflow);
2114   __ TailCallRuntime(Runtime::kThrowStackOverflow);
2115   __ bind(&stack_done);
2116 
2117   // Tail-call to the {code} handler.
2118   __ Jump(code, RelocInfo::CODE_TARGET);
2119 }
2120 
2121 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode)2122 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2123                                      ConvertReceiverMode mode) {
2124   // ----------- S t a t e -------------
2125   //  -- r2 : the number of arguments (not including the receiver)
2126   //  -- r3 : the function to call (checked to be a JSFunction)
2127   // -----------------------------------
2128   __ AssertFunction(r3);
2129 
2130   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2131   // Check that the function is not a "classConstructor".
2132   Label class_constructor;
2133   __ LoadTaggedPointerField(
2134       r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2135   __ LoadlW(r5, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset));
2136   __ TestBitMask(r5, SharedFunctionInfo::IsClassConstructorBit::kMask, r0);
2137   __ bne(&class_constructor);
2138 
2139   // Enter the context of the function; ToObject has to run in the function
2140   // context, and we also need to take the global proxy from the function
2141   // context in case of conversion.
2142   __ LoadTaggedPointerField(cp,
2143                             FieldMemOperand(r3, JSFunction::kContextOffset));
2144   // We need to convert the receiver for non-native sloppy mode functions.
2145   Label done_convert;
2146   __ AndP(r0, r5,
2147           Operand(SharedFunctionInfo::IsStrictBit::kMask |
2148                   SharedFunctionInfo::IsNativeBit::kMask));
2149   __ bne(&done_convert);
2150   {
2151     // ----------- S t a t e -------------
2152     //  -- r2 : the number of arguments (not including the receiver)
2153     //  -- r3 : the function to call (checked to be a JSFunction)
2154     //  -- r4 : the shared function info.
2155     //  -- cp : the function context.
2156     // -----------------------------------
2157 
2158     if (mode == ConvertReceiverMode::kNullOrUndefined) {
2159       // Patch receiver to global proxy.
2160       __ LoadGlobalProxy(r5);
2161     } else {
2162       Label convert_to_object, convert_receiver;
2163       __ ShiftLeftP(r5, r2, Operand(kSystemPointerSizeLog2));
2164       __ LoadP(r5, MemOperand(sp, r5));
2165       __ JumpIfSmi(r5, &convert_to_object);
2166       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2167       __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE);
2168       __ bge(&done_convert);
2169       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2170         Label convert_global_proxy;
2171         __ JumpIfRoot(r5, RootIndex::kUndefinedValue, &convert_global_proxy);
2172         __ JumpIfNotRoot(r5, RootIndex::kNullValue, &convert_to_object);
2173         __ bind(&convert_global_proxy);
2174         {
2175           // Patch receiver to global proxy.
2176           __ LoadGlobalProxy(r5);
2177         }
2178         __ b(&convert_receiver);
2179       }
2180       __ bind(&convert_to_object);
2181       {
2182         // Convert receiver using ToObject.
2183         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2184         // in the fast case? (fall back to AllocateInNewSpace?)
2185         FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2186         __ SmiTag(r2);
2187         __ Push(r2, r3);
2188         __ LoadRR(r2, r5);
2189         __ Push(cp);
2190         __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2191                 RelocInfo::CODE_TARGET);
2192         __ Pop(cp);
2193         __ LoadRR(r5, r2);
2194         __ Pop(r2, r3);
2195         __ SmiUntag(r2);
2196       }
2197       __ LoadTaggedPointerField(
2198           r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2199       __ bind(&convert_receiver);
2200     }
2201     __ ShiftLeftP(r6, r2, Operand(kSystemPointerSizeLog2));
2202     __ StoreP(r5, MemOperand(sp, r6));
2203   }
2204   __ bind(&done_convert);
2205 
2206   // ----------- S t a t e -------------
2207   //  -- r2 : the number of arguments (not including the receiver)
2208   //  -- r3 : the function to call (checked to be a JSFunction)
2209   //  -- r4 : the shared function info.
2210   //  -- cp : the function context.
2211   // -----------------------------------
2212 
2213   __ LoadLogicalHalfWordP(
2214       r4, FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset));
2215   __ InvokeFunctionCode(r3, no_reg, r4, r2, JUMP_FUNCTION);
2216 
2217   // The function is a "classConstructor", need to raise an exception.
2218   __ bind(&class_constructor);
2219   {
2220     FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2221     __ push(r3);
2222     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2223   }
2224 }
2225 
2226 namespace {
2227 
Generate_PushBoundArguments(MacroAssembler * masm)2228 void Generate_PushBoundArguments(MacroAssembler* masm) {
2229   // ----------- S t a t e -------------
2230   //  -- r2 : the number of arguments (not including the receiver)
2231   //  -- r3 : target (checked to be a JSBoundFunction)
2232   //  -- r5 : new.target (only in case of [[Construct]])
2233   // -----------------------------------
2234 
2235   // Load [[BoundArguments]] into r4 and length of that into r6.
2236   Label no_bound_arguments;
2237   __ LoadTaggedPointerField(
2238       r4, FieldMemOperand(r3, JSBoundFunction::kBoundArgumentsOffset));
2239   __ SmiUntagField(r6, FieldMemOperand(r4, FixedArray::kLengthOffset));
2240   __ LoadAndTestP(r6, r6);
2241   __ beq(&no_bound_arguments);
2242   {
2243     // ----------- S t a t e -------------
2244     //  -- r2 : the number of arguments (not including the receiver)
2245     //  -- r3 : target (checked to be a JSBoundFunction)
2246     //  -- r4 : the [[BoundArguments]] (implemented as FixedArray)
2247     //  -- r5 : new.target (only in case of [[Construct]])
2248     //  -- r6 : the number of [[BoundArguments]]
2249     // -----------------------------------
2250 
2251     Register scratch = r8;
2252     // Reserve stack space for the [[BoundArguments]].
2253     {
2254       Label done;
2255       __ ShiftLeftP(r9, r6, Operand(kSystemPointerSizeLog2));
2256       __ SubP(r1, sp, r9);
2257       // Check the stack for overflow. We are not trying to catch interruptions
2258       // (i.e. debug break and preemption) here, so check the "real stack
2259       // limit".
2260       __ CmpLogicalP(
2261           r1, StackLimitAsMemOperand(masm, StackLimitKind::kRealStackLimit));
2262       __ bgt(&done);  // Signed comparison.
2263       // Restore the stack pointer.
2264       {
2265         FrameScope scope(masm, StackFrame::MANUAL);
2266         __ EnterFrame(StackFrame::INTERNAL);
2267         __ CallRuntime(Runtime::kThrowStackOverflow);
2268       }
2269       __ bind(&done);
2270     }
2271 
2272     __ LoadRR(scratch, sp);
2273     __ LoadRR(sp, r1);
2274 
2275     // Relocate arguments down the stack.
2276     //  -- r2 : the number of arguments (not including the receiver)
2277     //  -- r8 : the previous stack pointer
2278     {
2279       Label skip, loop;
2280       __ LoadImmP(r7, Operand::Zero());
2281       __ CmpP(r2, Operand::Zero());
2282       __ beq(&skip);
2283       __ LoadRR(r1, r2);
2284       __ bind(&loop);
2285       __ LoadP(r0, MemOperand(scratch, r7));
2286       __ StoreP(r0, MemOperand(sp, r7));
2287       __ lay(r7, MemOperand(r7, kSystemPointerSize));
2288       __ BranchOnCount(r1, &loop);
2289       __ bind(&skip);
2290     }
2291 
2292     // Copy [[BoundArguments]] to the stack (below the arguments).
2293     {
2294       Label loop;
2295       __ ShiftLeftP(r9, r6, Operand(kTaggedSizeLog2));
2296       __ lay(r4, MemOperand(r4, r9, FixedArray::kHeaderSize - kHeapObjectTag));
2297       __ LoadRR(r1, r6);
2298       __ bind(&loop);
2299       __ LoadAnyTaggedField(ip, MemOperand(r4, -kTaggedSize), r0);
2300       __ lay(r4, MemOperand(r4, -kTaggedSize));
2301       __ StoreP(ip, MemOperand(sp, r7));
2302       __ lay(r7, MemOperand(r7, kSystemPointerSize));
2303       __ BranchOnCount(r1, &loop);
2304       __ AddP(r2, r2, r6);
2305     }
2306   }
2307   __ bind(&no_bound_arguments);
2308 }
2309 
2310 }  // namespace
2311 
2312 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm)2313 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2314   // ----------- S t a t e -------------
2315   //  -- r2 : the number of arguments (not including the receiver)
2316   //  -- r3 : the function to call (checked to be a JSBoundFunction)
2317   // -----------------------------------
2318   __ AssertBoundFunction(r3);
2319 
2320   // Patch the receiver to [[BoundThis]].
2321   __ LoadAnyTaggedField(r5,
2322                         FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset));
2323   __ ShiftLeftP(r1, r2, Operand(kSystemPointerSizeLog2));
2324   __ StoreP(r5, MemOperand(sp, r1));
2325 
2326   // Push the [[BoundArguments]] onto the stack.
2327   Generate_PushBoundArguments(masm);
2328 
2329   // Call the [[BoundTargetFunction]] via the Call builtin.
2330   __ LoadTaggedPointerField(
2331       r3, FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2332   __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2333           RelocInfo::CODE_TARGET);
2334 }
2335 
2336 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode)2337 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2338   // ----------- S t a t e -------------
2339   //  -- r2 : the number of arguments (not including the receiver)
2340   //  -- r3 : the target to call (can be any Object).
2341   // -----------------------------------
2342 
2343   Label non_callable, non_smi;
2344   __ JumpIfSmi(r3, &non_callable);
2345   __ bind(&non_smi);
2346   __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
2347   __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2348           RelocInfo::CODE_TARGET, eq);
2349   __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2350   __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2351           RelocInfo::CODE_TARGET, eq);
2352 
2353   // Check if target has a [[Call]] internal method.
2354   __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2355   __ TestBit(r6, Map::Bits1::IsCallableBit::kShift);
2356   __ beq(&non_callable);
2357 
2358   // Check if target is a proxy and call CallProxy external builtin
2359   __ CmpP(r7, Operand(JS_PROXY_TYPE));
2360   __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET, eq);
2361 
2362   // 2. Call to something else, which might have a [[Call]] internal method (if
2363   // not we raise an exception).
2364   // Overwrite the original receiver the (original) target.
2365   __ ShiftLeftP(r7, r2, Operand(kSystemPointerSizeLog2));
2366   __ StoreP(r3, MemOperand(sp, r7));
2367   // Let the "call_as_function_delegate" take care of the rest.
2368   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r3);
2369   __ Jump(masm->isolate()->builtins()->CallFunction(
2370               ConvertReceiverMode::kNotNullOrUndefined),
2371           RelocInfo::CODE_TARGET);
2372 
2373   // 3. Call to something that is not callable.
2374   __ bind(&non_callable);
2375   {
2376     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2377     __ Push(r3);
2378     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2379   }
2380 }
2381 
2382 // static
Generate_ConstructFunction(MacroAssembler * masm)2383 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2384   // ----------- S t a t e -------------
2385   //  -- r2 : the number of arguments (not including the receiver)
2386   //  -- r3 : the constructor to call (checked to be a JSFunction)
2387   //  -- r5 : the new target (checked to be a constructor)
2388   // -----------------------------------
2389   __ AssertConstructor(r3, r1);
2390   __ AssertFunction(r3);
2391 
2392   // Calling convention for function specific ConstructStubs require
2393   // r4 to contain either an AllocationSite or undefined.
2394   __ LoadRoot(r4, RootIndex::kUndefinedValue);
2395 
2396   Label call_generic_stub;
2397 
2398   // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2399   __ LoadTaggedPointerField(
2400       r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2401   __ LoadlW(r6, FieldMemOperand(r6, SharedFunctionInfo::kFlagsOffset));
2402   __ AndP(r6, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2403   __ beq(&call_generic_stub);
2404 
2405   __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2406           RelocInfo::CODE_TARGET);
2407 
2408   __ bind(&call_generic_stub);
2409   __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2410           RelocInfo::CODE_TARGET);
2411 }
2412 
2413 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2414 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2415   // ----------- S t a t e -------------
2416   //  -- r2 : the number of arguments (not including the receiver)
2417   //  -- r3 : the function to call (checked to be a JSBoundFunction)
2418   //  -- r5 : the new target (checked to be a constructor)
2419   // -----------------------------------
2420   __ AssertConstructor(r3, r1);
2421   __ AssertBoundFunction(r3);
2422 
2423   // Push the [[BoundArguments]] onto the stack.
2424   Generate_PushBoundArguments(masm);
2425 
2426   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2427   Label skip;
2428   __ CompareTagged(r3, r5);
2429   __ bne(&skip);
2430   __ LoadTaggedPointerField(
2431       r5, FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2432   __ bind(&skip);
2433 
2434   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2435   __ LoadTaggedPointerField(
2436       r3, FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2437   __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2438 }
2439 
2440 // static
Generate_Construct(MacroAssembler * masm)2441 void Builtins::Generate_Construct(MacroAssembler* masm) {
2442   // ----------- S t a t e -------------
2443   //  -- r2 : the number of arguments (not including the receiver)
2444   //  -- r3 : the constructor to call (can be any Object)
2445   //  -- r5 : the new target (either the same as the constructor or
2446   //          the JSFunction on which new was invoked initially)
2447   // -----------------------------------
2448 
2449   // Check if target is a Smi.
2450   Label non_constructor, non_proxy;
2451   __ JumpIfSmi(r3, &non_constructor);
2452 
2453   // Check if target has a [[Construct]] internal method.
2454   __ LoadTaggedPointerField(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
2455   __ LoadlB(r4, FieldMemOperand(r6, Map::kBitFieldOffset));
2456   __ TestBit(r4, Map::Bits1::IsConstructorBit::kShift);
2457   __ beq(&non_constructor);
2458 
2459   // Dispatch based on instance type.
2460   __ CompareInstanceType(r6, r7, JS_FUNCTION_TYPE);
2461   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2462           RelocInfo::CODE_TARGET, eq);
2463 
2464   // Only dispatch to bound functions after checking whether they are
2465   // constructors.
2466   __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2467   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2468           RelocInfo::CODE_TARGET, eq);
2469 
2470   // Only dispatch to proxies after checking whether they are constructors.
2471   __ CmpP(r7, Operand(JS_PROXY_TYPE));
2472   __ bne(&non_proxy);
2473   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2474           RelocInfo::CODE_TARGET);
2475 
2476   // Called Construct on an exotic Object with a [[Construct]] internal method.
2477   __ bind(&non_proxy);
2478   {
2479     // Overwrite the original receiver with the (original) target.
2480     __ ShiftLeftP(r7, r2, Operand(kSystemPointerSizeLog2));
2481     __ StoreP(r3, MemOperand(sp, r7));
2482     // Let the "call_as_constructor_delegate" take care of the rest.
2483     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r3);
2484     __ Jump(masm->isolate()->builtins()->CallFunction(),
2485             RelocInfo::CODE_TARGET);
2486   }
2487 
2488   // Called Construct on an Object that doesn't have a [[Construct]] internal
2489   // method.
2490   __ bind(&non_constructor);
2491   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2492           RelocInfo::CODE_TARGET);
2493 }
2494 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2495 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2496   // ----------- S t a t e -------------
2497   //  -- r2 : actual number of arguments
2498   //  -- r3 : function (passed through to callee)
2499   //  -- r4 : expected number of arguments
2500   //  -- r5 : new target (passed through to callee)
2501   // -----------------------------------
2502 
2503   Label dont_adapt_arguments, stack_overflow, skip_adapt_arguments;
2504   __ tmll(r4, Operand(kDontAdaptArgumentsSentinel));
2505   __ b(Condition(1), &dont_adapt_arguments);
2506   __ LoadTaggedPointerField(
2507       r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2508   __ LoadlW(r6, FieldMemOperand(r6, SharedFunctionInfo::kFlagsOffset));
2509   __ tmlh(r6,
2510           Operand(SharedFunctionInfo::IsSafeToSkipArgumentsAdaptorBit::kMask >>
2511                   16));
2512   __ bne(&skip_adapt_arguments);
2513 
2514   // -------------------------------------------
2515   // Adapt arguments.
2516   // -------------------------------------------
2517   {
2518     Label under_application, over_application, invoke;
2519     __ CmpP(r2, r4);
2520     __ blt(&under_application);
2521 
2522     // Enough parameters: actual >= expected
2523     __ bind(&over_application);
2524     {
2525       EnterArgumentsAdaptorFrame(masm);
2526       Generate_StackOverflowCheck(masm, r4, r7, &stack_overflow);
2527 
2528       // Calculate copy start address into r2 and copy end address into r6.
2529       // r2: actual number of arguments as a smi
2530       // r3: function
2531       // r4: expected number of arguments
2532       // r5: new target (passed through to callee)
2533       __ SmiToPtrArrayOffset(r2, r2);
2534       __ AddP(r2, fp);
2535       // adjust for return address and receiver
2536       __ AddP(r2, r2, Operand(2 * kSystemPointerSize));
2537       __ ShiftLeftP(r6, r4, Operand(kSystemPointerSizeLog2));
2538       __ SubP(r6, r2, r6);
2539 
2540       // Copy the arguments (including the receiver) to the new stack frame.
2541       // r2: copy start address
2542       // r3: function
2543       // r4: expected number of arguments
2544       // r5: new target (passed through to callee)
2545       // r6: copy end address
2546 
2547       Label copy;
2548       __ bind(&copy);
2549       __ LoadP(r0, MemOperand(r2, 0));
2550       __ push(r0);
2551       __ CmpP(r2, r6);  // Compare before moving to next argument.
2552       __ lay(r2, MemOperand(r2, -kSystemPointerSize));
2553       __ bne(&copy);
2554 
2555       __ b(&invoke);
2556     }
2557 
2558     // Too few parameters: Actual < expected
2559     __ bind(&under_application);
2560     {
2561       EnterArgumentsAdaptorFrame(masm);
2562       Generate_StackOverflowCheck(masm, r4, r7, &stack_overflow);
2563 
2564       // Calculate copy start address into r0 and copy end address is fp.
2565       // r2: actual number of arguments as a smi
2566       // r3: function
2567       // r4: expected number of arguments
2568       // r5: new target (passed through to callee)
2569       __ SmiToPtrArrayOffset(r2, r2);
2570       __ lay(r2, MemOperand(r2, fp));
2571 
2572       // Copy the arguments (including the receiver) to the new stack frame.
2573       // r2: copy start address
2574       // r3: function
2575       // r4: expected number of arguments
2576       // r5: new target (passed through to callee)
2577       Label copy;
2578       __ bind(&copy);
2579       // Adjust load for return address and receiver.
2580       __ LoadP(r0, MemOperand(r2, 2 * kSystemPointerSize));
2581       __ push(r0);
2582       __ CmpP(r2, fp);  // Compare before moving to next argument.
2583       __ lay(r2, MemOperand(r2, -kSystemPointerSize));
2584       __ bne(&copy);
2585 
2586       // Fill the remaining expected arguments with undefined.
2587       // r3: function
2588       // r4: expected number of argumentus
2589       __ LoadRoot(r0, RootIndex::kUndefinedValue);
2590       __ ShiftLeftP(r6, r4, Operand(kSystemPointerSizeLog2));
2591       __ SubP(r6, fp, r6);
2592       // Adjust for frame.
2593       __ SubP(r6, r6,
2594               Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2595                       kSystemPointerSize));
2596 
2597       Label fill;
2598       __ bind(&fill);
2599       __ push(r0);
2600       __ CmpP(sp, r6);
2601       __ bne(&fill);
2602     }
2603 
2604     // Call the entry point.
2605     __ bind(&invoke);
2606     __ LoadRR(r2, r4);
2607     // r2 : expected number of arguments
2608     // r3 : function (passed through to callee)
2609     // r5 : new target (passed through to callee)
2610     static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
2611     __ LoadTaggedPointerField(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
2612     __ CallCodeObject(r4);
2613 
2614     // Store offset of return address for deoptimizer.
2615     masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(
2616         masm->pc_offset());
2617 
2618     // Exit frame and return.
2619     LeaveArgumentsAdaptorFrame(masm);
2620     __ Ret();
2621   }
2622 
2623   // -------------------------------------------
2624   // Skip adapt arguments.
2625   // -------------------------------------------
2626   __ bind(&skip_adapt_arguments);
2627   {
2628     // The callee cannot observe the actual arguments, so it's safe to just
2629     // pass the expected arguments by massaging the stack appropriately. See
2630     // http://bit.ly/v8-faster-calls-with-arguments-mismatch for details.
2631     Label under_application, over_application;
2632     __ CmpP(r2, r4);
2633     __ blt(&under_application);
2634 
2635     __ bind(&over_application);
2636     {
2637       // Remove superfluous parameters from the stack.
2638       __ SubP(r6, r2, r4);
2639       __ lgr(r2, r4);
2640       __ ShiftLeftP(r6, r6, Operand(kSystemPointerSizeLog2));
2641       __ lay(sp, MemOperand(sp, r6));
2642       __ b(&dont_adapt_arguments);
2643     }
2644 
2645     __ bind(&under_application);
2646     {
2647       // Fill remaining expected arguments with undefined values.
2648       Label fill;
2649       __ LoadRoot(r6, RootIndex::kUndefinedValue);
2650       __ bind(&fill);
2651       __ AddP(r2, r2, Operand(1));
2652       __ push(r6);
2653       __ CmpP(r2, r4);
2654       __ blt(&fill);
2655       __ b(&dont_adapt_arguments);
2656     }
2657   }
2658 
2659   // -------------------------------------------
2660   // Dont adapt arguments.
2661   // -------------------------------------------
2662   __ bind(&dont_adapt_arguments);
2663   __ RecordComment("-- Call without adapting args --");
2664   static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
2665   __ LoadTaggedPointerField(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
2666   __ JumpCodeObject(r4);
2667 
2668   __ bind(&stack_overflow);
2669   {
2670     FrameScope frame(masm, StackFrame::MANUAL);
2671     __ CallRuntime(Runtime::kThrowStackOverflow);
2672     __ bkpt(0);
2673   }
2674 }
2675 
Generate_WasmCompileLazy(MacroAssembler * masm)2676 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2677   // The function index was put in a register by the jump table trampoline.
2678   // Convert to Smi for the runtime call.
2679   __ SmiTag(kWasmCompileLazyFuncIndexRegister,
2680             kWasmCompileLazyFuncIndexRegister);
2681   {
2682     HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2683     FrameAndConstantPoolScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2684 
2685     // Save all parameter registers (see wasm-linkage.cc). They might be
2686     // overwritten in the runtime call below. We don't have any callee-saved
2687     // registers in wasm, so no need to store anything else.
2688     constexpr RegList gp_regs = Register::ListOf(r2, r3, r4, r5, r6);
2689 #if V8_TARGET_ARCH_S390X
2690     constexpr RegList fp_regs = DoubleRegister::ListOf(d0, d2, d4, d6);
2691 #else
2692     constexpr RegList fp_regs = DoubleRegister::ListOf(d0, d2);
2693 #endif
2694     __ MultiPush(gp_regs);
2695     __ MultiPushDoubles(fp_regs);
2696 
2697     // Pass instance and function index as explicit arguments to the runtime
2698     // function.
2699     __ Push(kWasmInstanceRegister, r7);
2700     // Initialize the JavaScript context with 0. CEntry will use it to
2701     // set the current context on the isolate.
2702     __ LoadSmiLiteral(cp, Smi::zero());
2703     __ CallRuntime(Runtime::kWasmCompileLazy, 2);
2704     // The entrypoint address is the return value.
2705     __ LoadRR(ip, r2);
2706 
2707     // Restore registers.
2708     __ MultiPopDoubles(fp_regs);
2709     __ MultiPop(gp_regs);
2710   }
2711   // Finally, jump to the entrypoint.
2712   __ Jump(ip);
2713 }
2714 
Generate_WasmDebugBreak(MacroAssembler * masm)2715 void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
2716   HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2717   {
2718     FrameAndConstantPoolScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
2719 
2720     // Save all parameter registers. They might hold live values, we restore
2721     // them after the runtime call.
2722     __ MultiPush(WasmDebugBreakFrameConstants::kPushedGpRegs);
2723     __ MultiPushDoubles(WasmDebugBreakFrameConstants::kPushedFpRegs);
2724 
2725     // Initialize the JavaScript context with 0. CEntry will use it to
2726     // set the current context on the isolate.
2727     __ LoadSmiLiteral(cp, Smi::zero());
2728     __ CallRuntime(Runtime::kWasmDebugBreak, 0);
2729 
2730     // Restore registers.
2731     __ MultiPopDoubles(WasmDebugBreakFrameConstants::kPushedFpRegs);
2732     __ MultiPop(WasmDebugBreakFrameConstants::kPushedGpRegs);
2733   }
2734   __ Ret();
2735 }
2736 
Generate_CEntry(MacroAssembler * masm,int result_size,SaveFPRegsMode save_doubles,ArgvMode argv_mode,bool builtin_exit_frame)2737 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2738                                SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2739                                bool builtin_exit_frame) {
2740   // Called from JavaScript; parameters are on stack as if calling JS function.
2741   // r2: number of arguments including receiver
2742   // r3: pointer to builtin function
2743   // fp: frame pointer  (restored after C call)
2744   // sp: stack pointer  (restored as callee's sp after C call)
2745   // cp: current context  (C callee-saved)
2746   //
2747   // If argv_mode == kArgvInRegister:
2748   // r4: pointer to the first argument
2749 
2750   __ LoadRR(r7, r3);
2751 
2752   if (argv_mode == kArgvInRegister) {
2753     // Move argv into the correct register.
2754     __ LoadRR(r3, r4);
2755   } else {
2756     // Compute the argv pointer.
2757     __ ShiftLeftP(r3, r2, Operand(kSystemPointerSizeLog2));
2758     __ lay(r3, MemOperand(r3, sp, -kSystemPointerSize));
2759   }
2760 
2761   // Enter the exit frame that transitions from JavaScript to C++.
2762   FrameScope scope(masm, StackFrame::MANUAL);
2763 
2764   // Need at least one extra slot for return address location.
2765   int arg_stack_space = 1;
2766 
2767   // Pass buffer for return value on stack if necessary
2768   bool needs_return_buffer =
2769       result_size == 2 && !ABI_RETURNS_OBJECTPAIR_IN_REGS;
2770   if (needs_return_buffer) {
2771     arg_stack_space += result_size;
2772   }
2773 
2774 #if V8_TARGET_ARCH_S390X
2775   // 64-bit linux pass Argument object by reference not value
2776   arg_stack_space += 2;
2777 #endif
2778 
2779   __ EnterExitFrame(
2780       save_doubles, arg_stack_space,
2781       builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2782 
2783   // Store a copy of argc, argv in callee-saved registers for later.
2784   __ LoadRR(r6, r2);
2785   __ LoadRR(r8, r3);
2786   // r2, r6: number of arguments including receiver  (C callee-saved)
2787   // r3, r8: pointer to the first argument
2788   // r7: pointer to builtin function  (C callee-saved)
2789 
2790   // Result returned in registers or stack, depending on result size and ABI.
2791 
2792   Register isolate_reg = r4;
2793   if (needs_return_buffer) {
2794     // The return value is 16-byte non-scalar value.
2795     // Use frame storage reserved by calling function to pass return
2796     // buffer as implicit first argument in R2.  Shfit original parameters
2797     // by one register each.
2798     __ LoadRR(r4, r3);
2799     __ LoadRR(r3, r2);
2800     __ la(r2,
2801           MemOperand(sp, (kStackFrameExtraParamSlot + 1) * kSystemPointerSize));
2802     isolate_reg = r5;
2803     // Clang doesn't preserve r2 (result buffer)
2804     // write to r8 (preserved) before entry
2805     __ LoadRR(r8, r2);
2806   }
2807   // Call C built-in.
2808   __ Move(isolate_reg, ExternalReference::isolate_address(masm->isolate()));
2809 
2810   __ StoreReturnAddressAndCall(r7);
2811 
2812   // If return value is on the stack, pop it to registers.
2813   if (needs_return_buffer) {
2814     __ LoadRR(r2, r8);
2815     __ LoadP(r3, MemOperand(r2, kSystemPointerSize));
2816     __ LoadP(r2, MemOperand(r2));
2817   }
2818 
2819   // Check result for exception sentinel.
2820   Label exception_returned;
2821   __ CompareRoot(r2, RootIndex::kException);
2822   __ beq(&exception_returned, Label::kNear);
2823 
2824   // Check that there is no pending exception, otherwise we
2825   // should have returned the exception sentinel.
2826   if (FLAG_debug_code) {
2827     Label okay;
2828     ExternalReference pending_exception_address = ExternalReference::Create(
2829         IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2830     __ Move(r1, pending_exception_address);
2831     __ LoadP(r1, MemOperand(r1));
2832     __ CompareRoot(r1, RootIndex::kTheHoleValue);
2833     // Cannot use check here as it attempts to generate call into runtime.
2834     __ beq(&okay, Label::kNear);
2835     __ stop();
2836     __ bind(&okay);
2837   }
2838 
2839   // Exit C frame and return.
2840   // r2:r3: result
2841   // sp: stack pointer
2842   // fp: frame pointer
2843   Register argc = argv_mode == kArgvInRegister
2844                       // We don't want to pop arguments so set argc to no_reg.
2845                       ? no_reg
2846                       // r6: still holds argc (callee-saved).
2847                       : r6;
2848   __ LeaveExitFrame(save_doubles, argc);
2849   __ b(r14);
2850 
2851   // Handling of exception.
2852   __ bind(&exception_returned);
2853 
2854   ExternalReference pending_handler_context_address = ExternalReference::Create(
2855       IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2856   ExternalReference pending_handler_entrypoint_address =
2857       ExternalReference::Create(
2858           IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2859   ExternalReference pending_handler_fp_address = ExternalReference::Create(
2860       IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2861   ExternalReference pending_handler_sp_address = ExternalReference::Create(
2862       IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2863 
2864   // Ask the runtime for help to determine the handler. This will set r3 to
2865   // contain the current pending exception, don't clobber it.
2866   ExternalReference find_handler =
2867       ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2868   {
2869     FrameScope scope(masm, StackFrame::MANUAL);
2870     __ PrepareCallCFunction(3, 0, r2);
2871     __ LoadImmP(r2, Operand::Zero());
2872     __ LoadImmP(r3, Operand::Zero());
2873     __ Move(r4, ExternalReference::isolate_address(masm->isolate()));
2874     __ CallCFunction(find_handler, 3);
2875   }
2876 
2877   // Retrieve the handler context, SP and FP.
2878   __ Move(cp, pending_handler_context_address);
2879   __ LoadP(cp, MemOperand(cp));
2880   __ Move(sp, pending_handler_sp_address);
2881   __ LoadP(sp, MemOperand(sp));
2882   __ Move(fp, pending_handler_fp_address);
2883   __ LoadP(fp, MemOperand(fp));
2884 
2885   // If the handler is a JS frame, restore the context to the frame. Note that
2886   // the context will be set to (cp == 0) for non-JS frames.
2887   Label skip;
2888   __ CmpP(cp, Operand::Zero());
2889   __ beq(&skip, Label::kNear);
2890   __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2891   __ bind(&skip);
2892 
2893   // Reset the masking register. This is done independent of the underlying
2894   // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
2895   // with both configurations. It is safe to always do this, because the
2896   // underlying register is caller-saved and can be arbitrarily clobbered.
2897   __ ResetSpeculationPoisonRegister();
2898 
2899   // Compute the handler entry address and jump to it.
2900   __ Move(r3, pending_handler_entrypoint_address);
2901   __ LoadP(r3, MemOperand(r3));
2902   __ Jump(r3);
2903 }
2904 
Generate_DoubleToI(MacroAssembler * masm)2905 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2906   Label out_of_range, only_low, negate, done, fastpath_done;
2907   Register result_reg = r2;
2908 
2909   HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2910 
2911   // Immediate values for this stub fit in instructions, so it's safe to use ip.
2912   Register scratch = GetRegisterThatIsNotOneOf(result_reg);
2913   Register scratch_low = GetRegisterThatIsNotOneOf(result_reg, scratch);
2914   Register scratch_high =
2915       GetRegisterThatIsNotOneOf(result_reg, scratch, scratch_low);
2916   DoubleRegister double_scratch = kScratchDoubleReg;
2917 
2918   __ Push(result_reg, scratch);
2919   // Account for saved regs.
2920   int argument_offset = 2 * kSystemPointerSize;
2921 
2922   // Load double input.
2923   __ LoadDouble(double_scratch, MemOperand(sp, argument_offset));
2924 
2925   // Do fast-path convert from double to int.
2926   __ ConvertDoubleToInt64(result_reg, double_scratch);
2927 
2928   // Test for overflow
2929   __ TestIfInt32(result_reg);
2930   __ beq(&fastpath_done, Label::kNear);
2931 
2932   __ Push(scratch_high, scratch_low);
2933   // Account for saved regs.
2934   argument_offset += 2 * kSystemPointerSize;
2935 
2936   __ LoadlW(scratch_high,
2937             MemOperand(sp, argument_offset + Register::kExponentOffset));
2938   __ LoadlW(scratch_low,
2939             MemOperand(sp, argument_offset + Register::kMantissaOffset));
2940 
2941   __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask);
2942   // Load scratch with exponent - 1. This is faster than loading
2943   // with exponent because Bias + 1 = 1024 which is a *S390* immediate value.
2944   STATIC_ASSERT(HeapNumber::kExponentBias + 1 == 1024);
2945   __ SubP(scratch, Operand(HeapNumber::kExponentBias + 1));
2946   // If exponent is greater than or equal to 84, the 32 less significant
2947   // bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
2948   // the result is 0.
2949   // Compare exponent with 84 (compare exponent - 1 with 83).
2950   __ CmpP(scratch, Operand(83));
2951   __ bge(&out_of_range, Label::kNear);
2952 
2953   // If we reach this code, 31 <= exponent <= 83.
2954   // So, we don't have to handle cases where 0 <= exponent <= 20 for
2955   // which we would need to shift right the high part of the mantissa.
2956   // Scratch contains exponent - 1.
2957   // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
2958   __ Load(r0, Operand(51));
2959   __ SubP(scratch, r0, scratch);
2960   __ CmpP(scratch, Operand::Zero());
2961   __ ble(&only_low, Label::kNear);
2962   // 21 <= exponent <= 51, shift scratch_low and scratch_high
2963   // to generate the result.
2964   __ ShiftRight(scratch_low, scratch_low, scratch);
2965   // Scratch contains: 52 - exponent.
2966   // We needs: exponent - 20.
2967   // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
2968   __ Load(r0, Operand(32));
2969   __ SubP(scratch, r0, scratch);
2970   __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask);
2971   // Set the implicit 1 before the mantissa part in scratch_high.
2972   STATIC_ASSERT(HeapNumber::kMantissaBitsInTopWord >= 16);
2973   __ Load(r0, Operand(1 << ((HeapNumber::kMantissaBitsInTopWord)-16)));
2974   __ ShiftLeftP(r0, r0, Operand(16));
2975   __ OrP(result_reg, result_reg, r0);
2976   __ ShiftLeft(r0, result_reg, scratch);
2977   __ OrP(result_reg, scratch_low, r0);
2978   __ b(&negate, Label::kNear);
2979 
2980   __ bind(&out_of_range);
2981   __ mov(result_reg, Operand::Zero());
2982   __ b(&done, Label::kNear);
2983 
2984   __ bind(&only_low);
2985   // 52 <= exponent <= 83, shift only scratch_low.
2986   // On entry, scratch contains: 52 - exponent.
2987   __ LoadComplementRR(scratch, scratch);
2988   __ ShiftLeft(result_reg, scratch_low, scratch);
2989 
2990   __ bind(&negate);
2991   // If input was positive, scratch_high ASR 31 equals 0 and
2992   // scratch_high LSR 31 equals zero.
2993   // New result = (result eor 0) + 0 = result.
2994   // If the input was negative, we have to negate the result.
2995   // Input_high ASR 31 equals 0xFFFFFFFF and scratch_high LSR 31 equals 1.
2996   // New result = (result eor 0xFFFFFFFF) + 1 = 0 - result.
2997   __ ShiftRightArith(r0, scratch_high, Operand(31));
2998 #if V8_TARGET_ARCH_S390X
2999   __ lgfr(r0, r0);
3000   __ ShiftRightP(r0, r0, Operand(32));
3001 #endif
3002   __ XorP(result_reg, r0);
3003   __ ShiftRight(r0, scratch_high, Operand(31));
3004   __ AddP(result_reg, r0);
3005 
3006   __ bind(&done);
3007   __ Pop(scratch_high, scratch_low);
3008   argument_offset -= 2 * kSystemPointerSize;
3009 
3010   __ bind(&fastpath_done);
3011   __ StoreP(result_reg, MemOperand(sp, argument_offset));
3012   __ Pop(result_reg, scratch);
3013 
3014   __ Ret();
3015 }
3016 
3017 namespace {
3018 
AddressOffset(ExternalReference ref0,ExternalReference ref1)3019 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
3020   return ref0.address() - ref1.address();
3021 }
3022 
3023 // Calls an API function.  Allocates HandleScope, extracts returned value
3024 // from handle and propagates exceptions.  Restores context.  stack_space
3025 // - space to be unwound on exit (includes the call JS arguments space and
3026 // the additional space allocated for the fast call).
CallApiFunctionAndReturn(MacroAssembler * masm,Register function_address,ExternalReference thunk_ref,int stack_space,MemOperand * stack_space_operand,MemOperand return_value_operand)3027 static void CallApiFunctionAndReturn(MacroAssembler* masm,
3028                                      Register function_address,
3029                                      ExternalReference thunk_ref,
3030                                      int stack_space,
3031                                      MemOperand* stack_space_operand,
3032                                      MemOperand return_value_operand) {
3033   Isolate* isolate = masm->isolate();
3034   ExternalReference next_address =
3035       ExternalReference::handle_scope_next_address(isolate);
3036   const int kNextOffset = 0;
3037   const int kLimitOffset = AddressOffset(
3038       ExternalReference::handle_scope_limit_address(isolate), next_address);
3039   const int kLevelOffset = AddressOffset(
3040       ExternalReference::handle_scope_level_address(isolate), next_address);
3041 
3042   // Additional parameter is the address of the actual callback.
3043   DCHECK(function_address == r3 || function_address == r4);
3044   Register scratch = r5;
3045 
3046   __ Move(scratch, ExternalReference::is_profiling_address(isolate));
3047   __ LoadlB(scratch, MemOperand(scratch, 0));
3048   __ CmpP(scratch, Operand::Zero());
3049 
3050   Label profiler_enabled, end_profiler_check;
3051   __ bne(&profiler_enabled, Label::kNear);
3052   __ Move(scratch, ExternalReference::address_of_runtime_stats_flag());
3053   __ LoadlW(scratch, MemOperand(scratch, 0));
3054   __ CmpP(scratch, Operand::Zero());
3055   __ bne(&profiler_enabled, Label::kNear);
3056   {
3057     // Call the api function directly.
3058     __ LoadRR(scratch, function_address);
3059     __ b(&end_profiler_check, Label::kNear);
3060   }
3061   __ bind(&profiler_enabled);
3062   {
3063     // Additional parameter is the address of the actual callback.
3064     __ Move(scratch, thunk_ref);
3065   }
3066   __ bind(&end_profiler_check);
3067 
3068   // Allocate HandleScope in callee-save registers.
3069   // r9 - next_address
3070   // r6 - next_address->kNextOffset
3071   // r7 - next_address->kLimitOffset
3072   // r8 - next_address->kLevelOffset
3073   __ Move(r9, next_address);
3074   __ LoadP(r6, MemOperand(r9, kNextOffset));
3075   __ LoadP(r7, MemOperand(r9, kLimitOffset));
3076   __ LoadlW(r8, MemOperand(r9, kLevelOffset));
3077   __ AddP(r8, Operand(1));
3078   __ StoreW(r8, MemOperand(r9, kLevelOffset));
3079 
3080   __ StoreReturnAddressAndCall(scratch);
3081 
3082   Label promote_scheduled_exception;
3083   Label delete_allocated_handles;
3084   Label leave_exit_frame;
3085   Label return_value_loaded;
3086 
3087   // load value from ReturnValue
3088   __ LoadP(r2, return_value_operand);
3089   __ bind(&return_value_loaded);
3090   // No more valid handles (the result handle was the last one). Restore
3091   // previous handle scope.
3092   __ StoreP(r6, MemOperand(r9, kNextOffset));
3093   if (__ emit_debug_code()) {
3094     __ LoadlW(r3, MemOperand(r9, kLevelOffset));
3095     __ CmpP(r3, r8);
3096     __ Check(eq, AbortReason::kUnexpectedLevelAfterReturnFromApiCall);
3097   }
3098   __ SubP(r8, Operand(1));
3099   __ StoreW(r8, MemOperand(r9, kLevelOffset));
3100   __ CmpP(r7, MemOperand(r9, kLimitOffset));
3101   __ bne(&delete_allocated_handles, Label::kNear);
3102 
3103   // Leave the API exit frame.
3104   __ bind(&leave_exit_frame);
3105   // LeaveExitFrame expects unwind space to be in a register.
3106   if (stack_space_operand == nullptr) {
3107     DCHECK_NE(stack_space, 0);
3108     __ mov(r6, Operand(stack_space));
3109   } else {
3110     DCHECK_EQ(stack_space, 0);
3111     __ LoadP(r6, *stack_space_operand);
3112   }
3113   __ LeaveExitFrame(false, r6, stack_space_operand != nullptr);
3114 
3115   // Check if the function scheduled an exception.
3116   __ Move(r7, ExternalReference::scheduled_exception_address(isolate));
3117   __ LoadP(r7, MemOperand(r7));
3118   __ CompareRoot(r7, RootIndex::kTheHoleValue);
3119   __ bne(&promote_scheduled_exception, Label::kNear);
3120 
3121   __ b(r14);
3122 
3123   // Re-throw by promoting a scheduled exception.
3124   __ bind(&promote_scheduled_exception);
3125   __ TailCallRuntime(Runtime::kPromoteScheduledException);
3126 
3127   // HandleScope limit has changed. Delete allocated extensions.
3128   __ bind(&delete_allocated_handles);
3129   __ StoreP(r7, MemOperand(r9, kLimitOffset));
3130   __ LoadRR(r6, r2);
3131   __ PrepareCallCFunction(1, r7);
3132   __ Move(r2, ExternalReference::isolate_address(isolate));
3133   __ CallCFunction(ExternalReference::delete_handle_scope_extensions(), 1);
3134   __ LoadRR(r2, r6);
3135   __ b(&leave_exit_frame, Label::kNear);
3136 }
3137 
3138 }  // namespace
3139 
Generate_CallApiCallback(MacroAssembler * masm)3140 void Builtins::Generate_CallApiCallback(MacroAssembler* masm) {
3141   // ----------- S t a t e -------------
3142   //  -- cp                  : context
3143   //  -- r4                  : api function address
3144   //  -- r4                  : arguments count (not including the receiver)
3145   //  -- r5                  : call data
3146   //  -- r2                  : holder
3147   //  -- sp[0]               : last argument
3148   //  -- ...
3149   //  -- sp[(argc - 1) * 4]  : first argument
3150   //  -- sp[(argc + 0) * 4]  : receiver
3151   // -----------------------------------
3152 
3153   Register api_function_address = r3;
3154   Register argc = r4;
3155   Register call_data = r5;
3156   Register holder = r2;
3157   Register scratch = r6;
3158   DCHECK(!AreAliased(api_function_address, argc, call_data, holder, scratch));
3159 
3160   using FCA = FunctionCallbackArguments;
3161 
3162   STATIC_ASSERT(FCA::kArgsLength == 6);
3163   STATIC_ASSERT(FCA::kNewTargetIndex == 5);
3164   STATIC_ASSERT(FCA::kDataIndex == 4);
3165   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
3166   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
3167   STATIC_ASSERT(FCA::kIsolateIndex == 1);
3168   STATIC_ASSERT(FCA::kHolderIndex == 0);
3169 
3170   // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
3171   //
3172   // Target state:
3173   //   sp[0 * kSystemPointerSize]: kHolder
3174   //   sp[1 * kSystemPointerSize]: kIsolate
3175   //   sp[2 * kSystemPointerSize]: undefined (kReturnValueDefaultValue)
3176   //   sp[3 * kSystemPointerSize]: undefined (kReturnValue)
3177   //   sp[4 * kSystemPointerSize]: kData
3178   //   sp[5 * kSystemPointerSize]: undefined (kNewTarget)
3179 
3180   // Reserve space on the stack.
3181   __ lay(sp, MemOperand(sp, -(FCA::kArgsLength * kSystemPointerSize)));
3182 
3183   // kHolder.
3184   __ StoreP(holder, MemOperand(sp, 0 * kSystemPointerSize));
3185 
3186   // kIsolate.
3187   __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
3188   __ StoreP(scratch, MemOperand(sp, 1 * kSystemPointerSize));
3189 
3190   // kReturnValueDefaultValue and kReturnValue.
3191   __ LoadRoot(scratch, RootIndex::kUndefinedValue);
3192   __ StoreP(scratch, MemOperand(sp, 2 * kSystemPointerSize));
3193   __ StoreP(scratch, MemOperand(sp, 3 * kSystemPointerSize));
3194 
3195   // kData.
3196   __ StoreP(call_data, MemOperand(sp, 4 * kSystemPointerSize));
3197 
3198   // kNewTarget.
3199   __ StoreP(scratch, MemOperand(sp, 5 * kSystemPointerSize));
3200 
3201   // Keep a pointer to kHolder (= implicit_args) in a scratch register.
3202   // We use it below to set up the FunctionCallbackInfo object.
3203   __ LoadRR(scratch, sp);
3204 
3205   // Allocate the v8::Arguments structure in the arguments' space since
3206   // it's not controlled by GC.
3207   // S390 LINUX ABI:
3208   //
3209   // Create 4 extra slots on stack:
3210   //    [0] space for DirectCEntryStub's LR save
3211   //    [1-3] FunctionCallbackInfo
3212   //    [4] number of bytes to drop from the stack after returning
3213   static constexpr int kApiStackSpace = 5;
3214   static constexpr bool kDontSaveDoubles = false;
3215 
3216   FrameScope frame_scope(masm, StackFrame::MANUAL);
3217   __ EnterExitFrame(kDontSaveDoubles, kApiStackSpace);
3218 
3219   // FunctionCallbackInfo::implicit_args_ (points at kHolder as set up above).
3220   // Arguments are after the return address (pushed by EnterExitFrame()).
3221   __ StoreP(scratch, MemOperand(sp, (kStackFrameExtraParamSlot + 1) *
3222                                         kSystemPointerSize));
3223 
3224   // FunctionCallbackInfo::values_ (points at the first varargs argument passed
3225   // on the stack).
3226   __ AddP(scratch, scratch,
3227           Operand((FCA::kArgsLength - 1) * kSystemPointerSize));
3228   __ ShiftLeftP(r1, argc, Operand(kSystemPointerSizeLog2));
3229   __ AddP(scratch, scratch, r1);
3230   __ StoreP(scratch, MemOperand(sp, (kStackFrameExtraParamSlot + 2) *
3231                                         kSystemPointerSize));
3232 
3233   // FunctionCallbackInfo::length_.
3234   __ StoreW(argc, MemOperand(sp, (kStackFrameExtraParamSlot + 3) *
3235                                      kSystemPointerSize));
3236 
3237   // We also store the number of bytes to drop from the stack after returning
3238   // from the API function here.
3239   __ mov(scratch,
3240          Operand((FCA::kArgsLength + 1 /* receiver */) * kSystemPointerSize));
3241   __ ShiftLeftP(r1, argc, Operand(kSystemPointerSizeLog2));
3242   __ AddP(scratch, r1);
3243   __ StoreP(scratch, MemOperand(sp, (kStackFrameExtraParamSlot + 4) *
3244                                         kSystemPointerSize));
3245 
3246   // v8::InvocationCallback's argument.
3247   __ lay(r2,
3248          MemOperand(sp, (kStackFrameExtraParamSlot + 1) * kSystemPointerSize));
3249 
3250   ExternalReference thunk_ref = ExternalReference::invoke_function_callback();
3251 
3252   // There are two stack slots above the arguments we constructed on the stack.
3253   // TODO(jgruber): Document what these arguments are.
3254   static constexpr int kStackSlotsAboveFCA = 2;
3255   MemOperand return_value_operand(
3256       fp, (kStackSlotsAboveFCA + FCA::kReturnValueOffset) * kSystemPointerSize);
3257 
3258   static constexpr int kUseStackSpaceOperand = 0;
3259   MemOperand stack_space_operand(
3260       sp, (kStackFrameExtraParamSlot + 4) * kSystemPointerSize);
3261 
3262   AllowExternalCallThatCantCauseGC scope(masm);
3263   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
3264                            kUseStackSpaceOperand, &stack_space_operand,
3265                            return_value_operand);
3266 }
3267 
Generate_CallApiGetter(MacroAssembler * masm)3268 void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
3269   int arg0Slot = 0;
3270   int accessorInfoSlot = 0;
3271   int apiStackSpace = 0;
3272   // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
3273   // name below the exit frame to make GC aware of them.
3274   STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
3275   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
3276   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
3277   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
3278   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
3279   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
3280   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
3281   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
3282 
3283   Register receiver = ApiGetterDescriptor::ReceiverRegister();
3284   Register holder = ApiGetterDescriptor::HolderRegister();
3285   Register callback = ApiGetterDescriptor::CallbackRegister();
3286   Register scratch = r6;
3287   DCHECK(!AreAliased(receiver, holder, callback, scratch));
3288 
3289   Register api_function_address = r4;
3290 
3291   __ push(receiver);
3292   // Push data from AccessorInfo.
3293   __ LoadAnyTaggedField(
3294       scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset), r1);
3295   __ push(scratch);
3296   __ LoadRoot(scratch, RootIndex::kUndefinedValue);
3297   __ Push(scratch, scratch);
3298   __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
3299   __ Push(scratch, holder);
3300   __ Push(Smi::zero());  // should_throw_on_error -> false
3301   __ LoadTaggedPointerField(
3302       scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset), r1);
3303   __ push(scratch);
3304 
3305   // v8::PropertyCallbackInfo::args_ array and name handle.
3306   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
3307 
3308   // Load address of v8::PropertyAccessorInfo::args_ array and name handle.
3309   __ LoadRR(r2, sp);                           // r2 = Handle<Name>
3310   __ AddP(r3, r2, Operand(1 * kSystemPointerSize));  // r3 = v8::PCI::args_
3311 
3312   // If ABI passes Handles (pointer-sized struct) in a register:
3313   //
3314   // Create 2 extra slots on stack:
3315   //    [0] space for DirectCEntryStub's LR save
3316   //    [1] AccessorInfo&
3317   //
3318   // Otherwise:
3319   //
3320   // Create 3 extra slots on stack:
3321   //    [0] space for DirectCEntryStub's LR save
3322   //    [1] copy of Handle (first arg)
3323   //    [2] AccessorInfo&
3324   if (ABI_PASSES_HANDLES_IN_REGS) {
3325     accessorInfoSlot = kStackFrameExtraParamSlot + 1;
3326     apiStackSpace = 2;
3327   } else {
3328     arg0Slot = kStackFrameExtraParamSlot + 1;
3329     accessorInfoSlot = arg0Slot + 1;
3330     apiStackSpace = 3;
3331   }
3332 
3333   FrameScope frame_scope(masm, StackFrame::MANUAL);
3334   __ EnterExitFrame(false, apiStackSpace);
3335 
3336   if (!ABI_PASSES_HANDLES_IN_REGS) {
3337     // pass 1st arg by reference
3338     __ StoreP(r2, MemOperand(sp, arg0Slot * kSystemPointerSize));
3339     __ AddP(r2, sp, Operand(arg0Slot * kSystemPointerSize));
3340   }
3341 
3342   // Create v8::PropertyCallbackInfo object on the stack and initialize
3343   // it's args_ field.
3344   __ StoreP(r3, MemOperand(sp, accessorInfoSlot * kSystemPointerSize));
3345   __ AddP(r3, sp, Operand(accessorInfoSlot * kSystemPointerSize));
3346   // r3 = v8::PropertyCallbackInfo&
3347 
3348   ExternalReference thunk_ref =
3349       ExternalReference::invoke_accessor_getter_callback();
3350 
3351   __ LoadTaggedPointerField(
3352       scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset));
3353   __ LoadP(api_function_address,
3354            FieldMemOperand(scratch, Foreign::kForeignAddressOffset));
3355 
3356   // +3 is to skip prolog, return address and name handle.
3357   MemOperand return_value_operand(
3358       fp,
3359       (PropertyCallbackArguments::kReturnValueOffset + 3) * kSystemPointerSize);
3360   MemOperand* const kUseStackSpaceConstant = nullptr;
3361   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
3362                            kStackUnwindSpace, kUseStackSpaceConstant,
3363                            return_value_operand);
3364 }
3365 
Generate_DirectCEntry(MacroAssembler * masm)3366 void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
3367   // Unused.
3368   __ stop();
3369 }
3370 
3371 #undef __
3372 
3373 }  // namespace internal
3374 }  // namespace v8
3375 
3376 #endif  // V8_TARGET_ARCH_S390
3377