1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_PPC || V8_TARGET_ARCH_PPC64
6 
7 #include "src/api/api-arguments.h"
8 #include "src/codegen/code-factory.h"
9 // For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
10 #include "src/codegen/macro-assembler-inl.h"
11 #include "src/codegen/register-configuration.h"
12 #include "src/debug/debug.h"
13 #include "src/deoptimizer/deoptimizer.h"
14 #include "src/execution/frame-constants.h"
15 #include "src/execution/frames.h"
16 #include "src/heap/heap-inl.h"
17 #include "src/logging/counters.h"
18 #include "src/objects/cell.h"
19 #include "src/objects/foreign.h"
20 #include "src/objects/heap-number.h"
21 #include "src/objects/js-generator.h"
22 #include "src/objects/smi.h"
23 #include "src/runtime/runtime.h"
24 #include "src/wasm/wasm-linkage.h"
25 #include "src/wasm/wasm-objects.h"
26 
27 namespace v8 {
28 namespace internal {
29 
30 #define __ ACCESS_MASM(masm)
31 
Generate_Adaptor(MacroAssembler * masm,Address address)32 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address) {
33   __ Move(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
34   __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
35           RelocInfo::CODE_TARGET);
36 }
37 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)38 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
39                                            Runtime::FunctionId function_id) {
40   // ----------- S t a t e -------------
41   //  -- r4 : target function (preserved for callee)
42   //  -- r6 : new target (preserved for callee)
43   // -----------------------------------
44   {
45     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
46     // Push a copy of the target function and the new target.
47     // Push function as parameter to the runtime call.
48     __ Push(r4, r6, r4);
49 
50     __ CallRuntime(function_id, 1);
51     __ mr(r5, r3);
52 
53     // Restore target function and new target.
54     __ Pop(r4, r6);
55   }
56   static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
57   __ JumpCodeObject(r5);
58 }
59 
60 namespace {
61 
62 enum StackLimitKind { kInterruptStackLimit, kRealStackLimit };
63 
LoadStackLimit(MacroAssembler * masm,Register destination,StackLimitKind kind)64 void LoadStackLimit(MacroAssembler* masm, Register destination,
65                     StackLimitKind kind) {
66   DCHECK(masm->root_array_available());
67   Isolate* isolate = masm->isolate();
68   ExternalReference limit =
69       kind == StackLimitKind::kRealStackLimit
70           ? ExternalReference::address_of_real_jslimit(isolate)
71           : ExternalReference::address_of_jslimit(isolate);
72   DCHECK(TurboAssembler::IsAddressableThroughRootRegister(isolate, limit));
73 
74   intptr_t offset =
75       TurboAssembler::RootRegisterOffsetForExternalReference(isolate, limit);
76   CHECK(is_int32(offset));
77   __ LoadP(destination, MemOperand(kRootRegister, offset), r0);
78 }
79 
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch,Label * stack_overflow)80 void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
81                                  Register scratch, Label* stack_overflow) {
82   // Check the stack for overflow. We are not trying to catch
83   // interruptions (e.g. debug break and preemption) here, so the "real stack
84   // limit" is checked.
85   LoadStackLimit(masm, scratch, StackLimitKind::kRealStackLimit);
86   // Make scratch the space we have left. The stack might already be overflowed
87   // here which will cause scratch to become negative.
88   __ sub(scratch, sp, scratch);
89   // Check if the arguments will overflow the stack.
90   __ ShiftLeftImm(r0, num_args, Operand(kPointerSizeLog2));
91   __ cmp(scratch, r0);
92   __ ble(stack_overflow);  // Signed comparison.
93 }
94 
Generate_JSBuiltinsConstructStubHelper(MacroAssembler * masm)95 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
96   // ----------- S t a t e -------------
97   //  -- r3     : number of arguments
98   //  -- r4     : constructor function
99   //  -- r6     : new target
100   //  -- cp     : context
101   //  -- lr     : return address
102   //  -- sp[...]: constructor arguments
103   // -----------------------------------
104 
105   Register scratch = r5;
106 
107   Label stack_overflow;
108 
109   Generate_StackOverflowCheck(masm, r3, r8, &stack_overflow);
110   // Enter a construct frame.
111   {
112     FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
113 
114     // Preserve the incoming parameters on the stack.
115 
116     __ SmiTag(r3);
117     __ Push(cp, r3);
118     __ SmiUntag(r3, SetRC);
119     // The receiver for the builtin/api call.
120     __ PushRoot(RootIndex::kTheHoleValue);
121     // Set up pointer to last argument.
122     __ addi(r7, fp, Operand(StandardFrameConstants::kCallerSPOffset));
123 
124     // Copy arguments and receiver to the expression stack.
125 
126     Label loop, no_args;
127     // ----------- S t a t e -------------
128     //  --                 r3: number of arguments (untagged)
129     //  --                 r4: constructor function
130     //  --                 r6: new target
131     //  --                 r7: pointer to last argument
132     //  --                 cr0: condition indicating whether r3 is zero
133     //  -- sp[0*kPointerSize]: the hole (receiver)
134     //  -- sp[1*kPointerSize]: number of arguments (tagged)
135     //  -- sp[2*kPointerSize]: context
136     // -----------------------------------
137     __ beq(&no_args, cr0);
138     __ ShiftLeftImm(scratch, r3, Operand(kPointerSizeLog2));
139     __ sub(sp, sp, scratch);
140     __ mtctr(r3);
141     __ bind(&loop);
142     __ subi(scratch, scratch, Operand(kPointerSize));
143     __ LoadPX(r0, MemOperand(r7, scratch));
144     __ StorePX(r0, MemOperand(sp, scratch));
145     __ bdnz(&loop);
146     __ bind(&no_args);
147 
148     // Call the function.
149     // r3: number of arguments (untagged)
150     // r4: constructor function
151     // r6: new target
152     {
153       ConstantPoolUnavailableScope constant_pool_unavailable(masm);
154       __ InvokeFunctionWithNewTarget(r4, r6, r3, CALL_FUNCTION);
155     }
156 
157     // Restore context from the frame.
158     __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
159     // Restore smi-tagged arguments count from the frame.
160     __ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
161 
162     // Leave construct frame.
163   }
164   // Remove caller arguments from the stack and return.
165   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
166 
167   __ SmiToPtrArrayOffset(r4, r4);
168   __ add(sp, sp, r4);
169   __ addi(sp, sp, Operand(kPointerSize));
170   __ blr();
171 
172   __ bind(&stack_overflow);
173   {
174     FrameScope scope(masm, StackFrame::INTERNAL);
175     __ CallRuntime(Runtime::kThrowStackOverflow);
176     __ bkpt(0);  // Unreachable code.
177   }
178 }
179 
180 }  // namespace
181 
182 // The construct stub for ES5 constructor functions and ES6 class constructors.
Generate_JSConstructStubGeneric(MacroAssembler * masm)183 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
184   // ----------- S t a t e -------------
185   //  --      r3: number of arguments (untagged)
186   //  --      r4: constructor function
187   //  --      r6: new target
188   //  --      cp: context
189   //  --      lr: return address
190   //  -- sp[...]: constructor arguments
191   // -----------------------------------
192 
193   // Enter a construct frame.
194   {
195     FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
196     Label post_instantiation_deopt_entry, not_create_implicit_receiver;
197 
198     // Preserve the incoming parameters on the stack.
199     __ SmiTag(r3);
200     __ Push(cp, r3, r4);
201     __ PushRoot(RootIndex::kUndefinedValue);
202     __ Push(r6);
203 
204     // ----------- S t a t e -------------
205     //  --        sp[0*kPointerSize]: new target
206     //  --        sp[1*kPointerSize]: padding
207     //  -- r4 and sp[2*kPointerSize]: constructor function
208     //  --        sp[3*kPointerSize]: number of arguments (tagged)
209     //  --        sp[4*kPointerSize]: context
210     // -----------------------------------
211 
212     __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
213     __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kFlagsOffset));
214     __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r7);
215     __ JumpIfIsInRange(r7, kDefaultDerivedConstructor, kDerivedConstructor,
216                        &not_create_implicit_receiver);
217 
218     // If not derived class constructor: Allocate the new receiver object.
219     __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
220                         r7, r8);
221     __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
222             RelocInfo::CODE_TARGET);
223     __ b(&post_instantiation_deopt_entry);
224 
225     // Else: use TheHoleValue as receiver for constructor call
226     __ bind(&not_create_implicit_receiver);
227     __ LoadRoot(r3, RootIndex::kTheHoleValue);
228 
229     // ----------- S t a t e -------------
230     //  --                          r3: receiver
231     //  -- Slot 4 / sp[0*kPointerSize]: new target
232     //  -- Slot 3 / sp[1*kPointerSize]: padding
233     //  -- Slot 2 / sp[2*kPointerSize]: constructor function
234     //  -- Slot 1 / sp[3*kPointerSize]: number of arguments (tagged)
235     //  -- Slot 0 / sp[4*kPointerSize]: context
236     // -----------------------------------
237     // Deoptimizer enters here.
238     masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
239         masm->pc_offset());
240     __ bind(&post_instantiation_deopt_entry);
241 
242     // Restore new target.
243     __ Pop(r6);
244     // Push the allocated receiver to the stack. We need two copies
245     // because we may have to return the original one and the calling
246     // conventions dictate that the called function pops the receiver.
247     __ Push(r3, r3);
248 
249     // ----------- S t a t e -------------
250     //  --                 r6: new target
251     //  -- sp[0*kPointerSize]: implicit receiver
252     //  -- sp[1*kPointerSize]: implicit receiver
253     //  -- sp[2*kPointerSize]: padding
254     //  -- sp[3*kPointerSize]: constructor function
255     //  -- sp[4*kPointerSize]: number of arguments (tagged)
256     //  -- sp[5*kPointerSize]: context
257     // -----------------------------------
258 
259     // Restore constructor function and argument count.
260     __ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
261     __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
262     __ SmiUntag(r3);
263 
264     // Set up pointer to last argument.
265     __ addi(r7, fp, Operand(StandardFrameConstants::kCallerSPOffset));
266 
267     Label enough_stack_space, stack_overflow;
268     Generate_StackOverflowCheck(masm, r3, r8, &stack_overflow);
269     __ b(&enough_stack_space);
270 
271     __ bind(&stack_overflow);
272     // Restore the context from the frame.
273     __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
274     __ CallRuntime(Runtime::kThrowStackOverflow);
275     // Unreachable code.
276     __ bkpt(0);
277 
278     __ bind(&enough_stack_space);
279 
280     // Copy arguments and receiver to the expression stack.
281     Label loop, no_args;
282     // ----------- S t a t e -------------
283     //  --                        r3: number of arguments (untagged)
284     //  --                        r6: new target
285     //  --                        r7: pointer to last argument
286     //  --                        cr0: condition indicating whether r3 is zero
287     //  --        sp[0*kPointerSize]: implicit receiver
288     //  --        sp[1*kPointerSize]: implicit receiver
289     //  --        sp[2*kPointerSize]: padding
290     //  -- r4 and sp[3*kPointerSize]: constructor function
291     //  --        sp[4*kPointerSize]: number of arguments (tagged)
292     //  --        sp[5*kPointerSize]: context
293     // -----------------------------------
294     __ cmpi(r3, Operand::Zero());
295     __ beq(&no_args);
296     __ ShiftLeftImm(r9, r3, Operand(kPointerSizeLog2));
297     __ sub(sp, sp, r9);
298     __ mtctr(r3);
299     __ bind(&loop);
300     __ subi(r9, r9, Operand(kPointerSize));
301     __ LoadPX(r0, MemOperand(r7, r9));
302     __ StorePX(r0, MemOperand(sp, r9));
303     __ bdnz(&loop);
304     __ bind(&no_args);
305 
306     // Call the function.
307     {
308       ConstantPoolUnavailableScope constant_pool_unavailable(masm);
309       __ InvokeFunctionWithNewTarget(r4, r6, r3, CALL_FUNCTION);
310     }
311 
312     // ----------- S t a t e -------------
313     //  --                 r0: constructor result
314     //  -- sp[0*kPointerSize]: implicit receiver
315     //  -- sp[1*kPointerSize]: padding
316     //  -- sp[2*kPointerSize]: constructor function
317     //  -- sp[3*kPointerSize]: number of arguments
318     //  -- sp[4*kPointerSize]: context
319     // -----------------------------------
320 
321     // Store offset of return address for deoptimizer.
322     masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
323         masm->pc_offset());
324 
325     // Restore the context from the frame.
326     __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
327 
328     // If the result is an object (in the ECMA sense), we should get rid
329     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
330     // on page 74.
331     Label use_receiver, do_throw, leave_frame;
332 
333     // If the result is undefined, we jump out to using the implicit receiver.
334     __ JumpIfRoot(r3, RootIndex::kUndefinedValue, &use_receiver);
335 
336     // Otherwise we do a smi check and fall through to check if the return value
337     // is a valid receiver.
338 
339     // If the result is a smi, it is *not* an object in the ECMA sense.
340     __ JumpIfSmi(r3, &use_receiver);
341 
342     // If the type of the result (stored in its map) is less than
343     // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
344     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
345     __ CompareObjectType(r3, r7, r7, FIRST_JS_RECEIVER_TYPE);
346     __ bge(&leave_frame);
347     __ b(&use_receiver);
348 
349     __ bind(&do_throw);
350     __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
351 
352     // Throw away the result of the constructor invocation and use the
353     // on-stack receiver as the result.
354     __ bind(&use_receiver);
355     __ LoadP(r3, MemOperand(sp));
356     __ JumpIfRoot(r3, RootIndex::kTheHoleValue, &do_throw);
357 
358     __ bind(&leave_frame);
359     // Restore smi-tagged arguments count from the frame.
360     __ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
361     // Leave construct frame.
362   }
363 
364   // Remove caller arguments from the stack and return.
365   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
366 
367   __ SmiToPtrArrayOffset(r4, r4);
368   __ add(sp, sp, r4);
369   __ addi(sp, sp, Operand(kPointerSize));
370   __ blr();
371 }
372 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)373 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
374   Generate_JSBuiltinsConstructStubHelper(masm);
375 }
376 
GetSharedFunctionInfoBytecode(MacroAssembler * masm,Register sfi_data,Register scratch1)377 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
378                                           Register sfi_data,
379                                           Register scratch1) {
380   Label done;
381 
382   __ CompareObjectType(sfi_data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
383   __ bne(&done);
384   __ LoadP(sfi_data,
385            FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
386   __ bind(&done);
387 }
388 
389 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)390 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
391   // ----------- S t a t e -------------
392   //  -- r3 : the value to pass to the generator
393   //  -- r4 : the JSGeneratorObject to resume
394   //  -- lr : return address
395   // -----------------------------------
396   __ AssertGeneratorObject(r4);
397 
398   // Store input value into generator object.
399   __ StoreP(r3, FieldMemOperand(r4, JSGeneratorObject::kInputOrDebugPosOffset),
400             r0);
401   __ RecordWriteField(r4, JSGeneratorObject::kInputOrDebugPosOffset, r3, r6,
402                       kLRHasNotBeenSaved, kDontSaveFPRegs);
403 
404   // Load suspended function and context.
405   __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
406   __ LoadP(cp, FieldMemOperand(r7, JSFunction::kContextOffset));
407 
408   // Flood function if we are stepping.
409   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
410   Label stepping_prepared;
411   Register scratch = r8;
412   ExternalReference debug_hook =
413       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
414   __ Move(scratch, debug_hook);
415   __ LoadByte(scratch, MemOperand(scratch), r0);
416   __ extsb(scratch, scratch);
417   __ CmpSmiLiteral(scratch, Smi::zero(), r0);
418   __ bne(&prepare_step_in_if_stepping);
419 
420   // Flood function if we need to continue stepping in the suspended generator.
421 
422   ExternalReference debug_suspended_generator =
423       ExternalReference::debug_suspended_generator_address(masm->isolate());
424 
425   __ Move(scratch, debug_suspended_generator);
426   __ LoadP(scratch, MemOperand(scratch));
427   __ cmp(scratch, r4);
428   __ beq(&prepare_step_in_suspended_generator);
429   __ bind(&stepping_prepared);
430 
431   // Check the stack for overflow. We are not trying to catch interruptions
432   // (i.e. debug break and preemption) here, so check the "real stack limit".
433   Label stack_overflow;
434   LoadStackLimit(masm, scratch, StackLimitKind::kRealStackLimit);
435   __ cmpl(sp, scratch);
436   __ blt(&stack_overflow);
437 
438   // Push receiver.
439   __ LoadP(scratch, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
440   __ Push(scratch);
441 
442   // ----------- S t a t e -------------
443   //  -- r4    : the JSGeneratorObject to resume
444   //  -- r7    : generator function
445   //  -- cp    : generator context
446   //  -- lr    : return address
447   //  -- sp[0] : generator receiver
448   // -----------------------------------
449 
450   // Copy the function arguments from the generator object's register file.
451   __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
452   __ LoadHalfWord(
453       r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
454   __ LoadP(r5, FieldMemOperand(
455                    r4, JSGeneratorObject::kParametersAndRegistersOffset));
456   {
457     Label loop, done_loop;
458     __ cmpi(r6, Operand::Zero());
459     __ ble(&done_loop);
460 
461     // setup r9 to first element address - kPointerSize
462     __ addi(r9, r5,
463             Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
464 
465     __ mtctr(r6);
466     __ bind(&loop);
467     __ LoadPU(scratch, MemOperand(r9, kPointerSize));
468     __ push(scratch);
469     __ bdnz(&loop);
470 
471     __ bind(&done_loop);
472   }
473 
474   // Underlying function needs to have bytecode available.
475   if (FLAG_debug_code) {
476     __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
477     __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
478     GetSharedFunctionInfoBytecode(masm, r6, r3);
479     __ CompareObjectType(r6, r6, r6, BYTECODE_ARRAY_TYPE);
480     __ Assert(eq, AbortReason::kMissingBytecodeArray);
481   }
482 
483   // Resume (Ignition/TurboFan) generator object.
484   {
485     // We abuse new.target both to indicate that this is a resume call and to
486     // pass in the generator object.  In ordinary calls, new.target is always
487     // undefined because generator functions are non-constructable.
488     __ mr(r6, r4);
489     __ mr(r4, r7);
490     static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
491     __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
492     __ JumpCodeObject(r5);
493   }
494 
495   __ bind(&prepare_step_in_if_stepping);
496   {
497     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
498     __ Push(r4, r7);
499     // Push hole as receiver since we do not use it for stepping.
500     __ PushRoot(RootIndex::kTheHoleValue);
501     __ CallRuntime(Runtime::kDebugOnFunctionCall);
502     __ Pop(r4);
503     __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
504   }
505   __ b(&stepping_prepared);
506 
507   __ bind(&prepare_step_in_suspended_generator);
508   {
509     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
510     __ Push(r4);
511     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
512     __ Pop(r4);
513     __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
514   }
515   __ b(&stepping_prepared);
516 
517   __ bind(&stack_overflow);
518   {
519     FrameScope scope(masm, StackFrame::INTERNAL);
520     __ CallRuntime(Runtime::kThrowStackOverflow);
521     __ bkpt(0);  // This should be unreachable.
522   }
523 }
524 
Generate_ConstructedNonConstructable(MacroAssembler * masm)525 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
526   FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
527   __ push(r4);
528   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
529 }
530 
531 namespace {
532 
533 // Called with the native C calling convention. The corresponding function
534 // signature is either:
535 //
536 //   using JSEntryFunction = GeneratedCode<Address(
537 //       Address root_register_value, Address new_target, Address target,
538 //       Address receiver, intptr_t argc, Address** args)>;
539 // or
540 //   using JSEntryFunction = GeneratedCode<Address(
541 //       Address root_register_value, MicrotaskQueue* microtask_queue)>;
Generate_JSEntryVariant(MacroAssembler * masm,StackFrame::Type type,Builtins::Name entry_trampoline)542 void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
543                              Builtins::Name entry_trampoline) {
544   // The register state is either:
545   //   r3: root_register_value
546   //   r4: code entry
547   //   r5: function
548   //   r6: receiver
549   //   r7: argc
550   //   r8: argv
551   // or
552   //   r3: root_register_value
553   //   r4: microtask_queue
554 
555   Label invoke, handler_entry, exit;
556 
557   {
558     NoRootArrayScope no_root_array(masm);
559 
560     // PPC LINUX ABI:
561     // preserve LR in pre-reserved slot in caller's frame
562     __ mflr(r0);
563     __ StoreP(r0, MemOperand(sp, kStackFrameLRSlot * kPointerSize));
564 
565     // Save callee saved registers on the stack.
566     __ MultiPush(kCalleeSaved);
567 
568     // Save callee-saved double registers.
569     __ MultiPushDoubles(kCalleeSavedDoubles);
570     // Set up the reserved register for 0.0.
571     __ LoadDoubleLiteral(kDoubleRegZero, Double(0.0), r0);
572 
573     // Initialize the root register.
574     // C calling convention. The first argument is passed in r3.
575     __ mr(kRootRegister, r3);
576   }
577 
578   // Push a frame with special values setup to mark it as an entry frame.
579   // r4: code entry
580   // r5: function
581   // r6: receiver
582   // r7: argc
583   // r8: argv
584   __ li(r0, Operand(-1));  // Push a bad frame pointer to fail if it is used.
585   __ push(r0);
586   if (FLAG_enable_embedded_constant_pool) {
587     __ li(kConstantPoolRegister, Operand::Zero());
588     __ push(kConstantPoolRegister);
589   }
590   __ mov(r0, Operand(StackFrame::TypeToMarker(type)));
591   __ push(r0);
592   __ push(r0);
593   // Save copies of the top frame descriptor on the stack.
594   __ Move(r3, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
595                                         masm->isolate()));
596   __ LoadP(r0, MemOperand(r3));
597   __ push(r0);
598 
599   Register scratch = r9;
600   // Set up frame pointer for the frame to be pushed.
601   __ addi(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
602 
603   // If this is the outermost JS call, set js_entry_sp value.
604   Label non_outermost_js;
605   ExternalReference js_entry_sp =
606       ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress,
607                                 masm->isolate());
608   __ Move(r3, js_entry_sp);
609   __ LoadP(scratch, MemOperand(r3));
610   __ cmpi(scratch, Operand::Zero());
611   __ bne(&non_outermost_js);
612   __ StoreP(fp, MemOperand(r3));
613   __ mov(scratch, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
614   Label cont;
615   __ b(&cont);
616   __ bind(&non_outermost_js);
617   __ mov(scratch, Operand(StackFrame::INNER_JSENTRY_FRAME));
618   __ bind(&cont);
619   __ push(scratch);  // frame-type
620 
621   // Jump to a faked try block that does the invoke, with a faked catch
622   // block that sets the pending exception.
623   __ b(&invoke);
624 
625   // Block literal pool emission whilst taking the position of the handler
626   // entry. This avoids making the assumption that literal pools are always
627   // emitted after an instruction is emitted, rather than before.
628   {
629     ConstantPoolUnavailableScope constant_pool_unavailable(masm);
630     __ bind(&handler_entry);
631 
632     // Store the current pc as the handler offset. It's used later to create the
633     // handler table.
634     masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
635 
636     // Caught exception: Store result (exception) in the pending exception
637     // field in the JSEnv and return a failure sentinel.  Coming in here the
638     // fp will be invalid because the PushStackHandler below sets it to 0 to
639     // signal the existence of the JSEntry frame.
640     __ Move(scratch,
641             ExternalReference::Create(
642                 IsolateAddressId::kPendingExceptionAddress, masm->isolate()));
643   }
644 
645   __ StoreP(r3, MemOperand(scratch));
646   __ LoadRoot(r3, RootIndex::kException);
647   __ b(&exit);
648 
649   // Invoke: Link this frame into the handler chain.
650   __ bind(&invoke);
651   // Must preserve r4-r8.
652   __ PushStackHandler();
653   // If an exception not caught by another handler occurs, this handler
654   // returns control to the code after the b(&invoke) above, which
655   // restores all kCalleeSaved registers (including cp and fp) to their
656   // saved values before returning a failure to C.
657 
658   // Invoke the function by calling through JS entry trampoline builtin.
659   // Notice that we cannot store a reference to the trampoline code directly in
660   // this stub, because runtime stubs are not traversed when doing GC.
661 
662   // Invoke the function by calling through JS entry trampoline builtin and
663   // pop the faked function when we return.
664   Handle<Code> trampoline_code =
665       masm->isolate()->builtins()->builtin_handle(entry_trampoline);
666   __ Call(trampoline_code, RelocInfo::CODE_TARGET);
667 
668   // Unlink this frame from the handler chain.
669   __ PopStackHandler();
670 
671   __ bind(&exit);  // r3 holds result
672   // Check if the current stack frame is marked as the outermost JS frame.
673   Label non_outermost_js_2;
674   __ pop(r8);
675   __ cmpi(r8, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
676   __ bne(&non_outermost_js_2);
677   __ mov(scratch, Operand::Zero());
678   __ Move(r8, js_entry_sp);
679   __ StoreP(scratch, MemOperand(r8));
680   __ bind(&non_outermost_js_2);
681 
682   // Restore the top frame descriptors from the stack.
683   __ pop(r6);
684   __ Move(scratch, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
685                                              masm->isolate()));
686   __ StoreP(r6, MemOperand(scratch));
687 
688   // Reset the stack to the callee saved registers.
689   __ addi(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
690 
691   // Restore callee-saved double registers.
692   __ MultiPopDoubles(kCalleeSavedDoubles);
693 
694   // Restore callee-saved registers.
695   __ MultiPop(kCalleeSaved);
696 
697   // Return
698   __ LoadP(r0, MemOperand(sp, kStackFrameLRSlot * kPointerSize));
699   __ mtlr(r0);
700   __ blr();
701 }
702 
703 }  // namespace
704 
Generate_JSEntry(MacroAssembler * masm)705 void Builtins::Generate_JSEntry(MacroAssembler* masm) {
706   Generate_JSEntryVariant(masm, StackFrame::ENTRY,
707                           Builtins::kJSEntryTrampoline);
708 }
709 
Generate_JSConstructEntry(MacroAssembler * masm)710 void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
711   Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
712                           Builtins::kJSConstructEntryTrampoline);
713 }
714 
Generate_JSRunMicrotasksEntry(MacroAssembler * masm)715 void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
716   Generate_JSEntryVariant(masm, StackFrame::ENTRY,
717                           Builtins::kRunMicrotasksTrampoline);
718 }
719 
720 // Clobbers scratch1 and scratch2; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc,Register scratch1,Register scratch2)721 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
722                                         Register scratch1, Register scratch2) {
723   // Check the stack for overflow. We are not trying to catch
724   // interruptions (e.g. debug break and preemption) here, so the "real stack
725   // limit" is checked.
726   Label okay;
727   LoadStackLimit(masm, scratch1, StackLimitKind::kRealStackLimit);
728   // Make scratch1 the space we have left. The stack might already be overflowed
729   // here which will cause scratch1 to become negative.
730   __ sub(scratch1, sp, scratch1);
731   // Check if the arguments will overflow the stack.
732   __ ShiftLeftImm(scratch2, argc, Operand(kPointerSizeLog2));
733   __ cmp(scratch1, scratch2);
734   __ bgt(&okay);  // Signed comparison.
735 
736   // Out of stack space.
737   __ CallRuntime(Runtime::kThrowStackOverflow);
738 
739   __ bind(&okay);
740 }
741 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)742 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
743                                              bool is_construct) {
744   // Called from Generate_JS_Entry
745   // r4: new.target
746   // r5: function
747   // r6: receiver
748   // r7: argc
749   // r8: argv
750   // r0,r3,r9, cp may be clobbered
751 
752   // Enter an internal frame.
753   {
754     FrameScope scope(masm, StackFrame::INTERNAL);
755 
756     // Setup the context (we need to use the caller context from the isolate).
757     ExternalReference context_address = ExternalReference::Create(
758         IsolateAddressId::kContextAddress, masm->isolate());
759     __ Move(cp, context_address);
760     __ LoadP(cp, MemOperand(cp));
761 
762     // Push the function and the receiver onto the stack.
763     __ Push(r5, r6);
764 
765     // Check if we have enough stack space to push all arguments.
766     // Clobbers r3 and r6.
767     Generate_CheckStackOverflow(masm, r7, r3, r6);
768 
769     // r4: new.target
770     // r5: function
771     // r7: argc
772     // r8: argv
773     // r0,r3,r6,r9, cp may be clobbered
774 
775     // Setup new.target, argc and function.
776     __ mr(r3, r7);
777     __ mr(r6, r4);
778     __ mr(r4, r5);
779 
780     // r3: argc
781     // r4: function
782     // r6: new.target
783     // r8: argv
784 
785     // Copy arguments to the stack in a loop.
786     // r4: function
787     // r3: argc
788     // r8: argv, i.e. points to first arg
789     Label loop, entry;
790     __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
791     __ add(r5, r8, r0);
792     // r5 points past last arg.
793     __ b(&entry);
794     __ bind(&loop);
795     __ LoadP(r9, MemOperand(r8));  // read next parameter
796     __ addi(r8, r8, Operand(kPointerSize));
797     __ LoadP(r0, MemOperand(r9));  // dereference handle
798     __ push(r0);                   // push parameter
799     __ bind(&entry);
800     __ cmp(r8, r5);
801     __ bne(&loop);
802 
803     // r3: argc
804     // r4: function
805     // r6: new.target
806 
807     // Initialize all JavaScript callee-saved registers, since they will be seen
808     // by the garbage collector as part of handlers.
809     __ LoadRoot(r7, RootIndex::kUndefinedValue);
810     __ mr(r8, r7);
811     __ mr(r14, r7);
812     __ mr(r15, r7);
813     __ mr(r16, r7);
814     __ mr(r17, r7);
815 
816     // Invoke the code.
817     Handle<Code> builtin = is_construct
818                                ? BUILTIN_CODE(masm->isolate(), Construct)
819                                : masm->isolate()->builtins()->Call();
820     __ Call(builtin, RelocInfo::CODE_TARGET);
821 
822     // Exit the JS frame and remove the parameters (except function), and
823     // return.
824   }
825   __ blr();
826 
827   // r3: result
828 }
829 
Generate_JSEntryTrampoline(MacroAssembler * masm)830 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
831   Generate_JSEntryTrampolineHelper(masm, false);
832 }
833 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)834 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
835   Generate_JSEntryTrampolineHelper(masm, true);
836 }
837 
Generate_RunMicrotasksTrampoline(MacroAssembler * masm)838 void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
839   // This expects two C++ function parameters passed by Invoke() in
840   // execution.cc.
841   //   r3: root_register_value
842   //   r4: microtask_queue
843 
844   __ mr(RunMicrotasksDescriptor::MicrotaskQueueRegister(), r4);
845   __ Jump(BUILTIN_CODE(masm->isolate(), RunMicrotasks), RelocInfo::CODE_TARGET);
846 }
847 
ReplaceClosureCodeWithOptimizedCode(MacroAssembler * masm,Register optimized_code,Register closure,Register scratch1,Register scratch2)848 static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
849                                                 Register optimized_code,
850                                                 Register closure,
851                                                 Register scratch1,
852                                                 Register scratch2) {
853   // Store code entry in the closure.
854   __ StoreP(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset),
855             r0);
856   __ mr(scratch1, optimized_code);  // Write barrier clobbers scratch1 below.
857   __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
858                       kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
859                       OMIT_SMI_CHECK);
860 }
861 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch)862 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
863   Register args_count = scratch;
864 
865   // Get the arguments + receiver count.
866   __ LoadP(args_count,
867            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
868   __ lwz(args_count,
869          FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
870 
871   // Leave the frame (also dropping the register file).
872   __ LeaveFrame(StackFrame::INTERPRETED);
873 
874   __ add(sp, sp, args_count);
875 }
876 
877 // Tail-call |function_id| if |smi_entry| == |marker|
TailCallRuntimeIfMarkerEquals(MacroAssembler * masm,Register smi_entry,OptimizationMarker marker,Runtime::FunctionId function_id)878 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
879                                           Register smi_entry,
880                                           OptimizationMarker marker,
881                                           Runtime::FunctionId function_id) {
882   Label no_match;
883   __ CmpSmiLiteral(smi_entry, Smi::FromEnum(marker), r0);
884   __ bne(&no_match);
885   GenerateTailCallToReturnedCode(masm, function_id);
886   __ bind(&no_match);
887 }
888 
TailCallOptimizedCodeSlot(MacroAssembler * masm,Register optimized_code_entry,Register scratch)889 static void TailCallOptimizedCodeSlot(MacroAssembler* masm,
890                                       Register optimized_code_entry,
891                                       Register scratch) {
892   // ----------- S t a t e -------------
893   //  -- r6 : new target (preserved for callee if needed, and caller)
894   //  -- r4 : target function (preserved for callee if needed, and caller)
895   // -----------------------------------
896   DCHECK(!AreAliased(r4, r6, optimized_code_entry, scratch));
897 
898   Register closure = r4;
899 
900   // Check if the optimized code is marked for deopt. If it is, call the
901   // runtime to clear it.
902   Label found_deoptimized_code;
903   __ LoadP(scratch, FieldMemOperand(optimized_code_entry,
904                                     Code::kCodeDataContainerOffset));
905   __ LoadWordArith(
906       scratch,
907       FieldMemOperand(scratch, CodeDataContainer::kKindSpecificFlagsOffset));
908   __ TestBit(scratch, Code::kMarkedForDeoptimizationBit, r0);
909   __ bne(&found_deoptimized_code, cr0);
910 
911   // Optimized code is good, get it into the closure and link the closure
912   // into the optimized functions list, then tail call the optimized code.
913   ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
914                                       scratch, r8);
915   static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
916   __ LoadCodeObjectEntry(r5, optimized_code_entry);
917   __ Jump(r5);
918 
919   // Optimized code slot contains deoptimized code, evict it and re-enter
920   // the closure's code.
921   __ bind(&found_deoptimized_code);
922   GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
923 }
924 
MaybeOptimizeCode(MacroAssembler * masm,Register feedback_vector,Register optimization_marker)925 static void MaybeOptimizeCode(MacroAssembler* masm, Register feedback_vector,
926                               Register optimization_marker) {
927   // ----------- S t a t e -------------
928   //  -- r6 : new target (preserved for callee if needed, and caller)
929   //  -- r4 : target function (preserved for callee if needed, and caller)
930   //  -- feedback vector (preserved for caller if needed)
931   //  -- optimization_marker : a Smi containing a non-zero optimization marker.
932   // -----------------------------------
933   DCHECK(!AreAliased(feedback_vector, r4, r6, optimization_marker));
934 
935   // TODO(v8:8394): The logging of first execution will break if
936   // feedback vectors are not allocated. We need to find a different way of
937   // logging these events if required.
938   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
939                                 OptimizationMarker::kLogFirstExecution,
940                                 Runtime::kFunctionFirstExecution);
941   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
942                                 OptimizationMarker::kCompileOptimized,
943                                 Runtime::kCompileOptimized_NotConcurrent);
944   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
945                                 OptimizationMarker::kCompileOptimizedConcurrent,
946                                 Runtime::kCompileOptimized_Concurrent);
947 
948   // Otherwise, the marker is InOptimizationQueue, so fall through hoping
949   // that an interrupt will eventually update the slot with optimized code.
950   if (FLAG_debug_code) {
951     __ CmpSmiLiteral(optimization_marker,
952                      Smi::FromEnum(OptimizationMarker::kInOptimizationQueue),
953                      r0);
954     __ Assert(eq, AbortReason::kExpectedOptimizationSentinel);
955   }
956 }
957 
958 // Advance the current bytecode offset. This simulates what all bytecode
959 // handlers do upon completion of the underlying operation. Will bail out to a
960 // label if the bytecode (without prefix) is a return bytecode. Will not advance
961 // the bytecode offset if the current bytecode is a JumpLoop, instead just
962 // re-executing the JumpLoop to jump to the correct bytecode.
AdvanceBytecodeOffsetOrReturn(MacroAssembler * masm,Register bytecode_array,Register bytecode_offset,Register bytecode,Register scratch1,Register scratch2,Label * if_return)963 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
964                                           Register bytecode_array,
965                                           Register bytecode_offset,
966                                           Register bytecode, Register scratch1,
967                                           Register scratch2, Label* if_return) {
968   Register bytecode_size_table = scratch1;
969   Register scratch3 = bytecode;
970 
971   // The bytecode offset value will be increased by one in wide and extra wide
972   // cases. In the case of having a wide or extra wide JumpLoop bytecode, we
973   // will restore the original bytecode. In order to simplify the code, we have
974   // a backup of it.
975   Register original_bytecode_offset = scratch2;
976   DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
977                      bytecode, original_bytecode_offset));
978   __ Move(bytecode_size_table,
979           ExternalReference::bytecode_size_table_address());
980   __ Move(original_bytecode_offset, bytecode_offset);
981 
982   // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
983   Label process_bytecode, extra_wide;
984   STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
985   STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
986   STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
987   STATIC_ASSERT(3 ==
988                 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
989   __ cmpi(bytecode, Operand(0x3));
990   __ bgt(&process_bytecode);
991   __ andi(r0, bytecode, Operand(0x1));
992   __ bne(&extra_wide, cr0);
993 
994   // Load the next bytecode and update table to the wide scaled table.
995   __ addi(bytecode_offset, bytecode_offset, Operand(1));
996   __ lbzx(bytecode, MemOperand(bytecode_array, bytecode_offset));
997   __ addi(bytecode_size_table, bytecode_size_table,
998           Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
999   __ b(&process_bytecode);
1000 
1001   __ bind(&extra_wide);
1002   // Load the next bytecode and update table to the extra wide scaled table.
1003   __ addi(bytecode_offset, bytecode_offset, Operand(1));
1004   __ lbzx(bytecode, MemOperand(bytecode_array, bytecode_offset));
1005   __ addi(bytecode_size_table, bytecode_size_table,
1006           Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
1007 
1008   // Load the size of the current bytecode.
1009   __ bind(&process_bytecode);
1010 
1011   // Bailout to the return label if this is a return bytecode.
1012 #define JUMP_IF_EQUAL(NAME)                                           \
1013   __ cmpi(bytecode,                                                   \
1014           Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
1015   __ beq(if_return);
1016   RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
1017 #undef JUMP_IF_EQUAL
1018 
1019   // If this is a JumpLoop, re-execute it to perform the jump to the beginning
1020   // of the loop.
1021   Label end, not_jump_loop;
1022   __ cmpi(bytecode,
1023           Operand(static_cast<int>(interpreter::Bytecode::kJumpLoop)));
1024   __ bne(&not_jump_loop);
1025   // We need to restore the original bytecode_offset since we might have
1026   // increased it to skip the wide / extra-wide prefix bytecode.
1027   __ Move(bytecode_offset, original_bytecode_offset);
1028   __ b(&end);
1029 
1030   __ bind(&not_jump_loop);
1031   // Otherwise, load the size of the current bytecode and advance the offset.
1032   __ ShiftLeftImm(scratch3, bytecode, Operand(2));
1033   __ lwzx(scratch3, MemOperand(bytecode_size_table, scratch3));
1034   __ add(bytecode_offset, bytecode_offset, scratch3);
1035 
1036   __ bind(&end);
1037 }
1038 // Generate code for entering a JS function with the interpreter.
1039 // On entry to the function the receiver and arguments have been pushed on the
1040 // stack left to right.  The actual argument count matches the formal parameter
1041 // count expected by the function.
1042 //
1043 // The live registers are:
1044 //   o r4: the JS function object being called.
1045 //   o r6: the incoming new target or generator object
1046 //   o cp: our context
1047 //   o pp: the caller's constant pool pointer (if enabled)
1048 //   o fp: the caller's frame pointer
1049 //   o sp: stack pointer
1050 //   o lr: return address
1051 //
1052 // The function builds an interpreter frame.  See InterpreterFrameConstants in
1053 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)1054 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1055   Register closure = r4;
1056   Register feedback_vector = r5;
1057 
1058   // Get the bytecode array from the function object and load it into
1059   // kInterpreterBytecodeArrayRegister.
1060   __ LoadP(r3, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1061   // Load original bytecode array or the debug copy.
1062   __ LoadP(kInterpreterBytecodeArrayRegister,
1063            FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
1064   GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, r7);
1065 
1066   // The bytecode array could have been flushed from the shared function info,
1067   // if so, call into CompileLazy.
1068   Label compile_lazy;
1069   __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
1070                        BYTECODE_ARRAY_TYPE);
1071   __ bne(&compile_lazy);
1072 
1073   // Load the feedback vector from the closure.
1074   __ LoadP(feedback_vector,
1075            FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
1076   __ LoadP(feedback_vector,
1077            FieldMemOperand(feedback_vector, Cell::kValueOffset));
1078 
1079   Label push_stack_frame;
1080   // Check if feedback vector is valid. If valid, check for optimized code
1081   // and update invocation count. Otherwise, setup the stack frame.
1082   __ LoadP(r7, FieldMemOperand(feedback_vector, HeapObject::kMapOffset));
1083   __ LoadHalfWord(r7, FieldMemOperand(r7, Map::kInstanceTypeOffset));
1084   __ cmpi(r7, Operand(FEEDBACK_VECTOR_TYPE));
1085   __ bne(&push_stack_frame);
1086 
1087   Register optimized_code_entry = r7;
1088 
1089   // Read off the optimized code slot in the feedback vector.
1090   __ LoadP(optimized_code_entry,
1091            FieldMemOperand(feedback_vector,
1092                            FeedbackVector::kOptimizedCodeWeakOrSmiOffset));
1093   // Check if the optimized code slot is not empty.
1094   Label optimized_code_slot_not_empty;
1095   __ CmpSmiLiteral(optimized_code_entry,
1096                    Smi::FromEnum(OptimizationMarker::kNone), r0);
1097   __ bne(&optimized_code_slot_not_empty);
1098 
1099   Label not_optimized;
1100   __ bind(&not_optimized);
1101 
1102   // Increment invocation count for the function.
1103   __ LoadWord(
1104       r8,
1105       FieldMemOperand(feedback_vector, FeedbackVector::kInvocationCountOffset),
1106       r0);
1107   __ addi(r8, r8, Operand(1));
1108   __ StoreWord(
1109       r8,
1110       FieldMemOperand(feedback_vector, FeedbackVector::kInvocationCountOffset),
1111       r0);
1112 
1113   // Open a frame scope to indicate that there is a frame on the stack.  The
1114   // MANUAL indicates that the scope shouldn't actually generate code to set up
1115   // the frame (that is done below).
1116 
1117   __ bind(&push_stack_frame);
1118 
1119   FrameScope frame_scope(masm, StackFrame::MANUAL);
1120   __ PushStandardFrame(closure);
1121 
1122   // Reset code age and the OSR arming. The OSR field and BytecodeAgeOffset are
1123   // 8-bit fields next to each other, so we could just optimize by writing a
1124   // 16-bit. These static asserts guard our assumption is valid.
1125   STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
1126                 BytecodeArray::kOsrNestingLevelOffset + kCharSize);
1127   STATIC_ASSERT(BytecodeArray::kNoAgeBytecodeAge == 0);
1128   __ li(r8, Operand(0));
1129   __ StoreHalfWord(r8,
1130                    FieldMemOperand(kInterpreterBytecodeArrayRegister,
1131                                    BytecodeArray::kOsrNestingLevelOffset),
1132                    r0);
1133 
1134   // Load initial bytecode offset.
1135   __ mov(kInterpreterBytecodeOffsetRegister,
1136          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1137 
1138   // Push bytecode array and Smi tagged bytecode array offset.
1139   __ SmiTag(r3, kInterpreterBytecodeOffsetRegister);
1140   __ Push(kInterpreterBytecodeArrayRegister, r3);
1141 
1142   // Allocate the local and temporary register file on the stack.
1143   Label stack_overflow;
1144   {
1145     // Load frame size (word) from the BytecodeArray object.
1146     __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1147                                BytecodeArray::kFrameSizeOffset));
1148 
1149     // Do a stack check to ensure we don't go over the limit.
1150     __ sub(r8, sp, r5);
1151     LoadStackLimit(masm, r0, StackLimitKind::kRealStackLimit);
1152     __ cmpl(r8, r0);
1153     __ blt(&stack_overflow);
1154 
1155     // If ok, push undefined as the initial value for all register file entries.
1156     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1157     Label loop, no_args;
1158     __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1159     __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC);
1160     __ beq(&no_args, cr0);
1161     __ mtctr(r5);
1162     __ bind(&loop);
1163     __ push(kInterpreterAccumulatorRegister);
1164     __ bdnz(&loop);
1165     __ bind(&no_args);
1166   }
1167 
1168   // If the bytecode array has a valid incoming new target or generator object
1169   // register, initialize it with incoming value which was passed in r6.
1170   Label no_incoming_new_target_or_generator_register;
1171   __ LoadWordArith(
1172       r8, FieldMemOperand(
1173               kInterpreterBytecodeArrayRegister,
1174               BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1175   __ cmpi(r8, Operand::Zero());
1176   __ beq(&no_incoming_new_target_or_generator_register);
1177   __ ShiftLeftImm(r8, r8, Operand(kPointerSizeLog2));
1178   __ StorePX(r6, MemOperand(fp, r8));
1179   __ bind(&no_incoming_new_target_or_generator_register);
1180 
1181   // Perform interrupt stack check.
1182   // TODO(solanes): Merge with the real stack limit check above.
1183   Label stack_check_interrupt, after_stack_check_interrupt;
1184   LoadStackLimit(masm, r6, StackLimitKind::kInterruptStackLimit);
1185   __ cmpl(sp, r6);
1186   __ blt(&stack_check_interrupt);
1187   __ bind(&after_stack_check_interrupt);
1188 
1189   // The accumulator is already loaded with undefined.
1190 
1191   // Load the dispatch table into a register and dispatch to the bytecode
1192   // handler at the current bytecode offset.
1193   Label do_dispatch;
1194   __ bind(&do_dispatch);
1195   __ Move(
1196       kInterpreterDispatchTableRegister,
1197       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1198   __ lbzx(r6, MemOperand(kInterpreterBytecodeArrayRegister,
1199                          kInterpreterBytecodeOffsetRegister));
1200   __ ShiftLeftImm(r6, r6, Operand(kPointerSizeLog2));
1201   __ LoadPX(kJavaScriptCallCodeStartRegister,
1202             MemOperand(kInterpreterDispatchTableRegister, r6));
1203   __ Call(kJavaScriptCallCodeStartRegister);
1204 
1205   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1206 
1207   // Any returns to the entry trampoline are either due to the return bytecode
1208   // or the interpreter tail calling a builtin and then a dispatch.
1209 
1210   // Get bytecode array and bytecode offset from the stack frame.
1211   __ LoadP(kInterpreterBytecodeArrayRegister,
1212            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1213   __ LoadP(kInterpreterBytecodeOffsetRegister,
1214            MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1215   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1216 
1217   // Either return, or advance to the next bytecode and dispatch.
1218   Label do_return;
1219   __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1220                          kInterpreterBytecodeOffsetRegister));
1221   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1222                                 kInterpreterBytecodeOffsetRegister, r4, r5, r6,
1223                                 &do_return);
1224   __ b(&do_dispatch);
1225 
1226   __ bind(&do_return);
1227   // The return value is in r3.
1228   LeaveInterpreterFrame(masm, r5);
1229   __ blr();
1230 
1231   __ bind(&stack_check_interrupt);
1232   // Modify the bytecode offset in the stack to be kFunctionEntryBytecodeOffset
1233   // for the call to the StackGuard.
1234   __ mov(kInterpreterBytecodeOffsetRegister,
1235          Operand(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag +
1236                               kFunctionEntryBytecodeOffset)));
1237   __ StoreP(kInterpreterBytecodeOffsetRegister,
1238             MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1239   __ CallRuntime(Runtime::kStackGuard);
1240 
1241   // After the call, restore the bytecode array, bytecode offset and accumulator
1242   // registers again. Also, restore the bytecode offset in the stack to its
1243   // previous value.
1244   __ LoadP(kInterpreterBytecodeArrayRegister,
1245            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1246   __ mov(kInterpreterBytecodeOffsetRegister,
1247          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1248   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1249 
1250   __ SmiTag(r6, kInterpreterBytecodeOffsetRegister);
1251   __ StoreP(r6,
1252             MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1253 
1254   __ jmp(&after_stack_check_interrupt);
1255 
1256   __ bind(&optimized_code_slot_not_empty);
1257   Label maybe_has_optimized_code;
1258   // Check if optimized code marker is actually a weak reference to the
1259   // optimized code.
1260   __ JumpIfNotSmi(optimized_code_entry, &maybe_has_optimized_code);
1261   MaybeOptimizeCode(masm, feedback_vector, optimized_code_entry);
1262   // Fall through if there's no runnable optimized code.
1263   __ jmp(&not_optimized);
1264 
1265   __ bind(&maybe_has_optimized_code);
1266   // Load code entry from the weak reference, if it was cleared, resume
1267   // execution of unoptimized code.
1268   __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &not_optimized);
1269   TailCallOptimizedCodeSlot(masm, optimized_code_entry, r9);
1270 
1271   __ bind(&compile_lazy);
1272   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1273 
1274   __ bind(&stack_overflow);
1275   __ CallRuntime(Runtime::kThrowStackOverflow);
1276   __ bkpt(0);  // Should not return.
1277 }
1278 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register index,Register count,Register scratch)1279 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1280                                          Register num_args, Register index,
1281                                          Register count, Register scratch) {
1282   Label loop, skip;
1283   __ cmpi(count, Operand::Zero());
1284   __ beq(&skip);
1285   __ addi(index, index, Operand(kPointerSize));  // Bias up for LoadPU
1286   __ mtctr(count);
1287   __ bind(&loop);
1288   __ LoadPU(scratch, MemOperand(index, -kPointerSize));
1289   __ push(scratch);
1290   __ bdnz(&loop);
1291   __ bind(&skip);
1292 }
1293 
1294 // static
Generate_InterpreterPushArgsThenCallImpl(MacroAssembler * masm,ConvertReceiverMode receiver_mode,InterpreterPushArgsMode mode)1295 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1296     MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1297     InterpreterPushArgsMode mode) {
1298   DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1299   // ----------- S t a t e -------------
1300   //  -- r3 : the number of arguments (not including the receiver)
1301   //  -- r5 : the address of the first argument to be pushed. Subsequent
1302   //          arguments should be consecutive above this, in the same order as
1303   //          they are to be pushed onto the stack.
1304   //  -- r4 : the target to call (can be any Object).
1305   // -----------------------------------
1306   Label stack_overflow;
1307 
1308   // Calculate number of arguments (add one for receiver).
1309   __ addi(r6, r3, Operand(1));
1310 
1311   Generate_StackOverflowCheck(masm, r6, ip, &stack_overflow);
1312 
1313   // Push "undefined" as the receiver arg if we need to.
1314   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1315     __ PushRoot(RootIndex::kUndefinedValue);
1316     __ mr(r6, r3);  // Argument count is correct.
1317   }
1318 
1319   // Push the arguments. r5, r6, r7 will be modified.
1320   Generate_InterpreterPushArgs(masm, r6, r5, r6, r7);
1321 
1322   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1323     __ Pop(r5);                   // Pass the spread in a register
1324     __ subi(r3, r3, Operand(1));  // Subtract one for spread
1325   }
1326 
1327   // Call the target.
1328   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1329     __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1330             RelocInfo::CODE_TARGET);
1331   } else {
1332     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1333             RelocInfo::CODE_TARGET);
1334   }
1335 
1336   __ bind(&stack_overflow);
1337   {
1338     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1339     // Unreachable Code.
1340     __ bkpt(0);
1341   }
1342 }
1343 
1344 // static
Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler * masm,InterpreterPushArgsMode mode)1345 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1346     MacroAssembler* masm, InterpreterPushArgsMode mode) {
1347   // ----------- S t a t e -------------
1348   // -- r3 : argument count (not including receiver)
1349   // -- r6 : new target
1350   // -- r4 : constructor to call
1351   // -- r5 : allocation site feedback if available, undefined otherwise.
1352   // -- r7 : address of the first argument
1353   // -----------------------------------
1354   Label stack_overflow;
1355 
1356   // Push a slot for the receiver to be constructed.
1357   __ li(r0, Operand::Zero());
1358   __ push(r0);
1359 
1360   // Push the arguments (skip if none).
1361   Label skip;
1362   __ cmpi(r3, Operand::Zero());
1363   __ beq(&skip);
1364   Generate_StackOverflowCheck(masm, r3, ip, &stack_overflow);
1365   // Push the arguments. r8, r7, r9 will be modified.
1366   Generate_InterpreterPushArgs(masm, r3, r7, r3, r9);
1367   __ bind(&skip);
1368   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1369     __ Pop(r5);                   // Pass the spread in a register
1370     __ subi(r3, r3, Operand(1));  // Subtract one for spread
1371   } else {
1372     __ AssertUndefinedOrAllocationSite(r5, r8);
1373   }
1374   if (mode == InterpreterPushArgsMode::kArrayFunction) {
1375     __ AssertFunction(r4);
1376 
1377     // Tail call to the array construct stub (still in the caller
1378     // context at this point).
1379     Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1380     __ Jump(code, RelocInfo::CODE_TARGET);
1381   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1382     // Call the constructor with r3, r4, and r6 unmodified.
1383     __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1384             RelocInfo::CODE_TARGET);
1385   } else {
1386     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1387     // Call the constructor with r3, r4, and r6 unmodified.
1388     __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1389   }
1390 
1391   __ bind(&stack_overflow);
1392   {
1393     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1394     // Unreachable Code.
1395     __ bkpt(0);
1396   }
1397 }
1398 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1399 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1400   // Set the return address to the correct point in the interpreter entry
1401   // trampoline.
1402   Label builtin_trampoline, trampoline_loaded;
1403   Smi interpreter_entry_return_pc_offset(
1404       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1405   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1406 
1407   // If the SFI function_data is an InterpreterData, the function will have a
1408   // custom copy of the interpreter entry trampoline for profiling. If so,
1409   // get the custom trampoline, otherwise grab the entry address of the global
1410   // trampoline.
1411   __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1412   __ LoadP(r5, FieldMemOperand(r5, JSFunction::kSharedFunctionInfoOffset));
1413   __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
1414   __ CompareObjectType(r5, kInterpreterDispatchTableRegister,
1415                        kInterpreterDispatchTableRegister,
1416                        INTERPRETER_DATA_TYPE);
1417   __ bne(&builtin_trampoline);
1418 
1419   __ LoadP(r5,
1420            FieldMemOperand(r5, InterpreterData::kInterpreterTrampolineOffset));
1421   __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
1422   __ b(&trampoline_loaded);
1423 
1424   __ bind(&builtin_trampoline);
1425   __ Move(r5, ExternalReference::
1426                   address_of_interpreter_entry_trampoline_instruction_start(
1427                       masm->isolate()));
1428   __ LoadP(r5, MemOperand(r5));
1429 
1430   __ bind(&trampoline_loaded);
1431   __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset.value()));
1432   __ mtlr(r0);
1433 
1434   // Initialize the dispatch table register.
1435   __ Move(
1436       kInterpreterDispatchTableRegister,
1437       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1438 
1439   // Get the bytecode array pointer from the frame.
1440   __ LoadP(kInterpreterBytecodeArrayRegister,
1441            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1442 
1443   if (FLAG_debug_code) {
1444     // Check function data field is actually a BytecodeArray object.
1445     __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1446     __ Assert(ne,
1447               AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1448               cr0);
1449     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg,
1450                          BYTECODE_ARRAY_TYPE);
1451     __ Assert(
1452         eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1453   }
1454 
1455   // Get the target bytecode offset from the frame.
1456   __ LoadP(kInterpreterBytecodeOffsetRegister,
1457            MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1458   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1459 
1460   if (FLAG_debug_code) {
1461     Label okay;
1462     __ cmpi(kInterpreterBytecodeOffsetRegister,
1463             Operand(BytecodeArray::kHeaderSize - kHeapObjectTag +
1464                     kFunctionEntryBytecodeOffset));
1465     __ bge(&okay);
1466     __ bkpt(0);
1467     __ bind(&okay);
1468   }
1469 
1470   // Dispatch to the target bytecode.
1471   UseScratchRegisterScope temps(masm);
1472   Register scratch = temps.Acquire();
1473   __ lbzx(ip, MemOperand(kInterpreterBytecodeArrayRegister,
1474                          kInterpreterBytecodeOffsetRegister));
1475   __ ShiftLeftImm(scratch, scratch, Operand(kPointerSizeLog2));
1476   __ LoadPX(kJavaScriptCallCodeStartRegister,
1477             MemOperand(kInterpreterDispatchTableRegister, scratch));
1478   __ Jump(kJavaScriptCallCodeStartRegister);
1479 }
1480 
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1481 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1482   // Get bytecode array and bytecode offset from the stack frame.
1483   __ LoadP(kInterpreterBytecodeArrayRegister,
1484            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1485   __ LoadP(kInterpreterBytecodeOffsetRegister,
1486            MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1487   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1488 
1489   Label enter_bytecode, function_entry_bytecode;
1490   __ cmpi(kInterpreterBytecodeOffsetRegister,
1491           Operand(BytecodeArray::kHeaderSize - kHeapObjectTag +
1492                   kFunctionEntryBytecodeOffset));
1493   __ beq(&function_entry_bytecode);
1494 
1495   // Load the current bytecode.
1496   __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1497                          kInterpreterBytecodeOffsetRegister));
1498 
1499   // Advance to the next bytecode.
1500   Label if_return;
1501   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1502                                 kInterpreterBytecodeOffsetRegister, r4, r5, r6,
1503                                 &if_return);
1504 
1505   __ bind(&enter_bytecode);
1506   // Convert new bytecode offset to a Smi and save in the stackframe.
1507   __ SmiTag(r5, kInterpreterBytecodeOffsetRegister);
1508   __ StoreP(r5,
1509             MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1510 
1511   Generate_InterpreterEnterBytecode(masm);
1512 
1513   __ bind(&function_entry_bytecode);
1514   // If the code deoptimizes during the implicit function entry stack interrupt
1515   // check, it will have a bailout ID of kFunctionEntryBytecodeOffset, which is
1516   // not a valid bytecode offset. Detect this case and advance to the first
1517   // actual bytecode.
1518   __ mov(kInterpreterBytecodeOffsetRegister,
1519          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1520   __ b(&enter_bytecode);
1521 
1522   // We should never take the if_return path.
1523   __ bind(&if_return);
1524   __ Abort(AbortReason::kInvalidBytecodeAdvance);
1525 }
1526 
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1527 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1528   Generate_InterpreterEnterBytecode(masm);
1529 }
1530 
1531 namespace {
Generate_ContinueToBuiltinHelper(MacroAssembler * masm,bool java_script_builtin,bool with_result)1532 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1533                                       bool java_script_builtin,
1534                                       bool with_result) {
1535   const RegisterConfiguration* config(RegisterConfiguration::Default());
1536   int allocatable_register_count = config->num_allocatable_general_registers();
1537   if (with_result) {
1538     // Overwrite the hole inserted by the deoptimizer with the return value from
1539     // the LAZY deopt point.
1540     __ StoreP(
1541         r3, MemOperand(
1542                 sp, config->num_allocatable_general_registers() * kPointerSize +
1543                         BuiltinContinuationFrameConstants::kFixedFrameSize));
1544   }
1545   for (int i = allocatable_register_count - 1; i >= 0; --i) {
1546     int code = config->GetAllocatableGeneralCode(i);
1547     __ Pop(Register::from_code(code));
1548     if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1549       __ SmiUntag(Register::from_code(code));
1550     }
1551   }
1552   __ LoadP(
1553       fp,
1554       MemOperand(sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1555   // Load builtin index (stored as a Smi) and use it to get the builtin start
1556   // address from the builtins table.
1557   UseScratchRegisterScope temps(masm);
1558   Register builtin = temps.Acquire();
1559   __ Pop(builtin);
1560   __ addi(sp, sp,
1561           Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1562   __ Pop(r0);
1563   __ mtlr(r0);
1564   __ LoadEntryFromBuiltinIndex(builtin);
1565   __ Jump(builtin);
1566 }
1567 }  // namespace
1568 
Generate_ContinueToCodeStubBuiltin(MacroAssembler * masm)1569 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1570   Generate_ContinueToBuiltinHelper(masm, false, false);
1571 }
1572 
Generate_ContinueToCodeStubBuiltinWithResult(MacroAssembler * masm)1573 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1574     MacroAssembler* masm) {
1575   Generate_ContinueToBuiltinHelper(masm, false, true);
1576 }
1577 
Generate_ContinueToJavaScriptBuiltin(MacroAssembler * masm)1578 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1579   Generate_ContinueToBuiltinHelper(masm, true, false);
1580 }
1581 
Generate_ContinueToJavaScriptBuiltinWithResult(MacroAssembler * masm)1582 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1583     MacroAssembler* masm) {
1584   Generate_ContinueToBuiltinHelper(masm, true, true);
1585 }
1586 
Generate_NotifyDeoptimized(MacroAssembler * masm)1587 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1588   {
1589     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1590     __ CallRuntime(Runtime::kNotifyDeoptimized);
1591   }
1592 
1593   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r3.code());
1594   __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1595   __ addi(sp, sp, Operand(1 * kPointerSize));
1596   __ Ret();
1597 }
1598 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)1599 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1600   {
1601     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1602     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1603   }
1604 
1605   // If the code object is null, just return to the caller.
1606   Label skip;
1607   __ CmpSmiLiteral(r3, Smi::zero(), r0);
1608   __ bne(&skip);
1609   __ Ret();
1610 
1611   __ bind(&skip);
1612 
1613   // Drop the handler frame that is be sitting on top of the actual
1614   // JavaScript frame. This is the case then OSR is triggered from bytecode.
1615   __ LeaveFrame(StackFrame::STUB);
1616 
1617   // Load deoptimization data from the code object.
1618   // <deopt_data> = <code>[#deoptimization_data_offset]
1619   __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1620 
1621   {
1622     ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1623     __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));  // Code start
1624 
1625     if (FLAG_enable_embedded_constant_pool) {
1626       __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1627     }
1628 
1629     // Load the OSR entrypoint offset from the deoptimization data.
1630     // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1631     __ LoadP(r4,
1632              FieldMemOperand(r4, FixedArray::OffsetOfElementAt(
1633                                      DeoptimizationData::kOsrPcOffsetIndex)));
1634     __ SmiUntag(r4);
1635 
1636     // Compute the target address = code start + osr_offset
1637     __ add(r0, r3, r4);
1638 
1639     // And "return" to the OSR entry point of the function.
1640     __ mtlr(r0);
1641     __ blr();
1642   }
1643 }
1644 
1645 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1646 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1647   // ----------- S t a t e -------------
1648   //  -- r3    : argc
1649   //  -- sp[0] : argArray
1650   //  -- sp[4] : thisArg
1651   //  -- sp[8] : receiver
1652   // -----------------------------------
1653 
1654   // 1. Load receiver into r4, argArray into r5 (if present), remove all
1655   // arguments from the stack (including the receiver), and push thisArg (if
1656   // present) instead.
1657   {
1658     Label skip;
1659     Register arg_size = r8;
1660     Register new_sp = r6;
1661     Register scratch = r7;
1662     __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1663     __ add(new_sp, sp, arg_size);
1664     __ LoadRoot(scratch, RootIndex::kUndefinedValue);
1665     __ mr(r5, scratch);
1666     __ LoadP(r4, MemOperand(new_sp, 0));  // receiver
1667     __ cmpi(arg_size, Operand(kPointerSize));
1668     __ blt(&skip);
1669     __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize));  // thisArg
1670     __ beq(&skip);
1671     __ LoadP(r5, MemOperand(new_sp, 2 * -kPointerSize));  // argArray
1672     __ bind(&skip);
1673     __ mr(sp, new_sp);
1674     __ StoreP(scratch, MemOperand(sp, 0));
1675   }
1676 
1677   // ----------- S t a t e -------------
1678   //  -- r5    : argArray
1679   //  -- r4    : receiver
1680   //  -- sp[0] : thisArg
1681   // -----------------------------------
1682 
1683   // 2. We don't need to check explicitly for callable receiver here,
1684   // since that's the first thing the Call/CallWithArrayLike builtins
1685   // will do.
1686 
1687   // 3. Tail call with no arguments if argArray is null or undefined.
1688   Label no_arguments;
1689   __ JumpIfRoot(r5, RootIndex::kNullValue, &no_arguments);
1690   __ JumpIfRoot(r5, RootIndex::kUndefinedValue, &no_arguments);
1691 
1692   // 4a. Apply the receiver to the given argArray.
1693   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1694           RelocInfo::CODE_TARGET);
1695 
1696   // 4b. The argArray is either null or undefined, so we tail call without any
1697   // arguments to the receiver.
1698   __ bind(&no_arguments);
1699   {
1700     __ li(r3, Operand::Zero());
1701     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1702   }
1703 }
1704 
1705 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1706 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1707   // 1. Make sure we have at least one argument.
1708   // r3: actual number of arguments
1709   {
1710     Label done;
1711     __ cmpi(r3, Operand::Zero());
1712     __ bne(&done);
1713     __ PushRoot(RootIndex::kUndefinedValue);
1714     __ addi(r3, r3, Operand(1));
1715     __ bind(&done);
1716   }
1717 
1718   // 2. Get the callable to call (passed as receiver) from the stack.
1719   // r3: actual number of arguments
1720   __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
1721   __ LoadPX(r4, MemOperand(sp, r5));
1722 
1723   // 3. Shift arguments and return address one slot down on the stack
1724   //    (overwriting the original receiver).  Adjust argument count to make
1725   //    the original first argument the new receiver.
1726   // r3: actual number of arguments
1727   // r4: callable
1728   {
1729     Register scratch = r6;
1730     Label loop;
1731     // Calculate the copy start address (destination). Copy end address is sp.
1732     __ add(r5, sp, r5);
1733 
1734     __ mtctr(r3);
1735     __ bind(&loop);
1736     __ LoadP(scratch, MemOperand(r5, -kPointerSize));
1737     __ StoreP(scratch, MemOperand(r5));
1738     __ subi(r5, r5, Operand(kPointerSize));
1739     __ bdnz(&loop);
1740     // Adjust the actual number of arguments and remove the top element
1741     // (which is a copy of the last argument).
1742     __ subi(r3, r3, Operand(1));
1743     __ pop();
1744   }
1745 
1746   // 4. Call the callable.
1747   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1748 }
1749 
Generate_ReflectApply(MacroAssembler * masm)1750 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1751   // ----------- S t a t e -------------
1752   //  -- r3     : argc
1753   //  -- sp[0]  : argumentsList
1754   //  -- sp[4]  : thisArgument
1755   //  -- sp[8]  : target
1756   //  -- sp[12] : receiver
1757   // -----------------------------------
1758 
1759   // 1. Load target into r4 (if present), argumentsList into r5 (if present),
1760   // remove all arguments from the stack (including the receiver), and push
1761   // thisArgument (if present) instead.
1762   {
1763     Label skip;
1764     Register arg_size = r8;
1765     Register new_sp = r6;
1766     Register scratch = r7;
1767     __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1768     __ add(new_sp, sp, arg_size);
1769     __ LoadRoot(r4, RootIndex::kUndefinedValue);
1770     __ mr(scratch, r4);
1771     __ mr(r5, r4);
1772     __ cmpi(arg_size, Operand(kPointerSize));
1773     __ blt(&skip);
1774     __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize));  // target
1775     __ beq(&skip);
1776     __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize));  // thisArgument
1777     __ cmpi(arg_size, Operand(2 * kPointerSize));
1778     __ beq(&skip);
1779     __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize));  // argumentsList
1780     __ bind(&skip);
1781     __ mr(sp, new_sp);
1782     __ StoreP(scratch, MemOperand(sp, 0));
1783   }
1784 
1785   // ----------- S t a t e -------------
1786   //  -- r5    : argumentsList
1787   //  -- r4    : target
1788   //  -- sp[0] : thisArgument
1789   // -----------------------------------
1790 
1791   // 2. We don't need to check explicitly for callable target here,
1792   // since that's the first thing the Call/CallWithArrayLike builtins
1793   // will do.
1794 
1795   // 3. Apply the target to the given argumentsList.
1796   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1797           RelocInfo::CODE_TARGET);
1798 }
1799 
Generate_ReflectConstruct(MacroAssembler * masm)1800 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1801   // ----------- S t a t e -------------
1802   //  -- r3     : argc
1803   //  -- sp[0]  : new.target (optional)
1804   //  -- sp[4]  : argumentsList
1805   //  -- sp[8]  : target
1806   //  -- sp[12] : receiver
1807   // -----------------------------------
1808 
1809   // 1. Load target into r4 (if present), argumentsList into r5 (if present),
1810   // new.target into r6 (if present, otherwise use target), remove all
1811   // arguments from the stack (including the receiver), and push thisArgument
1812   // (if present) instead.
1813   {
1814     Label skip;
1815     Register arg_size = r8;
1816     Register new_sp = r7;
1817     __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1818     __ add(new_sp, sp, arg_size);
1819     __ LoadRoot(r4, RootIndex::kUndefinedValue);
1820     __ mr(r5, r4);
1821     __ mr(r6, r4);
1822     __ StoreP(r4, MemOperand(new_sp, 0));  // receiver (undefined)
1823     __ cmpi(arg_size, Operand(kPointerSize));
1824     __ blt(&skip);
1825     __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize));  // target
1826     __ mr(r6, r4);  // new.target defaults to target
1827     __ beq(&skip);
1828     __ LoadP(r5, MemOperand(new_sp, 2 * -kPointerSize));  // argumentsList
1829     __ cmpi(arg_size, Operand(2 * kPointerSize));
1830     __ beq(&skip);
1831     __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize));  // new.target
1832     __ bind(&skip);
1833     __ mr(sp, new_sp);
1834   }
1835 
1836   // ----------- S t a t e -------------
1837   //  -- r5    : argumentsList
1838   //  -- r6    : new.target
1839   //  -- r4    : target
1840   //  -- sp[0] : receiver (undefined)
1841   // -----------------------------------
1842 
1843   // 2. We don't need to check explicitly for constructor target here,
1844   // since that's the first thing the Construct/ConstructWithArrayLike
1845   // builtins will do.
1846 
1847   // 3. We don't need to check explicitly for constructor new.target here,
1848   // since that's the second thing the Construct/ConstructWithArrayLike
1849   // builtins will do.
1850 
1851   // 4. Construct the target with the given new.target and argumentsList.
1852   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1853           RelocInfo::CODE_TARGET);
1854 }
1855 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1856 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1857   __ SmiTag(r3);
1858   __ mov(r7, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1859   __ mflr(r0);
1860   __ push(r0);
1861   if (FLAG_enable_embedded_constant_pool) {
1862     __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1863   } else {
1864     __ Push(fp, r7, r4, r3);
1865   }
1866   __ Push(Smi::zero());  // Padding.
1867   __ addi(fp, sp,
1868           Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1869 }
1870 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1871 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1872   // ----------- S t a t e -------------
1873   //  -- r3 : result being passed through
1874   // -----------------------------------
1875   // Get the number of arguments passed (as a smi), tear down the frame and
1876   // then tear down the parameters.
1877   __ LoadP(r4, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1878   int stack_adjustment = kPointerSize;  // adjust for receiver
1879   __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1880   __ SmiToPtrArrayOffset(r0, r4);
1881   __ add(sp, sp, r0);
1882 }
1883 
1884 // static
Generate_CallOrConstructVarargs(MacroAssembler * masm,Handle<Code> code)1885 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1886                                                Handle<Code> code) {
1887   // ----------- S t a t e -------------
1888   //  -- r4 : target
1889   //  -- r3 : number of parameters on the stack (not including the receiver)
1890   //  -- r5 : arguments list (a FixedArray)
1891   //  -- r7 : len (number of elements to push from args)
1892   //  -- r6 : new.target (for [[Construct]])
1893   // -----------------------------------
1894 
1895   Register scratch = ip;
1896 
1897   if (masm->emit_debug_code()) {
1898     // Allow r5 to be a FixedArray, or a FixedDoubleArray if r7 == 0.
1899     Label ok, fail;
1900     __ AssertNotSmi(r5);
1901     __ LoadP(scratch, FieldMemOperand(r5, HeapObject::kMapOffset));
1902     __ LoadHalfWord(scratch,
1903                     FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1904     __ cmpi(scratch, Operand(FIXED_ARRAY_TYPE));
1905     __ beq(&ok);
1906     __ cmpi(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1907     __ bne(&fail);
1908     __ cmpi(r7, Operand::Zero());
1909     __ beq(&ok);
1910     // Fall through.
1911     __ bind(&fail);
1912     __ Abort(AbortReason::kOperandIsNotAFixedArray);
1913 
1914     __ bind(&ok);
1915   }
1916 
1917   // Check for stack overflow.
1918   Label stack_overflow;
1919   Generate_StackOverflowCheck(masm, r7, scratch, &stack_overflow);
1920 
1921   // Push arguments onto the stack (thisArgument is already on the stack).
1922   {
1923     Label loop, no_args, skip;
1924     __ cmpi(r7, Operand::Zero());
1925     __ beq(&no_args);
1926     __ addi(r5, r5,
1927             Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
1928     __ mtctr(r7);
1929     __ bind(&loop);
1930     __ LoadPU(scratch, MemOperand(r5, kPointerSize));
1931     __ CompareRoot(scratch, RootIndex::kTheHoleValue);
1932     __ bne(&skip);
1933     __ LoadRoot(scratch, RootIndex::kUndefinedValue);
1934     __ bind(&skip);
1935     __ push(scratch);
1936     __ bdnz(&loop);
1937     __ bind(&no_args);
1938     __ add(r3, r3, r7);
1939   }
1940 
1941   // Tail-call to the actual Call or Construct builtin.
1942   __ Jump(code, RelocInfo::CODE_TARGET);
1943 
1944   __ bind(&stack_overflow);
1945   __ TailCallRuntime(Runtime::kThrowStackOverflow);
1946 }
1947 
1948 // static
Generate_CallOrConstructForwardVarargs(MacroAssembler * masm,CallOrConstructMode mode,Handle<Code> code)1949 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1950                                                       CallOrConstructMode mode,
1951                                                       Handle<Code> code) {
1952   // ----------- S t a t e -------------
1953   //  -- r3 : the number of arguments (not including the receiver)
1954   //  -- r6 : the new.target (for [[Construct]] calls)
1955   //  -- r4 : the target to call (can be any Object)
1956   //  -- r5 : start index (to support rest parameters)
1957   // -----------------------------------
1958 
1959   Register scratch = r9;
1960 
1961   if (mode == CallOrConstructMode::kConstruct) {
1962     Label new_target_constructor, new_target_not_constructor;
1963     __ JumpIfSmi(r6, &new_target_not_constructor);
1964     __ LoadP(scratch, FieldMemOperand(r6, HeapObject::kMapOffset));
1965     __ lbz(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1966     __ TestBit(scratch, Map::Bits1::IsConstructorBit::kShift, r0);
1967     __ bne(&new_target_constructor, cr0);
1968     __ bind(&new_target_not_constructor);
1969     {
1970       FrameScope scope(masm, StackFrame::MANUAL);
1971       __ EnterFrame(StackFrame::INTERNAL);
1972       __ Push(r6);
1973       __ CallRuntime(Runtime::kThrowNotConstructor);
1974     }
1975     __ bind(&new_target_constructor);
1976   }
1977 
1978   // Check if we have an arguments adaptor frame below the function frame.
1979   Label arguments_adaptor, arguments_done;
1980   __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1981   __ LoadP(scratch,
1982            MemOperand(r7, CommonFrameConstants::kContextOrFrameTypeOffset));
1983   __ cmpi(scratch,
1984           Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1985   __ beq(&arguments_adaptor);
1986   {
1987     __ LoadP(r8, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1988     __ LoadP(r8, FieldMemOperand(r8, JSFunction::kSharedFunctionInfoOffset));
1989     __ LoadHalfWord(
1990         r8,
1991         FieldMemOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
1992     __ mr(r7, fp);
1993   }
1994   __ b(&arguments_done);
1995   __ bind(&arguments_adaptor);
1996   {
1997     // Load the length from the ArgumentsAdaptorFrame.
1998     __ LoadP(r8, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset));
1999     __ SmiUntag(r8);
2000   }
2001   __ bind(&arguments_done);
2002 
2003   Label stack_done, stack_overflow;
2004   __ sub(r8, r8, r5);
2005   __ cmpi(r8, Operand::Zero());
2006   __ ble(&stack_done);
2007   {
2008     // Check for stack overflow.
2009     Generate_StackOverflowCheck(masm, r8, r5, &stack_overflow);
2010 
2011     // Forward the arguments from the caller frame.
2012     {
2013       Label loop;
2014       __ addi(r7, r7, Operand(kPointerSize));
2015       __ add(r3, r3, r8);
2016       __ bind(&loop);
2017       {
2018         __ ShiftLeftImm(scratch, r8, Operand(kPointerSizeLog2));
2019         __ LoadPX(scratch, MemOperand(r7, scratch));
2020         __ push(scratch);
2021         __ subi(r8, r8, Operand(1));
2022         __ cmpi(r8, Operand::Zero());
2023         __ bne(&loop);
2024       }
2025     }
2026   }
2027   __ b(&stack_done);
2028   __ bind(&stack_overflow);
2029   __ TailCallRuntime(Runtime::kThrowStackOverflow);
2030   __ bind(&stack_done);
2031 
2032   // Tail-call to the {code} handler.
2033   __ Jump(code, RelocInfo::CODE_TARGET);
2034 }
2035 
2036 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode)2037 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2038                                      ConvertReceiverMode mode) {
2039   // ----------- S t a t e -------------
2040   //  -- r3 : the number of arguments (not including the receiver)
2041   //  -- r4 : the function to call (checked to be a JSFunction)
2042   // -----------------------------------
2043   __ AssertFunction(r4);
2044 
2045   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2046   // Check that the function is not a "classConstructor".
2047   Label class_constructor;
2048   __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2049   __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kFlagsOffset));
2050   __ TestBitMask(r6, SharedFunctionInfo::IsClassConstructorBit::kMask, r0);
2051   __ bne(&class_constructor, cr0);
2052 
2053   // Enter the context of the function; ToObject has to run in the function
2054   // context, and we also need to take the global proxy from the function
2055   // context in case of conversion.
2056   __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
2057   // We need to convert the receiver for non-native sloppy mode functions.
2058   Label done_convert;
2059   __ andi(r0, r6,
2060           Operand(SharedFunctionInfo::IsStrictBit::kMask |
2061                   SharedFunctionInfo::IsNativeBit::kMask));
2062   __ bne(&done_convert, cr0);
2063   {
2064     // ----------- S t a t e -------------
2065     //  -- r3 : the number of arguments (not including the receiver)
2066     //  -- r4 : the function to call (checked to be a JSFunction)
2067     //  -- r5 : the shared function info.
2068     //  -- cp : the function context.
2069     // -----------------------------------
2070 
2071     if (mode == ConvertReceiverMode::kNullOrUndefined) {
2072       // Patch receiver to global proxy.
2073       __ LoadGlobalProxy(r6);
2074     } else {
2075       Label convert_to_object, convert_receiver;
2076       __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
2077       __ LoadPX(r6, MemOperand(sp, r6));
2078       __ JumpIfSmi(r6, &convert_to_object);
2079       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2080       __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
2081       __ bge(&done_convert);
2082       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2083         Label convert_global_proxy;
2084         __ JumpIfRoot(r6, RootIndex::kUndefinedValue, &convert_global_proxy);
2085         __ JumpIfNotRoot(r6, RootIndex::kNullValue, &convert_to_object);
2086         __ bind(&convert_global_proxy);
2087         {
2088           // Patch receiver to global proxy.
2089           __ LoadGlobalProxy(r6);
2090         }
2091         __ b(&convert_receiver);
2092       }
2093       __ bind(&convert_to_object);
2094       {
2095         // Convert receiver using ToObject.
2096         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2097         // in the fast case? (fall back to AllocateInNewSpace?)
2098         FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2099         __ SmiTag(r3);
2100         __ Push(r3, r4);
2101         __ mr(r3, r6);
2102         __ Push(cp);
2103         __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2104                 RelocInfo::CODE_TARGET);
2105         __ Pop(cp);
2106         __ mr(r6, r3);
2107         __ Pop(r3, r4);
2108         __ SmiUntag(r3);
2109       }
2110       __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2111       __ bind(&convert_receiver);
2112     }
2113     __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
2114     __ StorePX(r6, MemOperand(sp, r7));
2115   }
2116   __ bind(&done_convert);
2117 
2118   // ----------- S t a t e -------------
2119   //  -- r3 : the number of arguments (not including the receiver)
2120   //  -- r4 : the function to call (checked to be a JSFunction)
2121   //  -- r5 : the shared function info.
2122   //  -- cp : the function context.
2123   // -----------------------------------
2124 
2125   __ LoadHalfWord(
2126       r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
2127   __ InvokeFunctionCode(r4, no_reg, r5, r3, JUMP_FUNCTION);
2128 
2129   // The function is a "classConstructor", need to raise an exception.
2130   __ bind(&class_constructor);
2131   {
2132     FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2133     __ push(r4);
2134     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2135   }
2136 }
2137 
2138 namespace {
2139 
Generate_PushBoundArguments(MacroAssembler * masm)2140 void Generate_PushBoundArguments(MacroAssembler* masm) {
2141   // ----------- S t a t e -------------
2142   //  -- r3 : the number of arguments (not including the receiver)
2143   //  -- r4 : target (checked to be a JSBoundFunction)
2144   //  -- r6 : new.target (only in case of [[Construct]])
2145   // -----------------------------------
2146 
2147   // Load [[BoundArguments]] into r5 and length of that into r7.
2148   Label no_bound_arguments;
2149   __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset));
2150   __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset));
2151   __ SmiUntag(r7, SetRC);
2152   __ beq(&no_bound_arguments, cr0);
2153   {
2154     // ----------- S t a t e -------------
2155     //  -- r3 : the number of arguments (not including the receiver)
2156     //  -- r4 : target (checked to be a JSBoundFunction)
2157     //  -- r5 : the [[BoundArguments]] (implemented as FixedArray)
2158     //  -- r6 : new.target (only in case of [[Construct]])
2159     //  -- r7 : the number of [[BoundArguments]]
2160     // -----------------------------------
2161 
2162     Register scratch = r9;
2163     // Reserve stack space for the [[BoundArguments]].
2164     {
2165       Label done;
2166       __ mr(scratch, sp);  // preserve previous stack pointer
2167       __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2));
2168       __ sub(sp, sp, r10);
2169       // Check the stack for overflow. We are not trying to catch interruptions
2170       // (i.e. debug break and preemption) here, so check the "real stack
2171       // limit".
2172       {
2173         UseScratchRegisterScope temps(masm);
2174         Register scratch = temps.Acquire();
2175         LoadStackLimit(masm, scratch, StackLimitKind::kRealStackLimit);
2176         __ cmpl(sp, scratch);
2177       }
2178       __ bgt(&done);  // Signed comparison.
2179       // Restore the stack pointer.
2180       __ mr(sp, scratch);
2181       {
2182         FrameScope scope(masm, StackFrame::MANUAL);
2183         __ EnterFrame(StackFrame::INTERNAL);
2184         __ CallRuntime(Runtime::kThrowStackOverflow);
2185       }
2186       __ bind(&done);
2187     }
2188 
2189     // Relocate arguments down the stack.
2190     //  -- r3 : the number of arguments (not including the receiver)
2191     //  -- r9 : the previous stack pointer
2192     //  -- r10: the size of the [[BoundArguments]]
2193     {
2194       Label skip, loop;
2195       __ li(r8, Operand::Zero());
2196       __ cmpi(r3, Operand::Zero());
2197       __ beq(&skip);
2198       __ mtctr(r3);
2199       __ bind(&loop);
2200       __ LoadPX(r0, MemOperand(scratch, r8));
2201       __ StorePX(r0, MemOperand(sp, r8));
2202       __ addi(r8, r8, Operand(kPointerSize));
2203       __ bdnz(&loop);
2204       __ bind(&skip);
2205     }
2206 
2207     // Copy [[BoundArguments]] to the stack (below the arguments).
2208     {
2209       Label loop;
2210       __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2211       __ add(r5, r5, r10);
2212       __ mtctr(r7);
2213       __ bind(&loop);
2214       __ LoadPU(r0, MemOperand(r5, -kPointerSize));
2215       __ StorePX(r0, MemOperand(sp, r8));
2216       __ addi(r8, r8, Operand(kPointerSize));
2217       __ bdnz(&loop);
2218       __ add(r3, r3, r7);
2219     }
2220   }
2221   __ bind(&no_bound_arguments);
2222 }
2223 
2224 }  // namespace
2225 
2226 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm)2227 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2228   // ----------- S t a t e -------------
2229   //  -- r3 : the number of arguments (not including the receiver)
2230   //  -- r4 : the function to call (checked to be a JSBoundFunction)
2231   // -----------------------------------
2232   __ AssertBoundFunction(r4);
2233 
2234   // Patch the receiver to [[BoundThis]].
2235   __ LoadP(r6, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
2236   __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
2237   __ StorePX(r6, MemOperand(sp, r0));
2238 
2239   // Push the [[BoundArguments]] onto the stack.
2240   Generate_PushBoundArguments(masm);
2241 
2242   // Call the [[BoundTargetFunction]] via the Call builtin.
2243   __ LoadP(r4,
2244            FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2245   __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2246           RelocInfo::CODE_TARGET);
2247 }
2248 
2249 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode)2250 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2251   // ----------- S t a t e -------------
2252   //  -- r3 : the number of arguments (not including the receiver)
2253   //  -- r4 : the target to call (can be any Object).
2254   // -----------------------------------
2255 
2256   Label non_callable, non_smi;
2257   __ JumpIfSmi(r4, &non_callable);
2258   __ bind(&non_smi);
2259   __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2260   __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2261           RelocInfo::CODE_TARGET, eq);
2262   __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2263   __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2264           RelocInfo::CODE_TARGET, eq);
2265 
2266   // Check if target has a [[Call]] internal method.
2267   __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2268   __ TestBit(r7, Map::Bits1::IsCallableBit::kShift, r0);
2269   __ beq(&non_callable, cr0);
2270 
2271   // Check if target is a proxy and call CallProxy external builtin
2272   __ cmpi(r8, Operand(JS_PROXY_TYPE));
2273   __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET, eq);
2274 
2275   // 2. Call to something else, which might have a [[Call]] internal method (if
2276   // not we raise an exception).
2277   // Overwrite the original receiver the (original) target.
2278   __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2279   __ StorePX(r4, MemOperand(sp, r8));
2280   // Let the "call_as_function_delegate" take care of the rest.
2281   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4);
2282   __ Jump(masm->isolate()->builtins()->CallFunction(
2283               ConvertReceiverMode::kNotNullOrUndefined),
2284           RelocInfo::CODE_TARGET);
2285 
2286   // 3. Call to something that is not callable.
2287   __ bind(&non_callable);
2288   {
2289     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2290     __ Push(r4);
2291     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2292   }
2293 }
2294 
2295 // static
Generate_ConstructFunction(MacroAssembler * masm)2296 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2297   // ----------- S t a t e -------------
2298   //  -- r3 : the number of arguments (not including the receiver)
2299   //  -- r4 : the constructor to call (checked to be a JSFunction)
2300   //  -- r6 : the new target (checked to be a constructor)
2301   // -----------------------------------
2302   __ AssertConstructor(r4);
2303   __ AssertFunction(r4);
2304 
2305   // Calling convention for function specific ConstructStubs require
2306   // r5 to contain either an AllocationSite or undefined.
2307   __ LoadRoot(r5, RootIndex::kUndefinedValue);
2308 
2309   Label call_generic_stub;
2310 
2311   // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2312   __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2313   __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kFlagsOffset));
2314   __ mov(ip, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2315   __ and_(r7, r7, ip, SetRC);
2316   __ beq(&call_generic_stub, cr0);
2317 
2318   __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2319           RelocInfo::CODE_TARGET);
2320 
2321   __ bind(&call_generic_stub);
2322   __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2323           RelocInfo::CODE_TARGET);
2324 }
2325 
2326 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2327 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2328   // ----------- S t a t e -------------
2329   //  -- r3 : the number of arguments (not including the receiver)
2330   //  -- r4 : the function to call (checked to be a JSBoundFunction)
2331   //  -- r6 : the new target (checked to be a constructor)
2332   // -----------------------------------
2333   __ AssertConstructor(r4);
2334   __ AssertBoundFunction(r4);
2335 
2336   // Push the [[BoundArguments]] onto the stack.
2337   Generate_PushBoundArguments(masm);
2338 
2339   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2340   Label skip;
2341   __ cmp(r4, r6);
2342   __ bne(&skip);
2343   __ LoadP(r6,
2344            FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2345   __ bind(&skip);
2346 
2347   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2348   __ LoadP(r4,
2349            FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2350   __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2351 }
2352 
2353 // static
Generate_Construct(MacroAssembler * masm)2354 void Builtins::Generate_Construct(MacroAssembler* masm) {
2355   // ----------- S t a t e -------------
2356   //  -- r3 : the number of arguments (not including the receiver)
2357   //  -- r4 : the constructor to call (can be any Object)
2358   //  -- r6 : the new target (either the same as the constructor or
2359   //          the JSFunction on which new was invoked initially)
2360   // -----------------------------------
2361 
2362   // Check if target is a Smi.
2363   Label non_constructor, non_proxy;
2364   __ JumpIfSmi(r4, &non_constructor);
2365 
2366   // Check if target has a [[Construct]] internal method.
2367   __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
2368   __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
2369   __ TestBit(r5, Map::Bits1::IsConstructorBit::kShift, r0);
2370   __ beq(&non_constructor, cr0);
2371 
2372   // Dispatch based on instance type.
2373   __ CompareInstanceType(r7, r8, JS_FUNCTION_TYPE);
2374   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2375           RelocInfo::CODE_TARGET, eq);
2376 
2377   // Only dispatch to bound functions after checking whether they are
2378   // constructors.
2379   __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2380   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2381           RelocInfo::CODE_TARGET, eq);
2382 
2383   // Only dispatch to proxies after checking whether they are constructors.
2384   __ cmpi(r8, Operand(JS_PROXY_TYPE));
2385   __ bne(&non_proxy);
2386   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2387           RelocInfo::CODE_TARGET);
2388 
2389   // Called Construct on an exotic Object with a [[Construct]] internal method.
2390   __ bind(&non_proxy);
2391   {
2392     // Overwrite the original receiver with the (original) target.
2393     __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2394     __ StorePX(r4, MemOperand(sp, r8));
2395     // Let the "call_as_constructor_delegate" take care of the rest.
2396     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4);
2397     __ Jump(masm->isolate()->builtins()->CallFunction(),
2398             RelocInfo::CODE_TARGET);
2399   }
2400 
2401   // Called Construct on an Object that doesn't have a [[Construct]] internal
2402   // method.
2403   __ bind(&non_constructor);
2404   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2405           RelocInfo::CODE_TARGET);
2406 }
2407 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2408 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2409   // ----------- S t a t e -------------
2410   //  -- r3 : actual number of arguments
2411   //  -- r4 : function (passed through to callee)
2412   //  -- r5 : expected number of arguments
2413   //  -- r6 : new target (passed through to callee)
2414   // -----------------------------------
2415 
2416   Label dont_adapt_arguments, stack_overflow, skip_adapt_arguments;
2417   __ cmpli(r5, Operand(kDontAdaptArgumentsSentinel));
2418   __ beq(&dont_adapt_arguments);
2419   __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2420   __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kFlagsOffset));
2421   __ TestBitMask(r7, SharedFunctionInfo::IsSafeToSkipArgumentsAdaptorBit::kMask,
2422                  r0);
2423   __ bne(&skip_adapt_arguments, cr0);
2424 
2425   // -------------------------------------------
2426   // Adapt arguments.
2427   // -------------------------------------------
2428   {
2429     Label under_application, over_application, invoke;
2430     __ cmp(r3, r5);
2431     __ blt(&under_application);
2432 
2433     // Enough parameters: actual >= expected
2434     __ bind(&over_application);
2435     {
2436       EnterArgumentsAdaptorFrame(masm);
2437       Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
2438 
2439       // Calculate copy start address into r3 and copy end address into r7.
2440       // r3: actual number of arguments as a smi
2441       // r4: function
2442       // r5: expected number of arguments
2443       // r6: new target (passed through to callee)
2444       __ SmiToPtrArrayOffset(r3, r3);
2445       __ add(r3, r3, fp);
2446       // adjust for return address and receiver
2447       __ addi(r3, r3, Operand(2 * kPointerSize));
2448       __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2449       __ sub(r7, r3, r7);
2450 
2451       // Copy the arguments (including the receiver) to the new stack frame.
2452       // r3: copy start address
2453       // r4: function
2454       // r5: expected number of arguments
2455       // r6: new target (passed through to callee)
2456       // r7: copy end address
2457 
2458       Label copy;
2459       __ bind(&copy);
2460       __ LoadP(r0, MemOperand(r3, 0));
2461       __ push(r0);
2462       __ cmp(r3, r7);  // Compare before moving to next argument.
2463       __ subi(r3, r3, Operand(kPointerSize));
2464       __ bne(&copy);
2465 
2466       __ b(&invoke);
2467     }
2468 
2469     // Too few parameters: Actual < expected
2470     __ bind(&under_application);
2471     {
2472       EnterArgumentsAdaptorFrame(masm);
2473       Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
2474 
2475       // Calculate copy start address into r0 and copy end address is fp.
2476       // r3: actual number of arguments as a smi
2477       // r4: function
2478       // r5: expected number of arguments
2479       // r6: new target (passed through to callee)
2480       __ SmiToPtrArrayOffset(r3, r3);
2481       __ add(r3, r3, fp);
2482 
2483       // Copy the arguments (including the receiver) to the new stack frame.
2484       // r3: copy start address
2485       // r4: function
2486       // r5: expected number of arguments
2487       // r6: new target (passed through to callee)
2488       Label copy;
2489       __ bind(&copy);
2490       // Adjust load for return address and receiver.
2491       __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
2492       __ push(r0);
2493       __ cmp(r3, fp);  // Compare before moving to next argument.
2494       __ subi(r3, r3, Operand(kPointerSize));
2495       __ bne(&copy);
2496 
2497       // Fill the remaining expected arguments with undefined.
2498       // r4: function
2499       // r5: expected number of arguments
2500       // r6: new target (passed through to callee)
2501       __ LoadRoot(r0, RootIndex::kUndefinedValue);
2502       __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2503       __ sub(r7, fp, r7);
2504       // Adjust for frame.
2505       __ subi(r7, r7,
2506               Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2507                       kPointerSize));
2508 
2509       Label fill;
2510       __ bind(&fill);
2511       __ push(r0);
2512       __ cmp(sp, r7);
2513       __ bne(&fill);
2514     }
2515 
2516     // Call the entry point.
2517     __ bind(&invoke);
2518     __ mr(r3, r5);
2519     // r3 : expected number of arguments
2520     // r4 : function (passed through to callee)
2521     // r6 : new target (passed through to callee)
2522     static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
2523     __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
2524     __ CallCodeObject(r5);
2525 
2526     // Store offset of return address for deoptimizer.
2527     masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(
2528         masm->pc_offset());
2529 
2530     // Exit frame and return.
2531     LeaveArgumentsAdaptorFrame(masm);
2532     __ blr();
2533   }
2534 
2535   // -------------------------------------------
2536   // Skip adapt arguments.
2537   // -------------------------------------------
2538   __ bind(&skip_adapt_arguments);
2539   {
2540     // The callee cannot observe the actual arguments, so it's safe to just
2541     // pass the expected arguments by massaging the stack appropriately. See
2542     // http://bit.ly/v8-faster-calls-with-arguments-mismatch for details.
2543     Label under_application, over_application;
2544     __ cmp(r3, r5);
2545     __ blt(&under_application);
2546 
2547     __ bind(&over_application);
2548     {
2549       // Remove superfluous parameters from the stack.
2550       __ sub(r7, r3, r5);
2551       __ mr(r3, r5);
2552       __ ShiftLeftImm(r7, r7, Operand(kPointerSizeLog2));
2553       __ add(sp, sp, r7);
2554       __ b(&dont_adapt_arguments);
2555     }
2556 
2557     __ bind(&under_application);
2558     {
2559       // Fill remaining expected arguments with undefined values.
2560       Label fill;
2561       __ LoadRoot(r7, RootIndex::kUndefinedValue);
2562       __ bind(&fill);
2563       __ addi(r3, r3, Operand(1));
2564       __ push(r7);
2565       __ cmp(r3, r5);
2566       __ blt(&fill);
2567       __ b(&dont_adapt_arguments);
2568     }
2569   }
2570 
2571   // -------------------------------------------
2572   // Dont adapt arguments.
2573   // -------------------------------------------
2574   __ bind(&dont_adapt_arguments);
2575   static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
2576   __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
2577   __ JumpCodeObject(r5);
2578 
2579   __ bind(&stack_overflow);
2580   {
2581     FrameScope frame(masm, StackFrame::MANUAL);
2582     __ CallRuntime(Runtime::kThrowStackOverflow);
2583     __ bkpt(0);
2584   }
2585 }
2586 
Generate_WasmCompileLazy(MacroAssembler * masm)2587 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2588   // The function index was put in a register by the jump table trampoline.
2589   // Convert to Smi for the runtime call.
2590   __ SmiTag(kWasmCompileLazyFuncIndexRegister,
2591             kWasmCompileLazyFuncIndexRegister);
2592   {
2593     HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2594     FrameAndConstantPoolScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2595 
2596     // Save all parameter registers (see wasm-linkage.cc). They might be
2597     // overwritten in the runtime call below. We don't have any callee-saved
2598     // registers in wasm, so no need to store anything else.
2599     constexpr RegList gp_regs =
2600         Register::ListOf(r3, r4, r5, r6, r7, r8, r9, r10);
2601     constexpr RegList fp_regs =
2602         DoubleRegister::ListOf(d1, d2, d3, d4, d5, d6, d7, d8);
2603     __ MultiPush(gp_regs);
2604     __ MultiPushDoubles(fp_regs);
2605 
2606     // Pass instance and function index as explicit arguments to the runtime
2607     // function.
2608     __ Push(kWasmInstanceRegister, kWasmCompileLazyFuncIndexRegister);
2609     // Initialize the JavaScript context with 0. CEntry will use it to
2610     // set the current context on the isolate.
2611     __ LoadSmiLiteral(cp, Smi::zero());
2612     __ CallRuntime(Runtime::kWasmCompileLazy, 2);
2613     // The entrypoint address is the return value.
2614     __ mr(r11, kReturnRegister0);
2615 
2616     // Restore registers.
2617     __ MultiPopDoubles(fp_regs);
2618     __ MultiPop(gp_regs);
2619   }
2620   // Finally, jump to the entrypoint.
2621   __ Jump(r11);
2622 }
2623 
Generate_WasmDebugBreak(MacroAssembler * masm)2624 void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
2625   HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2626   {
2627     FrameAndConstantPoolScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
2628 
2629     // Save all parameter registers. They might hold live values, we restore
2630     // them after the runtime call.
2631     __ MultiPush(WasmDebugBreakFrameConstants::kPushedGpRegs);
2632     __ MultiPushDoubles(WasmDebugBreakFrameConstants::kPushedFpRegs);
2633 
2634     // Initialize the JavaScript context with 0. CEntry will use it to
2635     // set the current context on the isolate.
2636     __ LoadSmiLiteral(cp, Smi::zero());
2637     __ CallRuntime(Runtime::kWasmDebugBreak, 0);
2638 
2639     // Restore registers.
2640     __ MultiPopDoubles(WasmDebugBreakFrameConstants::kPushedFpRegs);
2641     __ MultiPop(WasmDebugBreakFrameConstants::kPushedGpRegs);
2642   }
2643   __ Ret();
2644 }
2645 
Generate_CEntry(MacroAssembler * masm,int result_size,SaveFPRegsMode save_doubles,ArgvMode argv_mode,bool builtin_exit_frame)2646 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2647                                SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2648                                bool builtin_exit_frame) {
2649   // Called from JavaScript; parameters are on stack as if calling JS function.
2650   // r3: number of arguments including receiver
2651   // r4: pointer to builtin function
2652   // fp: frame pointer  (restored after C call)
2653   // sp: stack pointer  (restored as callee's sp after C call)
2654   // cp: current context  (C callee-saved)
2655   //
2656   // If argv_mode == kArgvInRegister:
2657   // r5: pointer to the first argument
2658 
2659   __ mr(r15, r4);
2660 
2661   if (argv_mode == kArgvInRegister) {
2662     // Move argv into the correct register.
2663     __ mr(r4, r5);
2664   } else {
2665     // Compute the argv pointer.
2666     __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
2667     __ add(r4, r4, sp);
2668     __ subi(r4, r4, Operand(kPointerSize));
2669   }
2670 
2671   // Enter the exit frame that transitions from JavaScript to C++.
2672   FrameScope scope(masm, StackFrame::MANUAL);
2673 
2674   // Need at least one extra slot for return address location.
2675   int arg_stack_space = 1;
2676 
2677   // Pass buffer for return value on stack if necessary
2678   bool needs_return_buffer =
2679       (result_size == 2 && !ABI_RETURNS_OBJECT_PAIRS_IN_REGS);
2680   if (needs_return_buffer) {
2681     arg_stack_space += result_size;
2682   }
2683 
2684   __ EnterExitFrame(
2685       save_doubles, arg_stack_space,
2686       builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2687 
2688   // Store a copy of argc in callee-saved registers for later.
2689   __ mr(r14, r3);
2690 
2691   // r3, r14: number of arguments including receiver  (C callee-saved)
2692   // r4: pointer to the first argument
2693   // r15: pointer to builtin function  (C callee-saved)
2694 
2695   // Result returned in registers or stack, depending on result size and ABI.
2696 
2697   Register isolate_reg = r5;
2698   if (needs_return_buffer) {
2699     // The return value is a non-scalar value.
2700     // Use frame storage reserved by calling function to pass return
2701     // buffer as implicit first argument.
2702     __ mr(r5, r4);
2703     __ mr(r4, r3);
2704     __ addi(r3, sp, Operand((kStackFrameExtraParamSlot + 1) * kPointerSize));
2705     isolate_reg = r6;
2706   }
2707 
2708   // Call C built-in.
2709   __ Move(isolate_reg, ExternalReference::isolate_address(masm->isolate()));
2710 
2711   Register target = r15;
2712   __ StoreReturnAddressAndCall(target);
2713 
2714   // If return value is on the stack, pop it to registers.
2715   if (needs_return_buffer) {
2716     __ LoadP(r4, MemOperand(r3, kPointerSize));
2717     __ LoadP(r3, MemOperand(r3));
2718   }
2719 
2720   // Check result for exception sentinel.
2721   Label exception_returned;
2722   __ CompareRoot(r3, RootIndex::kException);
2723   __ beq(&exception_returned);
2724 
2725   // Check that there is no pending exception, otherwise we
2726   // should have returned the exception sentinel.
2727   if (FLAG_debug_code) {
2728     Label okay;
2729     ExternalReference pending_exception_address = ExternalReference::Create(
2730         IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2731 
2732     __ Move(r6, pending_exception_address);
2733     __ LoadP(r6, MemOperand(r6));
2734     __ CompareRoot(r6, RootIndex::kTheHoleValue);
2735     // Cannot use check here as it attempts to generate call into runtime.
2736     __ beq(&okay);
2737     __ stop();
2738     __ bind(&okay);
2739   }
2740 
2741   // Exit C frame and return.
2742   // r3:r4: result
2743   // sp: stack pointer
2744   // fp: frame pointer
2745   Register argc = argv_mode == kArgvInRegister
2746                       // We don't want to pop arguments so set argc to no_reg.
2747                       ? no_reg
2748                       // r14: still holds argc (callee-saved).
2749                       : r14;
2750   __ LeaveExitFrame(save_doubles, argc);
2751   __ blr();
2752 
2753   // Handling of exception.
2754   __ bind(&exception_returned);
2755 
2756   ExternalReference pending_handler_context_address = ExternalReference::Create(
2757       IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2758   ExternalReference pending_handler_entrypoint_address =
2759       ExternalReference::Create(
2760           IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2761   ExternalReference pending_handler_constant_pool_address =
2762       ExternalReference::Create(
2763           IsolateAddressId::kPendingHandlerConstantPoolAddress,
2764           masm->isolate());
2765   ExternalReference pending_handler_fp_address = ExternalReference::Create(
2766       IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2767   ExternalReference pending_handler_sp_address = ExternalReference::Create(
2768       IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2769 
2770   // Ask the runtime for help to determine the handler. This will set r3 to
2771   // contain the current pending exception, don't clobber it.
2772   ExternalReference find_handler =
2773       ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2774   {
2775     FrameScope scope(masm, StackFrame::MANUAL);
2776     __ PrepareCallCFunction(3, 0, r3);
2777     __ li(r3, Operand::Zero());
2778     __ li(r4, Operand::Zero());
2779     __ Move(r5, ExternalReference::isolate_address(masm->isolate()));
2780     __ CallCFunction(find_handler, 3);
2781   }
2782 
2783   // Retrieve the handler context, SP and FP.
2784   __ Move(cp, pending_handler_context_address);
2785   __ LoadP(cp, MemOperand(cp));
2786   __ Move(sp, pending_handler_sp_address);
2787   __ LoadP(sp, MemOperand(sp));
2788   __ Move(fp, pending_handler_fp_address);
2789   __ LoadP(fp, MemOperand(fp));
2790 
2791   // If the handler is a JS frame, restore the context to the frame. Note that
2792   // the context will be set to (cp == 0) for non-JS frames.
2793   Label skip;
2794   __ cmpi(cp, Operand::Zero());
2795   __ beq(&skip);
2796   __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2797   __ bind(&skip);
2798 
2799   // Reset the masking register. This is done independent of the underlying
2800   // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
2801   // with both configurations. It is safe to always do this, because the
2802   // underlying register is caller-saved and can be arbitrarily clobbered.
2803   __ ResetSpeculationPoisonRegister();
2804 
2805   // Compute the handler entry address and jump to it.
2806   ConstantPoolUnavailableScope constant_pool_unavailable(masm);
2807   __ Move(ip, pending_handler_entrypoint_address);
2808   __ LoadP(ip, MemOperand(ip));
2809   if (FLAG_enable_embedded_constant_pool) {
2810     __ Move(kConstantPoolRegister, pending_handler_constant_pool_address);
2811     __ LoadP(kConstantPoolRegister, MemOperand(kConstantPoolRegister));
2812   }
2813   __ Jump(ip);
2814 }
2815 
Generate_DoubleToI(MacroAssembler * masm)2816 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2817   Label out_of_range, only_low, negate, done, fastpath_done;
2818   Register result_reg = r3;
2819 
2820   HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2821 
2822   // Immediate values for this stub fit in instructions, so it's safe to use ip.
2823   Register scratch = GetRegisterThatIsNotOneOf(result_reg);
2824   Register scratch_low = GetRegisterThatIsNotOneOf(result_reg, scratch);
2825   Register scratch_high =
2826       GetRegisterThatIsNotOneOf(result_reg, scratch, scratch_low);
2827   DoubleRegister double_scratch = kScratchDoubleReg;
2828 
2829   __ Push(result_reg, scratch);
2830   // Account for saved regs.
2831   int argument_offset = 2 * kPointerSize;
2832 
2833   // Load double input.
2834   __ lfd(double_scratch, MemOperand(sp, argument_offset));
2835 
2836   // Do fast-path convert from double to int.
2837   __ ConvertDoubleToInt64(double_scratch,
2838 #if !V8_TARGET_ARCH_PPC64
2839                           scratch,
2840 #endif
2841                           result_reg, d0);
2842 
2843 // Test for overflow
2844 #if V8_TARGET_ARCH_PPC64
2845   __ TestIfInt32(result_reg, r0);
2846 #else
2847   __ TestIfInt32(scratch, result_reg, r0);
2848 #endif
2849   __ beq(&fastpath_done);
2850 
2851   __ Push(scratch_high, scratch_low);
2852   // Account for saved regs.
2853   argument_offset += 2 * kPointerSize;
2854 
2855   __ lwz(scratch_high,
2856          MemOperand(sp, argument_offset + Register::kExponentOffset));
2857   __ lwz(scratch_low,
2858          MemOperand(sp, argument_offset + Register::kMantissaOffset));
2859 
2860   __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask);
2861   // Load scratch with exponent - 1. This is faster than loading
2862   // with exponent because Bias + 1 = 1024 which is a *PPC* immediate value.
2863   STATIC_ASSERT(HeapNumber::kExponentBias + 1 == 1024);
2864   __ subi(scratch, scratch, Operand(HeapNumber::kExponentBias + 1));
2865   // If exponent is greater than or equal to 84, the 32 less significant
2866   // bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
2867   // the result is 0.
2868   // Compare exponent with 84 (compare exponent - 1 with 83).
2869   __ cmpi(scratch, Operand(83));
2870   __ bge(&out_of_range);
2871 
2872   // If we reach this code, 31 <= exponent <= 83.
2873   // So, we don't have to handle cases where 0 <= exponent <= 20 for
2874   // which we would need to shift right the high part of the mantissa.
2875   // Scratch contains exponent - 1.
2876   // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
2877   __ subfic(scratch, scratch, Operand(51));
2878   __ cmpi(scratch, Operand::Zero());
2879   __ ble(&only_low);
2880   // 21 <= exponent <= 51, shift scratch_low and scratch_high
2881   // to generate the result.
2882   __ srw(scratch_low, scratch_low, scratch);
2883   // Scratch contains: 52 - exponent.
2884   // We needs: exponent - 20.
2885   // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
2886   __ subfic(scratch, scratch, Operand(32));
2887   __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask);
2888   // Set the implicit 1 before the mantissa part in scratch_high.
2889   STATIC_ASSERT(HeapNumber::kMantissaBitsInTopWord >= 16);
2890   __ oris(result_reg, result_reg,
2891           Operand(1 << ((HeapNumber::kMantissaBitsInTopWord)-16)));
2892   __ slw(r0, result_reg, scratch);
2893   __ orx(result_reg, scratch_low, r0);
2894   __ b(&negate);
2895 
2896   __ bind(&out_of_range);
2897   __ mov(result_reg, Operand::Zero());
2898   __ b(&done);
2899 
2900   __ bind(&only_low);
2901   // 52 <= exponent <= 83, shift only scratch_low.
2902   // On entry, scratch contains: 52 - exponent.
2903   __ neg(scratch, scratch);
2904   __ slw(result_reg, scratch_low, scratch);
2905 
2906   __ bind(&negate);
2907   // If input was positive, scratch_high ASR 31 equals 0 and
2908   // scratch_high LSR 31 equals zero.
2909   // New result = (result eor 0) + 0 = result.
2910   // If the input was negative, we have to negate the result.
2911   // Input_high ASR 31 equals 0xFFFFFFFF and scratch_high LSR 31 equals 1.
2912   // New result = (result eor 0xFFFFFFFF) + 1 = 0 - result.
2913   __ srawi(r0, scratch_high, 31);
2914 #if V8_TARGET_ARCH_PPC64
2915   __ srdi(r0, r0, Operand(32));
2916 #endif
2917   __ xor_(result_reg, result_reg, r0);
2918   __ srwi(r0, scratch_high, Operand(31));
2919   __ add(result_reg, result_reg, r0);
2920 
2921   __ bind(&done);
2922   __ Pop(scratch_high, scratch_low);
2923   // Account for saved regs.
2924   argument_offset -= 2 * kPointerSize;
2925 
2926   __ bind(&fastpath_done);
2927   __ StoreP(result_reg, MemOperand(sp, argument_offset));
2928   __ Pop(result_reg, scratch);
2929 
2930   __ Ret();
2931 }
2932 
2933 namespace {
2934 
AddressOffset(ExternalReference ref0,ExternalReference ref1)2935 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
2936   return ref0.address() - ref1.address();
2937 }
2938 
2939 
2940 // Calls an API function.  Allocates HandleScope, extracts returned value
2941 // from handle and propagates exceptions.  Restores context.  stack_space
2942 // - space to be unwound on exit (includes the call JS arguments space and
2943 // the additional space allocated for the fast call).
CallApiFunctionAndReturn(MacroAssembler * masm,Register function_address,ExternalReference thunk_ref,int stack_space,MemOperand * stack_space_operand,MemOperand return_value_operand)2944 static void CallApiFunctionAndReturn(MacroAssembler* masm,
2945                                      Register function_address,
2946                                      ExternalReference thunk_ref,
2947                                      int stack_space,
2948                                      MemOperand* stack_space_operand,
2949                                      MemOperand return_value_operand) {
2950   Isolate* isolate = masm->isolate();
2951   ExternalReference next_address =
2952       ExternalReference::handle_scope_next_address(isolate);
2953   const int kNextOffset = 0;
2954   const int kLimitOffset = AddressOffset(
2955       ExternalReference::handle_scope_limit_address(isolate), next_address);
2956   const int kLevelOffset = AddressOffset(
2957       ExternalReference::handle_scope_level_address(isolate), next_address);
2958 
2959   // Additional parameter is the address of the actual callback.
2960   DCHECK(function_address == r4 || function_address == r5);
2961   Register scratch = r6;
2962 
2963   __ Move(scratch, ExternalReference::is_profiling_address(isolate));
2964   __ lbz(scratch, MemOperand(scratch, 0));
2965   __ cmpi(scratch, Operand::Zero());
2966 
2967   if (CpuFeatures::IsSupported(ISELECT)) {
2968     __ Move(scratch, thunk_ref);
2969     __ isel(eq, scratch, function_address, scratch);
2970   } else {
2971     Label profiler_enabled, end_profiler_check;
2972     __ bne(&profiler_enabled);
2973     __ Move(scratch, ExternalReference::address_of_runtime_stats_flag());
2974     __ lwz(scratch, MemOperand(scratch, 0));
2975     __ cmpi(scratch, Operand::Zero());
2976     __ bne(&profiler_enabled);
2977     {
2978       // Call the api function directly.
2979       __ mr(scratch, function_address);
2980       __ b(&end_profiler_check);
2981     }
2982     __ bind(&profiler_enabled);
2983     {
2984       // Additional parameter is the address of the actual callback.
2985       __ Move(scratch, thunk_ref);
2986     }
2987     __ bind(&end_profiler_check);
2988   }
2989 
2990   // Allocate HandleScope in callee-save registers.
2991   // r17 - next_address
2992   // r14 - next_address->kNextOffset
2993   // r15 - next_address->kLimitOffset
2994   // r16 - next_address->kLevelOffset
2995   __ Move(r17, next_address);
2996   __ LoadP(r14, MemOperand(r17, kNextOffset));
2997   __ LoadP(r15, MemOperand(r17, kLimitOffset));
2998   __ lwz(r16, MemOperand(r17, kLevelOffset));
2999   __ addi(r16, r16, Operand(1));
3000   __ stw(r16, MemOperand(r17, kLevelOffset));
3001 
3002   __ StoreReturnAddressAndCall(scratch);
3003 
3004   Label promote_scheduled_exception;
3005   Label delete_allocated_handles;
3006   Label leave_exit_frame;
3007   Label return_value_loaded;
3008 
3009   // load value from ReturnValue
3010   __ LoadP(r3, return_value_operand);
3011   __ bind(&return_value_loaded);
3012   // No more valid handles (the result handle was the last one). Restore
3013   // previous handle scope.
3014   __ StoreP(r14, MemOperand(r17, kNextOffset));
3015   if (__ emit_debug_code()) {
3016     __ lwz(r4, MemOperand(r17, kLevelOffset));
3017     __ cmp(r4, r16);
3018     __ Check(eq, AbortReason::kUnexpectedLevelAfterReturnFromApiCall);
3019   }
3020   __ subi(r16, r16, Operand(1));
3021   __ stw(r16, MemOperand(r17, kLevelOffset));
3022   __ LoadP(r0, MemOperand(r17, kLimitOffset));
3023   __ cmp(r15, r0);
3024   __ bne(&delete_allocated_handles);
3025 
3026   // Leave the API exit frame.
3027   __ bind(&leave_exit_frame);
3028   // LeaveExitFrame expects unwind space to be in a register.
3029   if (stack_space_operand != nullptr) {
3030     __ LoadP(r14, *stack_space_operand);
3031   } else {
3032     __ mov(r14, Operand(stack_space));
3033   }
3034   __ LeaveExitFrame(false, r14, stack_space_operand != nullptr);
3035 
3036   // Check if the function scheduled an exception.
3037   __ LoadRoot(r14, RootIndex::kTheHoleValue);
3038   __ Move(r15, ExternalReference::scheduled_exception_address(isolate));
3039   __ LoadP(r15, MemOperand(r15));
3040   __ cmp(r14, r15);
3041   __ bne(&promote_scheduled_exception);
3042 
3043   __ blr();
3044 
3045   // Re-throw by promoting a scheduled exception.
3046   __ bind(&promote_scheduled_exception);
3047   __ TailCallRuntime(Runtime::kPromoteScheduledException);
3048 
3049   // HandleScope limit has changed. Delete allocated extensions.
3050   __ bind(&delete_allocated_handles);
3051   __ StoreP(r15, MemOperand(r17, kLimitOffset));
3052   __ mr(r14, r3);
3053   __ PrepareCallCFunction(1, r15);
3054   __ Move(r3, ExternalReference::isolate_address(isolate));
3055   __ CallCFunction(ExternalReference::delete_handle_scope_extensions(), 1);
3056   __ mr(r3, r14);
3057   __ b(&leave_exit_frame);
3058 }
3059 
3060 }  // namespace
3061 
Generate_CallApiCallback(MacroAssembler * masm)3062 void Builtins::Generate_CallApiCallback(MacroAssembler* masm) {
3063   // ----------- S t a t e -------------
3064   //  -- cp                  : context
3065   //  -- r4                  : api function address
3066   //  -- r5                  : arguments count (not including the receiver)
3067   //  -- r6                  : call data
3068   //  -- r3                  : holder
3069   //  -- sp[0]               : last argument
3070   //  -- ...
3071   //  -- sp[(argc - 1)* 4]   : first argument
3072   //  -- sp[(argc + 0) * 4]  : receiver
3073   // -----------------------------------
3074 
3075   Register api_function_address = r4;
3076   Register argc = r5;
3077   Register call_data = r6;
3078   Register holder = r3;
3079   Register scratch = r7;
3080   DCHECK(!AreAliased(api_function_address, argc, call_data, holder, scratch));
3081 
3082   using FCA = FunctionCallbackArguments;
3083 
3084   STATIC_ASSERT(FCA::kArgsLength == 6);
3085   STATIC_ASSERT(FCA::kNewTargetIndex == 5);
3086   STATIC_ASSERT(FCA::kDataIndex == 4);
3087   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
3088   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
3089   STATIC_ASSERT(FCA::kIsolateIndex == 1);
3090   STATIC_ASSERT(FCA::kHolderIndex == 0);
3091 
3092   // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
3093   //
3094   // Target state:
3095   //   sp[0 * kPointerSize]: kHolder
3096   //   sp[1 * kPointerSize]: kIsolate
3097   //   sp[2 * kPointerSize]: undefined (kReturnValueDefaultValue)
3098   //   sp[3 * kPointerSize]: undefined (kReturnValue)
3099   //   sp[4 * kPointerSize]: kData
3100   //   sp[5 * kPointerSize]: undefined (kNewTarget)
3101 
3102   // Reserve space on the stack.
3103   __ subi(sp, sp, Operand(FCA::kArgsLength * kPointerSize));
3104 
3105   // kHolder.
3106   __ StoreP(holder, MemOperand(sp, 0 * kPointerSize));
3107 
3108   // kIsolate.
3109   __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
3110   __ StoreP(scratch, MemOperand(sp, 1 * kPointerSize));
3111 
3112   // kReturnValueDefaultValue and kReturnValue.
3113   __ LoadRoot(scratch, RootIndex::kUndefinedValue);
3114   __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
3115   __ StoreP(scratch, MemOperand(sp, 3 * kPointerSize));
3116 
3117   // kData.
3118   __ StoreP(call_data, MemOperand(sp, 4 * kPointerSize));
3119 
3120   // kNewTarget.
3121   __ StoreP(scratch, MemOperand(sp, 5 * kPointerSize));
3122 
3123   // Keep a pointer to kHolder (= implicit_args) in a scratch register.
3124   // We use it below to set up the FunctionCallbackInfo object.
3125   __ mr(scratch, sp);
3126 
3127   // Allocate the v8::Arguments structure in the arguments' space since
3128   // it's not controlled by GC.
3129   // PPC LINUX ABI:
3130   //
3131   // Create 4 extra slots on stack:
3132   //    [0] space for DirectCEntryStub's LR save
3133   //    [1-3] FunctionCallbackInfo
3134   //    [4] number of bytes to drop from the stack after returning
3135   static constexpr int kApiStackSpace = 5;
3136   static constexpr bool kDontSaveDoubles = false;
3137 
3138   FrameScope frame_scope(masm, StackFrame::MANUAL);
3139   __ EnterExitFrame(kDontSaveDoubles, kApiStackSpace);
3140 
3141   // FunctionCallbackInfo::implicit_args_ (points at kHolder as set up above).
3142   // Arguments are after the return address (pushed by EnterExitFrame()).
3143   __ StoreP(scratch,
3144             MemOperand(sp, (kStackFrameExtraParamSlot + 1) * kPointerSize));
3145 
3146   // FunctionCallbackInfo::values_ (points at the first varargs argument passed
3147   // on the stack).
3148   __ addi(scratch, scratch, Operand((FCA::kArgsLength - 1) * kPointerSize));
3149   __ ShiftLeftImm(ip, argc, Operand(kPointerSizeLog2));
3150   __ add(scratch, scratch, ip);
3151   __ StoreP(scratch,
3152             MemOperand(sp, (kStackFrameExtraParamSlot + 2) * kPointerSize));
3153 
3154   // FunctionCallbackInfo::length_.
3155   __ stw(argc, MemOperand(sp, (kStackFrameExtraParamSlot + 3) * kPointerSize));
3156 
3157   // We also store the number of bytes to drop from the stack after returning
3158   // from the API function here.
3159   __ mov(scratch,
3160          Operand((FCA::kArgsLength + 1 /* receiver */) * kPointerSize));
3161   __ ShiftLeftImm(ip, argc, Operand(kPointerSizeLog2));
3162   __ add(scratch, scratch, ip);
3163   __ StoreP(scratch,
3164             MemOperand(sp, (kStackFrameExtraParamSlot + 4) * kPointerSize));
3165 
3166   // v8::InvocationCallback's argument.
3167   __ addi(r3, sp, Operand((kStackFrameExtraParamSlot + 1) * kPointerSize));
3168 
3169   ExternalReference thunk_ref = ExternalReference::invoke_function_callback();
3170 
3171   // There are two stack slots above the arguments we constructed on the stack.
3172   // TODO(jgruber): Document what these arguments are.
3173   static constexpr int kStackSlotsAboveFCA = 2;
3174   MemOperand return_value_operand(
3175       fp, (kStackSlotsAboveFCA + FCA::kReturnValueOffset) * kPointerSize);
3176 
3177   static constexpr int kUseStackSpaceOperand = 0;
3178   MemOperand stack_space_operand(
3179       sp, (kStackFrameExtraParamSlot + 4) * kPointerSize);
3180 
3181   AllowExternalCallThatCantCauseGC scope(masm);
3182   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
3183                            kUseStackSpaceOperand, &stack_space_operand,
3184                            return_value_operand);
3185 }
3186 
3187 
Generate_CallApiGetter(MacroAssembler * masm)3188 void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
3189   int arg0Slot = 0;
3190   int accessorInfoSlot = 0;
3191   int apiStackSpace = 0;
3192   // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
3193   // name below the exit frame to make GC aware of them.
3194   STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
3195   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
3196   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
3197   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
3198   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
3199   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
3200   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
3201   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
3202 
3203   Register receiver = ApiGetterDescriptor::ReceiverRegister();
3204   Register holder = ApiGetterDescriptor::HolderRegister();
3205   Register callback = ApiGetterDescriptor::CallbackRegister();
3206   Register scratch = r7;
3207   DCHECK(!AreAliased(receiver, holder, callback, scratch));
3208 
3209   Register api_function_address = r5;
3210 
3211   __ push(receiver);
3212   // Push data from AccessorInfo.
3213   __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
3214   __ push(scratch);
3215   __ LoadRoot(scratch, RootIndex::kUndefinedValue);
3216   __ Push(scratch, scratch);
3217   __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
3218   __ Push(scratch, holder);
3219   __ Push(Smi::zero());  // should_throw_on_error -> false
3220   __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
3221   __ push(scratch);
3222 
3223   // v8::PropertyCallbackInfo::args_ array and name handle.
3224   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
3225 
3226   // Load address of v8::PropertyAccessorInfo::args_ array and name handle.
3227   __ mr(r3, sp);                               // r3 = Handle<Name>
3228   __ addi(r4, r3, Operand(1 * kPointerSize));  // r4 = v8::PCI::args_
3229 
3230 // If ABI passes Handles (pointer-sized struct) in a register:
3231 //
3232 // Create 2 extra slots on stack:
3233 //    [0] space for DirectCEntryStub's LR save
3234 //    [1] AccessorInfo&
3235 //
3236 // Otherwise:
3237 //
3238 // Create 3 extra slots on stack:
3239 //    [0] space for DirectCEntryStub's LR save
3240 //    [1] copy of Handle (first arg)
3241 //    [2] AccessorInfo&
3242   if (ABI_PASSES_HANDLES_IN_REGS) {
3243     accessorInfoSlot = kStackFrameExtraParamSlot + 1;
3244     apiStackSpace = 2;
3245   } else {
3246     arg0Slot = kStackFrameExtraParamSlot + 1;
3247     accessorInfoSlot = arg0Slot + 1;
3248     apiStackSpace = 3;
3249   }
3250 
3251   FrameScope frame_scope(masm, StackFrame::MANUAL);
3252   __ EnterExitFrame(false, apiStackSpace);
3253 
3254   if (!ABI_PASSES_HANDLES_IN_REGS) {
3255     // pass 1st arg by reference
3256     __ StoreP(r3, MemOperand(sp, arg0Slot * kPointerSize));
3257     __ addi(r3, sp, Operand(arg0Slot * kPointerSize));
3258   }
3259 
3260   // Create v8::PropertyCallbackInfo object on the stack and initialize
3261   // it's args_ field.
3262   __ StoreP(r4, MemOperand(sp, accessorInfoSlot * kPointerSize));
3263   __ addi(r4, sp, Operand(accessorInfoSlot * kPointerSize));
3264   // r4 = v8::PropertyCallbackInfo&
3265 
3266   ExternalReference thunk_ref =
3267       ExternalReference::invoke_accessor_getter_callback();
3268 
3269   __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset));
3270   __ LoadP(api_function_address,
3271         FieldMemOperand(scratch, Foreign::kForeignAddressOffset));
3272 
3273   // +3 is to skip prolog, return address and name handle.
3274   MemOperand return_value_operand(
3275       fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
3276   MemOperand* const kUseStackSpaceConstant = nullptr;
3277   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
3278                            kStackUnwindSpace, kUseStackSpaceConstant,
3279                            return_value_operand);
3280 }
3281 
Generate_DirectCEntry(MacroAssembler * masm)3282 void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
3283   UseScratchRegisterScope temps(masm);
3284   Register temp2 = temps.Acquire();
3285   // Place the return address on the stack, making the call
3286   // GC safe. The RegExp backend also relies on this.
3287   __ mflr(r0);
3288   __ StoreP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize));
3289 
3290   if (ABI_USES_FUNCTION_DESCRIPTORS) {
3291     // AIX/PPC64BE Linux use a function descriptor;
3292     __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(temp2, kPointerSize));
3293     __ LoadP(temp2, MemOperand(temp2, 0));  // Instruction address
3294   }
3295 
3296   __ Call(temp2);  // Call the C++ function.
3297   __ LoadP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize));
3298   __ mtlr(r0);
3299   __ blr();
3300 }
3301 
3302 #undef __
3303 }  // namespace internal
3304 }  // namespace v8
3305 
3306 #endif  // V8_TARGET_ARCH_PPC64 || V8_TARGET_ARCH_PPC64
3307