1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_MIPS64
6 
7 #include "src/api/api-arguments.h"
8 #include "src/codegen/code-factory.h"
9 #include "src/debug/debug.h"
10 #include "src/deoptimizer/deoptimizer.h"
11 #include "src/execution/frame-constants.h"
12 #include "src/execution/frames.h"
13 #include "src/logging/counters.h"
14 // For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
15 #include "src/codegen/macro-assembler-inl.h"
16 #include "src/codegen/mips64/constants-mips64.h"
17 #include "src/codegen/register-configuration.h"
18 #include "src/heap/heap-inl.h"
19 #include "src/objects/cell.h"
20 #include "src/objects/foreign.h"
21 #include "src/objects/heap-number.h"
22 #include "src/objects/js-generator.h"
23 #include "src/objects/objects-inl.h"
24 #include "src/objects/smi.h"
25 #include "src/runtime/runtime.h"
26 #include "src/wasm/wasm-linkage.h"
27 #include "src/wasm/wasm-objects.h"
28 
29 namespace v8 {
30 namespace internal {
31 
32 #define __ ACCESS_MASM(masm)
33 
Generate_Adaptor(MacroAssembler * masm,Address address)34 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address) {
35   __ li(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
36   __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
37           RelocInfo::CODE_TARGET);
38 }
39 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)40 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
41                                            Runtime::FunctionId function_id) {
42   // ----------- S t a t e -------------
43   //  -- a1 : target function (preserved for callee)
44   //  -- a3 : new target (preserved for callee)
45   // -----------------------------------
46   {
47     FrameScope scope(masm, StackFrame::INTERNAL);
48     // Push a copy of the function onto the stack.
49     // Push a copy of the target function and the new target.
50     __ Push(a1, a3, a1);
51 
52     __ CallRuntime(function_id, 1);
53     // Restore target function and new target.
54     __ Pop(a1, a3);
55   }
56 
57   static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
58   __ Daddu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
59   __ Jump(a2);
60 }
61 
62 namespace {
63 
64 enum StackLimitKind { kInterruptStackLimit, kRealStackLimit };
65 
LoadStackLimit(MacroAssembler * masm,Register destination,StackLimitKind kind)66 void LoadStackLimit(MacroAssembler* masm, Register destination,
67                     StackLimitKind kind) {
68   DCHECK(masm->root_array_available());
69   Isolate* isolate = masm->isolate();
70   ExternalReference limit =
71       kind == StackLimitKind::kRealStackLimit
72           ? ExternalReference::address_of_real_jslimit(isolate)
73           : ExternalReference::address_of_jslimit(isolate);
74   DCHECK(TurboAssembler::IsAddressableThroughRootRegister(isolate, limit));
75 
76   intptr_t offset =
77       TurboAssembler::RootRegisterOffsetForExternalReference(isolate, limit);
78   CHECK(is_int32(offset));
79   __ Ld(destination, MemOperand(kRootRegister, static_cast<int32_t>(offset)));
80 }
81 
Generate_JSBuiltinsConstructStubHelper(MacroAssembler * masm)82 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
83   // ----------- S t a t e -------------
84   //  -- a0     : number of arguments
85   //  -- a1     : constructor function
86   //  -- a3     : new target
87   //  -- cp     : context
88   //  -- ra     : return address
89   //  -- sp[...]: constructor arguments
90   // -----------------------------------
91 
92   // Enter a construct frame.
93   {
94     FrameScope scope(masm, StackFrame::CONSTRUCT);
95 
96     // Preserve the incoming parameters on the stack.
97     __ SmiTag(a0);
98     __ Push(cp, a0);
99     __ SmiUntag(a0);
100 
101     // The receiver for the builtin/api call.
102     __ PushRoot(RootIndex::kTheHoleValue);
103 
104     // Set up pointer to last argument.
105     __ Daddu(t2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
106 
107     // Copy arguments and receiver to the expression stack.
108     Label loop, entry;
109     __ mov(t3, a0);
110     // ----------- S t a t e -------------
111     //  --                        a0: number of arguments (untagged)
112     //  --                        a3: new target
113     //  --                        t2: pointer to last argument
114     //  --                        t3: counter
115     //  --        sp[0*kPointerSize]: the hole (receiver)
116     //  --        sp[1*kPointerSize]: number of arguments (tagged)
117     //  --        sp[2*kPointerSize]: context
118     // -----------------------------------
119     __ jmp(&entry);
120     __ bind(&loop);
121     __ Dlsa(t0, t2, t3, kPointerSizeLog2);
122     __ Ld(t1, MemOperand(t0));
123     __ push(t1);
124     __ bind(&entry);
125     __ Daddu(t3, t3, Operand(-1));
126     __ Branch(&loop, greater_equal, t3, Operand(zero_reg));
127 
128     // Call the function.
129     // a0: number of arguments (untagged)
130     // a1: constructor function
131     // a3: new target
132     __ InvokeFunctionWithNewTarget(a1, a3, a0, CALL_FUNCTION);
133 
134     // Restore context from the frame.
135     __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
136     // Restore smi-tagged arguments count from the frame.
137     __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
138     // Leave construct frame.
139   }
140 
141   // Remove caller arguments from the stack and return.
142   __ SmiScale(a4, a1, kPointerSizeLog2);
143   __ Daddu(sp, sp, a4);
144   __ Daddu(sp, sp, kPointerSize);
145   __ Ret();
146 }
147 
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch1,Register scratch2,Label * stack_overflow)148 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
149                                         Register scratch1, Register scratch2,
150                                         Label* stack_overflow) {
151   // Check the stack for overflow. We are not trying to catch
152   // interruptions (e.g. debug break and preemption) here, so the "real stack
153   // limit" is checked.
154   LoadStackLimit(masm, scratch1, StackLimitKind::kRealStackLimit);
155   // Make scratch1 the space we have left. The stack might already be overflowed
156   // here which will cause scratch1 to become negative.
157   __ dsubu(scratch1, sp, scratch1);
158   // Check if the arguments will overflow the stack.
159   __ dsll(scratch2, num_args, kPointerSizeLog2);
160   // Signed comparison.
161   __ Branch(stack_overflow, le, scratch1, Operand(scratch2));
162 }
163 
164 }  // namespace
165 
166 // The construct stub for ES5 constructor functions and ES6 class constructors.
Generate_JSConstructStubGeneric(MacroAssembler * masm)167 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
168   // ----------- S t a t e -------------
169   //  --      a0: number of arguments (untagged)
170   //  --      a1: constructor function
171   //  --      a3: new target
172   //  --      cp: context
173   //  --      ra: return address
174   //  -- sp[...]: constructor arguments
175   // -----------------------------------
176 
177   // Enter a construct frame.
178   {
179     FrameScope scope(masm, StackFrame::CONSTRUCT);
180     Label post_instantiation_deopt_entry, not_create_implicit_receiver;
181 
182     // Preserve the incoming parameters on the stack.
183     __ SmiTag(a0);
184     __ Push(cp, a0, a1);
185     __ PushRoot(RootIndex::kTheHoleValue);
186     __ Push(a3);
187 
188     // ----------- S t a t e -------------
189     //  --        sp[0*kPointerSize]: new target
190     //  --        sp[1*kPointerSize]: padding
191     //  -- a1 and sp[2*kPointerSize]: constructor function
192     //  --        sp[3*kPointerSize]: number of arguments (tagged)
193     //  --        sp[4*kPointerSize]: context
194     // -----------------------------------
195 
196     __ Ld(t2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
197     __ lwu(t2, FieldMemOperand(t2, SharedFunctionInfo::kFlagsOffset));
198     __ DecodeField<SharedFunctionInfo::FunctionKindBits>(t2);
199     __ JumpIfIsInRange(t2, kDefaultDerivedConstructor, kDerivedConstructor,
200                        &not_create_implicit_receiver);
201 
202     // If not derived class constructor: Allocate the new receiver object.
203     __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
204                         t2, t3);
205     __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
206             RelocInfo::CODE_TARGET);
207     __ Branch(&post_instantiation_deopt_entry);
208 
209     // Else: use TheHoleValue as receiver for constructor call
210     __ bind(&not_create_implicit_receiver);
211     __ LoadRoot(v0, RootIndex::kTheHoleValue);
212 
213     // ----------- S t a t e -------------
214     //  --                          v0: receiver
215     //  -- Slot 4 / sp[0*kPointerSize]: new target
216     //  -- Slot 3 / sp[1*kPointerSize]: padding
217     //  -- Slot 2 / sp[2*kPointerSize]: constructor function
218     //  -- Slot 1 / sp[3*kPointerSize]: number of arguments (tagged)
219     //  -- Slot 0 / sp[4*kPointerSize]: context
220     // -----------------------------------
221     // Deoptimizer enters here.
222     masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
223         masm->pc_offset());
224     __ bind(&post_instantiation_deopt_entry);
225 
226     // Restore new target.
227     __ Pop(a3);
228     // Push the allocated receiver to the stack. We need two copies
229     // because we may have to return the original one and the calling
230     // conventions dictate that the called function pops the receiver.
231     __ Push(v0, v0);
232 
233     // ----------- S t a t e -------------
234     //  --                 r3: new target
235     //  -- sp[0*kPointerSize]: implicit receiver
236     //  -- sp[1*kPointerSize]: implicit receiver
237     //  -- sp[2*kPointerSize]: padding
238     //  -- sp[3*kPointerSize]: constructor function
239     //  -- sp[4*kPointerSize]: number of arguments (tagged)
240     //  -- sp[5*kPointerSize]: context
241     // -----------------------------------
242 
243     // Restore constructor function and argument count.
244     __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
245     __ Ld(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
246     __ SmiUntag(a0);
247 
248     // Set up pointer to last argument.
249     __ Daddu(t2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
250 
251     Label enough_stack_space, stack_overflow;
252     Generate_StackOverflowCheck(masm, a0, t0, t1, &stack_overflow);
253     __ Branch(&enough_stack_space);
254 
255     __ bind(&stack_overflow);
256     // Restore the context from the frame.
257     __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
258     __ CallRuntime(Runtime::kThrowStackOverflow);
259     // Unreachable code.
260     __ break_(0xCC);
261 
262     __ bind(&enough_stack_space);
263 
264     // Copy arguments and receiver to the expression stack.
265     Label loop, entry;
266     __ mov(t3, a0);
267     // ----------- S t a t e -------------
268     //  --                        a0: number of arguments (untagged)
269     //  --                        a3: new target
270     //  --                        t2: pointer to last argument
271     //  --                        t3: counter
272     //  --        sp[0*kPointerSize]: implicit receiver
273     //  --        sp[1*kPointerSize]: implicit receiver
274     //  --        sp[2*kPointerSize]: padding
275     //  -- a1 and sp[3*kPointerSize]: constructor function
276     //  --        sp[4*kPointerSize]: number of arguments (tagged)
277     //  --        sp[5*kPointerSize]: context
278     // -----------------------------------
279     __ jmp(&entry);
280     __ bind(&loop);
281     __ Dlsa(t0, t2, t3, kPointerSizeLog2);
282     __ Ld(t1, MemOperand(t0));
283     __ push(t1);
284     __ bind(&entry);
285     __ Daddu(t3, t3, Operand(-1));
286     __ Branch(&loop, greater_equal, t3, Operand(zero_reg));
287 
288     // Call the function.
289     __ InvokeFunctionWithNewTarget(a1, a3, a0, CALL_FUNCTION);
290 
291     // ----------- S t a t e -------------
292     //  --                 v0: constructor result
293     //  -- sp[0*kPointerSize]: implicit receiver
294     //  -- sp[1*kPointerSize]: padding
295     //  -- sp[2*kPointerSize]: constructor function
296     //  -- sp[3*kPointerSize]: number of arguments
297     //  -- sp[4*kPointerSize]: context
298     // -----------------------------------
299 
300     // Store offset of return address for deoptimizer.
301     masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
302         masm->pc_offset());
303 
304     // Restore the context from the frame.
305     __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
306 
307     // If the result is an object (in the ECMA sense), we should get rid
308     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
309     // on page 74.
310     Label use_receiver, do_throw, leave_frame;
311 
312     // If the result is undefined, we jump out to using the implicit receiver.
313     __ JumpIfRoot(v0, RootIndex::kUndefinedValue, &use_receiver);
314 
315     // Otherwise we do a smi check and fall through to check if the return value
316     // is a valid receiver.
317 
318     // If the result is a smi, it is *not* an object in the ECMA sense.
319     __ JumpIfSmi(v0, &use_receiver);
320 
321     // If the type of the result (stored in its map) is less than
322     // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
323     __ GetObjectType(v0, t2, t2);
324     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
325     __ Branch(&leave_frame, greater_equal, t2, Operand(FIRST_JS_RECEIVER_TYPE));
326     __ Branch(&use_receiver);
327 
328     __ bind(&do_throw);
329     __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
330 
331     // Throw away the result of the constructor invocation and use the
332     // on-stack receiver as the result.
333     __ bind(&use_receiver);
334     __ Ld(v0, MemOperand(sp, 0 * kPointerSize));
335     __ JumpIfRoot(v0, RootIndex::kTheHoleValue, &do_throw);
336 
337     __ bind(&leave_frame);
338     // Restore smi-tagged arguments count from the frame.
339     __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
340     // Leave construct frame.
341   }
342   // Remove caller arguments from the stack and return.
343   __ SmiScale(a4, a1, kPointerSizeLog2);
344   __ Daddu(sp, sp, a4);
345   __ Daddu(sp, sp, kPointerSize);
346   __ Ret();
347 }
348 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)349 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
350   Generate_JSBuiltinsConstructStubHelper(masm);
351 }
352 
GetSharedFunctionInfoBytecode(MacroAssembler * masm,Register sfi_data,Register scratch1)353 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
354                                           Register sfi_data,
355                                           Register scratch1) {
356   Label done;
357 
358   __ GetObjectType(sfi_data, scratch1, scratch1);
359   __ Branch(&done, ne, scratch1, Operand(INTERPRETER_DATA_TYPE));
360   __ Ld(sfi_data,
361         FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
362 
363   __ bind(&done);
364 }
365 
366 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)367 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
368   // ----------- S t a t e -------------
369   //  -- v0 : the value to pass to the generator
370   //  -- a1 : the JSGeneratorObject to resume
371   //  -- ra : return address
372   // -----------------------------------
373   __ AssertGeneratorObject(a1);
374 
375   // Store input value into generator object.
376   __ Sd(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
377   __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
378                       kRAHasNotBeenSaved, kDontSaveFPRegs);
379 
380   // Load suspended function and context.
381   __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
382   __ Ld(cp, FieldMemOperand(a4, JSFunction::kContextOffset));
383 
384   // Flood function if we are stepping.
385   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
386   Label stepping_prepared;
387   ExternalReference debug_hook =
388       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
389   __ li(a5, debug_hook);
390   __ Lb(a5, MemOperand(a5));
391   __ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg));
392 
393   // Flood function if we need to continue stepping in the suspended generator.
394   ExternalReference debug_suspended_generator =
395       ExternalReference::debug_suspended_generator_address(masm->isolate());
396   __ li(a5, debug_suspended_generator);
397   __ Ld(a5, MemOperand(a5));
398   __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(a5));
399   __ bind(&stepping_prepared);
400 
401   // Check the stack for overflow. We are not trying to catch interruptions
402   // (i.e. debug break and preemption) here, so check the "real stack limit".
403   Label stack_overflow;
404   LoadStackLimit(masm, kScratchReg, StackLimitKind::kRealStackLimit);
405   __ Branch(&stack_overflow, lo, sp, Operand(kScratchReg));
406 
407   // Push receiver.
408   __ Ld(a5, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
409   __ Push(a5);
410 
411   // ----------- S t a t e -------------
412   //  -- a1    : the JSGeneratorObject to resume
413   //  -- a4    : generator function
414   //  -- cp    : generator context
415   //  -- ra    : return address
416   //  -- sp[0] : generator receiver
417   // -----------------------------------
418 
419   // Push holes for arguments to generator function. Since the parser forced
420   // context allocation for any variables in generators, the actual argument
421   // values have already been copied into the context and these dummy values
422   // will never be used.
423   __ Ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
424   __ Lhu(a3,
425          FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
426   __ Ld(t1,
427         FieldMemOperand(a1, JSGeneratorObject::kParametersAndRegistersOffset));
428   {
429     Label done_loop, loop;
430     __ Move(t2, zero_reg);
431     __ bind(&loop);
432     __ Dsubu(a3, a3, Operand(1));
433     __ Branch(&done_loop, lt, a3, Operand(zero_reg));
434     __ Dlsa(kScratchReg, t1, t2, kPointerSizeLog2);
435     __ Ld(kScratchReg, FieldMemOperand(kScratchReg, FixedArray::kHeaderSize));
436     __ Push(kScratchReg);
437     __ Daddu(t2, t2, Operand(1));
438     __ Branch(&loop);
439     __ bind(&done_loop);
440   }
441 
442   // Underlying function needs to have bytecode available.
443   if (FLAG_debug_code) {
444     __ Ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
445     __ Ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
446     GetSharedFunctionInfoBytecode(masm, a3, a0);
447     __ GetObjectType(a3, a3, a3);
448     __ Assert(eq, AbortReason::kMissingBytecodeArray, a3,
449               Operand(BYTECODE_ARRAY_TYPE));
450   }
451 
452   // Resume (Ignition/TurboFan) generator object.
453   {
454     __ Ld(a0, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
455     __ Lhu(a0, FieldMemOperand(
456                    a0, SharedFunctionInfo::kFormalParameterCountOffset));
457     // We abuse new.target both to indicate that this is a resume call and to
458     // pass in the generator object.  In ordinary calls, new.target is always
459     // undefined because generator functions are non-constructable.
460     __ Move(a3, a1);
461     __ Move(a1, a4);
462     static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
463     __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
464     __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
465     __ Jump(a2);
466   }
467 
468   __ bind(&prepare_step_in_if_stepping);
469   {
470     FrameScope scope(masm, StackFrame::INTERNAL);
471     __ Push(a1, a4);
472     // Push hole as receiver since we do not use it for stepping.
473     __ PushRoot(RootIndex::kTheHoleValue);
474     __ CallRuntime(Runtime::kDebugOnFunctionCall);
475     __ Pop(a1);
476   }
477   __ Branch(USE_DELAY_SLOT, &stepping_prepared);
478   __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
479 
480   __ bind(&prepare_step_in_suspended_generator);
481   {
482     FrameScope scope(masm, StackFrame::INTERNAL);
483     __ Push(a1);
484     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
485     __ Pop(a1);
486   }
487   __ Branch(USE_DELAY_SLOT, &stepping_prepared);
488   __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
489 
490   __ bind(&stack_overflow);
491   {
492     FrameScope scope(masm, StackFrame::INTERNAL);
493     __ CallRuntime(Runtime::kThrowStackOverflow);
494     __ break_(0xCC);  // This should be unreachable.
495   }
496 }
497 
Generate_ConstructedNonConstructable(MacroAssembler * masm)498 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
499   FrameScope scope(masm, StackFrame::INTERNAL);
500   __ Push(a1);
501   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
502 }
503 
504 // Clobbers scratch1 and scratch2; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc,Register scratch1,Register scratch2)505 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
506                                         Register scratch1, Register scratch2) {
507   // Check the stack for overflow. We are not trying to catch
508   // interruptions (e.g. debug break and preemption) here, so the "real stack
509   // limit" is checked.
510   Label okay;
511   LoadStackLimit(masm, scratch1, StackLimitKind::kRealStackLimit);
512   // Make a2 the space we have left. The stack might already be overflowed
513   // here which will cause r2 to become negative.
514   __ dsubu(scratch1, sp, scratch1);
515   // Check if the arguments will overflow the stack.
516   __ dsll(scratch2, argc, kPointerSizeLog2);
517   __ Branch(&okay, gt, scratch1, Operand(scratch2));  // Signed comparison.
518 
519   // Out of stack space.
520   __ CallRuntime(Runtime::kThrowStackOverflow);
521 
522   __ bind(&okay);
523 }
524 
525 namespace {
526 
527 // Called with the native C calling convention. The corresponding function
528 // signature is either:
529 //
530 //   using JSEntryFunction = GeneratedCode<Address(
531 //       Address root_register_value, Address new_target, Address target,
532 //       Address receiver, intptr_t argc, Address** args)>;
533 // or
534 //   using JSEntryFunction = GeneratedCode<Address(
535 //       Address root_register_value, MicrotaskQueue* microtask_queue)>;
Generate_JSEntryVariant(MacroAssembler * masm,StackFrame::Type type,Builtins::Name entry_trampoline)536 void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
537                              Builtins::Name entry_trampoline) {
538   Label invoke, handler_entry, exit;
539 
540   {
541     NoRootArrayScope no_root_array(masm);
542 
543     // TODO(plind): unify the ABI description here.
544     // Registers:
545     //  either
546     //   a0: root register value
547     //   a1: entry address
548     //   a2: function
549     //   a3: receiver
550     //   a4: argc
551     //   a5: argv
552     //  or
553     //   a0: root register value
554     //   a1: microtask_queue
555     //
556     // Stack:
557     // 0 arg slots on mips64 (4 args slots on mips)
558 
559     // Save callee saved registers on the stack.
560     __ MultiPush(kCalleeSaved | ra.bit());
561 
562     // Save callee-saved FPU registers.
563     __ MultiPushFPU(kCalleeSavedFPU);
564     // Set up the reserved register for 0.0.
565     __ Move(kDoubleRegZero, 0.0);
566 
567     // Initialize the root register.
568     // C calling convention. The first argument is passed in a0.
569     __ mov(kRootRegister, a0);
570   }
571 
572   // a1: entry address
573   // a2: function
574   // a3: receiver
575   // a4: argc
576   // a5: argv
577 
578   // We build an EntryFrame.
579   __ li(s1, Operand(-1));  // Push a bad frame pointer to fail if it is used.
580   __ li(s2, Operand(StackFrame::TypeToMarker(type)));
581   __ li(s3, Operand(StackFrame::TypeToMarker(type)));
582   ExternalReference c_entry_fp = ExternalReference::Create(
583       IsolateAddressId::kCEntryFPAddress, masm->isolate());
584   __ li(s4, c_entry_fp);
585   __ Ld(s4, MemOperand(s4));
586   __ Push(s1, s2, s3, s4);
587   // Set up frame pointer for the frame to be pushed.
588   __ daddiu(fp, sp, -EntryFrameConstants::kCallerFPOffset);
589 
590   // Registers:
591   //  either
592   //   a1: entry address
593   //   a2: function
594   //   a3: receiver
595   //   a4: argc
596   //   a5: argv
597   //  or
598   //   a1: microtask_queue
599   //
600   // Stack:
601   // caller fp          |
602   // function slot      | entry frame
603   // context slot       |
604   // bad fp (0xFF...F)  |
605   // callee saved registers + ra
606   // [ O32: 4 args slots]
607   // args
608 
609   // If this is the outermost JS call, set js_entry_sp value.
610   Label non_outermost_js;
611   ExternalReference js_entry_sp = ExternalReference::Create(
612       IsolateAddressId::kJSEntrySPAddress, masm->isolate());
613   __ li(s1, js_entry_sp);
614   __ Ld(s2, MemOperand(s1));
615   __ Branch(&non_outermost_js, ne, s2, Operand(zero_reg));
616   __ Sd(fp, MemOperand(s1));
617   __ li(s3, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
618   Label cont;
619   __ b(&cont);
620   __ nop();  // Branch delay slot nop.
621   __ bind(&non_outermost_js);
622   __ li(s3, Operand(StackFrame::INNER_JSENTRY_FRAME));
623   __ bind(&cont);
624   __ push(s3);
625 
626   // Jump to a faked try block that does the invoke, with a faked catch
627   // block that sets the pending exception.
628   __ jmp(&invoke);
629   __ bind(&handler_entry);
630 
631   // Store the current pc as the handler offset. It's used later to create the
632   // handler table.
633   masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
634 
635   // Caught exception: Store result (exception) in the pending exception
636   // field in the JSEnv and return a failure sentinel.  Coming in here the
637   // fp will be invalid because the PushStackHandler below sets it to 0 to
638   // signal the existence of the JSEntry frame.
639   __ li(s1, ExternalReference::Create(
640                 IsolateAddressId::kPendingExceptionAddress, masm->isolate()));
641   __ Sd(v0, MemOperand(s1));  // We come back from 'invoke'. result is in v0.
642   __ LoadRoot(v0, RootIndex::kException);
643   __ b(&exit);  // b exposes branch delay slot.
644   __ nop();     // Branch delay slot nop.
645 
646   // Invoke: Link this frame into the handler chain.
647   __ bind(&invoke);
648   __ PushStackHandler();
649   // If an exception not caught by another handler occurs, this handler
650   // returns control to the code after the bal(&invoke) above, which
651   // restores all kCalleeSaved registers (including cp and fp) to their
652   // saved values before returning a failure to C.
653   //
654   // Registers:
655   //  either
656   //   a0: root register value
657   //   a1: entry address
658   //   a2: function
659   //   a3: receiver
660   //   a4: argc
661   //   a5: argv
662   //  or
663   //   a0: root register value
664   //   a1: microtask_queue
665   //
666   // Stack:
667   // handler frame
668   // entry frame
669   // callee saved registers + ra
670   // [ O32: 4 args slots]
671   // args
672   //
673   // Invoke the function by calling through JS entry trampoline builtin and
674   // pop the faked function when we return.
675 
676   Handle<Code> trampoline_code =
677       masm->isolate()->builtins()->builtin_handle(entry_trampoline);
678   __ Call(trampoline_code, RelocInfo::CODE_TARGET);
679 
680   // Unlink this frame from the handler chain.
681   __ PopStackHandler();
682 
683   __ bind(&exit);  // v0 holds result
684   // Check if the current stack frame is marked as the outermost JS frame.
685   Label non_outermost_js_2;
686   __ pop(a5);
687   __ Branch(&non_outermost_js_2, ne, a5,
688             Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
689   __ li(a5, js_entry_sp);
690   __ Sd(zero_reg, MemOperand(a5));
691   __ bind(&non_outermost_js_2);
692 
693   // Restore the top frame descriptors from the stack.
694   __ pop(a5);
695   __ li(a4, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
696                                       masm->isolate()));
697   __ Sd(a5, MemOperand(a4));
698 
699   // Reset the stack to the callee saved registers.
700   __ daddiu(sp, sp, -EntryFrameConstants::kCallerFPOffset);
701 
702   // Restore callee-saved fpu registers.
703   __ MultiPopFPU(kCalleeSavedFPU);
704 
705   // Restore callee saved registers from the stack.
706   __ MultiPop(kCalleeSaved | ra.bit());
707   // Return.
708   __ Jump(ra);
709 }
710 
711 }  // namespace
712 
Generate_JSEntry(MacroAssembler * masm)713 void Builtins::Generate_JSEntry(MacroAssembler* masm) {
714   Generate_JSEntryVariant(masm, StackFrame::ENTRY,
715                           Builtins::kJSEntryTrampoline);
716 }
717 
Generate_JSConstructEntry(MacroAssembler * masm)718 void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
719   Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
720                           Builtins::kJSConstructEntryTrampoline);
721 }
722 
Generate_JSRunMicrotasksEntry(MacroAssembler * masm)723 void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
724   Generate_JSEntryVariant(masm, StackFrame::ENTRY,
725                           Builtins::kRunMicrotasksTrampoline);
726 }
727 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)728 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
729                                              bool is_construct) {
730   // ----------- S t a t e -------------
731   //  -- a1: new.target
732   //  -- a2: function
733   //  -- a3: receiver_pointer
734   //  -- a4: argc
735   //  -- a5: argv
736   // -----------------------------------
737 
738   // Enter an internal frame.
739   {
740     FrameScope scope(masm, StackFrame::INTERNAL);
741 
742     // Setup the context (we need to use the caller context from the isolate).
743     ExternalReference context_address = ExternalReference::Create(
744         IsolateAddressId::kContextAddress, masm->isolate());
745     __ li(cp, context_address);
746     __ Ld(cp, MemOperand(cp));
747 
748     // Push the function and the receiver onto the stack.
749     __ Push(a2, a3);
750 
751     // Check if we have enough stack space to push all arguments.
752     // Clobbers a0 and a3.
753     Generate_CheckStackOverflow(masm, a4, a0, a3);
754 
755     // Setup new.target, function and argc.
756     __ mov(a3, a1);
757     __ mov(a1, a2);
758     __ mov(a0, a4);
759 
760     // a0: argc
761     // a1: function
762     // a3: new.target
763     // a5: argv
764 
765     // Copy arguments to the stack in a loop.
766     // a3: argc
767     // a5: argv, i.e. points to first arg
768     Label loop, entry;
769     __ Dlsa(s1, a5, a4, kPointerSizeLog2);
770     __ b(&entry);
771     __ nop();  // Branch delay slot nop.
772     // s1 points past last arg.
773     __ bind(&loop);
774     __ Ld(s2, MemOperand(a5));  // Read next parameter.
775     __ daddiu(a5, a5, kPointerSize);
776     __ Ld(s2, MemOperand(s2));  // Dereference handle.
777     __ push(s2);                // Push parameter.
778     __ bind(&entry);
779     __ Branch(&loop, ne, a5, Operand(s1));
780 
781     // a0: argc
782     // a1: function
783     // a3: new.target
784 
785     // Initialize all JavaScript callee-saved registers, since they will be seen
786     // by the garbage collector as part of handlers.
787     __ LoadRoot(a4, RootIndex::kUndefinedValue);
788     __ mov(a5, a4);
789     __ mov(s1, a4);
790     __ mov(s2, a4);
791     __ mov(s3, a4);
792     __ mov(s4, a4);
793     __ mov(s5, a4);
794     // s6 holds the root address. Do not clobber.
795     // s7 is cp. Do not init.
796 
797     // Invoke the code.
798     Handle<Code> builtin = is_construct
799                                ? BUILTIN_CODE(masm->isolate(), Construct)
800                                : masm->isolate()->builtins()->Call();
801     __ Call(builtin, RelocInfo::CODE_TARGET);
802 
803     // Leave internal frame.
804   }
805   __ Jump(ra);
806 }
807 
Generate_JSEntryTrampoline(MacroAssembler * masm)808 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
809   Generate_JSEntryTrampolineHelper(masm, false);
810 }
811 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)812 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
813   Generate_JSEntryTrampolineHelper(masm, true);
814 }
815 
Generate_RunMicrotasksTrampoline(MacroAssembler * masm)816 void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
817   // a1: microtask_queue
818   __ mov(RunMicrotasksDescriptor::MicrotaskQueueRegister(), a1);
819   __ Jump(BUILTIN_CODE(masm->isolate(), RunMicrotasks), RelocInfo::CODE_TARGET);
820 }
821 
ReplaceClosureCodeWithOptimizedCode(MacroAssembler * masm,Register optimized_code,Register closure,Register scratch1,Register scratch2)822 static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
823                                                 Register optimized_code,
824                                                 Register closure,
825                                                 Register scratch1,
826                                                 Register scratch2) {
827   // Store code entry in the closure.
828   __ Sd(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset));
829   __ mov(scratch1, optimized_code);  // Write barrier clobbers scratch1 below.
830   __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
831                       kRAHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
832                       OMIT_SMI_CHECK);
833 }
834 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch)835 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
836   Register args_count = scratch;
837 
838   // Get the arguments + receiver count.
839   __ Ld(args_count,
840         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
841   __ Lw(t0, FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
842 
843   // Leave the frame (also dropping the register file).
844   __ LeaveFrame(StackFrame::INTERPRETED);
845 
846   // Drop receiver + arguments.
847   __ Daddu(sp, sp, args_count);
848 }
849 
850 // Tail-call |function_id| if |smi_entry| == |marker|
TailCallRuntimeIfMarkerEquals(MacroAssembler * masm,Register smi_entry,OptimizationMarker marker,Runtime::FunctionId function_id)851 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
852                                           Register smi_entry,
853                                           OptimizationMarker marker,
854                                           Runtime::FunctionId function_id) {
855   Label no_match;
856   __ Branch(&no_match, ne, smi_entry, Operand(Smi::FromEnum(marker)));
857   GenerateTailCallToReturnedCode(masm, function_id);
858   __ bind(&no_match);
859 }
860 
TailCallOptimizedCodeSlot(MacroAssembler * masm,Register optimized_code_entry,Register scratch1,Register scratch2)861 static void TailCallOptimizedCodeSlot(MacroAssembler* masm,
862                                       Register optimized_code_entry,
863                                       Register scratch1, Register scratch2) {
864   // ----------- S t a t e -------------
865   //  -- a3 : new target (preserved for callee if needed, and caller)
866   //  -- a1 : target function (preserved for callee if needed, and caller)
867   // -----------------------------------
868   DCHECK(!AreAliased(optimized_code_entry, a1, a3, scratch1, scratch2));
869 
870   Register closure = a1;
871 
872   // Check if the optimized code is marked for deopt. If it is, call the
873   // runtime to clear it.
874   Label found_deoptimized_code;
875   __ Ld(a5,
876         FieldMemOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
877   __ Lw(a5, FieldMemOperand(a5, CodeDataContainer::kKindSpecificFlagsOffset));
878   __ And(a5, a5, Operand(1 << Code::kMarkedForDeoptimizationBit));
879   __ Branch(&found_deoptimized_code, ne, a5, Operand(zero_reg));
880 
881   // Optimized code is good, get it into the closure and link the closure into
882   // the optimized functions list, then tail call the optimized code.
883   // The feedback vector is no longer used, so re-use it as a scratch
884   // register.
885   ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
886                                       scratch1, scratch2);
887 
888   static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
889   __ Daddu(a2, optimized_code_entry,
890            Operand(Code::kHeaderSize - kHeapObjectTag));
891   __ Jump(a2);
892 
893   // Optimized code slot contains deoptimized code, evict it and re-enter the
894   // closure's code.
895   __ bind(&found_deoptimized_code);
896   GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
897 }
898 
MaybeOptimizeCode(MacroAssembler * masm,Register feedback_vector,Register optimization_marker)899 static void MaybeOptimizeCode(MacroAssembler* masm, Register feedback_vector,
900                               Register optimization_marker) {
901   // ----------- S t a t e -------------
902   //  -- a3 : new target (preserved for callee if needed, and caller)
903   //  -- a1 : target function (preserved for callee if needed, and caller)
904   //  -- feedback vector (preserved for caller if needed)
905   //  -- optimization_marker : a Smi containing a non-zero optimization marker.
906   // -----------------------------------
907   DCHECK(!AreAliased(feedback_vector, a1, a3, optimization_marker));
908 
909   // TODO(v8:8394): The logging of first execution will break if
910   // feedback vectors are not allocated. We need to find a different way of
911   // logging these events if required.
912   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
913                                 OptimizationMarker::kLogFirstExecution,
914                                 Runtime::kFunctionFirstExecution);
915   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
916                                 OptimizationMarker::kCompileOptimized,
917                                 Runtime::kCompileOptimized_NotConcurrent);
918   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
919                                 OptimizationMarker::kCompileOptimizedConcurrent,
920                                 Runtime::kCompileOptimized_Concurrent);
921 
922   // Otherwise, the marker is InOptimizationQueue, so fall through hoping
923   // that an interrupt will eventually update the slot with optimized code.
924   if (FLAG_debug_code) {
925     __ Assert(eq, AbortReason::kExpectedOptimizationSentinel,
926               optimization_marker,
927               Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
928   }
929 }
930 
931 // Advance the current bytecode offset. This simulates what all bytecode
932 // handlers do upon completion of the underlying operation. Will bail out to a
933 // label if the bytecode (without prefix) is a return bytecode.
AdvanceBytecodeOffsetOrReturn(MacroAssembler * masm,Register bytecode_array,Register bytecode_offset,Register bytecode,Register scratch1,Register scratch2,Label * if_return)934 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
935                                           Register bytecode_array,
936                                           Register bytecode_offset,
937                                           Register bytecode, Register scratch1,
938                                           Register scratch2, Label* if_return) {
939   Register bytecode_size_table = scratch1;
940   DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
941                      bytecode));
942   __ li(bytecode_size_table, ExternalReference::bytecode_size_table_address());
943 
944   // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
945   Label process_bytecode, extra_wide;
946   STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
947   STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
948   STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
949   STATIC_ASSERT(3 ==
950                 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
951   __ Branch(&process_bytecode, hi, bytecode, Operand(3));
952   __ And(scratch2, bytecode, Operand(1));
953   __ Branch(&extra_wide, ne, scratch2, Operand(zero_reg));
954 
955   // Load the next bytecode and update table to the wide scaled table.
956   __ Daddu(bytecode_offset, bytecode_offset, Operand(1));
957   __ Daddu(scratch2, bytecode_array, bytecode_offset);
958   __ Lbu(bytecode, MemOperand(scratch2));
959   __ Daddu(bytecode_size_table, bytecode_size_table,
960            Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
961   __ jmp(&process_bytecode);
962 
963   __ bind(&extra_wide);
964   // Load the next bytecode and update table to the extra wide scaled table.
965   __ Daddu(bytecode_offset, bytecode_offset, Operand(1));
966   __ Daddu(scratch2, bytecode_array, bytecode_offset);
967   __ Lbu(bytecode, MemOperand(scratch2));
968   __ Daddu(bytecode_size_table, bytecode_size_table,
969            Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
970 
971   __ bind(&process_bytecode);
972 
973 // Bailout to the return label if this is a return bytecode.
974 #define JUMP_IF_EQUAL(NAME)          \
975   __ Branch(if_return, eq, bytecode, \
976             Operand(static_cast<int>(interpreter::Bytecode::k##NAME)));
977   RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
978 #undef JUMP_IF_EQUAL
979 
980   // Otherwise, load the size of the current bytecode and advance the offset.
981   __ Dlsa(scratch2, bytecode_size_table, bytecode, 2);
982   __ Lw(scratch2, MemOperand(scratch2));
983   __ Daddu(bytecode_offset, bytecode_offset, scratch2);
984 }
985 
986 // Generate code for entering a JS function with the interpreter.
987 // On entry to the function the receiver and arguments have been pushed on the
988 // stack left to right.  The actual argument count matches the formal parameter
989 // count expected by the function.
990 //
991 // The live registers are:
992 //   o a1: the JS function object being called.
993 //   o a3: the incoming new target or generator object
994 //   o cp: our context
995 //   o fp: the caller's frame pointer
996 //   o sp: stack pointer
997 //   o ra: return address
998 //
999 // The function builds an interpreter frame.  See InterpreterFrameConstants in
1000 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)1001 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1002   Register closure = a1;
1003   Register feedback_vector = a2;
1004 
1005   // Get the bytecode array from the function object and load it into
1006   // kInterpreterBytecodeArrayRegister.
1007   __ Ld(a0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1008   __ Ld(kInterpreterBytecodeArrayRegister,
1009         FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
1010   GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, a4);
1011 
1012   // The bytecode array could have been flushed from the shared function info,
1013   // if so, call into CompileLazy.
1014   Label compile_lazy;
1015   __ GetObjectType(kInterpreterBytecodeArrayRegister, a0, a0);
1016   __ Branch(&compile_lazy, ne, a0, Operand(BYTECODE_ARRAY_TYPE));
1017 
1018   // Load the feedback vector from the closure.
1019   __ Ld(feedback_vector,
1020         FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
1021   __ Ld(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
1022 
1023   Label push_stack_frame;
1024   // Check if feedback vector is valid. If valid, check for optimized code
1025   // and update invocation count. Otherwise, setup the stack frame.
1026   __ Ld(a4, FieldMemOperand(feedback_vector, HeapObject::kMapOffset));
1027   __ Lhu(a4, FieldMemOperand(a4, Map::kInstanceTypeOffset));
1028   __ Branch(&push_stack_frame, ne, a4, Operand(FEEDBACK_VECTOR_TYPE));
1029 
1030   // Read off the optimized code slot in the feedback vector, and if there
1031   // is optimized code or an optimization marker, call that instead.
1032   Register optimized_code_entry = a4;
1033   __ Ld(optimized_code_entry,
1034         FieldMemOperand(feedback_vector,
1035                         FeedbackVector::kOptimizedCodeWeakOrSmiOffset));
1036 
1037   // Check if the optimized code slot is not empty.
1038   Label optimized_code_slot_not_empty;
1039 
1040   __ Branch(&optimized_code_slot_not_empty, ne, optimized_code_entry,
1041             Operand(Smi::FromEnum(OptimizationMarker::kNone)));
1042 
1043   Label not_optimized;
1044   __ bind(&not_optimized);
1045 
1046   // Increment invocation count for the function.
1047   __ Lw(a4, FieldMemOperand(feedback_vector,
1048                             FeedbackVector::kInvocationCountOffset));
1049   __ Addu(a4, a4, Operand(1));
1050   __ Sw(a4, FieldMemOperand(feedback_vector,
1051                             FeedbackVector::kInvocationCountOffset));
1052 
1053   // Open a frame scope to indicate that there is a frame on the stack.  The
1054   // MANUAL indicates that the scope shouldn't actually generate code to set up
1055   // the frame (that is done below).
1056   __ bind(&push_stack_frame);
1057   FrameScope frame_scope(masm, StackFrame::MANUAL);
1058   __ PushStandardFrame(closure);
1059 
1060   // Reset code age and the OSR arming. The OSR field and BytecodeAgeOffset are
1061   // 8-bit fields next to each other, so we could just optimize by writing a
1062   // 16-bit. These static asserts guard our assumption is valid.
1063   STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
1064                 BytecodeArray::kOsrNestingLevelOffset + kCharSize);
1065   STATIC_ASSERT(BytecodeArray::kNoAgeBytecodeAge == 0);
1066   __ sh(zero_reg, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1067                                   BytecodeArray::kOsrNestingLevelOffset));
1068 
1069   // Load initial bytecode offset.
1070   __ li(kInterpreterBytecodeOffsetRegister,
1071         Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1072 
1073   // Push bytecode array and Smi tagged bytecode array offset.
1074   __ SmiTag(a4, kInterpreterBytecodeOffsetRegister);
1075   __ Push(kInterpreterBytecodeArrayRegister, a4);
1076 
1077   // Allocate the local and temporary register file on the stack.
1078   Label stack_overflow;
1079   {
1080     // Load frame size (word) from the BytecodeArray object.
1081     __ Lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1082                               BytecodeArray::kFrameSizeOffset));
1083 
1084     // Do a stack check to ensure we don't go over the limit.
1085     __ Dsubu(a5, sp, Operand(a4));
1086     LoadStackLimit(masm, a2, StackLimitKind::kRealStackLimit);
1087     __ Branch(&stack_overflow, lo, a5, Operand(a2));
1088 
1089     // If ok, push undefined as the initial value for all register file entries.
1090     Label loop_header;
1091     Label loop_check;
1092     __ LoadRoot(a5, RootIndex::kUndefinedValue);
1093     __ Branch(&loop_check);
1094     __ bind(&loop_header);
1095     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1096     __ push(a5);
1097     // Continue loop if not done.
1098     __ bind(&loop_check);
1099     __ Dsubu(a4, a4, Operand(kPointerSize));
1100     __ Branch(&loop_header, ge, a4, Operand(zero_reg));
1101   }
1102 
1103   // If the bytecode array has a valid incoming new target or generator object
1104   // register, initialize it with incoming value which was passed in r3.
1105   Label no_incoming_new_target_or_generator_register;
1106   __ Lw(a5, FieldMemOperand(
1107                 kInterpreterBytecodeArrayRegister,
1108                 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1109   __ Branch(&no_incoming_new_target_or_generator_register, eq, a5,
1110             Operand(zero_reg));
1111   __ Dlsa(a5, fp, a5, kPointerSizeLog2);
1112   __ Sd(a3, MemOperand(a5));
1113   __ bind(&no_incoming_new_target_or_generator_register);
1114 
1115   // Perform interrupt stack check.
1116   // TODO(solanes): Merge with the real stack limit check above.
1117   Label stack_check_interrupt, after_stack_check_interrupt;
1118   LoadStackLimit(masm, a5, StackLimitKind::kInterruptStackLimit);
1119   __ Branch(&stack_check_interrupt, lo, sp, Operand(a5));
1120   __ bind(&after_stack_check_interrupt);
1121 
1122   // Load accumulator as undefined.
1123   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1124 
1125   // Load the dispatch table into a register and dispatch to the bytecode
1126   // handler at the current bytecode offset.
1127   Label do_dispatch;
1128   __ bind(&do_dispatch);
1129   __ li(kInterpreterDispatchTableRegister,
1130         ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1131   __ Daddu(a0, kInterpreterBytecodeArrayRegister,
1132            kInterpreterBytecodeOffsetRegister);
1133   __ Lbu(a7, MemOperand(a0));
1134   __ Dlsa(kScratchReg, kInterpreterDispatchTableRegister, a7, kPointerSizeLog2);
1135   __ Ld(kJavaScriptCallCodeStartRegister, MemOperand(kScratchReg));
1136   __ Call(kJavaScriptCallCodeStartRegister);
1137   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1138 
1139   // Any returns to the entry trampoline are either due to the return bytecode
1140   // or the interpreter tail calling a builtin and then a dispatch.
1141 
1142   // Get bytecode array and bytecode offset from the stack frame.
1143   __ Ld(kInterpreterBytecodeArrayRegister,
1144         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1145   __ Ld(kInterpreterBytecodeOffsetRegister,
1146         MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1147   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1148 
1149   // Either return, or advance to the next bytecode and dispatch.
1150   Label do_return;
1151   __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1152            kInterpreterBytecodeOffsetRegister);
1153   __ Lbu(a1, MemOperand(a1));
1154   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1155                                 kInterpreterBytecodeOffsetRegister, a1, a2, a3,
1156                                 &do_return);
1157   __ jmp(&do_dispatch);
1158 
1159   __ bind(&do_return);
1160   // The return value is in v0.
1161   LeaveInterpreterFrame(masm, t0);
1162   __ Jump(ra);
1163 
1164   __ bind(&stack_check_interrupt);
1165   // Modify the bytecode offset in the stack to be kFunctionEntryBytecodeOffset
1166   // for the call to the StackGuard.
1167   __ li(kInterpreterBytecodeOffsetRegister,
1168         Operand(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag +
1169                              kFunctionEntryBytecodeOffset)));
1170   __ Sd(kInterpreterBytecodeOffsetRegister,
1171         MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1172   __ CallRuntime(Runtime::kStackGuard);
1173 
1174   // After the call, restore the bytecode array, bytecode offset and accumulator
1175   // registers again. Also, restore the bytecode offset in the stack to its
1176   // previous value.
1177   __ Ld(kInterpreterBytecodeArrayRegister,
1178         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1179   __ li(kInterpreterBytecodeOffsetRegister,
1180         Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1181   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1182 
1183   __ SmiTag(a5, kInterpreterBytecodeOffsetRegister);
1184   __ Sd(a5, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1185 
1186   __ jmp(&after_stack_check_interrupt);
1187 
1188   __ bind(&optimized_code_slot_not_empty);
1189   Label maybe_has_optimized_code;
1190   // Check if optimized code marker is actually a weak reference to the
1191   // optimized code as opposed to an optimization marker.
1192   __ JumpIfNotSmi(optimized_code_entry, &maybe_has_optimized_code);
1193   MaybeOptimizeCode(masm, feedback_vector, optimized_code_entry);
1194   // Fall through if there's no runnable optimized code.
1195   __ jmp(&not_optimized);
1196 
1197   __ bind(&maybe_has_optimized_code);
1198   // Load code entry from the weak reference, if it was cleared, resume
1199   // execution of unoptimized code.
1200   __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &not_optimized);
1201   TailCallOptimizedCodeSlot(masm, optimized_code_entry, t3, a5);
1202 
1203   __ bind(&compile_lazy);
1204   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1205   // Unreachable code.
1206   __ break_(0xCC);
1207 
1208   __ bind(&stack_overflow);
1209   __ CallRuntime(Runtime::kThrowStackOverflow);
1210   // Unreachable code.
1211   __ break_(0xCC);
1212 }
1213 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register index,Register scratch,Register scratch2)1214 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1215                                          Register num_args, Register index,
1216                                          Register scratch, Register scratch2) {
1217   // Find the address of the last argument.
1218   __ mov(scratch2, num_args);
1219   __ dsll(scratch2, scratch2, kPointerSizeLog2);
1220   __ Dsubu(scratch2, index, Operand(scratch2));
1221 
1222   // Push the arguments.
1223   Label loop_header, loop_check;
1224   __ Branch(&loop_check);
1225   __ bind(&loop_header);
1226   __ Ld(scratch, MemOperand(index));
1227   __ Daddu(index, index, Operand(-kPointerSize));
1228   __ push(scratch);
1229   __ bind(&loop_check);
1230   __ Branch(&loop_header, hi, index, Operand(scratch2));
1231 }
1232 
1233 // static
Generate_InterpreterPushArgsThenCallImpl(MacroAssembler * masm,ConvertReceiverMode receiver_mode,InterpreterPushArgsMode mode)1234 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1235     MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1236     InterpreterPushArgsMode mode) {
1237   DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1238   // ----------- S t a t e -------------
1239   //  -- a0 : the number of arguments (not including the receiver)
1240   //  -- a2 : the address of the first argument to be pushed. Subsequent
1241   //          arguments should be consecutive above this, in the same order as
1242   //          they are to be pushed onto the stack.
1243   //  -- a1 : the target to call (can be any Object).
1244   // -----------------------------------
1245   Label stack_overflow;
1246 
1247   __ Daddu(a3, a0, Operand(1));  // Add one for receiver.
1248 
1249   // Push "undefined" as the receiver arg if we need to.
1250   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1251     __ PushRoot(RootIndex::kUndefinedValue);
1252     __ Dsubu(a3, a3, Operand(1));  // Subtract one for receiver.
1253   }
1254 
1255   Generate_StackOverflowCheck(masm, a3, a4, t0, &stack_overflow);
1256 
1257   // This function modifies a2, t0 and a4.
1258   Generate_InterpreterPushArgs(masm, a3, a2, a4, t0);
1259 
1260   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1261     __ Pop(a2);                   // Pass the spread in a register
1262     __ Dsubu(a0, a0, Operand(1));  // Subtract one for spread
1263   }
1264 
1265   // Call the target.
1266   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1267     __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1268             RelocInfo::CODE_TARGET);
1269   } else {
1270     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1271             RelocInfo::CODE_TARGET);
1272   }
1273 
1274   __ bind(&stack_overflow);
1275   {
1276     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1277     // Unreachable code.
1278     __ break_(0xCC);
1279   }
1280 }
1281 
1282 // static
Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler * masm,InterpreterPushArgsMode mode)1283 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1284     MacroAssembler* masm, InterpreterPushArgsMode mode) {
1285   // ----------- S t a t e -------------
1286   // -- a0 : argument count (not including receiver)
1287   // -- a3 : new target
1288   // -- a1 : constructor to call
1289   // -- a2 : allocation site feedback if available, undefined otherwise.
1290   // -- a4 : address of the first argument
1291   // -----------------------------------
1292   Label stack_overflow;
1293 
1294   // Push a slot for the receiver.
1295   __ push(zero_reg);
1296 
1297   Generate_StackOverflowCheck(masm, a0, a5, t0, &stack_overflow);
1298 
1299   // This function modifies t0, a4 and a5.
1300   Generate_InterpreterPushArgs(masm, a0, a4, a5, t0);
1301 
1302   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1303     __ Pop(a2);                   // Pass the spread in a register
1304     __ Dsubu(a0, a0, Operand(1));  // Subtract one for spread
1305   } else {
1306     __ AssertUndefinedOrAllocationSite(a2, t0);
1307   }
1308 
1309   if (mode == InterpreterPushArgsMode::kArrayFunction) {
1310     __ AssertFunction(a1);
1311 
1312     // Tail call to the function-specific construct stub (still in the caller
1313     // context at this point).
1314     __ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
1315             RelocInfo::CODE_TARGET);
1316   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1317     // Call the constructor with a0, a1, and a3 unmodified.
1318     __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1319             RelocInfo::CODE_TARGET);
1320   } else {
1321     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1322     // Call the constructor with a0, a1, and a3 unmodified.
1323     __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1324   }
1325 
1326   __ bind(&stack_overflow);
1327   {
1328     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1329     // Unreachable code.
1330     __ break_(0xCC);
1331   }
1332 }
1333 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1334 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1335   // Set the return address to the correct point in the interpreter entry
1336   // trampoline.
1337   Label builtin_trampoline, trampoline_loaded;
1338   Smi interpreter_entry_return_pc_offset(
1339       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1340   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1341 
1342   // If the SFI function_data is an InterpreterData, the function will have a
1343   // custom copy of the interpreter entry trampoline for profiling. If so,
1344   // get the custom trampoline, otherwise grab the entry address of the global
1345   // trampoline.
1346   __ Ld(t0, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1347   __ Ld(t0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
1348   __ Ld(t0, FieldMemOperand(t0, SharedFunctionInfo::kFunctionDataOffset));
1349   __ GetObjectType(t0, kInterpreterDispatchTableRegister,
1350                    kInterpreterDispatchTableRegister);
1351   __ Branch(&builtin_trampoline, ne, kInterpreterDispatchTableRegister,
1352             Operand(INTERPRETER_DATA_TYPE));
1353 
1354   __ Ld(t0, FieldMemOperand(t0, InterpreterData::kInterpreterTrampolineOffset));
1355   __ Daddu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1356   __ Branch(&trampoline_loaded);
1357 
1358   __ bind(&builtin_trampoline);
1359   __ li(t0, ExternalReference::
1360                 address_of_interpreter_entry_trampoline_instruction_start(
1361                     masm->isolate()));
1362   __ Ld(t0, MemOperand(t0));
1363 
1364   __ bind(&trampoline_loaded);
1365   __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset.value()));
1366 
1367   // Initialize the dispatch table register.
1368   __ li(kInterpreterDispatchTableRegister,
1369         ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1370 
1371   // Get the bytecode array pointer from the frame.
1372   __ Ld(kInterpreterBytecodeArrayRegister,
1373         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1374 
1375   if (FLAG_debug_code) {
1376     // Check function data field is actually a BytecodeArray object.
1377     __ SmiTst(kInterpreterBytecodeArrayRegister, kScratchReg);
1378     __ Assert(ne,
1379               AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1380               kScratchReg, Operand(zero_reg));
1381     __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1382     __ Assert(eq,
1383               AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1384               a1, Operand(BYTECODE_ARRAY_TYPE));
1385   }
1386 
1387   // Get the target bytecode offset from the frame.
1388   __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1389               MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1390 
1391   if (FLAG_debug_code) {
1392     Label okay;
1393     __ Branch(&okay, ge, kInterpreterBytecodeOffsetRegister,
1394               Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1395     // Unreachable code.
1396     __ break_(0xCC);
1397     __ bind(&okay);
1398   }
1399 
1400   // Dispatch to the target bytecode.
1401   __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1402            kInterpreterBytecodeOffsetRegister);
1403   __ Lbu(a7, MemOperand(a1));
1404   __ Dlsa(a1, kInterpreterDispatchTableRegister, a7, kPointerSizeLog2);
1405   __ Ld(kJavaScriptCallCodeStartRegister, MemOperand(a1));
1406   __ Jump(kJavaScriptCallCodeStartRegister);
1407 }
1408 
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1409 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1410   // Advance the current bytecode offset stored within the given interpreter
1411   // stack frame. This simulates what all bytecode handlers do upon completion
1412   // of the underlying operation.
1413   __ Ld(kInterpreterBytecodeArrayRegister,
1414         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1415   __ Ld(kInterpreterBytecodeOffsetRegister,
1416         MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1417   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1418 
1419   Label enter_bytecode, function_entry_bytecode;
1420   __ Branch(&function_entry_bytecode, eq, kInterpreterBytecodeOffsetRegister,
1421             Operand(BytecodeArray::kHeaderSize - kHeapObjectTag +
1422                     kFunctionEntryBytecodeOffset));
1423 
1424   // Load the current bytecode.
1425   __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1426            kInterpreterBytecodeOffsetRegister);
1427   __ Lbu(a1, MemOperand(a1));
1428 
1429   // Advance to the next bytecode.
1430   Label if_return;
1431   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1432                                 kInterpreterBytecodeOffsetRegister, a1, a2, a3,
1433                                 &if_return);
1434 
1435   __ bind(&enter_bytecode);
1436   // Convert new bytecode offset to a Smi and save in the stackframe.
1437   __ SmiTag(a2, kInterpreterBytecodeOffsetRegister);
1438   __ Sd(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1439 
1440   Generate_InterpreterEnterBytecode(masm);
1441 
1442   __ bind(&function_entry_bytecode);
1443   // If the code deoptimizes during the implicit function entry stack interrupt
1444   // check, it will have a bailout ID of kFunctionEntryBytecodeOffset, which is
1445   // not a valid bytecode offset. Detect this case and advance to the first
1446   // actual bytecode.
1447   __ li(kInterpreterBytecodeOffsetRegister,
1448         Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1449   __ Branch(&enter_bytecode);
1450 
1451   // We should never take the if_return path.
1452   __ bind(&if_return);
1453   __ Abort(AbortReason::kInvalidBytecodeAdvance);
1454 }
1455 
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1456 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1457   Generate_InterpreterEnterBytecode(masm);
1458 }
1459 
1460 namespace {
Generate_ContinueToBuiltinHelper(MacroAssembler * masm,bool java_script_builtin,bool with_result)1461 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1462                                       bool java_script_builtin,
1463                                       bool with_result) {
1464   const RegisterConfiguration* config(RegisterConfiguration::Default());
1465   int allocatable_register_count = config->num_allocatable_general_registers();
1466   if (with_result) {
1467     // Overwrite the hole inserted by the deoptimizer with the return value from
1468     // the LAZY deopt point.
1469     __ Sd(v0,
1470           MemOperand(
1471               sp, config->num_allocatable_general_registers() * kPointerSize +
1472                       BuiltinContinuationFrameConstants::kFixedFrameSize));
1473   }
1474   for (int i = allocatable_register_count - 1; i >= 0; --i) {
1475     int code = config->GetAllocatableGeneralCode(i);
1476     __ Pop(Register::from_code(code));
1477     if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1478       __ SmiUntag(Register::from_code(code));
1479     }
1480   }
1481   __ Ld(fp, MemOperand(
1482                 sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1483   // Load builtin index (stored as a Smi) and use it to get the builtin start
1484   // address from the builtins table.
1485   __ Pop(t0);
1486   __ Daddu(sp, sp,
1487            Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1488   __ Pop(ra);
1489   __ LoadEntryFromBuiltinIndex(t0);
1490   __ Jump(t0);
1491 }
1492 }  // namespace
1493 
Generate_ContinueToCodeStubBuiltin(MacroAssembler * masm)1494 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1495   Generate_ContinueToBuiltinHelper(masm, false, false);
1496 }
1497 
Generate_ContinueToCodeStubBuiltinWithResult(MacroAssembler * masm)1498 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1499     MacroAssembler* masm) {
1500   Generate_ContinueToBuiltinHelper(masm, false, true);
1501 }
1502 
Generate_ContinueToJavaScriptBuiltin(MacroAssembler * masm)1503 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1504   Generate_ContinueToBuiltinHelper(masm, true, false);
1505 }
1506 
Generate_ContinueToJavaScriptBuiltinWithResult(MacroAssembler * masm)1507 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1508     MacroAssembler* masm) {
1509   Generate_ContinueToBuiltinHelper(masm, true, true);
1510 }
1511 
Generate_NotifyDeoptimized(MacroAssembler * masm)1512 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1513   {
1514     FrameScope scope(masm, StackFrame::INTERNAL);
1515     __ CallRuntime(Runtime::kNotifyDeoptimized);
1516   }
1517 
1518   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
1519   __ Ld(v0, MemOperand(sp, 0 * kPointerSize));
1520   __ Ret(USE_DELAY_SLOT);
1521   // Safe to fill delay slot Addu will emit one instruction.
1522   __ Daddu(sp, sp, Operand(1 * kPointerSize));  // Remove state.
1523 }
1524 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)1525 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1526   {
1527     FrameScope scope(masm, StackFrame::INTERNAL);
1528     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1529   }
1530 
1531   // If the code object is null, just return to the caller.
1532   __ Ret(eq, v0, Operand(Smi::zero()));
1533 
1534   // Drop the handler frame that is be sitting on top of the actual
1535   // JavaScript frame. This is the case then OSR is triggered from bytecode.
1536   __ LeaveFrame(StackFrame::STUB);
1537 
1538   // Load deoptimization data from the code object.
1539   // <deopt_data> = <code>[#deoptimization_data_offset]
1540   __ Ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1541 
1542   // Load the OSR entrypoint offset from the deoptimization data.
1543   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1544   __ SmiUntag(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1545                                      DeoptimizationData::kOsrPcOffsetIndex) -
1546                                      kHeapObjectTag));
1547 
1548   // Compute the target address = code_obj + header_size + osr_offset
1549   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1550   __ Daddu(v0, v0, a1);
1551   __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1552 
1553   // And "return" to the OSR entry point of the function.
1554   __ Ret();
1555 }
1556 
1557 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1558 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1559   // ----------- S t a t e -------------
1560   //  -- a0    : argc
1561   //  -- sp[0] : argArray
1562   //  -- sp[4] : thisArg
1563   //  -- sp[8] : receiver
1564   // -----------------------------------
1565 
1566   Register argc = a0;
1567   Register arg_array = a2;
1568   Register receiver = a1;
1569   Register this_arg = a5;
1570   Register undefined_value = a3;
1571   Register scratch = a4;
1572 
1573   __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
1574 
1575   // 1. Load receiver into a1, argArray into a2 (if present), remove all
1576   // arguments from the stack (including the receiver), and push thisArg (if
1577   // present) instead.
1578   {
1579     // Claim (2 - argc) dummy arguments form the stack, to put the stack in a
1580     // consistent state for a simple pop operation.
1581 
1582     __ Dsubu(sp, sp, Operand(2 * kPointerSize));
1583     __ Dlsa(sp, sp, argc, kPointerSizeLog2);
1584     __ mov(scratch, argc);
1585     __ Pop(this_arg, arg_array);                   // Overwrite argc
1586     __ Movz(arg_array, undefined_value, scratch);  // if argc == 0
1587     __ Movz(this_arg, undefined_value, scratch);   // if argc == 0
1588     __ Dsubu(scratch, scratch, Operand(1));
1589     __ Movz(arg_array, undefined_value, scratch);  // if argc == 1
1590     __ Ld(receiver, MemOperand(sp));
1591     __ Sd(this_arg, MemOperand(sp));
1592   }
1593 
1594   // ----------- S t a t e -------------
1595   //  -- a2    : argArray
1596   //  -- a1    : receiver
1597   //  -- a3    : undefined root value
1598   //  -- sp[0] : thisArg
1599   // -----------------------------------
1600 
1601   // 2. We don't need to check explicitly for callable receiver here,
1602   // since that's the first thing the Call/CallWithArrayLike builtins
1603   // will do.
1604 
1605   // 3. Tail call with no arguments if argArray is null or undefined.
1606   Label no_arguments;
1607   __ JumpIfRoot(arg_array, RootIndex::kNullValue, &no_arguments);
1608   __ Branch(&no_arguments, eq, arg_array, Operand(undefined_value));
1609 
1610   // 4a. Apply the receiver to the given argArray.
1611   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1612           RelocInfo::CODE_TARGET);
1613 
1614   // 4b. The argArray is either null or undefined, so we tail call without any
1615   // arguments to the receiver.
1616   __ bind(&no_arguments);
1617   {
1618     __ mov(a0, zero_reg);
1619     DCHECK(receiver == a1);
1620     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1621   }
1622 }
1623 
1624 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1625 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1626   // 1. Make sure we have at least one argument.
1627   // a0: actual number of arguments
1628   {
1629     Label done;
1630     __ Branch(&done, ne, a0, Operand(zero_reg));
1631     __ PushRoot(RootIndex::kUndefinedValue);
1632     __ Daddu(a0, a0, Operand(1));
1633     __ bind(&done);
1634   }
1635 
1636   // 2. Get the function to call (passed as receiver) from the stack.
1637   // a0: actual number of arguments
1638   __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
1639   __ Ld(a1, MemOperand(kScratchReg));
1640 
1641   // 3. Shift arguments and return address one slot down on the stack
1642   //    (overwriting the original receiver).  Adjust argument count to make
1643   //    the original first argument the new receiver.
1644   // a0: actual number of arguments
1645   // a1: function
1646   {
1647     Label loop;
1648     // Calculate the copy start address (destination). Copy end address is sp.
1649     __ Dlsa(a2, sp, a0, kPointerSizeLog2);
1650 
1651     __ bind(&loop);
1652     __ Ld(kScratchReg, MemOperand(a2, -kPointerSize));
1653     __ Sd(kScratchReg, MemOperand(a2));
1654     __ Dsubu(a2, a2, Operand(kPointerSize));
1655     __ Branch(&loop, ne, a2, Operand(sp));
1656     // Adjust the actual number of arguments and remove the top element
1657     // (which is a copy of the last argument).
1658     __ Dsubu(a0, a0, Operand(1));
1659     __ Pop();
1660   }
1661 
1662   // 4. Call the callable.
1663   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1664 }
1665 
Generate_ReflectApply(MacroAssembler * masm)1666 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1667   // ----------- S t a t e -------------
1668   //  -- a0     : argc
1669   //  -- sp[0]  : argumentsList  (if argc ==3)
1670   //  -- sp[4]  : thisArgument   (if argc >=2)
1671   //  -- sp[8]  : target         (if argc >=1)
1672   //  -- sp[12] : receiver
1673   // -----------------------------------
1674 
1675   Register argc = a0;
1676   Register arguments_list = a2;
1677   Register target = a1;
1678   Register this_argument = a5;
1679   Register undefined_value = a3;
1680   Register scratch = a4;
1681 
1682   __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
1683 
1684   // 1. Load target into a1 (if present), argumentsList into a2 (if present),
1685   // remove all arguments from the stack (including the receiver), and push
1686   // thisArgument (if present) instead.
1687   {
1688     // Claim (3 - argc) dummy arguments form the stack, to put the stack in a
1689     // consistent state for a simple pop operation.
1690 
1691     __ Dsubu(sp, sp, Operand(3 * kPointerSize));
1692     __ Dlsa(sp, sp, argc, kPointerSizeLog2);
1693     __ mov(scratch, argc);
1694     __ Pop(target, this_argument, arguments_list);
1695     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 0
1696     __ Movz(this_argument, undefined_value, scratch);   // if argc == 0
1697     __ Movz(target, undefined_value, scratch);          // if argc == 0
1698     __ Dsubu(scratch, scratch, Operand(1));
1699     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 1
1700     __ Movz(this_argument, undefined_value, scratch);   // if argc == 1
1701     __ Dsubu(scratch, scratch, Operand(1));
1702     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 2
1703 
1704     __ Sd(this_argument, MemOperand(sp, 0));  // Overwrite receiver
1705   }
1706 
1707   // ----------- S t a t e -------------
1708   //  -- a2    : argumentsList
1709   //  -- a1    : target
1710   //  -- a3    : undefined root value
1711   //  -- sp[0] : thisArgument
1712   // -----------------------------------
1713 
1714   // 2. We don't need to check explicitly for callable target here,
1715   // since that's the first thing the Call/CallWithArrayLike builtins
1716   // will do.
1717 
1718   // 3. Apply the target to the given argumentsList.
1719   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1720           RelocInfo::CODE_TARGET);
1721 }
1722 
Generate_ReflectConstruct(MacroAssembler * masm)1723 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1724   // ----------- S t a t e -------------
1725   //  -- a0     : argc
1726   //  -- sp[0]  : new.target (optional) (dummy value if argc <= 2)
1727   //  -- sp[4]  : argumentsList         (dummy value if argc <= 1)
1728   //  -- sp[8]  : target                (dummy value if argc == 0)
1729   //  -- sp[12] : receiver
1730   // -----------------------------------
1731   Register argc = a0;
1732   Register arguments_list = a2;
1733   Register target = a1;
1734   Register new_target = a3;
1735   Register undefined_value = a4;
1736   Register scratch = a5;
1737 
1738   __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
1739 
1740   // 1. Load target into a1 (if present), argumentsList into a2 (if present),
1741   // new.target into a3 (if present, otherwise use target), remove all
1742   // arguments from the stack (including the receiver), and push thisArgument
1743   // (if present) instead.
1744   {
1745     // Claim (3 - argc) dummy arguments form the stack, to put the stack in a
1746     // consistent state for a simple pop operation.
1747 
1748     __ Dsubu(sp, sp, Operand(3 * kPointerSize));
1749     __ Dlsa(sp, sp, argc, kPointerSizeLog2);
1750     __ mov(scratch, argc);
1751     __ Pop(target, arguments_list, new_target);
1752     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 0
1753     __ Movz(new_target, undefined_value, scratch);      // if argc == 0
1754     __ Movz(target, undefined_value, scratch);          // if argc == 0
1755     __ Dsubu(scratch, scratch, Operand(1));
1756     __ Movz(arguments_list, undefined_value, scratch);  // if argc == 1
1757     __ Movz(new_target, target, scratch);               // if argc == 1
1758     __ Dsubu(scratch, scratch, Operand(1));
1759     __ Movz(new_target, target, scratch);  // if argc == 2
1760 
1761     __ Sd(undefined_value, MemOperand(sp, 0));  // Overwrite receiver
1762   }
1763 
1764   // ----------- S t a t e -------------
1765   //  -- a2    : argumentsList
1766   //  -- a1    : target
1767   //  -- a3    : new.target
1768   //  -- sp[0] : receiver (undefined)
1769   // -----------------------------------
1770 
1771   // 2. We don't need to check explicitly for constructor target here,
1772   // since that's the first thing the Construct/ConstructWithArrayLike
1773   // builtins will do.
1774 
1775   // 3. We don't need to check explicitly for constructor new.target here,
1776   // since that's the second thing the Construct/ConstructWithArrayLike
1777   // builtins will do.
1778 
1779   // 4. Construct the target with the given new.target and argumentsList.
1780   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1781           RelocInfo::CODE_TARGET);
1782 }
1783 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1784 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1785   __ SmiTag(a0);
1786   __ li(a4, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1787   __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
1788   __ Push(Smi::zero());  // Padding.
1789   __ Daddu(fp, sp,
1790            Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1791 }
1792 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1793 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1794   // ----------- S t a t e -------------
1795   //  -- v0 : result being passed through
1796   // -----------------------------------
1797   // Get the number of arguments passed (as a smi), tear down the frame and
1798   // then tear down the parameters.
1799   __ Ld(a1, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1800   __ mov(sp, fp);
1801   __ MultiPop(fp.bit() | ra.bit());
1802   __ SmiScale(a4, a1, kPointerSizeLog2);
1803   __ Daddu(sp, sp, a4);
1804   // Adjust for the receiver.
1805   __ Daddu(sp, sp, Operand(kPointerSize));
1806 }
1807 
1808 // static
Generate_CallOrConstructVarargs(MacroAssembler * masm,Handle<Code> code)1809 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1810                                                Handle<Code> code) {
1811   // ----------- S t a t e -------------
1812   //  -- a1 : target
1813   //  -- a0 : number of parameters on the stack (not including the receiver)
1814   //  -- a2 : arguments list (a FixedArray)
1815   //  -- a4 : len (number of elements to push from args)
1816   //  -- a3 : new.target (for [[Construct]])
1817   // -----------------------------------
1818   if (masm->emit_debug_code()) {
1819     // Allow a2 to be a FixedArray, or a FixedDoubleArray if a4 == 0.
1820     Label ok, fail;
1821     __ AssertNotSmi(a2);
1822     __ GetObjectType(a2, t8, t8);
1823     __ Branch(&ok, eq, t8, Operand(FIXED_ARRAY_TYPE));
1824     __ Branch(&fail, ne, t8, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1825     __ Branch(&ok, eq, a4, Operand(zero_reg));
1826     // Fall through.
1827     __ bind(&fail);
1828     __ Abort(AbortReason::kOperandIsNotAFixedArray);
1829 
1830     __ bind(&ok);
1831   }
1832 
1833   Register args = a2;
1834   Register len = a4;
1835 
1836   // Check for stack overflow.
1837   Label stack_overflow;
1838   Generate_StackOverflowCheck(masm, len, kScratchReg, a5, &stack_overflow);
1839 
1840   // Push arguments onto the stack (thisArgument is already on the stack).
1841   {
1842     Label done, push, loop;
1843     Register src = a6;
1844     Register scratch = len;
1845 
1846     __ daddiu(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
1847     __ Branch(&done, eq, len, Operand(zero_reg), i::USE_DELAY_SLOT);
1848     __ Daddu(a0, a0, len);  // The 'len' argument for Call() or Construct().
1849     __ dsll(scratch, len, kPointerSizeLog2);
1850     __ Dsubu(scratch, sp, Operand(scratch));
1851     __ LoadRoot(t1, RootIndex::kTheHoleValue);
1852     __ bind(&loop);
1853     __ Ld(a5, MemOperand(src));
1854     __ Branch(&push, ne, a5, Operand(t1));
1855     __ LoadRoot(a5, RootIndex::kUndefinedValue);
1856     __ bind(&push);
1857     __ daddiu(src, src, kPointerSize);
1858     __ Push(a5);
1859     __ Branch(&loop, ne, scratch, Operand(sp));
1860     __ bind(&done);
1861   }
1862 
1863   // Tail-call to the actual Call or Construct builtin.
1864   __ Jump(code, RelocInfo::CODE_TARGET);
1865 
1866   __ bind(&stack_overflow);
1867   __ TailCallRuntime(Runtime::kThrowStackOverflow);
1868 }
1869 
1870 // static
Generate_CallOrConstructForwardVarargs(MacroAssembler * masm,CallOrConstructMode mode,Handle<Code> code)1871 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1872                                                       CallOrConstructMode mode,
1873                                                       Handle<Code> code) {
1874   // ----------- S t a t e -------------
1875   //  -- a0 : the number of arguments (not including the receiver)
1876   //  -- a3 : the new.target (for [[Construct]] calls)
1877   //  -- a1 : the target to call (can be any Object)
1878   //  -- a2 : start index (to support rest parameters)
1879   // -----------------------------------
1880 
1881   // Check if new.target has a [[Construct]] internal method.
1882   if (mode == CallOrConstructMode::kConstruct) {
1883     Label new_target_constructor, new_target_not_constructor;
1884     __ JumpIfSmi(a3, &new_target_not_constructor);
1885     __ ld(t1, FieldMemOperand(a3, HeapObject::kMapOffset));
1886     __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
1887     __ And(t1, t1, Operand(Map::Bits1::IsConstructorBit::kMask));
1888     __ Branch(&new_target_constructor, ne, t1, Operand(zero_reg));
1889     __ bind(&new_target_not_constructor);
1890     {
1891       FrameScope scope(masm, StackFrame::MANUAL);
1892       __ EnterFrame(StackFrame::INTERNAL);
1893       __ Push(a3);
1894       __ CallRuntime(Runtime::kThrowNotConstructor);
1895     }
1896     __ bind(&new_target_constructor);
1897   }
1898 
1899   // Check if we have an arguments adaptor frame below the function frame.
1900   Label arguments_adaptor, arguments_done;
1901   __ Ld(a6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1902   __ Ld(a7, MemOperand(a6, CommonFrameConstants::kContextOrFrameTypeOffset));
1903   __ Branch(&arguments_adaptor, eq, a7,
1904             Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1905   {
1906     __ Ld(a7, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1907     __ Ld(a7, FieldMemOperand(a7, JSFunction::kSharedFunctionInfoOffset));
1908     __ Lhu(a7, FieldMemOperand(
1909                    a7, SharedFunctionInfo::kFormalParameterCountOffset));
1910     __ mov(a6, fp);
1911   }
1912   __ Branch(&arguments_done);
1913   __ bind(&arguments_adaptor);
1914   {
1915     // Just get the length from the ArgumentsAdaptorFrame.
1916     __ SmiUntag(a7,
1917                 MemOperand(a6, ArgumentsAdaptorFrameConstants::kLengthOffset));
1918   }
1919   __ bind(&arguments_done);
1920 
1921   Label stack_done, stack_overflow;
1922   __ Subu(a7, a7, a2);
1923   __ Branch(&stack_done, le, a7, Operand(zero_reg));
1924   {
1925     // Check for stack overflow.
1926     Generate_StackOverflowCheck(masm, a7, a4, a5, &stack_overflow);
1927 
1928     // Forward the arguments from the caller frame.
1929     {
1930       Label loop;
1931       __ Daddu(a0, a0, a7);
1932       __ bind(&loop);
1933       {
1934         __ Dlsa(kScratchReg, a6, a7, kPointerSizeLog2);
1935         __ Ld(kScratchReg, MemOperand(kScratchReg, 1 * kPointerSize));
1936         __ push(kScratchReg);
1937         __ Subu(a7, a7, Operand(1));
1938         __ Branch(&loop, ne, a7, Operand(zero_reg));
1939       }
1940     }
1941   }
1942   __ Branch(&stack_done);
1943   __ bind(&stack_overflow);
1944   __ TailCallRuntime(Runtime::kThrowStackOverflow);
1945   __ bind(&stack_done);
1946 
1947   // Tail-call to the {code} handler.
1948   __ Jump(code, RelocInfo::CODE_TARGET);
1949 }
1950 
1951 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode)1952 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1953                                      ConvertReceiverMode mode) {
1954   // ----------- S t a t e -------------
1955   //  -- a0 : the number of arguments (not including the receiver)
1956   //  -- a1 : the function to call (checked to be a JSFunction)
1957   // -----------------------------------
1958   __ AssertFunction(a1);
1959 
1960   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1961   // Check that function is not a "classConstructor".
1962   Label class_constructor;
1963   __ Ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1964   __ Lwu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFlagsOffset));
1965   __ And(kScratchReg, a3,
1966          Operand(SharedFunctionInfo::IsClassConstructorBit::kMask));
1967   __ Branch(&class_constructor, ne, kScratchReg, Operand(zero_reg));
1968 
1969   // Enter the context of the function; ToObject has to run in the function
1970   // context, and we also need to take the global proxy from the function
1971   // context in case of conversion.
1972   __ Ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1973   // We need to convert the receiver for non-native sloppy mode functions.
1974   Label done_convert;
1975   __ Lwu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFlagsOffset));
1976   __ And(kScratchReg, a3,
1977          Operand(SharedFunctionInfo::IsNativeBit::kMask |
1978                  SharedFunctionInfo::IsStrictBit::kMask));
1979   __ Branch(&done_convert, ne, kScratchReg, Operand(zero_reg));
1980   {
1981     // ----------- S t a t e -------------
1982     //  -- a0 : the number of arguments (not including the receiver)
1983     //  -- a1 : the function to call (checked to be a JSFunction)
1984     //  -- a2 : the shared function info.
1985     //  -- cp : the function context.
1986     // -----------------------------------
1987 
1988     if (mode == ConvertReceiverMode::kNullOrUndefined) {
1989       // Patch receiver to global proxy.
1990       __ LoadGlobalProxy(a3);
1991     } else {
1992       Label convert_to_object, convert_receiver;
1993       __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
1994       __ Ld(a3, MemOperand(kScratchReg));
1995       __ JumpIfSmi(a3, &convert_to_object);
1996       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1997       __ GetObjectType(a3, a4, a4);
1998       __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE));
1999       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2000         Label convert_global_proxy;
2001         __ JumpIfRoot(a3, RootIndex::kUndefinedValue, &convert_global_proxy);
2002         __ JumpIfNotRoot(a3, RootIndex::kNullValue, &convert_to_object);
2003         __ bind(&convert_global_proxy);
2004         {
2005           // Patch receiver to global proxy.
2006           __ LoadGlobalProxy(a3);
2007         }
2008         __ Branch(&convert_receiver);
2009       }
2010       __ bind(&convert_to_object);
2011       {
2012         // Convert receiver using ToObject.
2013         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2014         // in the fast case? (fall back to AllocateInNewSpace?)
2015         FrameScope scope(masm, StackFrame::INTERNAL);
2016         __ SmiTag(a0);
2017         __ Push(a0, a1);
2018         __ mov(a0, a3);
2019         __ Push(cp);
2020         __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2021                 RelocInfo::CODE_TARGET);
2022         __ Pop(cp);
2023         __ mov(a3, v0);
2024         __ Pop(a0, a1);
2025         __ SmiUntag(a0);
2026       }
2027       __ Ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2028       __ bind(&convert_receiver);
2029     }
2030     __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
2031     __ Sd(a3, MemOperand(kScratchReg));
2032   }
2033   __ bind(&done_convert);
2034 
2035   // ----------- S t a t e -------------
2036   //  -- a0 : the number of arguments (not including the receiver)
2037   //  -- a1 : the function to call (checked to be a JSFunction)
2038   //  -- a2 : the shared function info.
2039   //  -- cp : the function context.
2040   // -----------------------------------
2041 
2042   __ Lhu(a2,
2043          FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2044   __ InvokeFunctionCode(a1, no_reg, a2, a0, JUMP_FUNCTION);
2045 
2046   // The function is a "classConstructor", need to raise an exception.
2047   __ bind(&class_constructor);
2048   {
2049     FrameScope frame(masm, StackFrame::INTERNAL);
2050     __ Push(a1);
2051     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2052   }
2053 }
2054 
2055 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm)2056 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2057   // ----------- S t a t e -------------
2058   //  -- a0 : the number of arguments (not including the receiver)
2059   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2060   // -----------------------------------
2061   __ AssertBoundFunction(a1);
2062 
2063   // Patch the receiver to [[BoundThis]].
2064   {
2065     __ Ld(kScratchReg, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
2066     __ Dlsa(a4, sp, a0, kPointerSizeLog2);
2067     __ Sd(kScratchReg, MemOperand(a4));
2068   }
2069 
2070   // Load [[BoundArguments]] into a2 and length of that into a4.
2071   __ Ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2072   __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2073 
2074   // ----------- S t a t e -------------
2075   //  -- a0 : the number of arguments (not including the receiver)
2076   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2077   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2078   //  -- a4 : the number of [[BoundArguments]]
2079   // -----------------------------------
2080 
2081   // Reserve stack space for the [[BoundArguments]].
2082   {
2083     Label done;
2084     __ dsll(a5, a4, kPointerSizeLog2);
2085     __ Dsubu(sp, sp, Operand(a5));
2086     // Check the stack for overflow. We are not trying to catch interruptions
2087     // (i.e. debug break and preemption) here, so check the "real stack limit".
2088     LoadStackLimit(masm, kScratchReg, StackLimitKind::kRealStackLimit);
2089     __ Branch(&done, hs, sp, Operand(kScratchReg));
2090     // Restore the stack pointer.
2091     __ Daddu(sp, sp, Operand(a5));
2092     {
2093       FrameScope scope(masm, StackFrame::MANUAL);
2094       __ EnterFrame(StackFrame::INTERNAL);
2095       __ CallRuntime(Runtime::kThrowStackOverflow);
2096     }
2097     __ bind(&done);
2098   }
2099 
2100   // Relocate arguments down the stack.
2101   {
2102     Label loop, done_loop;
2103     __ mov(a5, zero_reg);
2104     __ bind(&loop);
2105     __ Branch(&done_loop, gt, a5, Operand(a0));
2106     __ Dlsa(a6, sp, a4, kPointerSizeLog2);
2107     __ Ld(kScratchReg, MemOperand(a6));
2108     __ Dlsa(a6, sp, a5, kPointerSizeLog2);
2109     __ Sd(kScratchReg, MemOperand(a6));
2110     __ Daddu(a4, a4, Operand(1));
2111     __ Daddu(a5, a5, Operand(1));
2112     __ Branch(&loop);
2113     __ bind(&done_loop);
2114   }
2115 
2116   // Copy [[BoundArguments]] to the stack (below the arguments).
2117   {
2118     Label loop, done_loop;
2119     __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2120     __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2121     __ bind(&loop);
2122     __ Dsubu(a4, a4, Operand(1));
2123     __ Branch(&done_loop, lt, a4, Operand(zero_reg));
2124     __ Dlsa(a5, a2, a4, kPointerSizeLog2);
2125     __ Ld(kScratchReg, MemOperand(a5));
2126     __ Dlsa(a5, sp, a0, kPointerSizeLog2);
2127     __ Sd(kScratchReg, MemOperand(a5));
2128     __ Daddu(a0, a0, Operand(1));
2129     __ Branch(&loop);
2130     __ bind(&done_loop);
2131   }
2132 
2133   // Call the [[BoundTargetFunction]] via the Call builtin.
2134   __ Ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2135   __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2136           RelocInfo::CODE_TARGET);
2137 }
2138 
2139 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode)2140 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2141   // ----------- S t a t e -------------
2142   //  -- a0 : the number of arguments (not including the receiver)
2143   //  -- a1 : the target to call (can be any Object).
2144   // -----------------------------------
2145 
2146   Label non_callable, non_smi;
2147   __ JumpIfSmi(a1, &non_callable);
2148   __ bind(&non_smi);
2149   __ GetObjectType(a1, t1, t2);
2150   __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2151           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2152   __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2153           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2154 
2155   // Check if target has a [[Call]] internal method.
2156   __ Lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
2157   __ And(t1, t1, Operand(Map::Bits1::IsCallableBit::kMask));
2158   __ Branch(&non_callable, eq, t1, Operand(zero_reg));
2159 
2160   __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy),
2161           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_PROXY_TYPE));
2162 
2163   // 2. Call to something else, which might have a [[Call]] internal method (if
2164   // not we raise an exception).
2165   // Overwrite the original receiver with the (original) target.
2166   __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
2167   __ Sd(a1, MemOperand(kScratchReg));
2168   // Let the "call_as_function_delegate" take care of the rest.
2169   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
2170   __ Jump(masm->isolate()->builtins()->CallFunction(
2171               ConvertReceiverMode::kNotNullOrUndefined),
2172           RelocInfo::CODE_TARGET);
2173 
2174   // 3. Call to something that is not callable.
2175   __ bind(&non_callable);
2176   {
2177     FrameScope scope(masm, StackFrame::INTERNAL);
2178     __ Push(a1);
2179     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2180   }
2181 }
2182 
Generate_ConstructFunction(MacroAssembler * masm)2183 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2184   // ----------- S t a t e -------------
2185   //  -- a0 : the number of arguments (not including the receiver)
2186   //  -- a1 : the constructor to call (checked to be a JSFunction)
2187   //  -- a3 : the new target (checked to be a constructor)
2188   // -----------------------------------
2189   __ AssertConstructor(a1);
2190   __ AssertFunction(a1);
2191 
2192   // Calling convention for function specific ConstructStubs require
2193   // a2 to contain either an AllocationSite or undefined.
2194   __ LoadRoot(a2, RootIndex::kUndefinedValue);
2195 
2196   Label call_generic_stub;
2197 
2198   // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2199   __ Ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2200   __ lwu(a4, FieldMemOperand(a4, SharedFunctionInfo::kFlagsOffset));
2201   __ And(a4, a4, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2202   __ Branch(&call_generic_stub, eq, a4, Operand(zero_reg));
2203 
2204   __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2205           RelocInfo::CODE_TARGET);
2206 
2207   __ bind(&call_generic_stub);
2208   __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2209           RelocInfo::CODE_TARGET);
2210 }
2211 
2212 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2213 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2214   // ----------- S t a t e -------------
2215   //  -- a0 : the number of arguments (not including the receiver)
2216   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2217   //  -- a3 : the new target (checked to be a constructor)
2218   // -----------------------------------
2219   __ AssertConstructor(a1);
2220   __ AssertBoundFunction(a1);
2221 
2222   // Load [[BoundArguments]] into a2 and length of that into a4.
2223   __ Ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2224   __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2225 
2226   // ----------- S t a t e -------------
2227   //  -- a0 : the number of arguments (not including the receiver)
2228   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2229   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2230   //  -- a3 : the new target (checked to be a constructor)
2231   //  -- a4 : the number of [[BoundArguments]]
2232   // -----------------------------------
2233 
2234   // Reserve stack space for the [[BoundArguments]].
2235   {
2236     Label done;
2237     __ dsll(a5, a4, kPointerSizeLog2);
2238     __ Dsubu(sp, sp, Operand(a5));
2239     // Check the stack for overflow. We are not trying to catch interruptions
2240     // (i.e. debug break and preemption) here, so check the "real stack limit".
2241     LoadStackLimit(masm, kScratchReg, StackLimitKind::kRealStackLimit);
2242     __ Branch(&done, hs, sp, Operand(kScratchReg));
2243     // Restore the stack pointer.
2244     __ Daddu(sp, sp, Operand(a5));
2245     {
2246       FrameScope scope(masm, StackFrame::MANUAL);
2247       __ EnterFrame(StackFrame::INTERNAL);
2248       __ CallRuntime(Runtime::kThrowStackOverflow);
2249     }
2250     __ bind(&done);
2251   }
2252 
2253   // Relocate arguments down the stack.
2254   {
2255     Label loop, done_loop;
2256     __ mov(a5, zero_reg);
2257     __ bind(&loop);
2258     __ Branch(&done_loop, ge, a5, Operand(a0));
2259     __ Dlsa(a6, sp, a4, kPointerSizeLog2);
2260     __ Ld(kScratchReg, MemOperand(a6));
2261     __ Dlsa(a6, sp, a5, kPointerSizeLog2);
2262     __ Sd(kScratchReg, MemOperand(a6));
2263     __ Daddu(a4, a4, Operand(1));
2264     __ Daddu(a5, a5, Operand(1));
2265     __ Branch(&loop);
2266     __ bind(&done_loop);
2267   }
2268 
2269   // Copy [[BoundArguments]] to the stack (below the arguments).
2270   {
2271     Label loop, done_loop;
2272     __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2273     __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2274     __ bind(&loop);
2275     __ Dsubu(a4, a4, Operand(1));
2276     __ Branch(&done_loop, lt, a4, Operand(zero_reg));
2277     __ Dlsa(a5, a2, a4, kPointerSizeLog2);
2278     __ Ld(kScratchReg, MemOperand(a5));
2279     __ Dlsa(a5, sp, a0, kPointerSizeLog2);
2280     __ Sd(kScratchReg, MemOperand(a5));
2281     __ Daddu(a0, a0, Operand(1));
2282     __ Branch(&loop);
2283     __ bind(&done_loop);
2284   }
2285 
2286   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2287   {
2288     Label skip_load;
2289     __ Branch(&skip_load, ne, a1, Operand(a3));
2290     __ Ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2291     __ bind(&skip_load);
2292   }
2293 
2294   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2295   __ Ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2296   __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2297 }
2298 
2299 // static
Generate_Construct(MacroAssembler * masm)2300 void Builtins::Generate_Construct(MacroAssembler* masm) {
2301   // ----------- S t a t e -------------
2302   //  -- a0 : the number of arguments (not including the receiver)
2303   //  -- a1 : the constructor to call (can be any Object)
2304   //  -- a3 : the new target (either the same as the constructor or
2305   //          the JSFunction on which new was invoked initially)
2306   // -----------------------------------
2307 
2308   // Check if target is a Smi.
2309   Label non_constructor, non_proxy;
2310   __ JumpIfSmi(a1, &non_constructor);
2311 
2312   // Check if target has a [[Construct]] internal method.
2313   __ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2314   __ Lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2315   __ And(t3, t3, Operand(Map::Bits1::IsConstructorBit::kMask));
2316   __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2317 
2318   // Dispatch based on instance type.
2319   __ Lhu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2320   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2321           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2322 
2323   // Only dispatch to bound functions after checking whether they are
2324   // constructors.
2325   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2326           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2327 
2328   // Only dispatch to proxies after checking whether they are constructors.
2329   __ Branch(&non_proxy, ne, t2, Operand(JS_PROXY_TYPE));
2330   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2331           RelocInfo::CODE_TARGET);
2332 
2333   // Called Construct on an exotic Object with a [[Construct]] internal method.
2334   __ bind(&non_proxy);
2335   {
2336     // Overwrite the original receiver with the (original) target.
2337     __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
2338     __ Sd(a1, MemOperand(kScratchReg));
2339     // Let the "call_as_constructor_delegate" take care of the rest.
2340     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2341     __ Jump(masm->isolate()->builtins()->CallFunction(),
2342             RelocInfo::CODE_TARGET);
2343   }
2344 
2345   // Called Construct on an Object that doesn't have a [[Construct]] internal
2346   // method.
2347   __ bind(&non_constructor);
2348   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2349           RelocInfo::CODE_TARGET);
2350 }
2351 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2352 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2353   // State setup as expected by MacroAssembler::InvokePrologue.
2354   // ----------- S t a t e -------------
2355   //  -- a0: actual arguments count
2356   //  -- a1: function (passed through to callee)
2357   //  -- a2: expected arguments count
2358   //  -- a3: new target (passed through to callee)
2359   // -----------------------------------
2360 
2361   Label invoke, dont_adapt_arguments, stack_overflow;
2362 
2363   Label enough, too_few;
2364   __ Branch(&dont_adapt_arguments, eq, a2,
2365             Operand(kDontAdaptArgumentsSentinel));
2366   // We use Uless as the number of argument should always be greater than 0.
2367   __ Branch(&too_few, Uless, a0, Operand(a2));
2368 
2369   {  // Enough parameters: actual >= expected.
2370     // a0: actual number of arguments as a smi
2371     // a1: function
2372     // a2: expected number of arguments
2373     // a3: new target (passed through to callee)
2374     __ bind(&enough);
2375     EnterArgumentsAdaptorFrame(masm);
2376     Generate_StackOverflowCheck(masm, a2, a5, kScratchReg, &stack_overflow);
2377 
2378     // Calculate copy start address into a0 and copy end address into a4.
2379     __ SmiScale(a0, a0, kPointerSizeLog2);
2380     __ Daddu(a0, fp, a0);
2381     // Adjust for return address and receiver.
2382     __ Daddu(a0, a0, Operand(2 * kPointerSize));
2383     // Compute copy end address.
2384     __ dsll(a4, a2, kPointerSizeLog2);
2385     __ dsubu(a4, a0, a4);
2386 
2387     // Copy the arguments (including the receiver) to the new stack frame.
2388     // a0: copy start address
2389     // a1: function
2390     // a2: expected number of arguments
2391     // a3: new target (passed through to callee)
2392     // a4: copy end address
2393 
2394     Label copy;
2395     __ bind(&copy);
2396     __ Ld(a5, MemOperand(a0));
2397     __ push(a5);
2398     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a4));
2399     __ daddiu(a0, a0, -kPointerSize);  // In delay slot.
2400 
2401     __ jmp(&invoke);
2402   }
2403 
2404   {  // Too few parameters: Actual < expected.
2405     __ bind(&too_few);
2406     EnterArgumentsAdaptorFrame(masm);
2407     Generate_StackOverflowCheck(masm, a2, a5, kScratchReg, &stack_overflow);
2408 
2409     // Calculate copy start address into a0 and copy end address into a7.
2410     // a0: actual number of arguments as a smi
2411     // a1: function
2412     // a2: expected number of arguments
2413     // a3: new target (passed through to callee)
2414     __ SmiScale(a0, a0, kPointerSizeLog2);
2415     __ Daddu(a0, fp, a0);
2416     // Adjust for return address and receiver.
2417     __ Daddu(a0, a0, Operand(2 * kPointerSize));
2418     // Compute copy end address. Also adjust for return address.
2419     __ Daddu(a7, fp, kPointerSize);
2420 
2421     // Copy the arguments (including the receiver) to the new stack frame.
2422     // a0: copy start address
2423     // a1: function
2424     // a2: expected number of arguments
2425     // a3: new target (passed through to callee)
2426     // a7: copy end address
2427     Label copy;
2428     __ bind(&copy);
2429     __ Ld(a4, MemOperand(a0));  // Adjusted above for return addr and receiver.
2430     __ Dsubu(sp, sp, kPointerSize);
2431     __ Dsubu(a0, a0, kPointerSize);
2432     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7));
2433     __ Sd(a4, MemOperand(sp));  // In the delay slot.
2434 
2435     // Fill the remaining expected arguments with undefined.
2436     // a1: function
2437     // a2: expected number of arguments
2438     // a3: new target (passed through to callee)
2439     __ LoadRoot(a5, RootIndex::kUndefinedValue);
2440     __ dsll(a6, a2, kPointerSizeLog2);
2441     __ Dsubu(a4, fp, Operand(a6));
2442     // Adjust for frame.
2443     __ Dsubu(a4, a4,
2444              Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2445                      kPointerSize));
2446 
2447     Label fill;
2448     __ bind(&fill);
2449     __ Dsubu(sp, sp, kPointerSize);
2450     __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4));
2451     __ Sd(a5, MemOperand(sp));
2452   }
2453 
2454   // Call the entry point.
2455   __ bind(&invoke);
2456   __ mov(a0, a2);
2457   // a0 : expected number of arguments
2458   // a1 : function (passed through to callee)
2459   // a3: new target (passed through to callee)
2460   static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
2461   __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
2462   __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
2463   __ Call(a2);
2464 
2465   // Store offset of return address for deoptimizer.
2466   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2467 
2468   // Exit frame and return.
2469   LeaveArgumentsAdaptorFrame(masm);
2470   __ Ret();
2471 
2472   // -------------------------------------------
2473   // Don't adapt arguments.
2474   // -------------------------------------------
2475   __ bind(&dont_adapt_arguments);
2476   static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
2477   __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
2478   __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
2479   __ Jump(a2);
2480 
2481   __ bind(&stack_overflow);
2482   {
2483     FrameScope frame(masm, StackFrame::MANUAL);
2484     __ CallRuntime(Runtime::kThrowStackOverflow);
2485     __ break_(0xCC);
2486   }
2487 }
2488 
Generate_WasmCompileLazy(MacroAssembler * masm)2489 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2490   // The function index was put in t0 by the jump table trampoline.
2491   // Convert to Smi for the runtime call
2492   __ SmiTag(kWasmCompileLazyFuncIndexRegister);
2493   {
2494     HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2495     FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2496 
2497     // Save all parameter registers (see wasm-linkage.cc). They might be
2498     // overwritten in the runtime call below. We don't have any callee-saved
2499     // registers in wasm, so no need to store anything else.
2500     constexpr RegList gp_regs =
2501         Register::ListOf(a0, a2, a3, a4, a5, a6, a7);
2502     constexpr RegList fp_regs =
2503         DoubleRegister::ListOf(f2, f4, f6, f8, f10, f12, f14);
2504     __ MultiPush(gp_regs);
2505     __ MultiPushFPU(fp_regs);
2506 
2507     // Pass instance and function index as an explicit arguments to the runtime
2508     // function.
2509     __ Push(kWasmInstanceRegister, kWasmCompileLazyFuncIndexRegister);
2510     // Initialize the JavaScript context with 0. CEntry will use it to
2511     // set the current context on the isolate.
2512     __ Move(kContextRegister, Smi::zero());
2513     __ CallRuntime(Runtime::kWasmCompileLazy, 2);
2514 
2515     // Restore registers.
2516     __ MultiPopFPU(fp_regs);
2517     __ MultiPop(gp_regs);
2518   }
2519   // Finally, jump to the entrypoint.
2520   __ Jump(v0);
2521 }
2522 
Generate_WasmDebugBreak(MacroAssembler * masm)2523 void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
2524   HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2525   {
2526     FrameScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
2527 
2528     // Save all parameter registers. They might hold live values, we restore
2529     // them after the runtime call.
2530     __ MultiPush(WasmDebugBreakFrameConstants::kPushedGpRegs);
2531     __ MultiPushFPU(WasmDebugBreakFrameConstants::kPushedFpRegs);
2532 
2533     // Initialize the JavaScript context with 0. CEntry will use it to
2534     // set the current context on the isolate.
2535     __ Move(cp, Smi::zero());
2536     __ CallRuntime(Runtime::kWasmDebugBreak, 0);
2537 
2538     // Restore registers.
2539     __ MultiPopFPU(WasmDebugBreakFrameConstants::kPushedFpRegs);
2540     __ MultiPop(WasmDebugBreakFrameConstants::kPushedGpRegs);
2541   }
2542   __ Ret();
2543 }
2544 
Generate_CEntry(MacroAssembler * masm,int result_size,SaveFPRegsMode save_doubles,ArgvMode argv_mode,bool builtin_exit_frame)2545 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2546                                SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2547                                bool builtin_exit_frame) {
2548   // Called from JavaScript; parameters are on stack as if calling JS function
2549   // a0: number of arguments including receiver
2550   // a1: pointer to builtin function
2551   // fp: frame pointer    (restored after C call)
2552   // sp: stack pointer    (restored as callee's sp after C call)
2553   // cp: current context  (C callee-saved)
2554   //
2555   // If argv_mode == kArgvInRegister:
2556   // a2: pointer to the first argument
2557 
2558   if (argv_mode == kArgvInRegister) {
2559     // Move argv into the correct register.
2560     __ mov(s1, a2);
2561   } else {
2562     // Compute the argv pointer in a callee-saved register.
2563     __ Dlsa(s1, sp, a0, kPointerSizeLog2);
2564     __ Dsubu(s1, s1, kPointerSize);
2565   }
2566 
2567   // Enter the exit frame that transitions from JavaScript to C++.
2568   FrameScope scope(masm, StackFrame::MANUAL);
2569   __ EnterExitFrame(
2570       save_doubles == kSaveFPRegs, 0,
2571       builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2572 
2573   // s0: number of arguments  including receiver (C callee-saved)
2574   // s1: pointer to first argument (C callee-saved)
2575   // s2: pointer to builtin function (C callee-saved)
2576 
2577   // Prepare arguments for C routine.
2578   // a0 = argc
2579   __ mov(s0, a0);
2580   __ mov(s2, a1);
2581 
2582   // We are calling compiled C/C++ code. a0 and a1 hold our two arguments. We
2583   // also need to reserve the 4 argument slots on the stack.
2584 
2585   __ AssertStackIsAligned();
2586 
2587   // a0 = argc, a1 = argv, a2 = isolate
2588   __ li(a2, ExternalReference::isolate_address(masm->isolate()));
2589   __ mov(a1, s1);
2590 
2591   __ StoreReturnAddressAndCall(s2);
2592 
2593   // Result returned in v0 or v1:v0 - do not destroy these registers!
2594 
2595   // Check result for exception sentinel.
2596   Label exception_returned;
2597   __ LoadRoot(a4, RootIndex::kException);
2598   __ Branch(&exception_returned, eq, a4, Operand(v0));
2599 
2600   // Check that there is no pending exception, otherwise we
2601   // should have returned the exception sentinel.
2602   if (FLAG_debug_code) {
2603     Label okay;
2604     ExternalReference pending_exception_address = ExternalReference::Create(
2605         IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2606     __ li(a2, pending_exception_address);
2607     __ Ld(a2, MemOperand(a2));
2608     __ LoadRoot(a4, RootIndex::kTheHoleValue);
2609     // Cannot use check here as it attempts to generate call into runtime.
2610     __ Branch(&okay, eq, a4, Operand(a2));
2611     __ stop();
2612     __ bind(&okay);
2613   }
2614 
2615   // Exit C frame and return.
2616   // v0:v1: result
2617   // sp: stack pointer
2618   // fp: frame pointer
2619   Register argc = argv_mode == kArgvInRegister
2620                       // We don't want to pop arguments so set argc to no_reg.
2621                       ? no_reg
2622                       // s0: still holds argc (callee-saved).
2623                       : s0;
2624   __ LeaveExitFrame(save_doubles == kSaveFPRegs, argc, EMIT_RETURN);
2625 
2626   // Handling of exception.
2627   __ bind(&exception_returned);
2628 
2629   ExternalReference pending_handler_context_address = ExternalReference::Create(
2630       IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2631   ExternalReference pending_handler_entrypoint_address =
2632       ExternalReference::Create(
2633           IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2634   ExternalReference pending_handler_fp_address = ExternalReference::Create(
2635       IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2636   ExternalReference pending_handler_sp_address = ExternalReference::Create(
2637       IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2638 
2639   // Ask the runtime for help to determine the handler. This will set v0 to
2640   // contain the current pending exception, don't clobber it.
2641   ExternalReference find_handler =
2642       ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2643   {
2644     FrameScope scope(masm, StackFrame::MANUAL);
2645     __ PrepareCallCFunction(3, 0, a0);
2646     __ mov(a0, zero_reg);
2647     __ mov(a1, zero_reg);
2648     __ li(a2, ExternalReference::isolate_address(masm->isolate()));
2649     __ CallCFunction(find_handler, 3);
2650   }
2651 
2652   // Retrieve the handler context, SP and FP.
2653   __ li(cp, pending_handler_context_address);
2654   __ Ld(cp, MemOperand(cp));
2655   __ li(sp, pending_handler_sp_address);
2656   __ Ld(sp, MemOperand(sp));
2657   __ li(fp, pending_handler_fp_address);
2658   __ Ld(fp, MemOperand(fp));
2659 
2660   // If the handler is a JS frame, restore the context to the frame. Note that
2661   // the context will be set to (cp == 0) for non-JS frames.
2662   Label zero;
2663   __ Branch(&zero, eq, cp, Operand(zero_reg));
2664   __ Sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2665   __ bind(&zero);
2666 
2667   // Reset the masking register. This is done independent of the underlying
2668   // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
2669   // with both configurations. It is safe to always do this, because the
2670   // underlying register is caller-saved and can be arbitrarily clobbered.
2671   __ ResetSpeculationPoisonRegister();
2672 
2673   // Compute the handler entry address and jump to it.
2674   __ li(t9, pending_handler_entrypoint_address);
2675   __ Ld(t9, MemOperand(t9));
2676   __ Jump(t9);
2677 }
2678 
Generate_DoubleToI(MacroAssembler * masm)2679 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2680   Label done;
2681   Register result_reg = t0;
2682 
2683   Register scratch = GetRegisterThatIsNotOneOf(result_reg);
2684   Register scratch2 = GetRegisterThatIsNotOneOf(result_reg, scratch);
2685   Register scratch3 = GetRegisterThatIsNotOneOf(result_reg, scratch, scratch2);
2686   DoubleRegister double_scratch = kScratchDoubleReg;
2687 
2688   // Account for saved regs.
2689   const int kArgumentOffset = 4 * kPointerSize;
2690 
2691   __ Push(result_reg);
2692   __ Push(scratch, scratch2, scratch3);
2693 
2694   // Load double input.
2695   __ Ldc1(double_scratch, MemOperand(sp, kArgumentOffset));
2696 
2697   // Clear cumulative exception flags and save the FCSR.
2698   __ cfc1(scratch2, FCSR);
2699   __ ctc1(zero_reg, FCSR);
2700 
2701   // Try a conversion to a signed integer.
2702   __ Trunc_w_d(double_scratch, double_scratch);
2703   // Move the converted value into the result register.
2704   __ mfc1(scratch3, double_scratch);
2705 
2706   // Retrieve and restore the FCSR.
2707   __ cfc1(scratch, FCSR);
2708   __ ctc1(scratch2, FCSR);
2709 
2710   // Check for overflow and NaNs.
2711   __ And(
2712       scratch, scratch,
2713       kFCSROverflowFlagMask | kFCSRUnderflowFlagMask | kFCSRInvalidOpFlagMask);
2714   // If we had no exceptions then set result_reg and we are done.
2715   Label error;
2716   __ Branch(&error, ne, scratch, Operand(zero_reg));
2717   __ Move(result_reg, scratch3);
2718   __ Branch(&done);
2719   __ bind(&error);
2720 
2721   // Load the double value and perform a manual truncation.
2722   Register input_high = scratch2;
2723   Register input_low = scratch3;
2724 
2725   __ Lw(input_low, MemOperand(sp, kArgumentOffset + Register::kMantissaOffset));
2726   __ Lw(input_high,
2727         MemOperand(sp, kArgumentOffset + Register::kExponentOffset));
2728 
2729   Label normal_exponent;
2730   // Extract the biased exponent in result.
2731   __ Ext(result_reg, input_high, HeapNumber::kExponentShift,
2732          HeapNumber::kExponentBits);
2733 
2734   // Check for Infinity and NaNs, which should return 0.
2735   __ Subu(scratch, result_reg, HeapNumber::kExponentMask);
2736   __ Movz(result_reg, zero_reg, scratch);
2737   __ Branch(&done, eq, scratch, Operand(zero_reg));
2738 
2739   // Express exponent as delta to (number of mantissa bits + 31).
2740   __ Subu(result_reg, result_reg,
2741           Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31));
2742 
2743   // If the delta is strictly positive, all bits would be shifted away,
2744   // which means that we can return 0.
2745   __ Branch(&normal_exponent, le, result_reg, Operand(zero_reg));
2746   __ mov(result_reg, zero_reg);
2747   __ Branch(&done);
2748 
2749   __ bind(&normal_exponent);
2750   const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1;
2751   // Calculate shift.
2752   __ Addu(scratch, result_reg, Operand(kShiftBase + HeapNumber::kMantissaBits));
2753 
2754   // Save the sign.
2755   Register sign = result_reg;
2756   result_reg = no_reg;
2757   __ And(sign, input_high, Operand(HeapNumber::kSignMask));
2758 
2759   // On ARM shifts > 31 bits are valid and will result in zero. On MIPS we need
2760   // to check for this specific case.
2761   Label high_shift_needed, high_shift_done;
2762   __ Branch(&high_shift_needed, lt, scratch, Operand(32));
2763   __ mov(input_high, zero_reg);
2764   __ Branch(&high_shift_done);
2765   __ bind(&high_shift_needed);
2766 
2767   // Set the implicit 1 before the mantissa part in input_high.
2768   __ Or(input_high, input_high,
2769         Operand(1 << HeapNumber::kMantissaBitsInTopWord));
2770   // Shift the mantissa bits to the correct position.
2771   // We don't need to clear non-mantissa bits as they will be shifted away.
2772   // If they weren't, it would mean that the answer is in the 32bit range.
2773   __ sllv(input_high, input_high, scratch);
2774 
2775   __ bind(&high_shift_done);
2776 
2777   // Replace the shifted bits with bits from the lower mantissa word.
2778   Label pos_shift, shift_done;
2779   __ li(kScratchReg, 32);
2780   __ subu(scratch, kScratchReg, scratch);
2781   __ Branch(&pos_shift, ge, scratch, Operand(zero_reg));
2782 
2783   // Negate scratch.
2784   __ Subu(scratch, zero_reg, scratch);
2785   __ sllv(input_low, input_low, scratch);
2786   __ Branch(&shift_done);
2787 
2788   __ bind(&pos_shift);
2789   __ srlv(input_low, input_low, scratch);
2790 
2791   __ bind(&shift_done);
2792   __ Or(input_high, input_high, Operand(input_low));
2793   // Restore sign if necessary.
2794   __ mov(scratch, sign);
2795   result_reg = sign;
2796   sign = no_reg;
2797   __ Subu(result_reg, zero_reg, input_high);
2798   __ Movz(result_reg, input_high, scratch);
2799 
2800   __ bind(&done);
2801 
2802   __ Sd(result_reg, MemOperand(sp, kArgumentOffset));
2803   __ Pop(scratch, scratch2, scratch3);
2804   __ Pop(result_reg);
2805   __ Ret();
2806 }
2807 
2808 namespace {
2809 
AddressOffset(ExternalReference ref0,ExternalReference ref1)2810 int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
2811   int64_t offset = (ref0.address() - ref1.address());
2812   DCHECK(static_cast<int>(offset) == offset);
2813   return static_cast<int>(offset);
2814 }
2815 
2816 // Calls an API function.  Allocates HandleScope, extracts returned value
2817 // from handle and propagates exceptions.  Restores context.  stack_space
2818 // - space to be unwound on exit (includes the call JS arguments space and
2819 // the additional space allocated for the fast call).
CallApiFunctionAndReturn(MacroAssembler * masm,Register function_address,ExternalReference thunk_ref,int stack_space,MemOperand * stack_space_operand,MemOperand return_value_operand)2820 void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address,
2821                               ExternalReference thunk_ref, int stack_space,
2822                               MemOperand* stack_space_operand,
2823                               MemOperand return_value_operand) {
2824   Isolate* isolate = masm->isolate();
2825   ExternalReference next_address =
2826       ExternalReference::handle_scope_next_address(isolate);
2827   const int kNextOffset = 0;
2828   const int kLimitOffset = AddressOffset(
2829       ExternalReference::handle_scope_limit_address(isolate), next_address);
2830   const int kLevelOffset = AddressOffset(
2831       ExternalReference::handle_scope_level_address(isolate), next_address);
2832 
2833   DCHECK(function_address == a1 || function_address == a2);
2834 
2835   Label profiler_enabled, end_profiler_check;
2836   __ li(t9, ExternalReference::is_profiling_address(isolate));
2837   __ Lb(t9, MemOperand(t9, 0));
2838   __ Branch(&profiler_enabled, ne, t9, Operand(zero_reg));
2839   __ li(t9, ExternalReference::address_of_runtime_stats_flag());
2840   __ Lw(t9, MemOperand(t9, 0));
2841   __ Branch(&profiler_enabled, ne, t9, Operand(zero_reg));
2842   {
2843     // Call the api function directly.
2844     __ mov(t9, function_address);
2845     __ Branch(&end_profiler_check);
2846   }
2847 
2848   __ bind(&profiler_enabled);
2849   {
2850     // Additional parameter is the address of the actual callback.
2851     __ li(t9, thunk_ref);
2852   }
2853   __ bind(&end_profiler_check);
2854 
2855   // Allocate HandleScope in callee-save registers.
2856   __ li(s5, next_address);
2857   __ Ld(s0, MemOperand(s5, kNextOffset));
2858   __ Ld(s1, MemOperand(s5, kLimitOffset));
2859   __ Lw(s2, MemOperand(s5, kLevelOffset));
2860   __ Addu(s2, s2, Operand(1));
2861   __ Sw(s2, MemOperand(s5, kLevelOffset));
2862 
2863   __ StoreReturnAddressAndCall(t9);
2864 
2865   Label promote_scheduled_exception;
2866   Label delete_allocated_handles;
2867   Label leave_exit_frame;
2868   Label return_value_loaded;
2869 
2870   // Load value from ReturnValue.
2871   __ Ld(v0, return_value_operand);
2872   __ bind(&return_value_loaded);
2873 
2874   // No more valid handles (the result handle was the last one). Restore
2875   // previous handle scope.
2876   __ Sd(s0, MemOperand(s5, kNextOffset));
2877   if (__ emit_debug_code()) {
2878     __ Lw(a1, MemOperand(s5, kLevelOffset));
2879     __ Check(eq, AbortReason::kUnexpectedLevelAfterReturnFromApiCall, a1,
2880              Operand(s2));
2881   }
2882   __ Subu(s2, s2, Operand(1));
2883   __ Sw(s2, MemOperand(s5, kLevelOffset));
2884   __ Ld(kScratchReg, MemOperand(s5, kLimitOffset));
2885   __ Branch(&delete_allocated_handles, ne, s1, Operand(kScratchReg));
2886 
2887   // Leave the API exit frame.
2888   __ bind(&leave_exit_frame);
2889 
2890   if (stack_space_operand == nullptr) {
2891     DCHECK_NE(stack_space, 0);
2892     __ li(s0, Operand(stack_space));
2893   } else {
2894     DCHECK_EQ(stack_space, 0);
2895     STATIC_ASSERT(kCArgSlotCount == 0);
2896     __ Ld(s0, *stack_space_operand);
2897   }
2898 
2899   static constexpr bool kDontSaveDoubles = false;
2900   static constexpr bool kRegisterContainsSlotCount = false;
2901   __ LeaveExitFrame(kDontSaveDoubles, s0, NO_EMIT_RETURN,
2902                     kRegisterContainsSlotCount);
2903 
2904   // Check if the function scheduled an exception.
2905   __ LoadRoot(a4, RootIndex::kTheHoleValue);
2906   __ li(kScratchReg, ExternalReference::scheduled_exception_address(isolate));
2907   __ Ld(a5, MemOperand(kScratchReg));
2908   __ Branch(&promote_scheduled_exception, ne, a4, Operand(a5));
2909 
2910   __ Ret();
2911 
2912   // Re-throw by promoting a scheduled exception.
2913   __ bind(&promote_scheduled_exception);
2914   __ TailCallRuntime(Runtime::kPromoteScheduledException);
2915 
2916   // HandleScope limit has changed. Delete allocated extensions.
2917   __ bind(&delete_allocated_handles);
2918   __ Sd(s1, MemOperand(s5, kLimitOffset));
2919   __ mov(s0, v0);
2920   __ mov(a0, v0);
2921   __ PrepareCallCFunction(1, s1);
2922   __ li(a0, ExternalReference::isolate_address(isolate));
2923   __ CallCFunction(ExternalReference::delete_handle_scope_extensions(), 1);
2924   __ mov(v0, s0);
2925   __ jmp(&leave_exit_frame);
2926 }
2927 
2928 }  // namespace
2929 
Generate_CallApiCallback(MacroAssembler * masm)2930 void Builtins::Generate_CallApiCallback(MacroAssembler* masm) {
2931   // ----------- S t a t e -------------
2932   //  -- cp                  : context
2933   //  -- a1                  : api function address
2934   //  -- a2                  : arguments count (not including the receiver)
2935   //  -- a3                  : call data
2936   //  -- a0                  : holder
2937   //  --
2938   //  -- sp[0]               : last argument
2939   //  -- ...
2940   //  -- sp[(argc - 1) * 8]  : first argument
2941   //  -- sp[(argc + 0) * 8]  : receiver
2942   // -----------------------------------
2943 
2944   Register api_function_address = a1;
2945   Register argc = a2;
2946   Register call_data = a3;
2947   Register holder = a0;
2948   Register scratch = t0;
2949   Register base = t1;  // For addressing MemOperands on the stack.
2950 
2951   DCHECK(!AreAliased(api_function_address, argc, call_data,
2952                      holder, scratch, base));
2953 
2954   using FCA = FunctionCallbackArguments;
2955 
2956   STATIC_ASSERT(FCA::kArgsLength == 6);
2957   STATIC_ASSERT(FCA::kNewTargetIndex == 5);
2958   STATIC_ASSERT(FCA::kDataIndex == 4);
2959   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
2960   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
2961   STATIC_ASSERT(FCA::kIsolateIndex == 1);
2962   STATIC_ASSERT(FCA::kHolderIndex == 0);
2963 
2964   // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
2965   //
2966   // Target state:
2967   //   sp[0 * kPointerSize]: kHolder
2968   //   sp[1 * kPointerSize]: kIsolate
2969   //   sp[2 * kPointerSize]: undefined (kReturnValueDefaultValue)
2970   //   sp[3 * kPointerSize]: undefined (kReturnValue)
2971   //   sp[4 * kPointerSize]: kData
2972   //   sp[5 * kPointerSize]: undefined (kNewTarget)
2973 
2974   // Set up the base register for addressing through MemOperands. It will point
2975   // at the receiver (located at sp + argc * kPointerSize).
2976   __ Dlsa(base, sp, argc, kPointerSizeLog2);
2977 
2978   // Reserve space on the stack.
2979   __ Dsubu(sp, sp, Operand(FCA::kArgsLength * kPointerSize));
2980 
2981   // kHolder.
2982   __ Sd(holder, MemOperand(sp, 0 * kPointerSize));
2983 
2984   // kIsolate.
2985   __ li(scratch, ExternalReference::isolate_address(masm->isolate()));
2986   __ Sd(scratch, MemOperand(sp, 1 * kPointerSize));
2987 
2988   // kReturnValueDefaultValue and kReturnValue.
2989   __ LoadRoot(scratch, RootIndex::kUndefinedValue);
2990   __ Sd(scratch, MemOperand(sp, 2 * kPointerSize));
2991   __ Sd(scratch, MemOperand(sp, 3 * kPointerSize));
2992 
2993   // kData.
2994   __ Sd(call_data, MemOperand(sp, 4 * kPointerSize));
2995 
2996   // kNewTarget.
2997   __ Sd(scratch, MemOperand(sp, 5 * kPointerSize));
2998 
2999   // Keep a pointer to kHolder (= implicit_args) in a scratch register.
3000   // We use it below to set up the FunctionCallbackInfo object.
3001   __ mov(scratch, sp);
3002 
3003   // Allocate the v8::Arguments structure in the arguments' space since
3004   // it's not controlled by GC.
3005   static constexpr int kApiStackSpace = 4;
3006   static constexpr bool kDontSaveDoubles = false;
3007   FrameScope frame_scope(masm, StackFrame::MANUAL);
3008   __ EnterExitFrame(kDontSaveDoubles, kApiStackSpace);
3009 
3010   // EnterExitFrame may align the sp.
3011 
3012   // FunctionCallbackInfo::implicit_args_ (points at kHolder as set up above).
3013   // Arguments are after the return address (pushed by EnterExitFrame()).
3014   __ Sd(scratch, MemOperand(sp, 1 * kPointerSize));
3015 
3016   // FunctionCallbackInfo::values_ (points at the first varargs argument passed
3017   // on the stack).
3018   __ Dsubu(scratch, base, Operand(1 * kPointerSize));
3019   __ Sd(scratch, MemOperand(sp, 2 * kPointerSize));
3020 
3021   // FunctionCallbackInfo::length_.
3022   // Stored as int field, 32-bit integers within struct on stack always left
3023   // justified by n64 ABI.
3024   __ Sw(argc, MemOperand(sp, 3 * kPointerSize));
3025 
3026   // We also store the number of bytes to drop from the stack after returning
3027   // from the API function here.
3028   // Note: Unlike on other architectures, this stores the number of slots to
3029   // drop, not the number of bytes.
3030   __ Daddu(scratch, argc, Operand(FCA::kArgsLength + 1 /* receiver */));
3031   __ Sd(scratch, MemOperand(sp, 4 * kPointerSize));
3032 
3033   // v8::InvocationCallback's argument.
3034   DCHECK(!AreAliased(api_function_address, scratch, a0));
3035   __ Daddu(a0, sp, Operand(1 * kPointerSize));
3036 
3037   ExternalReference thunk_ref = ExternalReference::invoke_function_callback();
3038 
3039   // There are two stack slots above the arguments we constructed on the stack.
3040   // TODO(jgruber): Document what these arguments are.
3041   static constexpr int kStackSlotsAboveFCA = 2;
3042   MemOperand return_value_operand(
3043       fp, (kStackSlotsAboveFCA + FCA::kReturnValueOffset) * kPointerSize);
3044 
3045   static constexpr int kUseStackSpaceOperand = 0;
3046   MemOperand stack_space_operand(sp, 4 * kPointerSize);
3047 
3048   AllowExternalCallThatCantCauseGC scope(masm);
3049   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
3050                            kUseStackSpaceOperand, &stack_space_operand,
3051                            return_value_operand);
3052 }
3053 
Generate_CallApiGetter(MacroAssembler * masm)3054 void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
3055   // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
3056   // name below the exit frame to make GC aware of them.
3057   STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
3058   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
3059   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
3060   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
3061   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
3062   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
3063   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
3064   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
3065 
3066   Register receiver = ApiGetterDescriptor::ReceiverRegister();
3067   Register holder = ApiGetterDescriptor::HolderRegister();
3068   Register callback = ApiGetterDescriptor::CallbackRegister();
3069   Register scratch = a4;
3070   DCHECK(!AreAliased(receiver, holder, callback, scratch));
3071 
3072   Register api_function_address = a2;
3073 
3074   // Here and below +1 is for name() pushed after the args_ array.
3075   using PCA = PropertyCallbackArguments;
3076   __ Dsubu(sp, sp, (PCA::kArgsLength + 1) * kPointerSize);
3077   __ Sd(receiver, MemOperand(sp, (PCA::kThisIndex + 1) * kPointerSize));
3078   __ Ld(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
3079   __ Sd(scratch, MemOperand(sp, (PCA::kDataIndex + 1) * kPointerSize));
3080   __ LoadRoot(scratch, RootIndex::kUndefinedValue);
3081   __ Sd(scratch, MemOperand(sp, (PCA::kReturnValueOffset + 1) * kPointerSize));
3082   __ Sd(scratch, MemOperand(sp, (PCA::kReturnValueDefaultValueIndex + 1) *
3083                                     kPointerSize));
3084   __ li(scratch, ExternalReference::isolate_address(masm->isolate()));
3085   __ Sd(scratch, MemOperand(sp, (PCA::kIsolateIndex + 1) * kPointerSize));
3086   __ Sd(holder, MemOperand(sp, (PCA::kHolderIndex + 1) * kPointerSize));
3087   // should_throw_on_error -> false
3088   DCHECK_EQ(0, Smi::zero().ptr());
3089   __ Sd(zero_reg,
3090         MemOperand(sp, (PCA::kShouldThrowOnErrorIndex + 1) * kPointerSize));
3091   __ Ld(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
3092   __ Sd(scratch, MemOperand(sp, 0 * kPointerSize));
3093 
3094   // v8::PropertyCallbackInfo::args_ array and name handle.
3095   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
3096 
3097   // Load address of v8::PropertyAccessorInfo::args_ array and name handle.
3098   __ mov(a0, sp);                               // a0 = Handle<Name>
3099   __ Daddu(a1, a0, Operand(1 * kPointerSize));  // a1 = v8::PCI::args_
3100 
3101   const int kApiStackSpace = 1;
3102   FrameScope frame_scope(masm, StackFrame::MANUAL);
3103   __ EnterExitFrame(false, kApiStackSpace);
3104 
3105   // Create v8::PropertyCallbackInfo object on the stack and initialize
3106   // it's args_ field.
3107   __ Sd(a1, MemOperand(sp, 1 * kPointerSize));
3108   __ Daddu(a1, sp, Operand(1 * kPointerSize));
3109   // a1 = v8::PropertyCallbackInfo&
3110 
3111   ExternalReference thunk_ref =
3112       ExternalReference::invoke_accessor_getter_callback();
3113 
3114   __ Ld(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset));
3115   __ Ld(api_function_address,
3116         FieldMemOperand(scratch, Foreign::kForeignAddressOffset));
3117 
3118   // +3 is to skip prolog, return address and name handle.
3119   MemOperand return_value_operand(
3120       fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
3121   MemOperand* const kUseStackSpaceConstant = nullptr;
3122   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
3123                            kStackUnwindSpace, kUseStackSpaceConstant,
3124                            return_value_operand);
3125 }
3126 
Generate_DirectCEntry(MacroAssembler * masm)3127 void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
3128   // The sole purpose of DirectCEntry is for movable callers (e.g. any general
3129   // purpose Code object) to be able to call into C functions that may trigger
3130   // GC and thus move the caller.
3131   //
3132   // DirectCEntry places the return address on the stack (updated by the GC),
3133   // making the call GC safe. The irregexp backend relies on this.
3134 
3135   // Make place for arguments to fit C calling convention. Callers use
3136   // EnterExitFrame/LeaveExitFrame so they handle stack restoring and we don't
3137   // have to do that here. Any caller must drop kCArgsSlotsSize stack space
3138   // after the call.
3139   __ daddiu(sp, sp, -kCArgsSlotsSize);
3140 
3141   __ Sd(ra, MemOperand(sp, kCArgsSlotsSize));  // Store the return address.
3142   __ Call(t9);                                 // Call the C++ function.
3143   __ Ld(t9, MemOperand(sp, kCArgsSlotsSize));  // Return to calling code.
3144 
3145   if (FLAG_debug_code && FLAG_enable_slow_asserts) {
3146     // In case of an error the return address may point to a memory area
3147     // filled with kZapValue by the GC. Dereference the address and check for
3148     // this.
3149     __ Uld(a4, MemOperand(t9));
3150     __ Assert(ne, AbortReason::kReceivedInvalidReturnAddress, a4,
3151               Operand(reinterpret_cast<uint64_t>(kZapValue)));
3152   }
3153 
3154   __ Jump(t9);
3155 }
3156 
3157 #undef __
3158 
3159 }  // namespace internal
3160 }  // namespace v8
3161 
3162 #endif  // V8_TARGET_ARCH_MIPS64
3163