1 // Copyright 2021 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_BASELINE_RISCV64_BASELINE_ASSEMBLER_RISCV64_INL_H_
6 #define V8_BASELINE_RISCV64_BASELINE_ASSEMBLER_RISCV64_INL_H_
7 
8 #include "src/baseline/baseline-assembler.h"
9 #include "src/codegen/assembler-inl.h"
10 #include "src/codegen/interface-descriptors.h"
11 namespace v8 {
12 namespace internal {
13 namespace baseline {
14 
15 class BaselineAssembler::ScratchRegisterScope {
16  public:
ScratchRegisterScope(BaselineAssembler * assembler)17   explicit ScratchRegisterScope(BaselineAssembler* assembler)
18       : assembler_(assembler),
19         prev_scope_(assembler->scratch_register_scope_),
20         wrapped_scope_(assembler->masm()) {
21     if (!assembler_->scratch_register_scope_) {
22       // If we haven't opened a scratch scope yet, for the first one add a
23       // couple of extra registers.
24       wrapped_scope_.Include(kScratchReg, kScratchReg2);
25     }
26     assembler_->scratch_register_scope_ = this;
27   }
~ScratchRegisterScope()28   ~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
29 
AcquireScratch()30   Register AcquireScratch() { return wrapped_scope_.Acquire(); }
31 
32  private:
33   BaselineAssembler* assembler_;
34   ScratchRegisterScope* prev_scope_;
35   UseScratchRegisterScope wrapped_scope_;
36 };
37 
38 enum class Condition : uint32_t {
39   kEqual = eq,
40   kNotEqual = ne,
41 
42   kLessThan = lt,
43   kGreaterThan = gt,
44   kLessThanEqual = le,
45   kGreaterThanEqual = ge,
46 
47   kUnsignedLessThan = Uless,
48   kUnsignedGreaterThan = Ugreater,
49   kUnsignedLessThanEqual = Uless_equal,
50   kUnsignedGreaterThanEqual = Ugreater_equal,
51 
52   kOverflow = overflow,
53   kNoOverflow = no_overflow,
54 
55   kZero = eq,
56   kNotZero = ne,
57 };
58 
AsMasmCondition(Condition cond)59 inline internal::Condition AsMasmCondition(Condition cond) {
60   return static_cast<internal::Condition>(cond);
61 }
62 
63 namespace detail {
64 
65 #ifdef DEBUG
Clobbers(Register target,MemOperand op)66 inline bool Clobbers(Register target, MemOperand op) {
67   return op.is_reg() && op.rm() == target;
68 }
69 #endif
70 
71 }  // namespace detail
72 
73 #define __ masm_->
74 
RegisterFrameOperand(interpreter::Register interpreter_register)75 MemOperand BaselineAssembler::RegisterFrameOperand(
76     interpreter::Register interpreter_register) {
77   return MemOperand(fp, interpreter_register.ToOperand() * kSystemPointerSize);
78 }
FeedbackVectorOperand()79 MemOperand BaselineAssembler::FeedbackVectorOperand() {
80   return MemOperand(fp, BaselineFrameConstants::kFeedbackVectorFromFp);
81 }
82 
Bind(Label * label)83 void BaselineAssembler::Bind(Label* label) { __ bind(label); }
84 
BindWithoutJumpTarget(Label * label)85 void BaselineAssembler::BindWithoutJumpTarget(Label* label) { __ bind(label); }
86 
JumpTarget()87 void BaselineAssembler::JumpTarget() {
88   // Nop
89 }
90 
Jump(Label * target,Label::Distance distance)91 void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
92   __ jmp(target);
93 }
JumpIfRoot(Register value,RootIndex index,Label * target,Label::Distance)94 void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
95                                    Label* target, Label::Distance) {
96   __ JumpIfRoot(value, index, target);
97 }
JumpIfNotRoot(Register value,RootIndex index,Label * target,Label::Distance)98 void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
99                                       Label* target, Label::Distance) {
100   __ JumpIfNotRoot(value, index, target);
101 }
JumpIfSmi(Register value,Label * target,Label::Distance)102 void BaselineAssembler::JumpIfSmi(Register value, Label* target,
103                                   Label::Distance) {
104   __ JumpIfSmi(value, target);
105 }
JumpIfNotSmi(Register value,Label * target,Label::Distance)106 void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
107                                      Label::Distance) {
108   __ JumpIfSmi(value, target);
109 }
110 
CallBuiltin(Builtin builtin)111 void BaselineAssembler::CallBuiltin(Builtin builtin) {
112   ASM_CODE_COMMENT_STRING(masm_,
113                           __ CommentForOffHeapTrampoline("call", builtin));
114   Register temp = t6;
115   __ LoadEntryFromBuiltin(builtin, temp);
116   __ Call(temp);
117 }
118 
TailCallBuiltin(Builtin builtin)119 void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
120   ASM_CODE_COMMENT_STRING(masm_,
121                           __ CommentForOffHeapTrampoline("tail call", builtin));
122   Register temp = t6;
123   __ LoadEntryFromBuiltin(builtin, temp);
124   __ Jump(temp);
125 }
126 
TestAndBranch(Register value,int mask,Condition cc,Label * target,Label::Distance)127 void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
128                                       Label* target, Label::Distance) {
129   ScratchRegisterScope temps(this);
130   Register tmp = temps.AcquireScratch();
131   __ And(tmp, value, Operand(mask));
132   __ Branch(target, AsMasmCondition(cc), tmp, Operand(zero_reg));
133 }
134 
JumpIf(Condition cc,Register lhs,const Operand & rhs,Label * target,Label::Distance)135 void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
136                                Label* target, Label::Distance) {
137   __ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
138 }
JumpIfObjectType(Condition cc,Register object,InstanceType instance_type,Register map,Label * target,Label::Distance)139 void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
140                                          InstanceType instance_type,
141                                          Register map, Label* target,
142                                          Label::Distance) {
143   ScratchRegisterScope temps(this);
144   Register type = temps.AcquireScratch();
145   __ GetObjectType(object, map, type);
146   __ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
147 }
JumpIfInstanceType(Condition cc,Register map,InstanceType instance_type,Label * target,Label::Distance)148 void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
149                                            InstanceType instance_type,
150                                            Label* target, Label::Distance) {
151   ScratchRegisterScope temps(this);
152   Register type = temps.AcquireScratch();
153   if (FLAG_debug_code) {
154     __ AssertNotSmi(map);
155     __ GetObjectType(map, type, type);
156     __ Assert(eq, AbortReason::kUnexpectedValue, type, Operand(MAP_TYPE));
157   }
158   __ Ld(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
159   __ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
160 }
JumpIfPointer(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance)161 void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
162                                       MemOperand operand, Label* target,
163                                       Label::Distance) {
164   ScratchRegisterScope temps(this);
165   Register temp = temps.AcquireScratch();
166   __ Ld(temp, operand);
167   __ Branch(target, AsMasmCondition(cc), value, Operand(temp));
168 }
JumpIfSmi(Condition cc,Register value,Smi smi,Label * target,Label::Distance)169 void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
170                                   Label* target, Label::Distance) {
171   ScratchRegisterScope temps(this);
172   Register temp = temps.AcquireScratch();
173   __ li(temp, Operand(smi));
174   __ SmiUntag(temp);
175   __ Branch(target, AsMasmCondition(cc), value, Operand(temp));
176 }
JumpIfSmi(Condition cc,Register lhs,Register rhs,Label * target,Label::Distance)177 void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
178                                   Label* target, Label::Distance) {
179   // todo: compress pointer
180   __ AssertSmi(lhs);
181   __ AssertSmi(rhs);
182   __ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
183 }
JumpIfTagged(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance)184 void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
185                                      MemOperand operand, Label* target,
186                                      Label::Distance) {
187   // todo: compress pointer
188   ScratchRegisterScope temps(this);
189   Register scratch = temps.AcquireScratch();
190   __ Ld(scratch, operand);
191   __ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
192 }
JumpIfTagged(Condition cc,MemOperand operand,Register value,Label * target,Label::Distance)193 void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
194                                      Register value, Label* target,
195                                      Label::Distance) {
196   // todo: compress pointer
197   ScratchRegisterScope temps(this);
198   Register scratch = temps.AcquireScratch();
199   __ Ld(scratch, operand);
200   __ Branch(target, AsMasmCondition(cc), scratch, Operand(value));
201 }
JumpIfByte(Condition cc,Register value,int32_t byte,Label * target,Label::Distance)202 void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
203                                    Label* target, Label::Distance) {
204   __ Branch(target, AsMasmCondition(cc), value, Operand(byte));
205 }
206 
Move(interpreter::Register output,Register source)207 void BaselineAssembler::Move(interpreter::Register output, Register source) {
208   Move(RegisterFrameOperand(output), source);
209 }
Move(Register output,TaggedIndex value)210 void BaselineAssembler::Move(Register output, TaggedIndex value) {
211   __ li(output, Operand(value.ptr()));
212 }
Move(MemOperand output,Register source)213 void BaselineAssembler::Move(MemOperand output, Register source) {
214   __ Sd(source, output);
215 }
Move(Register output,ExternalReference reference)216 void BaselineAssembler::Move(Register output, ExternalReference reference) {
217   __ li(output, Operand(reference));
218 }
Move(Register output,Handle<HeapObject> value)219 void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
220   __ li(output, Operand(value));
221 }
Move(Register output,int32_t value)222 void BaselineAssembler::Move(Register output, int32_t value) {
223   __ li(output, Operand(value));
224 }
MoveMaybeSmi(Register output,Register source)225 void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
226   __ Move(output, source);
227 }
MoveSmi(Register output,Register source)228 void BaselineAssembler::MoveSmi(Register output, Register source) {
229   __ Move(output, source);
230 }
231 
232 namespace detail {
233 
234 template <typename Arg>
ToRegister(BaselineAssembler * basm,BaselineAssembler::ScratchRegisterScope * scope,Arg arg)235 inline Register ToRegister(BaselineAssembler* basm,
236                            BaselineAssembler::ScratchRegisterScope* scope,
237                            Arg arg) {
238   Register reg = scope->AcquireScratch();
239   basm->Move(reg, arg);
240   return reg;
241 }
ToRegister(BaselineAssembler * basm,BaselineAssembler::ScratchRegisterScope * scope,Register reg)242 inline Register ToRegister(BaselineAssembler* basm,
243                            BaselineAssembler::ScratchRegisterScope* scope,
244                            Register reg) {
245   return reg;
246 }
247 
248 template <typename... Args>
249 struct PushAllHelper;
250 template <>
251 struct PushAllHelper<> {
252   static int Push(BaselineAssembler* basm) { return 0; }
253   static int PushReverse(BaselineAssembler* basm) { return 0; }
254 };
255 template <typename Arg>
256 struct PushAllHelper<Arg> {
257   static int Push(BaselineAssembler* basm, Arg arg) {
258     BaselineAssembler::ScratchRegisterScope scope(basm);
259     basm->masm()->Push(ToRegister(basm, &scope, arg));
260     return 1;
261   }
262   static int PushReverse(BaselineAssembler* basm, Arg arg) {
263     return Push(basm, arg);
264   }
265 };
266 template <typename Arg, typename... Args>
267 struct PushAllHelper<Arg, Args...> {
268   static int Push(BaselineAssembler* basm, Arg arg, Args... args) {
269     PushAllHelper<Arg>::Push(basm, arg);
270     return 1 + PushAllHelper<Args...>::Push(basm, args...);
271   }
272   static int PushReverse(BaselineAssembler* basm, Arg arg, Args... args) {
273     int nargs = PushAllHelper<Args...>::PushReverse(basm, args...);
274     PushAllHelper<Arg>::Push(basm, arg);
275     return nargs + 1;
276   }
277 };
278 template <>
279 struct PushAllHelper<interpreter::RegisterList> {
280   static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
281     for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
282       PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
283     }
284     return list.register_count();
285   }
286   static int PushReverse(BaselineAssembler* basm,
287                          interpreter::RegisterList list) {
288     for (int reg_index = list.register_count() - 1; reg_index >= 0;
289          --reg_index) {
290       PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
291     }
292     return list.register_count();
293   }
294 };
295 
296 template <typename... T>
297 struct PopAllHelper;
298 template <>
299 struct PopAllHelper<> {
300   static void Pop(BaselineAssembler* basm) {}
301 };
302 template <>
303 struct PopAllHelper<Register> {
304   static void Pop(BaselineAssembler* basm, Register reg) {
305     basm->masm()->Pop(reg);
306   }
307 };
308 template <typename... T>
309 struct PopAllHelper<Register, T...> {
310   static void Pop(BaselineAssembler* basm, Register reg, T... tail) {
311     PopAllHelper<Register>::Pop(basm, reg);
312     PopAllHelper<T...>::Pop(basm, tail...);
313   }
314 };
315 
316 }  // namespace detail
317 
318 template <typename... T>
319 int BaselineAssembler::Push(T... vals) {
320   return detail::PushAllHelper<T...>::Push(this, vals...);
321 }
322 
323 template <typename... T>
324 void BaselineAssembler::PushReverse(T... vals) {
325   detail::PushAllHelper<T...>::PushReverse(this, vals...);
326 }
327 
328 template <typename... T>
329 void BaselineAssembler::Pop(T... registers) {
330   detail::PopAllHelper<T...>::Pop(this, registers...);
331 }
332 
333 void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
334                                                int offset) {
335   __ LoadTaggedPointerField(output, FieldMemOperand(source, offset));
336 }
337 void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
338                                               int offset) {
339   __ LoadTaggedSignedField(output, FieldMemOperand(source, offset));
340 }
341 void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
342                                            int offset) {
343   __ LoadAnyTaggedField(output, FieldMemOperand(source, offset));
344 }
345 void BaselineAssembler::LoadByteField(Register output, Register source,
346                                       int offset) {
347   __ Lb(output, FieldMemOperand(source, offset));
348 }
349 void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
350                                                Smi value) {
351   ASM_CODE_COMMENT(masm_);
352   ScratchRegisterScope temps(this);
353   Register tmp = temps.AcquireScratch();
354   __ li(tmp, Operand(value));
355   __ StoreTaggedField(tmp, FieldMemOperand(target, offset));
356 }
357 void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
358                                                          int offset,
359                                                          Register value) {
360   ASM_CODE_COMMENT(masm_);
361   __ StoreTaggedField(value, FieldMemOperand(target, offset));
362   __ RecordWriteField(target, offset, value, kRAHasNotBeenSaved,
363                       SaveFPRegsMode::kIgnore);
364 }
365 void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
366                                                        int offset,
367                                                        Register value) {
368   __ StoreTaggedField(value, FieldMemOperand(target, offset));
369 }
370 
371 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
372     int32_t weight, Label* skip_interrupt_label) {
373   ASM_CODE_COMMENT(masm_);
374   ScratchRegisterScope scratch_scope(this);
375   Register feedback_cell = scratch_scope.AcquireScratch();
376   LoadFunction(feedback_cell);
377   LoadTaggedPointerField(feedback_cell, feedback_cell,
378                          JSFunction::kFeedbackCellOffset);
379 
380   Register interrupt_budget = scratch_scope.AcquireScratch();
381   __ Lw(interrupt_budget,
382         FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
383   // Remember to set flags as part of the add!
384   __ Add32(interrupt_budget, interrupt_budget, weight);
385   __ Sw(interrupt_budget,
386         FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
387   if (skip_interrupt_label) {
388     DCHECK_LT(weight, 0);
389     __ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(weight));
390   }
391 }
392 
393 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
394     Register weight, Label* skip_interrupt_label) {
395   ASM_CODE_COMMENT(masm_);
396   ScratchRegisterScope scratch_scope(this);
397   Register feedback_cell = scratch_scope.AcquireScratch();
398   LoadFunction(feedback_cell);
399   LoadTaggedPointerField(feedback_cell, feedback_cell,
400                          JSFunction::kFeedbackCellOffset);
401 
402   Register interrupt_budget = scratch_scope.AcquireScratch();
403   __ Lw(interrupt_budget,
404         FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
405   // Remember to set flags as part of the add!
406   __ Add32(interrupt_budget, interrupt_budget, weight);
407   __ Sw(interrupt_budget,
408         FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
409   if (skip_interrupt_label)
410     __ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(weight));
411 }
412 
413 void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
414   ASM_CODE_COMMENT(masm_);
415   if (SmiValuesAre31Bits()) {
416     __ Add32(lhs, lhs, Operand(rhs));
417   } else {
418     __ Add64(lhs, lhs, Operand(rhs));
419   }
420 }
421 
422 void BaselineAssembler::Switch(Register reg, int case_value_base,
423                                Label** labels, int num_labels) {
424   ASM_CODE_COMMENT(masm_);
425   Label fallthrough;
426   if (case_value_base != 0) {
427     __ Sub64(reg, reg, Operand(case_value_base));
428   }
429 
430   // Mostly copied from code-generator-riscv64.cc
431   ScratchRegisterScope scope(this);
432   Label table;
433   __ Branch(&fallthrough, AsMasmCondition(Condition::kUnsignedGreaterThanEqual),
434             reg, Operand(int64_t(num_labels)));
435   int64_t imm64;
436   imm64 = __ branch_long_offset(&table);
437   CHECK(is_int32(imm64 + 0x800));
438   int32_t Hi20 = (((int32_t)imm64 + 0x800) >> 12);
439   int32_t Lo12 = (int32_t)imm64 << 20 >> 20;
440   __ auipc(t6, Hi20);  // Read PC + Hi20 into t6
441   __ addi(t6, t6, Lo12);  // jump PC + Hi20 + Lo12
442 
443   int entry_size_log2 = 3;
444   __ CalcScaledAddress(t6, t6, reg, entry_size_log2);
445   __ Jump(t6);
446   {
447     TurboAssembler::BlockTrampolinePoolScope(masm());
448     __ BlockTrampolinePoolFor(num_labels * kInstrSize * 2);
449     __ bind(&table);
450     for (int i = 0; i < num_labels; ++i) {
451       __ BranchLong(labels[i]);
452     }
453     DCHECK_EQ(num_labels * 2, __ InstructionsGeneratedSince(&table));
454     __ bind(&fallthrough);
455   }
456 }
457 
458 #undef __
459 
460 #define __ basm.
461 
462 void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
463   ASM_CODE_COMMENT(masm);
464   BaselineAssembler basm(masm);
465 
466   Register weight = BaselineLeaveFrameDescriptor::WeightRegister();
467   Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
468 
469   {
470     ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
471 
472     Label skip_interrupt_label;
473     __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
474     __ masm()->SmiTag(params_size);
475     __ masm()->Push(params_size, kInterpreterAccumulatorRegister);
476 
477     __ LoadContext(kContextRegister);
478     __ LoadFunction(kJSFunctionRegister);
479     __ masm()->Push(kJSFunctionRegister);
480     __ CallRuntime(Runtime::kBytecodeBudgetInterruptFromBytecode, 1);
481 
482     __ masm()->Pop(params_size, kInterpreterAccumulatorRegister);
483     __ masm()->SmiUntag(params_size);
484 
485   __ Bind(&skip_interrupt_label);
486   }
487 
488   BaselineAssembler::ScratchRegisterScope temps(&basm);
489   Register actual_params_size = temps.AcquireScratch();
490   // Compute the size of the actual parameters + receiver (in bytes).
491   __ Move(actual_params_size,
492           MemOperand(fp, StandardFrameConstants::kArgCOffset));
493 
494   // If actual is bigger than formal, then we should use it to free up the stack
495   // arguments.
496   Label corrected_args_count;
497   __ masm()->Branch(&corrected_args_count, ge, params_size,
498                     Operand(actual_params_size), Label::Distance::kNear);
499   __ masm()->Move(params_size, actual_params_size);
500   __ Bind(&corrected_args_count);
501 
502   // Leave the frame (also dropping the register file).
503   __ masm()->LeaveFrame(StackFrame::BASELINE);
504 
505   // Drop receiver + arguments.
506   __ masm()->Add64(params_size, params_size, 1);  // Include the receiver.
507   __ masm()->slli(params_size, params_size, kSystemPointerSizeLog2);
508   __ masm()->Add64(sp, sp, params_size);
509   __ masm()->Ret();
510 }
511 
512 #undef __
513 
514 inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
515     Register reg) {
516   assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue, reg,
517                              Operand(kInterpreterAccumulatorRegister));
518 }
519 }  // namespace baseline
520 }  // namespace internal
521 }  // namespace v8
522 
523 #endif  // V8_BASELINE_RISCV64_BASELINE_ASSEMBLER_RISCV64_INL_H_
524