1 // Copyright 2021 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_WASM_BASELINE_RISCV64_LIFTOFF_ASSEMBLER_RISCV64_H_
6 #define V8_WASM_BASELINE_RISCV64_LIFTOFF_ASSEMBLER_RISCV64_H_
7 
8 #include "src/base/platform/wrappers.h"
9 #include "src/heap/memory-chunk.h"
10 #include "src/wasm/baseline/liftoff-assembler.h"
11 #include "src/wasm/wasm-objects.h"
12 
13 namespace v8 {
14 namespace internal {
15 namespace wasm {
16 
17 namespace liftoff {
18 
ToCondition(LiftoffCondition liftoff_cond)19 inline constexpr Condition ToCondition(LiftoffCondition liftoff_cond) {
20   switch (liftoff_cond) {
21     case kEqual:
22       return eq;
23     case kUnequal:
24       return ne;
25     case kSignedLessThan:
26       return lt;
27     case kSignedLessEqual:
28       return le;
29     case kSignedGreaterThan:
30       return gt;
31     case kSignedGreaterEqual:
32       return ge;
33     case kUnsignedLessThan:
34       return ult;
35     case kUnsignedLessEqual:
36       return ule;
37     case kUnsignedGreaterThan:
38       return ugt;
39     case kUnsignedGreaterEqual:
40       return uge;
41   }
42 }
43 
44 // Liftoff Frames.
45 //
46 //  slot      Frame
47 //       +--------------------+---------------------------
48 //  n+4  | optional padding slot to keep the stack 16 byte aligned.
49 //  n+3  |   parameter n      |
50 //  ...  |       ...          |
51 //   4   |   parameter 1      | or parameter 2
52 //   3   |   parameter 0      | or parameter 1
53 //   2   |  (result address)  | or parameter 0
54 //  -----+--------------------+---------------------------
55 //   1   | return addr (ra)   |
56 //   0   | previous frame (fp)|
57 //  -----+--------------------+  <-- frame ptr (fp)
58 //  -1   | 0xa: WASM          |
59 //  -2   |     instance       |
60 //  -----+--------------------+---------------------------
61 //  -3   |     slot 0         |   ^
62 //  -4   |     slot 1         |   |
63 //       |                    | Frame slots
64 //       |                    |   |
65 //       |                    |   v
66 //       | optional padding slot to keep the stack 16 byte aligned.
67 //  -----+--------------------+  <-- stack ptr (sp)
68 //
69 
70 // fp-8 holds the stack marker, fp-16 is the instance parameter.
71 constexpr int kInstanceOffset = 16;
72 
GetStackSlot(int offset)73 inline MemOperand GetStackSlot(int offset) { return MemOperand(fp, -offset); }
74 
GetInstanceOperand()75 inline MemOperand GetInstanceOperand() { return GetStackSlot(kInstanceOffset); }
76 
GetMemOp(LiftoffAssembler * assm,Register addr,Register offset,uintptr_t offset_imm)77 inline MemOperand GetMemOp(LiftoffAssembler* assm, Register addr,
78                            Register offset, uintptr_t offset_imm) {
79   if (is_uint31(offset_imm)) {
80     int32_t offset_imm32 = static_cast<int32_t>(offset_imm);
81     if (offset == no_reg) return MemOperand(addr, offset_imm32);
82     assm->Add64(kScratchReg2, addr, offset);
83     return MemOperand(kScratchReg2, offset_imm32);
84   }
85   // Offset immediate does not fit in 31 bits.
86   assm->li(kScratchReg2, offset_imm);
87   assm->Add64(kScratchReg2, kScratchReg2, addr);
88   if (offset != no_reg) {
89     assm->Add64(kScratchReg2, kScratchReg2, offset);
90   }
91   return MemOperand(kScratchReg2, 0);
92 }
93 
Load(LiftoffAssembler * assm,LiftoffRegister dst,MemOperand src,ValueKind kind)94 inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src,
95                  ValueKind kind) {
96   switch (kind) {
97     case kI32:
98       assm->Lw(dst.gp(), src);
99       break;
100     case kI64:
101     case kRef:
102     case kOptRef:
103     case kRtt:
104       assm->Ld(dst.gp(), src);
105       break;
106     case kF32:
107       assm->LoadFloat(dst.fp(), src);
108       break;
109     case kF64:
110       assm->LoadDouble(dst.fp(), src);
111       break;
112     default:
113       UNREACHABLE();
114   }
115 }
116 
Store(LiftoffAssembler * assm,Register base,int32_t offset,LiftoffRegister src,ValueKind kind)117 inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
118                   LiftoffRegister src, ValueKind kind) {
119   MemOperand dst(base, offset);
120   switch (kind) {
121     case kI32:
122       assm->Usw(src.gp(), dst);
123       break;
124     case kI64:
125     case kOptRef:
126     case kRef:
127     case kRtt:
128       assm->Usd(src.gp(), dst);
129       break;
130     case kF32:
131       assm->UStoreFloat(src.fp(), dst, kScratchReg);
132       break;
133     case kF64:
134       assm->UStoreDouble(src.fp(), dst, kScratchReg);
135       break;
136     default:
137       UNREACHABLE();
138   }
139 }
140 
push(LiftoffAssembler * assm,LiftoffRegister reg,ValueKind kind)141 inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueKind kind) {
142   switch (kind) {
143     case kI32:
144       assm->addi(sp, sp, -kSystemPointerSize);
145       assm->Sw(reg.gp(), MemOperand(sp, 0));
146       break;
147     case kI64:
148     case kOptRef:
149     case kRef:
150     case kRtt:
151       assm->push(reg.gp());
152       break;
153     case kF32:
154       assm->addi(sp, sp, -kSystemPointerSize);
155       assm->StoreFloat(reg.fp(), MemOperand(sp, 0));
156       break;
157     case kF64:
158       assm->addi(sp, sp, -kSystemPointerSize);
159       assm->StoreDouble(reg.fp(), MemOperand(sp, 0));
160       break;
161     default:
162       UNREACHABLE();
163   }
164 }
165 
166 #if defined(V8_TARGET_BIG_ENDIAN)
ChangeEndiannessLoad(LiftoffAssembler * assm,LiftoffRegister dst,LoadType type,LiftoffRegList pinned)167 inline void ChangeEndiannessLoad(LiftoffAssembler* assm, LiftoffRegister dst,
168                                  LoadType type, LiftoffRegList pinned) {
169   bool is_float = false;
170   LiftoffRegister tmp = dst;
171   switch (type.value()) {
172     case LoadType::kI64Load8U:
173     case LoadType::kI64Load8S:
174     case LoadType::kI32Load8U:
175     case LoadType::kI32Load8S:
176       // No need to change endianness for byte size.
177       return;
178     case LoadType::kF32Load:
179       is_float = true;
180       tmp = assm->GetUnusedRegister(kGpReg, pinned);
181       assm->emit_type_conversion(kExprI32ReinterpretF32, tmp, dst);
182       V8_FALLTHROUGH;
183     case LoadType::kI64Load32U:
184       assm->TurboAssembler::ByteSwapUnsigned(tmp.gp(), tmp.gp(), 4);
185       break;
186     case LoadType::kI32Load:
187     case LoadType::kI64Load32S:
188       assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
189       break;
190     case LoadType::kI32Load16S:
191     case LoadType::kI64Load16S:
192       assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
193       break;
194     case LoadType::kI32Load16U:
195     case LoadType::kI64Load16U:
196       assm->TurboAssembler::ByteSwapUnsigned(tmp.gp(), tmp.gp(), 2);
197       break;
198     case LoadType::kF64Load:
199       is_float = true;
200       tmp = assm->GetUnusedRegister(kGpReg, pinned);
201       assm->emit_type_conversion(kExprI64ReinterpretF64, tmp, dst);
202       V8_FALLTHROUGH;
203     case LoadType::kI64Load:
204       assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 8);
205       break;
206     default:
207       UNREACHABLE();
208   }
209 
210   if (is_float) {
211     switch (type.value()) {
212       case LoadType::kF32Load:
213         assm->emit_type_conversion(kExprF32ReinterpretI32, dst, tmp);
214         break;
215       case LoadType::kF64Load:
216         assm->emit_type_conversion(kExprF64ReinterpretI64, dst, tmp);
217         break;
218       default:
219         UNREACHABLE();
220     }
221   }
222 }
223 
ChangeEndiannessStore(LiftoffAssembler * assm,LiftoffRegister src,StoreType type,LiftoffRegList pinned)224 inline void ChangeEndiannessStore(LiftoffAssembler* assm, LiftoffRegister src,
225                                   StoreType type, LiftoffRegList pinned) {
226   bool is_float = false;
227   LiftoffRegister tmp = src;
228   switch (type.value()) {
229     case StoreType::kI64Store8:
230     case StoreType::kI32Store8:
231       // No need to change endianness for byte size.
232       return;
233     case StoreType::kF32Store:
234       is_float = true;
235       tmp = assm->GetUnusedRegister(kGpReg, pinned);
236       assm->emit_type_conversion(kExprI32ReinterpretF32, tmp, src);
237       V8_FALLTHROUGH;
238     case StoreType::kI32Store:
239       assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
240       break;
241     case StoreType::kI32Store16:
242       assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
243       break;
244     case StoreType::kF64Store:
245       is_float = true;
246       tmp = assm->GetUnusedRegister(kGpReg, pinned);
247       assm->emit_type_conversion(kExprI64ReinterpretF64, tmp, src);
248       V8_FALLTHROUGH;
249     case StoreType::kI64Store:
250       assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 8);
251       break;
252     case StoreType::kI64Store32:
253       assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
254       break;
255     case StoreType::kI64Store16:
256       assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
257       break;
258     default:
259       UNREACHABLE();
260   }
261 
262   if (is_float) {
263     switch (type.value()) {
264       case StoreType::kF32Store:
265         assm->emit_type_conversion(kExprF32ReinterpretI32, src, tmp);
266         break;
267       case StoreType::kF64Store:
268         assm->emit_type_conversion(kExprF64ReinterpretI64, src, tmp);
269         break;
270       default:
271         UNREACHABLE();
272     }
273   }
274 }
275 #endif  // V8_TARGET_BIG_ENDIAN
276 
277 }  // namespace liftoff
278 
PrepareStackFrame()279 int LiftoffAssembler::PrepareStackFrame() {
280   int offset = pc_offset();
281   // When the frame size is bigger than 4KB, we need two instructions for
282   // stack checking, so we reserve space for this case.
283   addi(sp, sp, 0);
284   nop();
285   nop();
286   return offset;
287 }
288 
PrepareTailCall(int num_callee_stack_params,int stack_param_delta)289 void LiftoffAssembler::PrepareTailCall(int num_callee_stack_params,
290                                        int stack_param_delta) {
291   UseScratchRegisterScope temps(this);
292   Register scratch = temps.Acquire();
293 
294   // Push the return address and frame pointer to complete the stack frame.
295   Ld(scratch, MemOperand(fp, 8));
296   Push(scratch);
297   Ld(scratch, MemOperand(fp, 0));
298   Push(scratch);
299 
300   // Shift the whole frame upwards.
301   int slot_count = num_callee_stack_params + 2;
302   for (int i = slot_count - 1; i >= 0; --i) {
303     Ld(scratch, MemOperand(sp, i * 8));
304     Sd(scratch, MemOperand(fp, (i - stack_param_delta) * 8));
305   }
306 
307   // Set the new stack and frame pointer.
308   Add64(sp, fp, -stack_param_delta * 8);
309   Pop(ra, fp);
310 }
311 
AlignFrameSize()312 void LiftoffAssembler::AlignFrameSize() {}
313 
PatchPrepareStackFrame(int offset,SafepointTableBuilder * safepoint_table_builder)314 void LiftoffAssembler::PatchPrepareStackFrame(
315     int offset, SafepointTableBuilder* safepoint_table_builder) {
316   // The frame_size includes the frame marker and the instance slot. Both are
317   // pushed as part of frame construction, so we don't need to allocate memory
318   // for them anymore.
319   int frame_size = GetTotalFrameSize() - 2 * kSystemPointerSize;
320   // We can't run out of space, just pass anything big enough to not cause the
321   // assembler to try to grow the buffer.
322   constexpr int kAvailableSpace = 256;
323   TurboAssembler patching_assembler(
324       nullptr, AssemblerOptions{}, CodeObjectRequired::kNo,
325       ExternalAssemblerBuffer(buffer_start_ + offset, kAvailableSpace));
326 
327   if (V8_LIKELY(frame_size < 4 * KB)) {
328     // This is the standard case for small frames: just subtract from SP and be
329     // done with it.
330     patching_assembler.Add64(sp, sp, Operand(-frame_size));
331     return;
332   }
333 
334   // The frame size is bigger than 4KB, so we might overflow the available stack
335   // space if we first allocate the frame and then do the stack check (we will
336   // need some remaining stack space for throwing the exception). That's why we
337   // check the available stack space before we allocate the frame. To do this we
338   // replace the {__ Add64(sp, sp, -frame_size)} with a jump to OOL code that
339   // does this "extended stack check".
340   //
341   // The OOL code can simply be generated here with the normal assembler,
342   // because all other code generation, including OOL code, has already finished
343   // when {PatchPrepareStackFrame} is called. The function prologue then jumps
344   // to the current {pc_offset()} to execute the OOL code for allocating the
345   // large frame.
346   // Emit the unconditional branch in the function prologue (from {offset} to
347   // {pc_offset()}).
348 
349   int imm32 = pc_offset() - offset;
350   patching_assembler.GenPCRelativeJump(kScratchReg, imm32);
351 
352   // If the frame is bigger than the stack, we throw the stack overflow
353   // exception unconditionally. Thereby we can avoid the integer overflow
354   // check in the condition code.
355   RecordComment("OOL: stack check for large frame");
356   Label continuation;
357   if (frame_size < FLAG_stack_size * 1024) {
358     Register stack_limit = kScratchReg;
359     Ld(stack_limit,
360        FieldMemOperand(kWasmInstanceRegister,
361                        WasmInstanceObject::kRealStackLimitAddressOffset));
362     Ld(stack_limit, MemOperand(stack_limit));
363     Add64(stack_limit, stack_limit, Operand(frame_size));
364     Branch(&continuation, uge, sp, Operand(stack_limit));
365   }
366 
367   Call(wasm::WasmCode::kWasmStackOverflow, RelocInfo::WASM_STUB_CALL);
368   // The call will not return; just define an empty safepoint.
369   safepoint_table_builder->DefineSafepoint(this);
370   if (FLAG_debug_code) stop();
371 
372   bind(&continuation);
373 
374   // Now allocate the stack space. Note that this might do more than just
375   // decrementing the SP;
376   Add64(sp, sp, Operand(-frame_size));
377 
378   // Jump back to the start of the function, from {pc_offset()} to
379   // right after the reserved space for the {__ Add64(sp, sp, -framesize)}
380   // (which is a Branch now).
381   int func_start_offset = offset + 2 * kInstrSize;
382   imm32 = func_start_offset - pc_offset();
383   GenPCRelativeJump(kScratchReg, imm32);
384 }
385 
FinishCode()386 void LiftoffAssembler::FinishCode() { ForceConstantPoolEmissionWithoutJump(); }
387 
AbortCompilation()388 void LiftoffAssembler::AbortCompilation() { AbortedCodeGeneration(); }
389 
390 // static
StaticStackFrameSize()391 constexpr int LiftoffAssembler::StaticStackFrameSize() {
392   return liftoff::kInstanceOffset;
393 }
394 
SlotSizeForType(ValueKind kind)395 int LiftoffAssembler::SlotSizeForType(ValueKind kind) {
396   switch (kind) {
397     case kS128:
398       return element_size_bytes(kind);
399     default:
400       return kStackSlotSize;
401   }
402 }
403 
NeedsAlignment(ValueKind kind)404 bool LiftoffAssembler::NeedsAlignment(ValueKind kind) {
405   switch (kind) {
406     case kS128:
407       return true;
408     default:
409       // No alignment because all other types are kStackSlotSize.
410       return false;
411   }
412 }
413 
LoadConstant(LiftoffRegister reg,WasmValue value,RelocInfo::Mode rmode)414 void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
415                                     RelocInfo::Mode rmode) {
416   switch (value.type().kind()) {
417     case kI32:
418       TurboAssembler::li(reg.gp(), Operand(value.to_i32(), rmode));
419       break;
420     case kI64:
421       TurboAssembler::li(reg.gp(), Operand(value.to_i64(), rmode));
422       break;
423     case kF32:
424       TurboAssembler::LoadFPRImmediate(reg.fp(),
425                                        value.to_f32_boxed().get_bits());
426       break;
427     case kF64:
428       TurboAssembler::LoadFPRImmediate(reg.fp(),
429                                        value.to_f64_boxed().get_bits());
430       break;
431     default:
432       UNREACHABLE();
433   }
434 }
435 
LoadInstanceFromFrame(Register dst)436 void LiftoffAssembler::LoadInstanceFromFrame(Register dst) {
437   Ld(dst, liftoff::GetInstanceOperand());
438 }
439 
LoadFromInstance(Register dst,Register instance,int offset,int size)440 void LiftoffAssembler::LoadFromInstance(Register dst, Register instance,
441                                         int offset, int size) {
442   DCHECK_LE(0, offset);
443   MemOperand src{instance, offset};
444   switch (size) {
445     case 1:
446       Lb(dst, MemOperand(src));
447       break;
448     case 4:
449       Lw(dst, MemOperand(src));
450       break;
451     case 8:
452       Ld(dst, MemOperand(src));
453       break;
454     default:
455       UNIMPLEMENTED();
456   }
457 }
458 
LoadTaggedPointerFromInstance(Register dst,Register instance,int offset)459 void LiftoffAssembler::LoadTaggedPointerFromInstance(Register dst,
460                                                      Register instance,
461                                                      int offset) {
462   DCHECK_LE(0, offset);
463   LoadTaggedPointerField(dst, MemOperand{instance, offset});
464 }
465 
SpillInstance(Register instance)466 void LiftoffAssembler::SpillInstance(Register instance) {
467   Sd(instance, liftoff::GetInstanceOperand());
468 }
469 
ResetOSRTarget()470 void LiftoffAssembler::ResetOSRTarget() {}
471 
FillInstanceInto(Register dst)472 void LiftoffAssembler::FillInstanceInto(Register dst) {
473   Ld(dst, liftoff::GetInstanceOperand());
474 }
475 
LoadTaggedPointer(Register dst,Register src_addr,Register offset_reg,int32_t offset_imm,LiftoffRegList pinned)476 void LiftoffAssembler::LoadTaggedPointer(Register dst, Register src_addr,
477                                          Register offset_reg,
478                                          int32_t offset_imm,
479                                          LiftoffRegList pinned) {
480   MemOperand src_op = liftoff::GetMemOp(this, src_addr, offset_reg, offset_imm);
481   LoadTaggedPointerField(dst, src_op);
482 }
483 
LoadFullPointer(Register dst,Register src_addr,int32_t offset_imm)484 void LiftoffAssembler::LoadFullPointer(Register dst, Register src_addr,
485                                        int32_t offset_imm) {
486   MemOperand src_op = liftoff::GetMemOp(this, src_addr, no_reg, offset_imm);
487   Ld(dst, src_op);
488 }
489 
StoreTaggedPointer(Register dst_addr,Register offset_reg,int32_t offset_imm,LiftoffRegister src,LiftoffRegList pinned,SkipWriteBarrier skip_write_barrier)490 void LiftoffAssembler::StoreTaggedPointer(Register dst_addr,
491                                           Register offset_reg,
492                                           int32_t offset_imm,
493                                           LiftoffRegister src,
494                                           LiftoffRegList pinned,
495                                           SkipWriteBarrier skip_write_barrier) {
496   Register scratch = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
497   MemOperand dst_op = liftoff::GetMemOp(this, dst_addr, offset_reg, offset_imm);
498   StoreTaggedField(src.gp(), dst_op);
499 
500   if (skip_write_barrier || FLAG_disable_write_barriers) return;
501 
502   Label write_barrier;
503   Label exit;
504   CheckPageFlag(dst_addr, scratch,
505                 MemoryChunk::kPointersFromHereAreInterestingMask, ne,
506                 &write_barrier);
507   Branch(&exit);
508   bind(&write_barrier);
509   JumpIfSmi(src.gp(), &exit);
510   CheckPageFlag(src.gp(), scratch,
511                 MemoryChunk::kPointersToHereAreInterestingMask, eq, &exit);
512   Add64(scratch, dst_op.rm(), dst_op.offset());
513   CallRecordWriteStubSaveRegisters(
514       dst_addr, scratch, RememberedSetAction::kEmit, SaveFPRegsMode::kSave,
515       StubCallMode::kCallWasmRuntimeStub);
516   bind(&exit);
517 }
518 
Load(LiftoffRegister dst,Register src_addr,Register offset_reg,uintptr_t offset_imm,LoadType type,LiftoffRegList pinned,uint32_t * protected_load_pc,bool is_load_mem,bool i64_offset)519 void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
520                             Register offset_reg, uintptr_t offset_imm,
521                             LoadType type, LiftoffRegList pinned,
522                             uint32_t* protected_load_pc, bool is_load_mem,
523                             bool i64_offset) {
524   MemOperand src_op = liftoff::GetMemOp(this, src_addr, offset_reg, offset_imm);
525 
526   if (protected_load_pc) *protected_load_pc = pc_offset();
527   switch (type.value()) {
528     case LoadType::kI32Load8U:
529     case LoadType::kI64Load8U:
530       Lbu(dst.gp(), src_op);
531       break;
532     case LoadType::kI32Load8S:
533     case LoadType::kI64Load8S:
534       Lb(dst.gp(), src_op);
535       break;
536     case LoadType::kI32Load16U:
537     case LoadType::kI64Load16U:
538       TurboAssembler::Ulhu(dst.gp(), src_op);
539       break;
540     case LoadType::kI32Load16S:
541     case LoadType::kI64Load16S:
542       TurboAssembler::Ulh(dst.gp(), src_op);
543       break;
544     case LoadType::kI64Load32U:
545       TurboAssembler::Ulwu(dst.gp(), src_op);
546       break;
547     case LoadType::kI32Load:
548     case LoadType::kI64Load32S:
549       TurboAssembler::Ulw(dst.gp(), src_op);
550       break;
551     case LoadType::kI64Load:
552       TurboAssembler::Uld(dst.gp(), src_op);
553       break;
554     case LoadType::kF32Load:
555       TurboAssembler::ULoadFloat(dst.fp(), src_op, kScratchReg);
556       break;
557     case LoadType::kF64Load:
558       TurboAssembler::ULoadDouble(dst.fp(), src_op, kScratchReg);
559       break;
560     case LoadType::kS128Load: {
561       VU.set(kScratchReg, E8, m1);
562       Register src_reg = src_op.offset() == 0 ? src_op.rm() : kScratchReg;
563       if (src_op.offset() != 0) {
564         TurboAssembler::Add64(src_reg, src_op.rm(), src_op.offset());
565       }
566       vl(dst.fp().toV(), src_reg, 0, E8);
567       break;
568     }
569     default:
570       UNREACHABLE();
571   }
572 
573 #if defined(V8_TARGET_BIG_ENDIAN)
574   if (is_load_mem) {
575     pinned.set(src_op.rm());
576     liftoff::ChangeEndiannessLoad(this, dst, type, pinned);
577   }
578 #endif
579 }
580 
Store(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister src,StoreType type,LiftoffRegList pinned,uint32_t * protected_store_pc,bool is_store_mem)581 void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
582                              uintptr_t offset_imm, LiftoffRegister src,
583                              StoreType type, LiftoffRegList pinned,
584                              uint32_t* protected_store_pc, bool is_store_mem) {
585   MemOperand dst_op = liftoff::GetMemOp(this, dst_addr, offset_reg, offset_imm);
586 
587 #if defined(V8_TARGET_BIG_ENDIAN)
588   if (is_store_mem) {
589     pinned.set(dst_op.rm());
590     LiftoffRegister tmp = GetUnusedRegister(src.reg_class(), pinned);
591     // Save original value.
592     Move(tmp, src, type.value_type());
593 
594     src = tmp;
595     pinned.set(tmp);
596     liftoff::ChangeEndiannessStore(this, src, type, pinned);
597   }
598 #endif
599 
600   if (protected_store_pc) *protected_store_pc = pc_offset();
601 
602   switch (type.value()) {
603     case StoreType::kI32Store8:
604     case StoreType::kI64Store8:
605       Sb(src.gp(), dst_op);
606       break;
607     case StoreType::kI32Store16:
608     case StoreType::kI64Store16:
609       TurboAssembler::Ush(src.gp(), dst_op);
610       break;
611     case StoreType::kI32Store:
612     case StoreType::kI64Store32:
613       TurboAssembler::Usw(src.gp(), dst_op);
614       break;
615     case StoreType::kI64Store:
616       TurboAssembler::Usd(src.gp(), dst_op);
617       break;
618     case StoreType::kF32Store:
619       TurboAssembler::UStoreFloat(src.fp(), dst_op, kScratchReg);
620       break;
621     case StoreType::kF64Store:
622       TurboAssembler::UStoreDouble(src.fp(), dst_op, kScratchReg);
623       break;
624     case StoreType::kS128Store: {
625       VU.set(kScratchReg, E8, m1);
626       Register dst_reg = dst_op.offset() == 0 ? dst_op.rm() : kScratchReg;
627       if (dst_op.offset() != 0) {
628         Add64(kScratchReg, dst_op.rm(), dst_op.offset());
629       }
630       vs(src.fp().toV(), dst_reg, 0, VSew::E8);
631       break;
632     }
633     default:
634       UNREACHABLE();
635   }
636 }
637 
638 namespace liftoff {
639 #define __ lasm->
640 
CalculateActualAddress(LiftoffAssembler * lasm,Register addr_reg,Register offset_reg,uintptr_t offset_imm,Register result_reg)641 inline Register CalculateActualAddress(LiftoffAssembler* lasm,
642                                        Register addr_reg, Register offset_reg,
643                                        uintptr_t offset_imm,
644                                        Register result_reg) {
645   DCHECK_NE(offset_reg, no_reg);
646   DCHECK_NE(addr_reg, no_reg);
647   __ Add64(result_reg, addr_reg, Operand(offset_reg));
648   if (offset_imm != 0) {
649     __ Add64(result_reg, result_reg, Operand(offset_imm));
650   }
651   return result_reg;
652 }
653 
654 enum class Binop { kAdd, kSub, kAnd, kOr, kXor, kExchange };
655 
AtomicBinop(LiftoffAssembler * lasm,Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type,Binop op)656 inline void AtomicBinop(LiftoffAssembler* lasm, Register dst_addr,
657                         Register offset_reg, uintptr_t offset_imm,
658                         LiftoffRegister value, LiftoffRegister result,
659                         StoreType type, Binop op) {
660   LiftoffRegList pinned =
661       LiftoffRegList::ForRegs(dst_addr, offset_reg, value, result);
662   Register store_result = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
663 
664   // Make sure that {result} is unique.
665   Register result_reg = result.gp();
666   if (result_reg == value.gp() || result_reg == dst_addr ||
667       result_reg == offset_reg) {
668     result_reg = __ GetUnusedRegister(kGpReg, pinned).gp();
669   }
670 
671   UseScratchRegisterScope temps(lasm);
672   Register actual_addr = liftoff::CalculateActualAddress(
673       lasm, dst_addr, offset_reg, offset_imm, temps.Acquire());
674 
675   // Allocate an additional {temp} register to hold the result that should be
676   // stored to memory. Note that {temp} and {store_result} are not allowed to be
677   // the same register.
678   Register temp = temps.Acquire();
679 
680   Label retry;
681   __ bind(&retry);
682   switch (type.value()) {
683     case StoreType::kI64Store8:
684     case StoreType::kI32Store8:
685       __ lbu(result_reg, actual_addr, 0);
686       __ sync();
687       break;
688     case StoreType::kI64Store16:
689     case StoreType::kI32Store16:
690       __ lhu(result_reg, actual_addr, 0);
691       __ sync();
692       break;
693     case StoreType::kI64Store32:
694     case StoreType::kI32Store:
695       __ lr_w(true, false, result_reg, actual_addr);
696       break;
697     case StoreType::kI64Store:
698       __ lr_d(true, false, result_reg, actual_addr);
699       break;
700     default:
701       UNREACHABLE();
702   }
703 
704   switch (op) {
705     case Binop::kAdd:
706       __ add(temp, result_reg, value.gp());
707       break;
708     case Binop::kSub:
709       __ sub(temp, result_reg, value.gp());
710       break;
711     case Binop::kAnd:
712       __ and_(temp, result_reg, value.gp());
713       break;
714     case Binop::kOr:
715       __ or_(temp, result_reg, value.gp());
716       break;
717     case Binop::kXor:
718       __ xor_(temp, result_reg, value.gp());
719       break;
720     case Binop::kExchange:
721       __ mv(temp, value.gp());
722       break;
723   }
724   switch (type.value()) {
725     case StoreType::kI64Store8:
726     case StoreType::kI32Store8:
727       __ sync();
728       __ sb(temp, actual_addr, 0);
729       __ sync();
730       __ mv(store_result, zero_reg);
731       break;
732     case StoreType::kI64Store16:
733     case StoreType::kI32Store16:
734       __ sync();
735       __ sh(temp, actual_addr, 0);
736       __ sync();
737       __ mv(store_result, zero_reg);
738       break;
739     case StoreType::kI64Store32:
740     case StoreType::kI32Store:
741       __ sc_w(false, true, store_result, actual_addr, temp);
742       break;
743     case StoreType::kI64Store:
744       __ sc_w(false, true, store_result, actual_addr, temp);
745       break;
746     default:
747       UNREACHABLE();
748   }
749 
750   __ bnez(store_result, &retry);
751   if (result_reg != result.gp()) {
752     __ mv(result.gp(), result_reg);
753   }
754 }
755 
756 #undef __
757 }  // namespace liftoff
758 
AtomicLoad(LiftoffRegister dst,Register src_addr,Register offset_reg,uintptr_t offset_imm,LoadType type,LiftoffRegList pinned)759 void LiftoffAssembler::AtomicLoad(LiftoffRegister dst, Register src_addr,
760                                   Register offset_reg, uintptr_t offset_imm,
761                                   LoadType type, LiftoffRegList pinned) {
762   UseScratchRegisterScope temps(this);
763   Register src_reg = liftoff::CalculateActualAddress(
764       this, src_addr, offset_reg, offset_imm, temps.Acquire());
765   switch (type.value()) {
766     case LoadType::kI32Load8U:
767     case LoadType::kI64Load8U:
768       fence(PSR | PSW, PSR | PSW);
769       lbu(dst.gp(), src_reg, 0);
770       fence(PSR, PSR | PSW);
771       return;
772     case LoadType::kI32Load16U:
773     case LoadType::kI64Load16U:
774       fence(PSR | PSW, PSR | PSW);
775       lhu(dst.gp(), src_reg, 0);
776       fence(PSR, PSR | PSW);
777       return;
778     case LoadType::kI32Load:
779     case LoadType::kI64Load32U:
780       fence(PSR | PSW, PSR | PSW);
781       lw(dst.gp(), src_reg, 0);
782       fence(PSR, PSR | PSW);
783       return;
784     case LoadType::kI64Load:
785       fence(PSR | PSW, PSR | PSW);
786       ld(dst.gp(), src_reg, 0);
787       fence(PSR, PSR | PSW);
788       return;
789     default:
790       UNREACHABLE();
791   }
792 }
793 
AtomicStore(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister src,StoreType type,LiftoffRegList pinned)794 void LiftoffAssembler::AtomicStore(Register dst_addr, Register offset_reg,
795                                    uintptr_t offset_imm, LiftoffRegister src,
796                                    StoreType type, LiftoffRegList pinned) {
797   UseScratchRegisterScope temps(this);
798   Register dst_reg = liftoff::CalculateActualAddress(
799       this, dst_addr, offset_reg, offset_imm, temps.Acquire());
800   switch (type.value()) {
801     case StoreType::kI64Store8:
802     case StoreType::kI32Store8:
803       fence(PSR | PSW, PSW);
804       sb(src.gp(), dst_reg, 0);
805       return;
806     case StoreType::kI64Store16:
807     case StoreType::kI32Store16:
808       fence(PSR | PSW, PSW);
809       sh(src.gp(), dst_reg, 0);
810       return;
811     case StoreType::kI64Store32:
812     case StoreType::kI32Store:
813       fence(PSR | PSW, PSW);
814       sw(src.gp(), dst_reg, 0);
815       return;
816     case StoreType::kI64Store:
817       fence(PSR | PSW, PSW);
818       sd(src.gp(), dst_reg, 0);
819       return;
820     default:
821       UNREACHABLE();
822   }
823 }
824 
AtomicAdd(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)825 void LiftoffAssembler::AtomicAdd(Register dst_addr, Register offset_reg,
826                                  uintptr_t offset_imm, LiftoffRegister value,
827                                  LiftoffRegister result, StoreType type) {
828   liftoff::AtomicBinop(this, dst_addr, offset_reg, offset_imm, value, result,
829                        type, liftoff::Binop::kAdd);
830 }
831 
AtomicSub(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)832 void LiftoffAssembler::AtomicSub(Register dst_addr, Register offset_reg,
833                                  uintptr_t offset_imm, LiftoffRegister value,
834                                  LiftoffRegister result, StoreType type) {
835   liftoff::AtomicBinop(this, dst_addr, offset_reg, offset_imm, value, result,
836                        type, liftoff::Binop::kSub);
837 }
838 
AtomicAnd(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)839 void LiftoffAssembler::AtomicAnd(Register dst_addr, Register offset_reg,
840                                  uintptr_t offset_imm, LiftoffRegister value,
841                                  LiftoffRegister result, StoreType type) {
842   liftoff::AtomicBinop(this, dst_addr, offset_reg, offset_imm, value, result,
843                        type, liftoff::Binop::kAnd);
844 }
845 
AtomicOr(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)846 void LiftoffAssembler::AtomicOr(Register dst_addr, Register offset_reg,
847                                 uintptr_t offset_imm, LiftoffRegister value,
848                                 LiftoffRegister result, StoreType type) {
849   liftoff::AtomicBinop(this, dst_addr, offset_reg, offset_imm, value, result,
850                        type, liftoff::Binop::kOr);
851 }
852 
AtomicXor(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)853 void LiftoffAssembler::AtomicXor(Register dst_addr, Register offset_reg,
854                                  uintptr_t offset_imm, LiftoffRegister value,
855                                  LiftoffRegister result, StoreType type) {
856   liftoff::AtomicBinop(this, dst_addr, offset_reg, offset_imm, value, result,
857                        type, liftoff::Binop::kXor);
858 }
859 
AtomicExchange(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)860 void LiftoffAssembler::AtomicExchange(Register dst_addr, Register offset_reg,
861                                       uintptr_t offset_imm,
862                                       LiftoffRegister value,
863                                       LiftoffRegister result, StoreType type) {
864   liftoff::AtomicBinop(this, dst_addr, offset_reg, offset_imm, value, result,
865                        type, liftoff::Binop::kExchange);
866 }
867 
AtomicCompareExchange(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister expected,LiftoffRegister new_value,LiftoffRegister result,StoreType type)868 void LiftoffAssembler::AtomicCompareExchange(
869     Register dst_addr, Register offset_reg, uintptr_t offset_imm,
870     LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister result,
871     StoreType type) {
872   LiftoffRegList pinned =
873       LiftoffRegList::ForRegs(dst_addr, offset_reg, expected, new_value);
874 
875   Register result_reg = result.gp();
876   if (pinned.has(result)) {
877     result_reg = GetUnusedRegister(kGpReg, pinned).gp();
878   }
879 
880   UseScratchRegisterScope temps(this);
881 
882   Register actual_addr = liftoff::CalculateActualAddress(
883       this, dst_addr, offset_reg, offset_imm, temps.Acquire());
884 
885   Register store_result = temps.Acquire();
886 
887   Label retry;
888   Label done;
889   bind(&retry);
890   switch (type.value()) {
891     case StoreType::kI64Store8:
892     case StoreType::kI32Store8:
893       lbu(result_reg, actual_addr, 0);
894       sync();
895       Branch(&done, ne, result.gp(), Operand(expected.gp()));
896       sync();
897       sb(new_value.gp(), actual_addr, 0);
898       sync();
899       mv(store_result, zero_reg);
900       break;
901     case StoreType::kI64Store16:
902     case StoreType::kI32Store16:
903       lhu(result_reg, actual_addr, 0);
904       sync();
905       Branch(&done, ne, result.gp(), Operand(expected.gp()));
906       sync();
907       sh(new_value.gp(), actual_addr, 0);
908       sync();
909       mv(store_result, zero_reg);
910       break;
911     case StoreType::kI64Store32:
912     case StoreType::kI32Store:
913       lr_w(true, true, result_reg, actual_addr);
914       Branch(&done, ne, result.gp(), Operand(expected.gp()));
915       sc_w(true, true, store_result, new_value.gp(), actual_addr);
916       break;
917     case StoreType::kI64Store:
918       lr_d(true, true, result_reg, actual_addr);
919       Branch(&done, ne, result.gp(), Operand(expected.gp()));
920       sc_d(true, true, store_result, new_value.gp(), actual_addr);
921       break;
922     default:
923       UNREACHABLE();
924   }
925   bnez(store_result, &retry);
926   bind(&done);
927 
928   if (result_reg != result.gp()) {
929     mv(result.gp(), result_reg);
930   }
931 }
932 
AtomicFence()933 void LiftoffAssembler::AtomicFence() { sync(); }
934 
LoadCallerFrameSlot(LiftoffRegister dst,uint32_t caller_slot_idx,ValueKind kind)935 void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
936                                            uint32_t caller_slot_idx,
937                                            ValueKind kind) {
938   MemOperand src(fp, kSystemPointerSize * (caller_slot_idx + 1));
939   liftoff::Load(this, dst, src, kind);
940 }
941 
StoreCallerFrameSlot(LiftoffRegister src,uint32_t caller_slot_idx,ValueKind kind)942 void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src,
943                                             uint32_t caller_slot_idx,
944                                             ValueKind kind) {
945   int32_t offset = kSystemPointerSize * (caller_slot_idx + 1);
946   liftoff::Store(this, fp, offset, src, kind);
947 }
948 
LoadReturnStackSlot(LiftoffRegister dst,int offset,ValueKind kind)949 void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset,
950                                            ValueKind kind) {
951   liftoff::Load(this, dst, MemOperand(sp, offset), kind);
952 }
953 
MoveStackValue(uint32_t dst_offset,uint32_t src_offset,ValueKind kind)954 void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
955                                       ValueKind kind) {
956   DCHECK_NE(dst_offset, src_offset);
957   LiftoffRegister reg = GetUnusedRegister(reg_class_for(kind), {});
958   Fill(reg, src_offset, kind);
959   Spill(dst_offset, reg, kind);
960 }
961 
Move(Register dst,Register src,ValueKind kind)962 void LiftoffAssembler::Move(Register dst, Register src, ValueKind kind) {
963   DCHECK_NE(dst, src);
964   // TODO(ksreten): Handle different sizes here.
965   TurboAssembler::Move(dst, src);
966 }
967 
Move(DoubleRegister dst,DoubleRegister src,ValueKind kind)968 void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
969                             ValueKind kind) {
970   DCHECK_NE(dst, src);
971   if (kind != kS128) {
972     TurboAssembler::Move(dst, src);
973   } else {
974     TurboAssembler::vmv_vv(dst.toV(), dst.toV());
975   }
976 }
977 
Spill(int offset,LiftoffRegister reg,ValueKind kind)978 void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
979   RecordUsedSpillOffset(offset);
980   MemOperand dst = liftoff::GetStackSlot(offset);
981   switch (kind) {
982     case kI32:
983       Sw(reg.gp(), dst);
984       break;
985     case kI64:
986     case kRef:
987     case kOptRef:
988     case kRtt:
989     case kRttWithDepth:
990       Sd(reg.gp(), dst);
991       break;
992     case kF32:
993       StoreFloat(reg.fp(), dst);
994       break;
995     case kF64:
996       TurboAssembler::StoreDouble(reg.fp(), dst);
997       break;
998     case kS128: {
999       VU.set(kScratchReg, E8, m1);
1000       Register dst_reg = dst.offset() == 0 ? dst.rm() : kScratchReg;
1001       if (dst.offset() != 0) {
1002         Add64(kScratchReg, dst.rm(), dst.offset());
1003       }
1004       vs(reg.fp().toV(), dst_reg, 0, VSew::E8);
1005       break;
1006     }
1007     default:
1008       UNREACHABLE();
1009   }
1010 }
1011 
Spill(int offset,WasmValue value)1012 void LiftoffAssembler::Spill(int offset, WasmValue value) {
1013   RecordUsedSpillOffset(offset);
1014   MemOperand dst = liftoff::GetStackSlot(offset);
1015   switch (value.type().kind()) {
1016     case kI32: {
1017       LiftoffRegister tmp = GetUnusedRegister(kGpReg, {});
1018       TurboAssembler::li(tmp.gp(), Operand(value.to_i32()));
1019       Sw(tmp.gp(), dst);
1020       break;
1021     }
1022     case kI64:
1023     case kRef:
1024     case kOptRef: {
1025       LiftoffRegister tmp = GetUnusedRegister(kGpReg, {});
1026       TurboAssembler::li(tmp.gp(), value.to_i64());
1027       Sd(tmp.gp(), dst);
1028       break;
1029     }
1030     default:
1031       // kWasmF32 and kWasmF64 are unreachable, since those
1032       // constants are not tracked.
1033       UNREACHABLE();
1034   }
1035 }
1036 
Fill(LiftoffRegister reg,int offset,ValueKind kind)1037 void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueKind kind) {
1038   MemOperand src = liftoff::GetStackSlot(offset);
1039   switch (kind) {
1040     case kI32:
1041       Lw(reg.gp(), src);
1042       break;
1043     case kI64:
1044     case kRef:
1045     case kOptRef:
1046       Ld(reg.gp(), src);
1047       break;
1048     case kF32:
1049       LoadFloat(reg.fp(), src);
1050       break;
1051     case kF64:
1052       TurboAssembler::LoadDouble(reg.fp(), src);
1053       break;
1054     case kS128: {
1055       VU.set(kScratchReg, E8, m1);
1056       Register src_reg = src.offset() == 0 ? src.rm() : kScratchReg;
1057       if (src.offset() != 0) {
1058         TurboAssembler::Add64(src_reg, src.rm(), src.offset());
1059       }
1060       vl(reg.fp().toV(), src_reg, 0, E8);
1061       break;
1062     }
1063     default:
1064       UNREACHABLE();
1065   }
1066 }
1067 
FillI64Half(Register,int offset,RegPairHalf)1068 void LiftoffAssembler::FillI64Half(Register, int offset, RegPairHalf) {
1069   UNREACHABLE();
1070 }
1071 
FillStackSlotsWithZero(int start,int size)1072 void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
1073   DCHECK_LT(0, size);
1074   RecordUsedSpillOffset(start + size);
1075 
1076   if (size <= 12 * kStackSlotSize) {
1077     // Special straight-line code for up to 12 slots. Generates one
1078     // instruction per slot (<= 12 instructions total).
1079     uint32_t remainder = size;
1080     for (; remainder >= kStackSlotSize; remainder -= kStackSlotSize) {
1081       Sd(zero_reg, liftoff::GetStackSlot(start + remainder));
1082     }
1083     DCHECK(remainder == 4 || remainder == 0);
1084     if (remainder) {
1085       Sw(zero_reg, liftoff::GetStackSlot(start + remainder));
1086     }
1087   } else {
1088     // General case for bigger counts (12 instructions).
1089     // Use a0 for start address (inclusive), a1 for end address (exclusive).
1090     Push(a1, a0);
1091     Add64(a0, fp, Operand(-start - size));
1092     Add64(a1, fp, Operand(-start));
1093 
1094     Label loop;
1095     bind(&loop);
1096     Sd(zero_reg, MemOperand(a0));
1097     addi(a0, a0, kSystemPointerSize);
1098     BranchShort(&loop, ne, a0, Operand(a1));
1099 
1100     Pop(a1, a0);
1101   }
1102 }
1103 
emit_i64_clz(LiftoffRegister dst,LiftoffRegister src)1104 void LiftoffAssembler::emit_i64_clz(LiftoffRegister dst, LiftoffRegister src) {
1105   TurboAssembler::Clz64(dst.gp(), src.gp());
1106 }
1107 
emit_i64_ctz(LiftoffRegister dst,LiftoffRegister src)1108 void LiftoffAssembler::emit_i64_ctz(LiftoffRegister dst, LiftoffRegister src) {
1109   TurboAssembler::Ctz64(dst.gp(), src.gp());
1110 }
1111 
emit_i64_popcnt(LiftoffRegister dst,LiftoffRegister src)1112 bool LiftoffAssembler::emit_i64_popcnt(LiftoffRegister dst,
1113                                        LiftoffRegister src) {
1114   TurboAssembler::Popcnt64(dst.gp(), src.gp(), kScratchReg);
1115   return true;
1116 }
1117 
emit_i32_mul(Register dst,Register lhs,Register rhs)1118 void LiftoffAssembler::emit_i32_mul(Register dst, Register lhs, Register rhs) {
1119   TurboAssembler::Mul32(dst, lhs, rhs);
1120 }
1121 
emit_i32_divs(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero,Label * trap_div_unrepresentable)1122 void LiftoffAssembler::emit_i32_divs(Register dst, Register lhs, Register rhs,
1123                                      Label* trap_div_by_zero,
1124                                      Label* trap_div_unrepresentable) {
1125   TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
1126 
1127   // Check if lhs == kMinInt and rhs == -1, since this case is unrepresentable.
1128   TurboAssembler::CompareI(kScratchReg, lhs, Operand(kMinInt), ne);
1129   TurboAssembler::CompareI(kScratchReg2, rhs, Operand(-1), ne);
1130   add(kScratchReg, kScratchReg, kScratchReg2);
1131   TurboAssembler::Branch(trap_div_unrepresentable, eq, kScratchReg,
1132                          Operand(zero_reg));
1133 
1134   TurboAssembler::Div32(dst, lhs, rhs);
1135 }
1136 
emit_i32_divu(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero)1137 void LiftoffAssembler::emit_i32_divu(Register dst, Register lhs, Register rhs,
1138                                      Label* trap_div_by_zero) {
1139   TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
1140   TurboAssembler::Divu32(dst, lhs, rhs);
1141 }
1142 
emit_i32_rems(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero)1143 void LiftoffAssembler::emit_i32_rems(Register dst, Register lhs, Register rhs,
1144                                      Label* trap_div_by_zero) {
1145   TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
1146   TurboAssembler::Mod32(dst, lhs, rhs);
1147 }
1148 
emit_i32_remu(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero)1149 void LiftoffAssembler::emit_i32_remu(Register dst, Register lhs, Register rhs,
1150                                      Label* trap_div_by_zero) {
1151   TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
1152   TurboAssembler::Modu32(dst, lhs, rhs);
1153 }
1154 
1155 #define I32_BINOP(name, instruction)                                 \
1156   void LiftoffAssembler::emit_i32_##name(Register dst, Register lhs, \
1157                                          Register rhs) {             \
1158     instruction(dst, lhs, rhs);                                      \
1159   }
1160 
1161 // clang-format off
I32_BINOP(add,addw)1162 I32_BINOP(add, addw)
1163 I32_BINOP(sub, subw)
1164 I32_BINOP(and, and_)
1165 I32_BINOP(or, or_)
1166 I32_BINOP(xor, xor_)
1167 // clang-format on
1168 
1169 #undef I32_BINOP
1170 
1171 #define I32_BINOP_I(name, instruction)                                  \
1172   void LiftoffAssembler::emit_i32_##name##i(Register dst, Register lhs, \
1173                                             int32_t imm) {              \
1174     instruction(dst, lhs, Operand(imm));                                \
1175   }
1176 
1177 // clang-format off
1178 I32_BINOP_I(add, Add32)
1179 I32_BINOP_I(sub, Sub32)
1180 I32_BINOP_I(and, And)
1181 I32_BINOP_I(or, Or)
1182 I32_BINOP_I(xor, Xor)
1183 // clang-format on
1184 
1185 #undef I32_BINOP_I
1186 
1187 void LiftoffAssembler::emit_i32_clz(Register dst, Register src) {
1188   TurboAssembler::Clz32(dst, src);
1189 }
1190 
emit_i32_ctz(Register dst,Register src)1191 void LiftoffAssembler::emit_i32_ctz(Register dst, Register src) {
1192   TurboAssembler::Ctz32(dst, src);
1193 }
1194 
emit_i32_popcnt(Register dst,Register src)1195 bool LiftoffAssembler::emit_i32_popcnt(Register dst, Register src) {
1196   TurboAssembler::Popcnt32(dst, src, kScratchReg);
1197   return true;
1198 }
1199 
1200 #define I32_SHIFTOP(name, instruction)                               \
1201   void LiftoffAssembler::emit_i32_##name(Register dst, Register src, \
1202                                          Register amount) {          \
1203     instruction(dst, src, amount);                                   \
1204   }
1205 #define I32_SHIFTOP_I(name, instruction)                                \
1206   void LiftoffAssembler::emit_i32_##name##i(Register dst, Register src, \
1207                                             int amount) {               \
1208     instruction(dst, src, amount & 31);                                 \
1209   }
1210 
I32_SHIFTOP(shl,sllw)1211 I32_SHIFTOP(shl, sllw)
1212 I32_SHIFTOP(sar, sraw)
1213 I32_SHIFTOP(shr, srlw)
1214 
1215 I32_SHIFTOP_I(shl, slliw)
1216 I32_SHIFTOP_I(sar, sraiw)
1217 I32_SHIFTOP_I(shr, srliw)
1218 
1219 #undef I32_SHIFTOP
1220 #undef I32_SHIFTOP_I
1221 
1222 void LiftoffAssembler::emit_i64_mul(LiftoffRegister dst, LiftoffRegister lhs,
1223                                     LiftoffRegister rhs) {
1224   TurboAssembler::Mul64(dst.gp(), lhs.gp(), rhs.gp());
1225 }
1226 
emit_i64_divs(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero,Label * trap_div_unrepresentable)1227 bool LiftoffAssembler::emit_i64_divs(LiftoffRegister dst, LiftoffRegister lhs,
1228                                      LiftoffRegister rhs,
1229                                      Label* trap_div_by_zero,
1230                                      Label* trap_div_unrepresentable) {
1231   TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg));
1232 
1233   // Check if lhs == MinInt64 and rhs == -1, since this case is unrepresentable.
1234   TurboAssembler::CompareI(kScratchReg, lhs.gp(),
1235                            Operand(std::numeric_limits<int64_t>::min()), ne);
1236   TurboAssembler::CompareI(kScratchReg2, rhs.gp(), Operand(-1), ne);
1237   add(kScratchReg, kScratchReg, kScratchReg2);
1238   TurboAssembler::Branch(trap_div_unrepresentable, eq, kScratchReg,
1239                          Operand(zero_reg));
1240 
1241   TurboAssembler::Div64(dst.gp(), lhs.gp(), rhs.gp());
1242   return true;
1243 }
1244 
emit_i64_divu(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero)1245 bool LiftoffAssembler::emit_i64_divu(LiftoffRegister dst, LiftoffRegister lhs,
1246                                      LiftoffRegister rhs,
1247                                      Label* trap_div_by_zero) {
1248   TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg));
1249   TurboAssembler::Divu64(dst.gp(), lhs.gp(), rhs.gp());
1250   return true;
1251 }
1252 
emit_i64_rems(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero)1253 bool LiftoffAssembler::emit_i64_rems(LiftoffRegister dst, LiftoffRegister lhs,
1254                                      LiftoffRegister rhs,
1255                                      Label* trap_div_by_zero) {
1256   TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg));
1257   TurboAssembler::Mod64(dst.gp(), lhs.gp(), rhs.gp());
1258   return true;
1259 }
1260 
emit_i64_remu(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero)1261 bool LiftoffAssembler::emit_i64_remu(LiftoffRegister dst, LiftoffRegister lhs,
1262                                      LiftoffRegister rhs,
1263                                      Label* trap_div_by_zero) {
1264   TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg));
1265   TurboAssembler::Modu64(dst.gp(), lhs.gp(), rhs.gp());
1266   return true;
1267 }
1268 
1269 #define I64_BINOP(name, instruction)                                   \
1270   void LiftoffAssembler::emit_i64_##name(                              \
1271       LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
1272     instruction(dst.gp(), lhs.gp(), rhs.gp());                         \
1273   }
1274 
1275 // clang-format off
I64_BINOP(add,add)1276 I64_BINOP(add, add)
1277 I64_BINOP(sub, sub)
1278 I64_BINOP(and, and_)
1279 I64_BINOP(or, or_)
1280 I64_BINOP(xor, xor_)
1281 // clang-format on
1282 
1283 #undef I64_BINOP
1284 
1285 #define I64_BINOP_I(name, instruction)                         \
1286   void LiftoffAssembler::emit_i64_##name##i(                   \
1287       LiftoffRegister dst, LiftoffRegister lhs, int32_t imm) { \
1288     instruction(dst.gp(), lhs.gp(), Operand(imm));             \
1289   }
1290 
1291 // clang-format off
1292 I64_BINOP_I(and, And)
1293 I64_BINOP_I(or, Or)
1294 I64_BINOP_I(xor, Xor)
1295 // clang-format on
1296 
1297 #undef I64_BINOP_I
1298 
1299 #define I64_SHIFTOP(name, instruction)                             \
1300   void LiftoffAssembler::emit_i64_##name(                          \
1301       LiftoffRegister dst, LiftoffRegister src, Register amount) { \
1302     instruction(dst.gp(), src.gp(), amount);                       \
1303   }
1304 #define I64_SHIFTOP_I(name, instruction)                                       \
1305   void LiftoffAssembler::emit_i64_##name##i(LiftoffRegister dst,               \
1306                                             LiftoffRegister src, int amount) { \
1307     DCHECK(is_uint6(amount));                                                  \
1308     instruction(dst.gp(), src.gp(), amount);                                   \
1309   }
1310 
1311 I64_SHIFTOP(shl, sll)
1312 I64_SHIFTOP(sar, sra)
1313 I64_SHIFTOP(shr, srl)
1314 
1315 I64_SHIFTOP_I(shl, slli)
1316 I64_SHIFTOP_I(sar, srai)
1317 I64_SHIFTOP_I(shr, srli)
1318 
1319 #undef I64_SHIFTOP
1320 #undef I64_SHIFTOP_I
1321 
1322 void LiftoffAssembler::emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs,
1323                                      int64_t imm) {
1324   TurboAssembler::Add64(dst.gp(), lhs.gp(), Operand(imm));
1325 }
emit_u32_to_intptr(Register dst,Register src)1326 void LiftoffAssembler::emit_u32_to_intptr(Register dst, Register src) {
1327   addw(dst, src, zero_reg);
1328 }
1329 
emit_f32_neg(DoubleRegister dst,DoubleRegister src)1330 void LiftoffAssembler::emit_f32_neg(DoubleRegister dst, DoubleRegister src) {
1331   TurboAssembler::Neg_s(dst, src);
1332 }
1333 
emit_f64_neg(DoubleRegister dst,DoubleRegister src)1334 void LiftoffAssembler::emit_f64_neg(DoubleRegister dst, DoubleRegister src) {
1335   TurboAssembler::Neg_d(dst, src);
1336 }
1337 
emit_f32_min(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1338 void LiftoffAssembler::emit_f32_min(DoubleRegister dst, DoubleRegister lhs,
1339                                     DoubleRegister rhs) {
1340   TurboAssembler::Float32Min(dst, lhs, rhs);
1341 }
1342 
emit_f32_max(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1343 void LiftoffAssembler::emit_f32_max(DoubleRegister dst, DoubleRegister lhs,
1344                                     DoubleRegister rhs) {
1345   TurboAssembler::Float32Max(dst, lhs, rhs);
1346 }
1347 
emit_f32_copysign(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1348 void LiftoffAssembler::emit_f32_copysign(DoubleRegister dst, DoubleRegister lhs,
1349                                          DoubleRegister rhs) {
1350   fsgnj_s(dst, lhs, rhs);
1351 }
1352 
emit_f64_min(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1353 void LiftoffAssembler::emit_f64_min(DoubleRegister dst, DoubleRegister lhs,
1354                                     DoubleRegister rhs) {
1355   TurboAssembler::Float64Min(dst, lhs, rhs);
1356 }
1357 
emit_f64_max(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1358 void LiftoffAssembler::emit_f64_max(DoubleRegister dst, DoubleRegister lhs,
1359                                     DoubleRegister rhs) {
1360   TurboAssembler::Float64Max(dst, lhs, rhs);
1361 }
1362 
emit_f64_copysign(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1363 void LiftoffAssembler::emit_f64_copysign(DoubleRegister dst, DoubleRegister lhs,
1364                                          DoubleRegister rhs) {
1365   fsgnj_d(dst, lhs, rhs);
1366 }
1367 
1368 #define FP_BINOP(name, instruction)                                          \
1369   void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister lhs, \
1370                                      DoubleRegister rhs) {                   \
1371     instruction(dst, lhs, rhs);                                              \
1372   }
1373 #define FP_UNOP(name, instruction)                                             \
1374   void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \
1375     instruction(dst, src);                                                     \
1376   }
1377 #define FP_UNOP_RETURN_TRUE(name, instruction)                                 \
1378   bool LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \
1379     instruction(dst, src, kScratchDoubleReg);                                  \
1380     return true;                                                               \
1381   }
1382 
FP_BINOP(f32_add,fadd_s)1383 FP_BINOP(f32_add, fadd_s)
1384 FP_BINOP(f32_sub, fsub_s)
1385 FP_BINOP(f32_mul, fmul_s)
1386 FP_BINOP(f32_div, fdiv_s)
1387 FP_UNOP(f32_abs, fabs_s)
1388 FP_UNOP_RETURN_TRUE(f32_ceil, Ceil_s_s)
1389 FP_UNOP_RETURN_TRUE(f32_floor, Floor_s_s)
1390 FP_UNOP_RETURN_TRUE(f32_trunc, Trunc_s_s)
1391 FP_UNOP_RETURN_TRUE(f32_nearest_int, Round_s_s)
1392 FP_UNOP(f32_sqrt, fsqrt_s)
1393 FP_BINOP(f64_add, fadd_d)
1394 FP_BINOP(f64_sub, fsub_d)
1395 FP_BINOP(f64_mul, fmul_d)
1396 FP_BINOP(f64_div, fdiv_d)
1397 FP_UNOP(f64_abs, fabs_d)
1398 FP_UNOP_RETURN_TRUE(f64_ceil, Ceil_d_d)
1399 FP_UNOP_RETURN_TRUE(f64_floor, Floor_d_d)
1400 FP_UNOP_RETURN_TRUE(f64_trunc, Trunc_d_d)
1401 FP_UNOP_RETURN_TRUE(f64_nearest_int, Round_d_d)
1402 FP_UNOP(f64_sqrt, fsqrt_d)
1403 
1404 #undef FP_BINOP
1405 #undef FP_UNOP
1406 #undef FP_UNOP_RETURN_TRUE
1407 
1408 bool LiftoffAssembler::emit_type_conversion(WasmOpcode opcode,
1409                                             LiftoffRegister dst,
1410                                             LiftoffRegister src, Label* trap) {
1411   switch (opcode) {
1412     case kExprI32ConvertI64:
1413       // According to WebAssembly spec, if I64 value does not fit the range of
1414       // I32, the value is undefined. Therefore, We use sign extension to
1415       // implement I64 to I32 truncation
1416       TurboAssembler::SignExtendWord(dst.gp(), src.gp());
1417       return true;
1418     case kExprI32SConvertF32:
1419     case kExprI32UConvertF32:
1420     case kExprI32SConvertF64:
1421     case kExprI32UConvertF64:
1422     case kExprI64SConvertF32:
1423     case kExprI64UConvertF32:
1424     case kExprI64SConvertF64:
1425     case kExprI64UConvertF64:
1426     case kExprF32ConvertF64: {
1427       // real conversion, if src is out-of-bound of target integer types,
1428       // kScratchReg is set to 0
1429       switch (opcode) {
1430         case kExprI32SConvertF32:
1431           Trunc_w_s(dst.gp(), src.fp(), kScratchReg);
1432           break;
1433         case kExprI32UConvertF32:
1434           Trunc_uw_s(dst.gp(), src.fp(), kScratchReg);
1435           break;
1436         case kExprI32SConvertF64:
1437           Trunc_w_d(dst.gp(), src.fp(), kScratchReg);
1438           break;
1439         case kExprI32UConvertF64:
1440           Trunc_uw_d(dst.gp(), src.fp(), kScratchReg);
1441           break;
1442         case kExprI64SConvertF32:
1443           Trunc_l_s(dst.gp(), src.fp(), kScratchReg);
1444           break;
1445         case kExprI64UConvertF32:
1446           Trunc_ul_s(dst.gp(), src.fp(), kScratchReg);
1447           break;
1448         case kExprI64SConvertF64:
1449           Trunc_l_d(dst.gp(), src.fp(), kScratchReg);
1450           break;
1451         case kExprI64UConvertF64:
1452           Trunc_ul_d(dst.gp(), src.fp(), kScratchReg);
1453           break;
1454         case kExprF32ConvertF64:
1455           fcvt_s_d(dst.fp(), src.fp());
1456           break;
1457         default:
1458           UNREACHABLE();
1459       }
1460 
1461       // Checking if trap.
1462       if (trap != nullptr) {
1463         TurboAssembler::Branch(trap, eq, kScratchReg, Operand(zero_reg));
1464       }
1465 
1466       return true;
1467     }
1468     case kExprI32ReinterpretF32:
1469       TurboAssembler::ExtractLowWordFromF64(dst.gp(), src.fp());
1470       return true;
1471     case kExprI64SConvertI32:
1472       TurboAssembler::SignExtendWord(dst.gp(), src.gp());
1473       return true;
1474     case kExprI64UConvertI32:
1475       TurboAssembler::ZeroExtendWord(dst.gp(), src.gp());
1476       return true;
1477     case kExprI64ReinterpretF64:
1478       fmv_x_d(dst.gp(), src.fp());
1479       return true;
1480     case kExprF32SConvertI32: {
1481       TurboAssembler::Cvt_s_w(dst.fp(), src.gp());
1482       return true;
1483     }
1484     case kExprF32UConvertI32:
1485       TurboAssembler::Cvt_s_uw(dst.fp(), src.gp());
1486       return true;
1487     case kExprF32ReinterpretI32:
1488       fmv_w_x(dst.fp(), src.gp());
1489       return true;
1490     case kExprF64SConvertI32: {
1491       TurboAssembler::Cvt_d_w(dst.fp(), src.gp());
1492       return true;
1493     }
1494     case kExprF64UConvertI32:
1495       TurboAssembler::Cvt_d_uw(dst.fp(), src.gp());
1496       return true;
1497     case kExprF64ConvertF32:
1498       fcvt_d_s(dst.fp(), src.fp());
1499       return true;
1500     case kExprF64ReinterpretI64:
1501       fmv_d_x(dst.fp(), src.gp());
1502       return true;
1503     case kExprI32SConvertSatF32: {
1504       fcvt_w_s(dst.gp(), src.fp(), RTZ);
1505       Clear_if_nan_s(dst.gp(), src.fp());
1506       return true;
1507     }
1508     case kExprI32UConvertSatF32: {
1509       fcvt_wu_s(dst.gp(), src.fp(), RTZ);
1510       Clear_if_nan_s(dst.gp(), src.fp());
1511       return true;
1512     }
1513     case kExprI32SConvertSatF64: {
1514       fcvt_w_d(dst.gp(), src.fp(), RTZ);
1515       Clear_if_nan_d(dst.gp(), src.fp());
1516       return true;
1517     }
1518     case kExprI32UConvertSatF64: {
1519       fcvt_wu_d(dst.gp(), src.fp(), RTZ);
1520       Clear_if_nan_d(dst.gp(), src.fp());
1521       return true;
1522     }
1523     case kExprI64SConvertSatF32: {
1524       fcvt_l_s(dst.gp(), src.fp(), RTZ);
1525       Clear_if_nan_s(dst.gp(), src.fp());
1526       return true;
1527     }
1528     case kExprI64UConvertSatF32: {
1529       fcvt_lu_s(dst.gp(), src.fp(), RTZ);
1530       Clear_if_nan_s(dst.gp(), src.fp());
1531       return true;
1532     }
1533     case kExprI64SConvertSatF64: {
1534       fcvt_l_d(dst.gp(), src.fp(), RTZ);
1535       Clear_if_nan_d(dst.gp(), src.fp());
1536       return true;
1537     }
1538     case kExprI64UConvertSatF64: {
1539       fcvt_lu_d(dst.gp(), src.fp(), RTZ);
1540       Clear_if_nan_d(dst.gp(), src.fp());
1541       return true;
1542     }
1543     default:
1544       return false;
1545   }
1546 }
1547 
emit_i32_signextend_i8(Register dst,Register src)1548 void LiftoffAssembler::emit_i32_signextend_i8(Register dst, Register src) {
1549   slliw(dst, src, 32 - 8);
1550   sraiw(dst, dst, 32 - 8);
1551 }
1552 
emit_i32_signextend_i16(Register dst,Register src)1553 void LiftoffAssembler::emit_i32_signextend_i16(Register dst, Register src) {
1554   slliw(dst, src, 32 - 16);
1555   sraiw(dst, dst, 32 - 16);
1556 }
1557 
emit_i64_signextend_i8(LiftoffRegister dst,LiftoffRegister src)1558 void LiftoffAssembler::emit_i64_signextend_i8(LiftoffRegister dst,
1559                                               LiftoffRegister src) {
1560   slli(dst.gp(), src.gp(), 64 - 8);
1561   srai(dst.gp(), dst.gp(), 64 - 8);
1562 }
1563 
emit_i64_signextend_i16(LiftoffRegister dst,LiftoffRegister src)1564 void LiftoffAssembler::emit_i64_signextend_i16(LiftoffRegister dst,
1565                                                LiftoffRegister src) {
1566   slli(dst.gp(), src.gp(), 64 - 16);
1567   srai(dst.gp(), dst.gp(), 64 - 16);
1568 }
1569 
emit_i64_signextend_i32(LiftoffRegister dst,LiftoffRegister src)1570 void LiftoffAssembler::emit_i64_signextend_i32(LiftoffRegister dst,
1571                                                LiftoffRegister src) {
1572   slli(dst.gp(), src.gp(), 64 - 32);
1573   srai(dst.gp(), dst.gp(), 64 - 32);
1574 }
1575 
emit_jump(Label * label)1576 void LiftoffAssembler::emit_jump(Label* label) {
1577   TurboAssembler::Branch(label);
1578 }
1579 
emit_jump(Register target)1580 void LiftoffAssembler::emit_jump(Register target) {
1581   TurboAssembler::Jump(target);
1582 }
1583 
emit_cond_jump(LiftoffCondition liftoff_cond,Label * label,ValueKind kind,Register lhs,Register rhs)1584 void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
1585                                       Label* label, ValueKind kind,
1586                                       Register lhs, Register rhs) {
1587   Condition cond = liftoff::ToCondition(liftoff_cond);
1588   if (rhs == no_reg) {
1589     DCHECK(kind == kI32 || kind == kI64);
1590     TurboAssembler::Branch(label, cond, lhs, Operand(zero_reg));
1591   } else {
1592     DCHECK((kind == kI32 || kind == kI64) ||
1593            (is_reference(kind) &&
1594             (liftoff_cond == kEqual || liftoff_cond == kUnequal)));
1595     TurboAssembler::Branch(label, cond, lhs, Operand(rhs));
1596   }
1597 }
1598 
emit_i32_cond_jumpi(LiftoffCondition liftoff_cond,Label * label,Register lhs,int32_t imm)1599 void LiftoffAssembler::emit_i32_cond_jumpi(LiftoffCondition liftoff_cond,
1600                                            Label* label, Register lhs,
1601                                            int32_t imm) {
1602   Condition cond = liftoff::ToCondition(liftoff_cond);
1603   TurboAssembler::Branch(label, cond, lhs, Operand(imm));
1604 }
1605 
emit_i32_eqz(Register dst,Register src)1606 void LiftoffAssembler::emit_i32_eqz(Register dst, Register src) {
1607   TurboAssembler::Sltu(dst, src, 1);
1608 }
1609 
emit_i32_set_cond(LiftoffCondition liftoff_cond,Register dst,Register lhs,Register rhs)1610 void LiftoffAssembler::emit_i32_set_cond(LiftoffCondition liftoff_cond,
1611                                          Register dst, Register lhs,
1612                                          Register rhs) {
1613   Condition cond = liftoff::ToCondition(liftoff_cond);
1614   TurboAssembler::CompareI(dst, lhs, Operand(rhs), cond);
1615 }
1616 
emit_i64_eqz(Register dst,LiftoffRegister src)1617 void LiftoffAssembler::emit_i64_eqz(Register dst, LiftoffRegister src) {
1618   TurboAssembler::Sltu(dst, src.gp(), 1);
1619 }
1620 
emit_i64_set_cond(LiftoffCondition liftoff_cond,Register dst,LiftoffRegister lhs,LiftoffRegister rhs)1621 void LiftoffAssembler::emit_i64_set_cond(LiftoffCondition liftoff_cond,
1622                                          Register dst, LiftoffRegister lhs,
1623                                          LiftoffRegister rhs) {
1624   Condition cond = liftoff::ToCondition(liftoff_cond);
1625   TurboAssembler::CompareI(dst, lhs.gp(), Operand(rhs.gp()), cond);
1626 }
1627 
ConditionToConditionCmpFPU(LiftoffCondition condition)1628 static FPUCondition ConditionToConditionCmpFPU(LiftoffCondition condition) {
1629   switch (condition) {
1630     case kEqual:
1631       return EQ;
1632     case kUnequal:
1633       return NE;
1634     case kUnsignedLessThan:
1635       return LT;
1636     case kUnsignedGreaterEqual:
1637       return GE;
1638     case kUnsignedLessEqual:
1639       return LE;
1640     case kUnsignedGreaterThan:
1641       return GT;
1642     default:
1643       break;
1644   }
1645   UNREACHABLE();
1646 }
1647 
emit_f32_set_cond(LiftoffCondition liftoff_cond,Register dst,DoubleRegister lhs,DoubleRegister rhs)1648 void LiftoffAssembler::emit_f32_set_cond(LiftoffCondition liftoff_cond,
1649                                          Register dst, DoubleRegister lhs,
1650                                          DoubleRegister rhs) {
1651   FPUCondition fcond = ConditionToConditionCmpFPU(liftoff_cond);
1652   TurboAssembler::CompareF32(dst, fcond, lhs, rhs);
1653 }
1654 
emit_f64_set_cond(LiftoffCondition liftoff_cond,Register dst,DoubleRegister lhs,DoubleRegister rhs)1655 void LiftoffAssembler::emit_f64_set_cond(LiftoffCondition liftoff_cond,
1656                                          Register dst, DoubleRegister lhs,
1657                                          DoubleRegister rhs) {
1658   FPUCondition fcond = ConditionToConditionCmpFPU(liftoff_cond);
1659   TurboAssembler::CompareF64(dst, fcond, lhs, rhs);
1660 }
1661 
emit_select(LiftoffRegister dst,Register condition,LiftoffRegister true_value,LiftoffRegister false_value,ValueKind kind)1662 bool LiftoffAssembler::emit_select(LiftoffRegister dst, Register condition,
1663                                    LiftoffRegister true_value,
1664                                    LiftoffRegister false_value,
1665                                    ValueKind kind) {
1666   return false;
1667 }
1668 
emit_smi_check(Register obj,Label * target,SmiCheckMode mode)1669 void LiftoffAssembler::emit_smi_check(Register obj, Label* target,
1670                                       SmiCheckMode mode) {
1671   UseScratchRegisterScope temps(this);
1672   Register scratch = temps.Acquire();
1673   And(scratch, obj, Operand(kSmiTagMask));
1674   Condition condition = mode == kJumpOnSmi ? eq : ne;
1675   Branch(target, condition, scratch, Operand(zero_reg));
1676 }
1677 
LoadTransform(LiftoffRegister dst,Register src_addr,Register offset_reg,uintptr_t offset_imm,LoadType type,LoadTransformationKind transform,uint32_t * protected_load_pc)1678 void LiftoffAssembler::LoadTransform(LiftoffRegister dst, Register src_addr,
1679                                      Register offset_reg, uintptr_t offset_imm,
1680                                      LoadType type,
1681                                      LoadTransformationKind transform,
1682                                      uint32_t* protected_load_pc) {
1683   bailout(kSimd, "load extend and load splat unimplemented");
1684 }
1685 
LoadLane(LiftoffRegister dst,LiftoffRegister src,Register addr,Register offset_reg,uintptr_t offset_imm,LoadType type,uint8_t laneidx,uint32_t * protected_load_pc)1686 void LiftoffAssembler::LoadLane(LiftoffRegister dst, LiftoffRegister src,
1687                                 Register addr, Register offset_reg,
1688                                 uintptr_t offset_imm, LoadType type,
1689                                 uint8_t laneidx, uint32_t* protected_load_pc) {
1690   bailout(kSimd, "loadlane");
1691 }
1692 
StoreLane(Register dst,Register offset,uintptr_t offset_imm,LiftoffRegister src,StoreType type,uint8_t lane,uint32_t * protected_store_pc)1693 void LiftoffAssembler::StoreLane(Register dst, Register offset,
1694                                  uintptr_t offset_imm, LiftoffRegister src,
1695                                  StoreType type, uint8_t lane,
1696                                  uint32_t* protected_store_pc) {
1697   bailout(kSimd, "StoreLane");
1698 }
1699 
emit_i8x16_shuffle(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,const uint8_t shuffle[16],bool is_swizzle)1700 void LiftoffAssembler::emit_i8x16_shuffle(LiftoffRegister dst,
1701                                           LiftoffRegister lhs,
1702                                           LiftoffRegister rhs,
1703                                           const uint8_t shuffle[16],
1704                                           bool is_swizzle) {
1705   VRegister dst_v = dst.fp().toV();
1706   VRegister lhs_v = lhs.fp().toV();
1707   VRegister rhs_v = rhs.fp().toV();
1708 
1709   uint64_t imm1 = *(reinterpret_cast<const uint64_t*>(shuffle));
1710   uint64_t imm2 = *((reinterpret_cast<const uint64_t*>(shuffle)) + 1);
1711   VU.set(kScratchReg, VSew::E64, Vlmul::m1);
1712   li(kScratchReg, 1);
1713   vmv_vx(v0, kScratchReg);
1714   li(kScratchReg, imm1);
1715   vmerge_vx(kSimd128ScratchReg, kScratchReg, kSimd128ScratchReg);
1716   li(kScratchReg, imm2);
1717   vsll_vi(v0, v0, 1);
1718   vmerge_vx(kSimd128ScratchReg, kScratchReg, kSimd128ScratchReg);
1719 
1720   VU.set(kScratchReg, E8, m1);
1721   VRegister temp =
1722       GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(lhs, rhs)).fp().toV();
1723   if (dst_v == lhs_v) {
1724     vmv_vv(temp, lhs_v);
1725     lhs_v = temp;
1726   } else if (dst_v == rhs_v) {
1727     vmv_vv(temp, rhs_v);
1728     rhs_v = temp;
1729   }
1730   vrgather_vv(dst_v, lhs_v, kSimd128ScratchReg);
1731   vadd_vi(kSimd128ScratchReg, kSimd128ScratchReg,
1732           -16);  // The indices in range [16, 31] select the i - 16-th element
1733                  // of rhs
1734   vrgather_vv(kSimd128ScratchReg2, rhs_v, kSimd128ScratchReg);
1735   vor_vv(dst_v, dst_v, kSimd128ScratchReg2);
1736 }
1737 
emit_i8x16_popcnt(LiftoffRegister dst,LiftoffRegister src)1738 void LiftoffAssembler::emit_i8x16_popcnt(LiftoffRegister dst,
1739                                          LiftoffRegister src) {
1740   bailout(kSimd, "emit_i8x16_popcnt");
1741 }
1742 
emit_i8x16_swizzle(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1743 void LiftoffAssembler::emit_i8x16_swizzle(LiftoffRegister dst,
1744                                           LiftoffRegister lhs,
1745                                           LiftoffRegister rhs) {
1746   bailout(kSimd, "emit_i8x16_swizzle");
1747 }
1748 
emit_i8x16_splat(LiftoffRegister dst,LiftoffRegister src)1749 void LiftoffAssembler::emit_i8x16_splat(LiftoffRegister dst,
1750                                         LiftoffRegister src) {
1751   VU.set(kScratchReg, E8, m1);
1752   vmv_vx(dst.fp().toV(), src.gp());
1753 }
1754 
emit_i16x8_splat(LiftoffRegister dst,LiftoffRegister src)1755 void LiftoffAssembler::emit_i16x8_splat(LiftoffRegister dst,
1756                                         LiftoffRegister src) {
1757   VU.set(kScratchReg, E16, m1);
1758   vmv_vx(dst.fp().toV(), src.gp());
1759 }
1760 
emit_i32x4_splat(LiftoffRegister dst,LiftoffRegister src)1761 void LiftoffAssembler::emit_i32x4_splat(LiftoffRegister dst,
1762                                         LiftoffRegister src) {
1763   VU.set(kScratchReg, E32, m1);
1764   vmv_vx(dst.fp().toV(), src.gp());
1765 }
1766 
emit_i64x2_splat(LiftoffRegister dst,LiftoffRegister src)1767 void LiftoffAssembler::emit_i64x2_splat(LiftoffRegister dst,
1768                                         LiftoffRegister src) {
1769   VU.set(kScratchReg, E64, m1);
1770   vmv_vx(dst.fp().toV(), src.gp());
1771 }
1772 
emit_i64x2_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1773 void LiftoffAssembler::emit_i64x2_eq(LiftoffRegister dst, LiftoffRegister lhs,
1774                                      LiftoffRegister rhs) {
1775   WasmRvvEq(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E64, m1);
1776 }
1777 
emit_i64x2_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1778 void LiftoffAssembler::emit_i64x2_ne(LiftoffRegister dst, LiftoffRegister lhs,
1779                                      LiftoffRegister rhs) {
1780   WasmRvvNe(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E64, m1);
1781 }
1782 
emit_i64x2_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1783 void LiftoffAssembler::emit_i64x2_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1784                                        LiftoffRegister rhs) {
1785   WasmRvvGtS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E64, m1);
1786 }
1787 
emit_i64x2_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1788 void LiftoffAssembler::emit_i64x2_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1789                                        LiftoffRegister rhs) {
1790   WasmRvvGeS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E64, m1);
1791 }
1792 
emit_f32x4_splat(LiftoffRegister dst,LiftoffRegister src)1793 void LiftoffAssembler::emit_f32x4_splat(LiftoffRegister dst,
1794                                         LiftoffRegister src) {
1795   VU.set(kScratchReg, E32, m1);
1796   fmv_x_w(kScratchReg, src.fp());
1797   vmv_vx(dst.fp().toV(), kScratchReg);
1798 }
1799 
emit_f64x2_splat(LiftoffRegister dst,LiftoffRegister src)1800 void LiftoffAssembler::emit_f64x2_splat(LiftoffRegister dst,
1801                                         LiftoffRegister src) {
1802   VU.set(kScratchReg, E64, m1);
1803   fmv_x_d(kScratchReg, src.fp());
1804   vmv_vx(dst.fp().toV(), kScratchReg);
1805 }
1806 
1807 #define SIMD_BINOP(name1, name2)                                         \
1808   void LiftoffAssembler::emit_##name1##_extmul_low_##name2(              \
1809       LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2) { \
1810     bailout(kSimd, "emit_" #name1 "_extmul_low_" #name2);                \
1811   }                                                                      \
1812   void LiftoffAssembler::emit_##name1##_extmul_high_##name2(             \
1813       LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2) { \
1814     bailout(kSimd, "emit_" #name1 "_extmul_high_" #name2);               \
1815   }
1816 
SIMD_BINOP(i16x8,i8x16_s)1817 SIMD_BINOP(i16x8, i8x16_s)
1818 SIMD_BINOP(i16x8, i8x16_u)
1819 
1820 SIMD_BINOP(i32x4, i16x8_s)
1821 SIMD_BINOP(i32x4, i16x8_u)
1822 
1823 SIMD_BINOP(i64x2, i32x4_s)
1824 SIMD_BINOP(i64x2, i32x4_u)
1825 
1826 #undef SIMD_BINOP
1827 
1828 void LiftoffAssembler::emit_i16x8_q15mulr_sat_s(LiftoffRegister dst,
1829                                                 LiftoffRegister src1,
1830                                                 LiftoffRegister src2) {
1831   bailout(kSimd, "i16x8_q15mulr_sat_s");
1832 }
1833 
emit_i64x2_bitmask(LiftoffRegister dst,LiftoffRegister src)1834 void LiftoffAssembler::emit_i64x2_bitmask(LiftoffRegister dst,
1835                                           LiftoffRegister src) {
1836   VU.set(kScratchReg, E64, m1);
1837   vmv_vx(kSimd128RegZero, zero_reg);
1838   vmslt_vv(kSimd128ScratchReg, src.fp().toV(), kSimd128RegZero);
1839   VU.set(kScratchReg, E32, m1);
1840   vmv_xs(dst.gp(), kSimd128ScratchReg);
1841 }
1842 
emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst,LiftoffRegister src)1843 void LiftoffAssembler::emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst,
1844                                                      LiftoffRegister src) {
1845   bailout(kSimd, "i64x2_sconvert_i32x4_low");
1846 }
1847 
emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst,LiftoffRegister src)1848 void LiftoffAssembler::emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst,
1849                                                       LiftoffRegister src) {
1850   bailout(kSimd, "i64x2_sconvert_i32x4_high");
1851 }
1852 
emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst,LiftoffRegister src)1853 void LiftoffAssembler::emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst,
1854                                                      LiftoffRegister src) {
1855   bailout(kSimd, "i64x2_uconvert_i32x4_low");
1856 }
1857 
emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst,LiftoffRegister src)1858 void LiftoffAssembler::emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst,
1859                                                       LiftoffRegister src) {
1860   bailout(kSimd, "i64x2_uconvert_i32x4_high");
1861 }
1862 
emit_i8x16_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1863 void LiftoffAssembler::emit_i8x16_eq(LiftoffRegister dst, LiftoffRegister lhs,
1864                                      LiftoffRegister rhs) {
1865   WasmRvvEq(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E8, m1);
1866 }
1867 
emit_i8x16_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1868 void LiftoffAssembler::emit_i8x16_ne(LiftoffRegister dst, LiftoffRegister lhs,
1869                                      LiftoffRegister rhs) {
1870   WasmRvvNe(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E8, m1);
1871 }
1872 
emit_i8x16_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1873 void LiftoffAssembler::emit_i8x16_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1874                                        LiftoffRegister rhs) {
1875   WasmRvvGtS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E8, m1);
1876 }
1877 
emit_i8x16_gt_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1878 void LiftoffAssembler::emit_i8x16_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
1879                                        LiftoffRegister rhs) {
1880   WasmRvvGtU(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
1881 }
1882 
emit_i8x16_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1883 void LiftoffAssembler::emit_i8x16_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1884                                        LiftoffRegister rhs) {
1885   WasmRvvGeS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E8, m1);
1886 }
1887 
emit_i8x16_ge_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1888 void LiftoffAssembler::emit_i8x16_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
1889                                        LiftoffRegister rhs) {
1890   WasmRvvGeU(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E8, m1);
1891 }
1892 
emit_i16x8_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1893 void LiftoffAssembler::emit_i16x8_eq(LiftoffRegister dst, LiftoffRegister lhs,
1894                                      LiftoffRegister rhs) {
1895   WasmRvvEq(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
1896 }
1897 
emit_i16x8_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1898 void LiftoffAssembler::emit_i16x8_ne(LiftoffRegister dst, LiftoffRegister lhs,
1899                                      LiftoffRegister rhs) {
1900   WasmRvvNe(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
1901 }
1902 
emit_i16x8_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1903 void LiftoffAssembler::emit_i16x8_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1904                                        LiftoffRegister rhs) {
1905   WasmRvvGtS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
1906 }
1907 
emit_i16x8_gt_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1908 void LiftoffAssembler::emit_i16x8_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
1909                                        LiftoffRegister rhs) {
1910   WasmRvvGtU(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
1911 }
1912 
emit_i16x8_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1913 void LiftoffAssembler::emit_i16x8_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1914                                        LiftoffRegister rhs) {
1915   WasmRvvGeS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
1916 }
1917 
emit_i16x8_ge_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1918 void LiftoffAssembler::emit_i16x8_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
1919                                        LiftoffRegister rhs) {
1920   WasmRvvGeU(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
1921 }
1922 
emit_i32x4_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1923 void LiftoffAssembler::emit_i32x4_eq(LiftoffRegister dst, LiftoffRegister lhs,
1924                                      LiftoffRegister rhs) {
1925   WasmRvvEq(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E32, m1);
1926 }
1927 
emit_i32x4_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1928 void LiftoffAssembler::emit_i32x4_ne(LiftoffRegister dst, LiftoffRegister lhs,
1929                                      LiftoffRegister rhs) {
1930   WasmRvvNe(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E32, m1);
1931 }
1932 
emit_i32x4_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1933 void LiftoffAssembler::emit_i32x4_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1934                                        LiftoffRegister rhs) {
1935   WasmRvvGtS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E32, m1);
1936 }
1937 
emit_i32x4_gt_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1938 void LiftoffAssembler::emit_i32x4_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
1939                                        LiftoffRegister rhs) {
1940   WasmRvvGtU(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E32, m1);
1941 }
1942 
emit_i32x4_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1943 void LiftoffAssembler::emit_i32x4_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1944                                        LiftoffRegister rhs) {
1945   WasmRvvGeS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E32, m1);
1946 }
1947 
emit_i32x4_ge_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1948 void LiftoffAssembler::emit_i32x4_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
1949                                        LiftoffRegister rhs) {
1950   WasmRvvGeU(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E32, m1);
1951 }
1952 
emit_f32x4_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1953 void LiftoffAssembler::emit_f32x4_eq(LiftoffRegister dst, LiftoffRegister lhs,
1954                                      LiftoffRegister rhs) {
1955   VU.set(kScratchReg, E32, m1);
1956   vmfeq_vv(v0, rhs.fp().toV(), lhs.fp().toV());
1957   vmv_vx(dst.fp().toV(), zero_reg);
1958   vmerge_vi(dst.fp().toV(), -1, dst.fp().toV());
1959 }
1960 
emit_f32x4_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1961 void LiftoffAssembler::emit_f32x4_ne(LiftoffRegister dst, LiftoffRegister lhs,
1962                                      LiftoffRegister rhs) {
1963   VU.set(kScratchReg, E32, m1);
1964   vmfne_vv(v0, rhs.fp().toV(), lhs.fp().toV());
1965   vmv_vx(dst.fp().toV(), zero_reg);
1966   vmerge_vi(dst.fp().toV(), -1, dst.fp().toV());
1967 }
1968 
emit_f32x4_lt(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1969 void LiftoffAssembler::emit_f32x4_lt(LiftoffRegister dst, LiftoffRegister lhs,
1970                                      LiftoffRegister rhs) {
1971   VU.set(kScratchReg, E32, m1);
1972   vmflt_vv(v0, rhs.fp().toV(), lhs.fp().toV());
1973   vmv_vx(dst.fp().toV(), zero_reg);
1974   vmerge_vi(dst.fp().toV(), -1, dst.fp().toV());
1975 }
1976 
emit_f32x4_le(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1977 void LiftoffAssembler::emit_f32x4_le(LiftoffRegister dst, LiftoffRegister lhs,
1978                                      LiftoffRegister rhs) {
1979   VU.set(kScratchReg, E32, m1);
1980   vmfle_vv(v0, rhs.fp().toV(), lhs.fp().toV());
1981   vmv_vx(dst.fp().toV(), zero_reg);
1982   vmerge_vi(dst.fp().toV(), -1, dst.fp().toV());
1983 }
1984 
emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst,LiftoffRegister src)1985 void LiftoffAssembler::emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst,
1986                                                       LiftoffRegister src) {
1987   bailout(kSimd, "f64x2.convert_low_i32x4_s");
1988 }
1989 
emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst,LiftoffRegister src)1990 void LiftoffAssembler::emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst,
1991                                                       LiftoffRegister src) {
1992   bailout(kSimd, "f64x2.convert_low_i32x4_u");
1993 }
1994 
emit_f64x2_promote_low_f32x4(LiftoffRegister dst,LiftoffRegister src)1995 void LiftoffAssembler::emit_f64x2_promote_low_f32x4(LiftoffRegister dst,
1996                                                     LiftoffRegister src) {
1997   bailout(kSimd, "f64x2.promote_low_f32x4");
1998 }
1999 
emit_f32x4_demote_f64x2_zero(LiftoffRegister dst,LiftoffRegister src)2000 void LiftoffAssembler::emit_f32x4_demote_f64x2_zero(LiftoffRegister dst,
2001                                                     LiftoffRegister src) {
2002   VU.set(kScratchReg, E32, m1);
2003   vfncvt_f_f_w(dst.fp().toV(), src.fp().toV());
2004   vmv_vi(v0, 12);
2005   vmerge_vx(dst.fp().toV(), zero_reg, dst.fp().toV());
2006 }
2007 
emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst,LiftoffRegister src)2008 void LiftoffAssembler::emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst,
2009                                                          LiftoffRegister src) {
2010   bailout(kSimd, "i32x4.trunc_sat_f64x2_s_zero");
2011 }
2012 
emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst,LiftoffRegister src)2013 void LiftoffAssembler::emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst,
2014                                                          LiftoffRegister src) {
2015   bailout(kSimd, "i32x4.trunc_sat_f64x2_u_zero");
2016 }
2017 
emit_f64x2_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2018 void LiftoffAssembler::emit_f64x2_eq(LiftoffRegister dst, LiftoffRegister lhs,
2019                                      LiftoffRegister rhs) {
2020   bailout(kSimd, "emit_f64x2_eq");
2021 }
2022 
emit_f64x2_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2023 void LiftoffAssembler::emit_f64x2_ne(LiftoffRegister dst, LiftoffRegister lhs,
2024                                      LiftoffRegister rhs) {
2025   bailout(kSimd, "emit_f64x2_ne");
2026 }
2027 
emit_f64x2_lt(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2028 void LiftoffAssembler::emit_f64x2_lt(LiftoffRegister dst, LiftoffRegister lhs,
2029                                      LiftoffRegister rhs) {
2030   bailout(kSimd, "emit_f64x2_lt");
2031 }
2032 
emit_f64x2_le(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2033 void LiftoffAssembler::emit_f64x2_le(LiftoffRegister dst, LiftoffRegister lhs,
2034                                      LiftoffRegister rhs) {
2035   bailout(kSimd, "emit_f64x2_le");
2036 }
2037 
emit_s128_const(LiftoffRegister dst,const uint8_t imms[16])2038 void LiftoffAssembler::emit_s128_const(LiftoffRegister dst,
2039                                        const uint8_t imms[16]) {
2040   WasmRvvS128const(dst.fp().toV(), imms);
2041 }
2042 
emit_s128_not(LiftoffRegister dst,LiftoffRegister src)2043 void LiftoffAssembler::emit_s128_not(LiftoffRegister dst, LiftoffRegister src) {
2044   VU.set(kScratchReg, E8, m1);
2045   vnot_vv(dst.fp().toV(), src.fp().toV());
2046 }
2047 
emit_s128_and(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2048 void LiftoffAssembler::emit_s128_and(LiftoffRegister dst, LiftoffRegister lhs,
2049                                      LiftoffRegister rhs) {
2050   VU.set(kScratchReg, E8, m1);
2051   vand_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2052 }
2053 
emit_s128_or(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2054 void LiftoffAssembler::emit_s128_or(LiftoffRegister dst, LiftoffRegister lhs,
2055                                     LiftoffRegister rhs) {
2056   VU.set(kScratchReg, E8, m1);
2057   vor_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2058 }
2059 
emit_s128_xor(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2060 void LiftoffAssembler::emit_s128_xor(LiftoffRegister dst, LiftoffRegister lhs,
2061                                      LiftoffRegister rhs) {
2062   VU.set(kScratchReg, E8, m1);
2063   vxor_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2064 }
2065 
emit_s128_and_not(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2066 void LiftoffAssembler::emit_s128_and_not(LiftoffRegister dst,
2067                                          LiftoffRegister lhs,
2068                                          LiftoffRegister rhs) {
2069   VU.set(kScratchReg, E8, m1);
2070   vnot_vv(dst.fp().toV(), rhs.fp().toV());
2071   vand_vv(dst.fp().toV(), lhs.fp().toV(), dst.fp().toV());
2072 }
2073 
emit_s128_select(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,LiftoffRegister mask)2074 void LiftoffAssembler::emit_s128_select(LiftoffRegister dst,
2075                                         LiftoffRegister src1,
2076                                         LiftoffRegister src2,
2077                                         LiftoffRegister mask) {
2078   VU.set(kScratchReg, E8, m1);
2079   vand_vv(kSimd128ScratchReg, src1.fp().toV(), mask.fp().toV());
2080   vnot_vv(kSimd128ScratchReg2, mask.fp().toV());
2081   vand_vv(kSimd128ScratchReg2, src2.fp().toV(), kSimd128ScratchReg2);
2082   vor_vv(dst.fp().toV(), kSimd128ScratchReg, kSimd128ScratchReg2);
2083 }
2084 
emit_i8x16_neg(LiftoffRegister dst,LiftoffRegister src)2085 void LiftoffAssembler::emit_i8x16_neg(LiftoffRegister dst,
2086                                       LiftoffRegister src) {
2087   VU.set(kScratchReg, E8, m1);
2088   vneg_vv(dst.fp().toV(), src.fp().toV());
2089 }
2090 
emit_v128_anytrue(LiftoffRegister dst,LiftoffRegister src)2091 void LiftoffAssembler::emit_v128_anytrue(LiftoffRegister dst,
2092                                          LiftoffRegister src) {
2093   VU.set(kScratchReg, E8, m1);
2094   Label t;
2095   vmv_sx(kSimd128ScratchReg, zero_reg);
2096   vredmaxu_vs(kSimd128ScratchReg, src.fp().toV(), kSimd128ScratchReg);
2097   vmv_xs(dst.gp(), kSimd128ScratchReg);
2098   beq(dst.gp(), zero_reg, &t);
2099   li(dst.gp(), 1);
2100   bind(&t);
2101 }
2102 
emit_i8x16_alltrue(LiftoffRegister dst,LiftoffRegister src)2103 void LiftoffAssembler::emit_i8x16_alltrue(LiftoffRegister dst,
2104                                           LiftoffRegister src) {
2105   VU.set(kScratchReg, E8, m1);
2106   Label alltrue;
2107   li(kScratchReg, -1);
2108   vmv_sx(kSimd128ScratchReg, kScratchReg);
2109   vredminu_vs(kSimd128ScratchReg, src.fp().toV(), kSimd128ScratchReg);
2110   vmv_xs(dst.gp(), kSimd128ScratchReg);
2111   beqz(dst.gp(), &alltrue);
2112   li(dst.gp(), 1);
2113   bind(&alltrue);
2114 }
2115 
emit_i8x16_bitmask(LiftoffRegister dst,LiftoffRegister src)2116 void LiftoffAssembler::emit_i8x16_bitmask(LiftoffRegister dst,
2117                                           LiftoffRegister src) {
2118   VU.set(kScratchReg, E8, m1);
2119   vmv_vx(kSimd128RegZero, zero_reg);
2120   vmslt_vv(kSimd128ScratchReg, src.fp().toV(), kSimd128RegZero);
2121   VU.set(kScratchReg, E32, m1);
2122   vmv_xs(dst.gp(), kSimd128ScratchReg);
2123 }
2124 
emit_i8x16_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2125 void LiftoffAssembler::emit_i8x16_shl(LiftoffRegister dst, LiftoffRegister lhs,
2126                                       LiftoffRegister rhs) {
2127   VU.set(kScratchReg, E8, m1);
2128   vsll_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
2129 }
2130 
emit_i8x16_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2131 void LiftoffAssembler::emit_i8x16_shli(LiftoffRegister dst, LiftoffRegister lhs,
2132                                        int32_t rhs) {
2133   DCHECK(is_uint5(rhs));
2134   VU.set(kScratchReg, E8, m1);
2135   vsll_vi(dst.fp().toV(), lhs.fp().toV(), rhs);
2136 }
2137 
emit_i8x16_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2138 void LiftoffAssembler::emit_i8x16_shr_s(LiftoffRegister dst,
2139                                         LiftoffRegister lhs,
2140                                         LiftoffRegister rhs) {
2141   bailout(kSimd, "emit_i8x16_shr_s");
2142 }
2143 
emit_i8x16_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2144 void LiftoffAssembler::emit_i8x16_shri_s(LiftoffRegister dst,
2145                                          LiftoffRegister lhs, int32_t rhs) {
2146   bailout(kSimd, "emit_i8x16_shri_s");
2147 }
2148 
emit_i8x16_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2149 void LiftoffAssembler::emit_i8x16_shr_u(LiftoffRegister dst,
2150                                         LiftoffRegister lhs,
2151                                         LiftoffRegister rhs) {
2152   bailout(kSimd, "emit_i8x16_shr_u");
2153 }
2154 
emit_i8x16_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2155 void LiftoffAssembler::emit_i8x16_shri_u(LiftoffRegister dst,
2156                                          LiftoffRegister lhs, int32_t rhs) {
2157   bailout(kSimd, "emit_i8x16_shri_u");
2158 }
2159 
emit_i8x16_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2160 void LiftoffAssembler::emit_i8x16_add(LiftoffRegister dst, LiftoffRegister lhs,
2161                                       LiftoffRegister rhs) {
2162   VU.set(kScratchReg, E8, m1);
2163   vadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2164 }
2165 
emit_i8x16_add_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2166 void LiftoffAssembler::emit_i8x16_add_sat_s(LiftoffRegister dst,
2167                                             LiftoffRegister lhs,
2168                                             LiftoffRegister rhs) {
2169   VU.set(kScratchReg, E8, m1);
2170   vsadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2171 }
2172 
emit_i8x16_add_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2173 void LiftoffAssembler::emit_i8x16_add_sat_u(LiftoffRegister dst,
2174                                             LiftoffRegister lhs,
2175                                             LiftoffRegister rhs) {
2176   VU.set(kScratchReg, E8, m1);
2177   vsaddu_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2178 }
2179 
emit_i8x16_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2180 void LiftoffAssembler::emit_i8x16_sub(LiftoffRegister dst, LiftoffRegister lhs,
2181                                       LiftoffRegister rhs) {
2182   VU.set(kScratchReg, E8, m1);
2183   vsub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2184 }
2185 
emit_i8x16_sub_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2186 void LiftoffAssembler::emit_i8x16_sub_sat_s(LiftoffRegister dst,
2187                                             LiftoffRegister lhs,
2188                                             LiftoffRegister rhs) {
2189   VU.set(kScratchReg, E8, m1);
2190   vssub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2191 }
2192 
emit_i8x16_sub_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2193 void LiftoffAssembler::emit_i8x16_sub_sat_u(LiftoffRegister dst,
2194                                             LiftoffRegister lhs,
2195                                             LiftoffRegister rhs) {
2196   VU.set(kScratchReg, E8, m1);
2197   vssubu_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2198 }
2199 
emit_i8x16_min_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2200 void LiftoffAssembler::emit_i8x16_min_s(LiftoffRegister dst,
2201                                         LiftoffRegister lhs,
2202                                         LiftoffRegister rhs) {
2203   bailout(kSimd, "emit_i8x16_min_s");
2204 }
2205 
emit_i8x16_min_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2206 void LiftoffAssembler::emit_i8x16_min_u(LiftoffRegister dst,
2207                                         LiftoffRegister lhs,
2208                                         LiftoffRegister rhs) {
2209   bailout(kSimd, "emit_i8x16_min_u");
2210 }
2211 
emit_i8x16_max_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2212 void LiftoffAssembler::emit_i8x16_max_s(LiftoffRegister dst,
2213                                         LiftoffRegister lhs,
2214                                         LiftoffRegister rhs) {
2215   bailout(kSimd, "emit_i8x16_max_s");
2216 }
2217 
emit_i8x16_max_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2218 void LiftoffAssembler::emit_i8x16_max_u(LiftoffRegister dst,
2219                                         LiftoffRegister lhs,
2220                                         LiftoffRegister rhs) {
2221   bailout(kSimd, "emit_i8x16_max_u");
2222 }
2223 
emit_i16x8_neg(LiftoffRegister dst,LiftoffRegister src)2224 void LiftoffAssembler::emit_i16x8_neg(LiftoffRegister dst,
2225                                       LiftoffRegister src) {
2226   bailout(kSimd, "emit_i16x8_neg");
2227 }
2228 
emit_i16x8_alltrue(LiftoffRegister dst,LiftoffRegister src)2229 void LiftoffAssembler::emit_i16x8_alltrue(LiftoffRegister dst,
2230                                           LiftoffRegister src) {
2231   VU.set(kScratchReg, E16, m1);
2232   Label alltrue;
2233   li(kScratchReg, -1);
2234   vmv_sx(kSimd128ScratchReg, kScratchReg);
2235   vredminu_vs(kSimd128ScratchReg, src.fp().toV(), kSimd128ScratchReg);
2236   vmv_xs(dst.gp(), kSimd128ScratchReg);
2237   beqz(dst.gp(), &alltrue);
2238   li(dst.gp(), 1);
2239   bind(&alltrue);
2240 }
2241 
emit_i16x8_bitmask(LiftoffRegister dst,LiftoffRegister src)2242 void LiftoffAssembler::emit_i16x8_bitmask(LiftoffRegister dst,
2243                                           LiftoffRegister src) {
2244   VU.set(kScratchReg, E16, m1);
2245   vmv_vx(kSimd128RegZero, zero_reg);
2246   vmslt_vv(kSimd128ScratchReg, src.fp().toV(), kSimd128RegZero);
2247   VU.set(kScratchReg, E32, m1);
2248   vmv_xs(dst.gp(), kSimd128ScratchReg);
2249 }
2250 
emit_i16x8_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2251 void LiftoffAssembler::emit_i16x8_shl(LiftoffRegister dst, LiftoffRegister lhs,
2252                                       LiftoffRegister rhs) {
2253   VU.set(kScratchReg, E16, m1);
2254   vsll_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
2255 }
2256 
emit_i16x8_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2257 void LiftoffAssembler::emit_i16x8_shli(LiftoffRegister dst, LiftoffRegister lhs,
2258                                        int32_t rhs) {
2259   DCHECK(is_uint5(rhs));
2260   VU.set(kScratchReg, E16, m1);
2261   vsll_vi(dst.fp().toV(), lhs.fp().toV(), rhs);
2262 }
2263 
emit_i16x8_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2264 void LiftoffAssembler::emit_i16x8_shr_s(LiftoffRegister dst,
2265                                         LiftoffRegister lhs,
2266                                         LiftoffRegister rhs) {
2267   bailout(kSimd, "emit_i16x8_shr_s");
2268 }
2269 
emit_i16x8_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2270 void LiftoffAssembler::emit_i16x8_shri_s(LiftoffRegister dst,
2271                                          LiftoffRegister lhs, int32_t rhs) {
2272   bailout(kSimd, "emit_i16x8_shri_s");
2273 }
2274 
emit_i16x8_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2275 void LiftoffAssembler::emit_i16x8_shr_u(LiftoffRegister dst,
2276                                         LiftoffRegister lhs,
2277                                         LiftoffRegister rhs) {
2278   bailout(kSimd, "emit_i16x8_shr_u");
2279 }
2280 
emit_i16x8_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2281 void LiftoffAssembler::emit_i16x8_shri_u(LiftoffRegister dst,
2282                                          LiftoffRegister lhs, int32_t rhs) {
2283   bailout(kSimd, "emit_i16x8_shri_u");
2284 }
2285 
emit_i16x8_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2286 void LiftoffAssembler::emit_i16x8_add(LiftoffRegister dst, LiftoffRegister lhs,
2287                                       LiftoffRegister rhs) {
2288   VU.set(kScratchReg, E16, m1);
2289   vadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2290 }
2291 
emit_i16x8_add_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2292 void LiftoffAssembler::emit_i16x8_add_sat_s(LiftoffRegister dst,
2293                                             LiftoffRegister lhs,
2294                                             LiftoffRegister rhs) {
2295   bailout(kSimd, "emit_i16x8_add_sat_s");
2296 }
2297 
emit_i16x8_add_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2298 void LiftoffAssembler::emit_i16x8_add_sat_u(LiftoffRegister dst,
2299                                             LiftoffRegister lhs,
2300                                             LiftoffRegister rhs) {
2301   bailout(kSimd, "emit_i16x8_add_sat_u");
2302 }
2303 
emit_i16x8_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2304 void LiftoffAssembler::emit_i16x8_sub(LiftoffRegister dst, LiftoffRegister lhs,
2305                                       LiftoffRegister rhs) {
2306   VU.set(kScratchReg, E16, m1);
2307   vsub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2308 }
2309 
emit_i16x8_sub_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2310 void LiftoffAssembler::emit_i16x8_sub_sat_s(LiftoffRegister dst,
2311                                             LiftoffRegister lhs,
2312                                             LiftoffRegister rhs) {
2313   bailout(kSimd, "emit_i16x8_sub_sat_s");
2314 }
2315 
emit_i16x8_sub_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2316 void LiftoffAssembler::emit_i16x8_sub_sat_u(LiftoffRegister dst,
2317                                             LiftoffRegister lhs,
2318                                             LiftoffRegister rhs) {
2319   bailout(kSimd, "emit_i16x8_sub_sat_u");
2320 }
2321 
emit_i16x8_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2322 void LiftoffAssembler::emit_i16x8_mul(LiftoffRegister dst, LiftoffRegister lhs,
2323                                       LiftoffRegister rhs) {
2324   bailout(kSimd, "emit_i16x8_mul");
2325 }
2326 
emit_i16x8_min_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2327 void LiftoffAssembler::emit_i16x8_min_s(LiftoffRegister dst,
2328                                         LiftoffRegister lhs,
2329                                         LiftoffRegister rhs) {
2330   bailout(kSimd, "emit_i16x8_min_s");
2331 }
2332 
emit_i16x8_min_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2333 void LiftoffAssembler::emit_i16x8_min_u(LiftoffRegister dst,
2334                                         LiftoffRegister lhs,
2335                                         LiftoffRegister rhs) {
2336   bailout(kSimd, "emit_i16x8_min_u");
2337 }
2338 
emit_i16x8_max_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2339 void LiftoffAssembler::emit_i16x8_max_s(LiftoffRegister dst,
2340                                         LiftoffRegister lhs,
2341                                         LiftoffRegister rhs) {
2342   bailout(kSimd, "emit_i16x8_max_s");
2343 }
2344 
emit_i16x8_max_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2345 void LiftoffAssembler::emit_i16x8_max_u(LiftoffRegister dst,
2346                                         LiftoffRegister lhs,
2347                                         LiftoffRegister rhs) {
2348   bailout(kSimd, "emit_i16x8_max_u");
2349 }
2350 
emit_i32x4_neg(LiftoffRegister dst,LiftoffRegister src)2351 void LiftoffAssembler::emit_i32x4_neg(LiftoffRegister dst,
2352                                       LiftoffRegister src) {
2353   bailout(kSimd, "emit_i32x4_neg");
2354 }
2355 
emit_i32x4_alltrue(LiftoffRegister dst,LiftoffRegister src)2356 void LiftoffAssembler::emit_i32x4_alltrue(LiftoffRegister dst,
2357                                           LiftoffRegister src) {
2358   VU.set(kScratchReg, E32, m1);
2359   Label alltrue;
2360   li(kScratchReg, -1);
2361   vmv_sx(kSimd128ScratchReg, kScratchReg);
2362   vredminu_vs(kSimd128ScratchReg, src.fp().toV(), kSimd128ScratchReg);
2363   vmv_xs(dst.gp(), kSimd128ScratchReg);
2364   beqz(dst.gp(), &alltrue);
2365   li(dst.gp(), 1);
2366   bind(&alltrue);
2367 }
2368 
emit_i32x4_bitmask(LiftoffRegister dst,LiftoffRegister src)2369 void LiftoffAssembler::emit_i32x4_bitmask(LiftoffRegister dst,
2370                                           LiftoffRegister src) {
2371   VU.set(kScratchReg, E32, m1);
2372   vmv_vx(kSimd128RegZero, zero_reg);
2373   vmslt_vv(kSimd128ScratchReg, src.fp().toV(), kSimd128RegZero);
2374   vmv_xs(dst.gp(), kSimd128ScratchReg);
2375 }
2376 
emit_i32x4_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2377 void LiftoffAssembler::emit_i32x4_shl(LiftoffRegister dst, LiftoffRegister lhs,
2378                                       LiftoffRegister rhs) {
2379   VU.set(kScratchReg, E32, m1);
2380   vsll_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
2381 }
2382 
emit_i32x4_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2383 void LiftoffAssembler::emit_i32x4_shli(LiftoffRegister dst, LiftoffRegister lhs,
2384                                        int32_t rhs) {
2385   if (is_uint5(rhs)) {
2386     vsll_vi(dst.fp().toV(), lhs.fp().toV(), rhs);
2387   } else {
2388     li(kScratchReg, rhs);
2389     vsll_vx(dst.fp().toV(), lhs.fp().toV(), kScratchReg);
2390   }
2391 }
2392 
emit_i32x4_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2393 void LiftoffAssembler::emit_i32x4_shr_s(LiftoffRegister dst,
2394                                         LiftoffRegister lhs,
2395                                         LiftoffRegister rhs) {
2396   bailout(kSimd, "emit_i32x4_shr_s");
2397 }
2398 
emit_i32x4_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2399 void LiftoffAssembler::emit_i32x4_shri_s(LiftoffRegister dst,
2400                                          LiftoffRegister lhs, int32_t rhs) {
2401   bailout(kSimd, "emit_i32x4_shri_s");
2402 }
2403 
emit_i32x4_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2404 void LiftoffAssembler::emit_i32x4_shr_u(LiftoffRegister dst,
2405                                         LiftoffRegister lhs,
2406                                         LiftoffRegister rhs) {
2407   bailout(kSimd, "emit_i32x4_shr_u");
2408 }
2409 
emit_i32x4_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2410 void LiftoffAssembler::emit_i32x4_shri_u(LiftoffRegister dst,
2411                                          LiftoffRegister lhs, int32_t rhs) {
2412   bailout(kSimd, "emit_i32x4_shri_u");
2413 }
2414 
emit_i32x4_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2415 void LiftoffAssembler::emit_i32x4_add(LiftoffRegister dst, LiftoffRegister lhs,
2416                                       LiftoffRegister rhs) {
2417   VU.set(kScratchReg, E32, m1);
2418   vadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2419 }
2420 
emit_i32x4_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2421 void LiftoffAssembler::emit_i32x4_sub(LiftoffRegister dst, LiftoffRegister lhs,
2422                                       LiftoffRegister rhs) {
2423   VU.set(kScratchReg, E32, m1);
2424   vsub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2425 }
2426 
emit_i32x4_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2427 void LiftoffAssembler::emit_i32x4_mul(LiftoffRegister dst, LiftoffRegister lhs,
2428                                       LiftoffRegister rhs) {
2429   bailout(kSimd, "emit_i32x4_mul");
2430 }
2431 
emit_i32x4_min_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2432 void LiftoffAssembler::emit_i32x4_min_s(LiftoffRegister dst,
2433                                         LiftoffRegister lhs,
2434                                         LiftoffRegister rhs) {
2435   bailout(kSimd, "emit_i32x4_min_s");
2436 }
2437 
emit_i32x4_min_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2438 void LiftoffAssembler::emit_i32x4_min_u(LiftoffRegister dst,
2439                                         LiftoffRegister lhs,
2440                                         LiftoffRegister rhs) {
2441   bailout(kSimd, "emit_i32x4_min_u");
2442 }
2443 
emit_i32x4_max_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2444 void LiftoffAssembler::emit_i32x4_max_s(LiftoffRegister dst,
2445                                         LiftoffRegister lhs,
2446                                         LiftoffRegister rhs) {
2447   bailout(kSimd, "emit_i32x4_max_s");
2448 }
2449 
emit_i32x4_max_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2450 void LiftoffAssembler::emit_i32x4_max_u(LiftoffRegister dst,
2451                                         LiftoffRegister lhs,
2452                                         LiftoffRegister rhs) {
2453   bailout(kSimd, "emit_i32x4_max_u");
2454 }
2455 
emit_i32x4_dot_i16x8_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2456 void LiftoffAssembler::emit_i32x4_dot_i16x8_s(LiftoffRegister dst,
2457                                               LiftoffRegister lhs,
2458                                               LiftoffRegister rhs) {
2459   bailout(kSimd, "emit_i32x4_dot_i16x8_s");
2460 }
2461 
emit_i64x2_neg(LiftoffRegister dst,LiftoffRegister src)2462 void LiftoffAssembler::emit_i64x2_neg(LiftoffRegister dst,
2463                                       LiftoffRegister src) {
2464   bailout(kSimd, "emit_i64x2_neg");
2465 }
2466 
emit_i64x2_alltrue(LiftoffRegister dst,LiftoffRegister src)2467 void LiftoffAssembler::emit_i64x2_alltrue(LiftoffRegister dst,
2468                                           LiftoffRegister src) {
2469   VU.set(kScratchReg, E64, m1);
2470   Label alltrue;
2471   li(kScratchReg, -1);
2472   vmv_sx(kSimd128ScratchReg, kScratchReg);
2473   vredminu_vs(kSimd128ScratchReg, src.fp().toV(), kSimd128ScratchReg);
2474   vmv_xs(dst.gp(), kSimd128ScratchReg);
2475   beqz(dst.gp(), &alltrue);
2476   li(dst.gp(), 1);
2477   bind(&alltrue);
2478 }
2479 
emit_i64x2_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2480 void LiftoffAssembler::emit_i64x2_shl(LiftoffRegister dst, LiftoffRegister lhs,
2481                                       LiftoffRegister rhs) {
2482   VU.set(kScratchReg, E64, m1);
2483   vsll_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
2484 }
2485 
emit_i64x2_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2486 void LiftoffAssembler::emit_i64x2_shli(LiftoffRegister dst, LiftoffRegister lhs,
2487                                        int32_t rhs) {
2488   VU.set(kScratchReg, E64, m1);
2489   if (is_uint5(rhs)) {
2490     vsll_vi(dst.fp().toV(), lhs.fp().toV(), rhs);
2491   } else {
2492     li(kScratchReg, rhs);
2493     vsll_vx(dst.fp().toV(), lhs.fp().toV(), kScratchReg);
2494   }
2495 }
2496 
emit_i64x2_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2497 void LiftoffAssembler::emit_i64x2_shr_s(LiftoffRegister dst,
2498                                         LiftoffRegister lhs,
2499                                         LiftoffRegister rhs) {
2500   bailout(kSimd, "emit_i64x2_shr_s");
2501 }
2502 
emit_i64x2_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2503 void LiftoffAssembler::emit_i64x2_shri_s(LiftoffRegister dst,
2504                                          LiftoffRegister lhs, int32_t rhs) {
2505   bailout(kSimd, "emit_i64x2_shri_s");
2506 }
2507 
emit_i64x2_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2508 void LiftoffAssembler::emit_i64x2_shr_u(LiftoffRegister dst,
2509                                         LiftoffRegister lhs,
2510                                         LiftoffRegister rhs) {
2511   bailout(kSimd, "emit_i64x2_shr_u");
2512 }
2513 
emit_i64x2_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2514 void LiftoffAssembler::emit_i64x2_shri_u(LiftoffRegister dst,
2515                                          LiftoffRegister lhs, int32_t rhs) {
2516   bailout(kSimd, "emit_i64x2_shri_u");
2517 }
2518 
emit_i64x2_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2519 void LiftoffAssembler::emit_i64x2_add(LiftoffRegister dst, LiftoffRegister lhs,
2520                                       LiftoffRegister rhs) {
2521   VU.set(kScratchReg, E64, m1);
2522   vadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2523 }
2524 
emit_i64x2_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2525 void LiftoffAssembler::emit_i64x2_sub(LiftoffRegister dst, LiftoffRegister lhs,
2526                                       LiftoffRegister rhs) {
2527   VU.set(kScratchReg, E64, m1);
2528   vsub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2529 }
2530 
emit_i64x2_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2531 void LiftoffAssembler::emit_i64x2_mul(LiftoffRegister dst, LiftoffRegister lhs,
2532                                       LiftoffRegister rhs) {
2533   bailout(kSimd, "emit_i64x2_mul");
2534 }
2535 
emit_f32x4_abs(LiftoffRegister dst,LiftoffRegister src)2536 void LiftoffAssembler::emit_f32x4_abs(LiftoffRegister dst,
2537                                       LiftoffRegister src) {
2538   VU.set(kScratchReg, E32, m1);
2539   vfabs_vv(dst.fp().toV(), src.fp().toV());
2540 }
2541 
emit_f32x4_neg(LiftoffRegister dst,LiftoffRegister src)2542 void LiftoffAssembler::emit_f32x4_neg(LiftoffRegister dst,
2543                                       LiftoffRegister src) {
2544   VU.set(kScratchReg, E32, m1);
2545   vfneg_vv(dst.fp().toV(), src.fp().toV());
2546 }
2547 
emit_f32x4_sqrt(LiftoffRegister dst,LiftoffRegister src)2548 void LiftoffAssembler::emit_f32x4_sqrt(LiftoffRegister dst,
2549                                        LiftoffRegister src) {
2550   bailout(kSimd, "emit_f32x4_sqrt");
2551 }
2552 
emit_f32x4_ceil(LiftoffRegister dst,LiftoffRegister src)2553 bool LiftoffAssembler::emit_f32x4_ceil(LiftoffRegister dst,
2554                                        LiftoffRegister src) {
2555   Ceil_f(dst.fp().toV(), src.fp().toV(), kScratchReg, kSimd128ScratchReg);
2556   return true;
2557 }
2558 
emit_f32x4_floor(LiftoffRegister dst,LiftoffRegister src)2559 bool LiftoffAssembler::emit_f32x4_floor(LiftoffRegister dst,
2560                                         LiftoffRegister src) {
2561   Floor_f(dst.fp().toV(), src.fp().toV(), kScratchReg, kSimd128ScratchReg);
2562   return true;
2563 }
2564 
emit_f32x4_trunc(LiftoffRegister dst,LiftoffRegister src)2565 bool LiftoffAssembler::emit_f32x4_trunc(LiftoffRegister dst,
2566                                         LiftoffRegister src) {
2567   bailout(kSimd, "emit_f32x4_trunc");
2568   return true;
2569 }
2570 
emit_f32x4_nearest_int(LiftoffRegister dst,LiftoffRegister src)2571 bool LiftoffAssembler::emit_f32x4_nearest_int(LiftoffRegister dst,
2572                                               LiftoffRegister src) {
2573   bailout(kSimd, "emit_f32x4_nearest_int");
2574   return true;
2575 }
2576 
emit_f32x4_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2577 void LiftoffAssembler::emit_f32x4_add(LiftoffRegister dst, LiftoffRegister lhs,
2578                                       LiftoffRegister rhs) {
2579   VU.set(kScratchReg, E32, m1);
2580   vfadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2581 }
2582 
emit_f32x4_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2583 void LiftoffAssembler::emit_f32x4_sub(LiftoffRegister dst, LiftoffRegister lhs,
2584                                       LiftoffRegister rhs) {
2585   VU.set(kScratchReg, E32, m1);
2586   vfsub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2587 }
2588 
emit_f32x4_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2589 void LiftoffAssembler::emit_f32x4_mul(LiftoffRegister dst, LiftoffRegister lhs,
2590                                       LiftoffRegister rhs) {
2591   VU.set(kScratchReg, E32, m1);
2592   VU.set(RoundingMode::RTZ);
2593   vfmul_vv(dst.fp().toV(), rhs.fp().toV(), lhs.fp().toV());
2594 }
2595 
emit_f32x4_div(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2596 void LiftoffAssembler::emit_f32x4_div(LiftoffRegister dst, LiftoffRegister lhs,
2597                                       LiftoffRegister rhs) {
2598   VU.set(kScratchReg, E32, m1);
2599   vfdiv_vv(dst.fp().toV(), rhs.fp().toV(), lhs.fp().toV());
2600 }
2601 
emit_f32x4_min(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2602 void LiftoffAssembler::emit_f32x4_min(LiftoffRegister dst, LiftoffRegister lhs,
2603                                       LiftoffRegister rhs) {
2604   const int32_t kNaN = 0x7FC00000;
2605   VU.set(kScratchReg, E32, m1);
2606   vmfeq_vv(v0, lhs.fp().toV(), lhs.fp().toV());
2607   vmfeq_vv(kSimd128ScratchReg, rhs.fp().toV(), rhs.fp().toV());
2608   vand_vv(v0, v0, kSimd128ScratchReg);
2609   li(kScratchReg, kNaN);
2610   vmv_vx(kSimd128ScratchReg, kScratchReg);
2611   vfmin_vv(kSimd128ScratchReg, rhs.fp().toV(), lhs.fp().toV(), Mask);
2612   vmv_vv(dst.fp().toV(), kSimd128ScratchReg);
2613 }
2614 
emit_f32x4_max(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2615 void LiftoffAssembler::emit_f32x4_max(LiftoffRegister dst, LiftoffRegister lhs,
2616                                       LiftoffRegister rhs) {
2617   const int32_t kNaN = 0x7FC00000;
2618   VU.set(kScratchReg, E32, m1);
2619   vmfeq_vv(v0, lhs.fp().toV(), lhs.fp().toV());
2620   vmfeq_vv(kSimd128ScratchReg, rhs.fp().toV(), rhs.fp().toV());
2621   vand_vv(v0, v0, kSimd128ScratchReg);
2622   li(kScratchReg, kNaN);
2623   vmv_vx(kSimd128ScratchReg, kScratchReg);
2624   vfmax_vv(kSimd128ScratchReg, rhs.fp().toV(), lhs.fp().toV(), Mask);
2625   vmv_vv(dst.fp().toV(), kSimd128ScratchReg);
2626 }
2627 
emit_f32x4_pmin(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2628 void LiftoffAssembler::emit_f32x4_pmin(LiftoffRegister dst, LiftoffRegister lhs,
2629                                        LiftoffRegister rhs) {
2630   bailout(kSimd, "emit_f32x4_pmin");
2631 }
2632 
emit_f32x4_pmax(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2633 void LiftoffAssembler::emit_f32x4_pmax(LiftoffRegister dst, LiftoffRegister lhs,
2634                                        LiftoffRegister rhs) {
2635   bailout(kSimd, "emit_f32x4_pmax");
2636 }
2637 
emit_f64x2_abs(LiftoffRegister dst,LiftoffRegister src)2638 void LiftoffAssembler::emit_f64x2_abs(LiftoffRegister dst,
2639                                       LiftoffRegister src) {
2640   VU.set(kScratchReg, E64, m1);
2641   vfabs_vv(dst.fp().toV(), src.fp().toV());
2642 }
2643 
emit_f64x2_neg(LiftoffRegister dst,LiftoffRegister src)2644 void LiftoffAssembler::emit_f64x2_neg(LiftoffRegister dst,
2645                                       LiftoffRegister src) {
2646   VU.set(kScratchReg, E64, m1);
2647   vfneg_vv(dst.fp().toV(), src.fp().toV());
2648 }
2649 
emit_f64x2_sqrt(LiftoffRegister dst,LiftoffRegister src)2650 void LiftoffAssembler::emit_f64x2_sqrt(LiftoffRegister dst,
2651                                        LiftoffRegister src) {
2652   bailout(kSimd, "emit_f64x2_sqrt");
2653 }
2654 
emit_f64x2_ceil(LiftoffRegister dst,LiftoffRegister src)2655 bool LiftoffAssembler::emit_f64x2_ceil(LiftoffRegister dst,
2656                                        LiftoffRegister src) {
2657   Ceil_d(dst.fp().toV(), src.fp().toV(), kScratchReg, kSimd128ScratchReg);
2658   return true;
2659 }
2660 
emit_f64x2_floor(LiftoffRegister dst,LiftoffRegister src)2661 bool LiftoffAssembler::emit_f64x2_floor(LiftoffRegister dst,
2662                                         LiftoffRegister src) {
2663   Floor_d(dst.fp().toV(), src.fp().toV(), kScratchReg, kSimd128ScratchReg);
2664   return true;
2665 }
2666 
emit_f64x2_trunc(LiftoffRegister dst,LiftoffRegister src)2667 bool LiftoffAssembler::emit_f64x2_trunc(LiftoffRegister dst,
2668                                         LiftoffRegister src) {
2669   bailout(kSimd, "emit_f64x2_trunc");
2670   return true;
2671 }
2672 
emit_f64x2_nearest_int(LiftoffRegister dst,LiftoffRegister src)2673 bool LiftoffAssembler::emit_f64x2_nearest_int(LiftoffRegister dst,
2674                                               LiftoffRegister src) {
2675   bailout(kSimd, "emit_f64x2_nearest_int");
2676   return true;
2677 }
2678 
emit_f64x2_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2679 void LiftoffAssembler::emit_f64x2_add(LiftoffRegister dst, LiftoffRegister lhs,
2680                                       LiftoffRegister rhs) {
2681   VU.set(kScratchReg, E64, m1);
2682   vfadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2683 }
2684 
emit_f64x2_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2685 void LiftoffAssembler::emit_f64x2_sub(LiftoffRegister dst, LiftoffRegister lhs,
2686                                       LiftoffRegister rhs) {
2687   VU.set(kScratchReg, E64, m1);
2688   vfsub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
2689 }
2690 
emit_f64x2_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2691 void LiftoffAssembler::emit_f64x2_mul(LiftoffRegister dst, LiftoffRegister lhs,
2692                                       LiftoffRegister rhs) {
2693   bailout(kSimd, "emit_f64x2_mul");
2694 }
2695 
emit_f64x2_div(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2696 void LiftoffAssembler::emit_f64x2_div(LiftoffRegister dst, LiftoffRegister lhs,
2697                                       LiftoffRegister rhs) {
2698   bailout(kSimd, "emit_f64x2_div");
2699 }
2700 
emit_f64x2_min(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2701 void LiftoffAssembler::emit_f64x2_min(LiftoffRegister dst, LiftoffRegister lhs,
2702                                       LiftoffRegister rhs) {
2703   bailout(kSimd, "emit_f64x2_min");
2704 }
2705 
emit_f64x2_max(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2706 void LiftoffAssembler::emit_f64x2_max(LiftoffRegister dst, LiftoffRegister lhs,
2707                                       LiftoffRegister rhs) {
2708   bailout(kSimd, "emit_f64x2_max");
2709 }
2710 
emit_f64x2_pmin(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2711 void LiftoffAssembler::emit_f64x2_pmin(LiftoffRegister dst, LiftoffRegister lhs,
2712                                        LiftoffRegister rhs) {
2713   bailout(kSimd, "emit_f64x2_pmin");
2714 }
2715 
emit_f64x2_pmax(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2716 void LiftoffAssembler::emit_f64x2_pmax(LiftoffRegister dst, LiftoffRegister lhs,
2717                                        LiftoffRegister rhs) {
2718   bailout(kSimd, "emit_f64x2_pmax");
2719 }
2720 
emit_i32x4_sconvert_f32x4(LiftoffRegister dst,LiftoffRegister src)2721 void LiftoffAssembler::emit_i32x4_sconvert_f32x4(LiftoffRegister dst,
2722                                                  LiftoffRegister src) {
2723   VU.set(kScratchReg, E32, m1);
2724   VU.set(RoundingMode::RTZ);
2725   vmfeq_vv(v0, src.fp().toV(), src.fp().toV());
2726   vmv_vx(dst.fp().toV(), zero_reg);
2727   vfcvt_x_f_v(dst.fp().toV(), src.fp().toV(), Mask);
2728 }
2729 
emit_i32x4_uconvert_f32x4(LiftoffRegister dst,LiftoffRegister src)2730 void LiftoffAssembler::emit_i32x4_uconvert_f32x4(LiftoffRegister dst,
2731                                                  LiftoffRegister src) {
2732   VU.set(kScratchReg, E32, m1);
2733   VU.set(RoundingMode::RTZ);
2734   vmfeq_vv(v0, src.fp().toV(), src.fp().toV());
2735   vmv_vx(dst.fp().toV(), zero_reg);
2736   vfcvt_xu_f_v(dst.fp().toV(), src.fp().toV(), Mask);
2737 }
2738 
emit_f32x4_sconvert_i32x4(LiftoffRegister dst,LiftoffRegister src)2739 void LiftoffAssembler::emit_f32x4_sconvert_i32x4(LiftoffRegister dst,
2740                                                  LiftoffRegister src) {
2741   VU.set(kScratchReg, E32, m1);
2742   VU.set(RoundingMode::RTZ);
2743   vfcvt_f_x_v(dst.fp().toV(), src.fp().toV());
2744 }
2745 
emit_f32x4_uconvert_i32x4(LiftoffRegister dst,LiftoffRegister src)2746 void LiftoffAssembler::emit_f32x4_uconvert_i32x4(LiftoffRegister dst,
2747                                                  LiftoffRegister src) {
2748   VU.set(kScratchReg, E32, m1);
2749   VU.set(RoundingMode::RTZ);
2750   vfcvt_f_xu_v(dst.fp().toV(), src.fp().toV());
2751 }
2752 
emit_i8x16_sconvert_i16x8(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2753 void LiftoffAssembler::emit_i8x16_sconvert_i16x8(LiftoffRegister dst,
2754                                                  LiftoffRegister lhs,
2755                                                  LiftoffRegister rhs) {
2756   bailout(kSimd, "emit_i8x16_sconvert_i16x8");
2757 }
2758 
emit_i8x16_uconvert_i16x8(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2759 void LiftoffAssembler::emit_i8x16_uconvert_i16x8(LiftoffRegister dst,
2760                                                  LiftoffRegister lhs,
2761                                                  LiftoffRegister rhs) {
2762   bailout(kSimd, "emit_i8x16_uconvert_i16x8");
2763 }
2764 
emit_i16x8_sconvert_i32x4(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2765 void LiftoffAssembler::emit_i16x8_sconvert_i32x4(LiftoffRegister dst,
2766                                                  LiftoffRegister lhs,
2767                                                  LiftoffRegister rhs) {
2768   bailout(kSimd, "emit_i16x8_sconvert_i32x4");
2769 }
2770 
emit_i16x8_uconvert_i32x4(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2771 void LiftoffAssembler::emit_i16x8_uconvert_i32x4(LiftoffRegister dst,
2772                                                  LiftoffRegister lhs,
2773                                                  LiftoffRegister rhs) {
2774   bailout(kSimd, "emit_i16x8_uconvert_i32x4");
2775 }
2776 
emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst,LiftoffRegister src)2777 void LiftoffAssembler::emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst,
2778                                                      LiftoffRegister src) {
2779   bailout(kSimd, "emit_i16x8_sconvert_i8x16_low");
2780 }
2781 
emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst,LiftoffRegister src)2782 void LiftoffAssembler::emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst,
2783                                                       LiftoffRegister src) {
2784   bailout(kSimd, "emit_i16x8_sconvert_i8x16_high");
2785 }
2786 
emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst,LiftoffRegister src)2787 void LiftoffAssembler::emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst,
2788                                                      LiftoffRegister src) {
2789   bailout(kSimd, "emit_i16x8_uconvert_i8x16_low");
2790 }
2791 
emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst,LiftoffRegister src)2792 void LiftoffAssembler::emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst,
2793                                                       LiftoffRegister src) {
2794   bailout(kSimd, "emit_i16x8_uconvert_i8x16_high");
2795 }
2796 
emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst,LiftoffRegister src)2797 void LiftoffAssembler::emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst,
2798                                                      LiftoffRegister src) {
2799   bailout(kSimd, "emit_i32x4_sconvert_i16x8_low");
2800 }
2801 
emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst,LiftoffRegister src)2802 void LiftoffAssembler::emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst,
2803                                                       LiftoffRegister src) {
2804   bailout(kSimd, "emit_i32x4_sconvert_i16x8_high");
2805 }
2806 
emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst,LiftoffRegister src)2807 void LiftoffAssembler::emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst,
2808                                                      LiftoffRegister src) {
2809   bailout(kSimd, "emit_i32x4_uconvert_i16x8_low");
2810 }
2811 
emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst,LiftoffRegister src)2812 void LiftoffAssembler::emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst,
2813                                                       LiftoffRegister src) {
2814   bailout(kSimd, "emit_i32x4_uconvert_i16x8_high");
2815 }
2816 
emit_i8x16_rounding_average_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2817 void LiftoffAssembler::emit_i8x16_rounding_average_u(LiftoffRegister dst,
2818                                                      LiftoffRegister lhs,
2819                                                      LiftoffRegister rhs) {
2820   bailout(kSimd, "emit_i8x16_rounding_average_u");
2821 }
2822 
emit_i16x8_rounding_average_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2823 void LiftoffAssembler::emit_i16x8_rounding_average_u(LiftoffRegister dst,
2824                                                      LiftoffRegister lhs,
2825                                                      LiftoffRegister rhs) {
2826   bailout(kSimd, "emit_i16x8_rounding_average_u");
2827 }
2828 
emit_i8x16_abs(LiftoffRegister dst,LiftoffRegister src)2829 void LiftoffAssembler::emit_i8x16_abs(LiftoffRegister dst,
2830                                       LiftoffRegister src) {
2831   bailout(kSimd, "emit_i8x16_abs");
2832 }
2833 
emit_i16x8_abs(LiftoffRegister dst,LiftoffRegister src)2834 void LiftoffAssembler::emit_i16x8_abs(LiftoffRegister dst,
2835                                       LiftoffRegister src) {
2836   bailout(kSimd, "emit_i16x8_abs");
2837 }
2838 
emit_i64x2_abs(LiftoffRegister dst,LiftoffRegister src)2839 void LiftoffAssembler::emit_i64x2_abs(LiftoffRegister dst,
2840                                       LiftoffRegister src) {
2841   bailout(kSimd, "emit_i64x2_abs");
2842 }
2843 
emit_i32x4_extadd_pairwise_i16x8_s(LiftoffRegister dst,LiftoffRegister src)2844 void LiftoffAssembler::emit_i32x4_extadd_pairwise_i16x8_s(LiftoffRegister dst,
2845                                                           LiftoffRegister src) {
2846   bailout(kSimd, "i32x4.extadd_pairwise_i16x8_s");
2847 }
2848 
emit_i32x4_extadd_pairwise_i16x8_u(LiftoffRegister dst,LiftoffRegister src)2849 void LiftoffAssembler::emit_i32x4_extadd_pairwise_i16x8_u(LiftoffRegister dst,
2850                                                           LiftoffRegister src) {
2851   bailout(kSimd, "i32x4.extadd_pairwise_i16x8_u");
2852 }
2853 
emit_i16x8_extadd_pairwise_i8x16_s(LiftoffRegister dst,LiftoffRegister src)2854 void LiftoffAssembler::emit_i16x8_extadd_pairwise_i8x16_s(LiftoffRegister dst,
2855                                                           LiftoffRegister src) {
2856   bailout(kSimd, "i16x8.extadd_pairwise_i8x16_s");
2857 }
2858 
emit_i16x8_extadd_pairwise_i8x16_u(LiftoffRegister dst,LiftoffRegister src)2859 void LiftoffAssembler::emit_i16x8_extadd_pairwise_i8x16_u(LiftoffRegister dst,
2860                                                           LiftoffRegister src) {
2861   bailout(kSimd, "i16x8.extadd_pairwise_i8x16_u");
2862 }
2863 
2864 
emit_i32x4_abs(LiftoffRegister dst,LiftoffRegister src)2865 void LiftoffAssembler::emit_i32x4_abs(LiftoffRegister dst,
2866                                       LiftoffRegister src) {
2867   VU.set(kScratchReg, E32, m1);
2868   vmv_vx(kSimd128RegZero, zero_reg);
2869   vmv_vv(dst.fp().toV(), src.fp().toV());
2870   vmslt_vv(v0, src.fp().toV(), kSimd128RegZero);
2871   vsub_vv(dst.fp().toV(), kSimd128RegZero, src.fp().toV(), Mask);
2872 }
2873 
emit_i8x16_extract_lane_s(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2874 void LiftoffAssembler::emit_i8x16_extract_lane_s(LiftoffRegister dst,
2875                                                  LiftoffRegister lhs,
2876                                                  uint8_t imm_lane_idx) {
2877   bailout(kSimd, "emit_i8x16_extract_lane_s");
2878 }
2879 
emit_i8x16_extract_lane_u(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2880 void LiftoffAssembler::emit_i8x16_extract_lane_u(LiftoffRegister dst,
2881                                                  LiftoffRegister lhs,
2882                                                  uint8_t imm_lane_idx) {
2883   bailout(kSimd, "emit_i8x16_extract_lane_u");
2884 }
2885 
emit_i16x8_extract_lane_s(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2886 void LiftoffAssembler::emit_i16x8_extract_lane_s(LiftoffRegister dst,
2887                                                  LiftoffRegister lhs,
2888                                                  uint8_t imm_lane_idx) {
2889   bailout(kSimd, "emit_i16x8_extract_lane_s");
2890 }
2891 
emit_i16x8_extract_lane_u(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2892 void LiftoffAssembler::emit_i16x8_extract_lane_u(LiftoffRegister dst,
2893                                                  LiftoffRegister lhs,
2894                                                  uint8_t imm_lane_idx) {
2895   bailout(kSimd, "emit_i16x8_extract_lane_u");
2896 }
2897 
emit_i32x4_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2898 void LiftoffAssembler::emit_i32x4_extract_lane(LiftoffRegister dst,
2899                                                LiftoffRegister lhs,
2900                                                uint8_t imm_lane_idx) {
2901   VU.set(kScratchReg, E32, m1);
2902   vslidedown_vi(v31, lhs.fp().toV(), imm_lane_idx);
2903   vmv_xs(dst.gp(), v31);
2904 }
2905 
emit_i64x2_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2906 void LiftoffAssembler::emit_i64x2_extract_lane(LiftoffRegister dst,
2907                                                LiftoffRegister lhs,
2908                                                uint8_t imm_lane_idx) {
2909   bailout(kSimd, "emit_i64x2_extract_lane");
2910 }
2911 
emit_f32x4_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2912 void LiftoffAssembler::emit_f32x4_extract_lane(LiftoffRegister dst,
2913                                                LiftoffRegister lhs,
2914                                                uint8_t imm_lane_idx) {
2915   bailout(kSimd, "emit_f32x4_extract_lane");
2916 }
2917 
emit_f64x2_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2918 void LiftoffAssembler::emit_f64x2_extract_lane(LiftoffRegister dst,
2919                                                LiftoffRegister lhs,
2920                                                uint8_t imm_lane_idx) {
2921   bailout(kSimd, "emit_f64x2_extract_lane");
2922 }
2923 
emit_i8x16_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2924 void LiftoffAssembler::emit_i8x16_replace_lane(LiftoffRegister dst,
2925                                                LiftoffRegister src1,
2926                                                LiftoffRegister src2,
2927                                                uint8_t imm_lane_idx) {
2928   VU.set(kScratchReg, E8, m1);
2929   li(kScratchReg, 0x1 << imm_lane_idx);
2930   vmv_sx(v0, kScratchReg);
2931   vmerge_vx(dst.fp().toV(), src2.gp(), src1.fp().toV());
2932 }
2933 
emit_i16x8_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2934 void LiftoffAssembler::emit_i16x8_replace_lane(LiftoffRegister dst,
2935                                                LiftoffRegister src1,
2936                                                LiftoffRegister src2,
2937                                                uint8_t imm_lane_idx) {
2938   VU.set(kScratchReg, E16, m1);
2939   li(kScratchReg, 0x1 << imm_lane_idx);
2940   vmv_sx(v0, kScratchReg);
2941   vmerge_vx(dst.fp().toV(), src2.gp(), src1.fp().toV());
2942 }
2943 
emit_i32x4_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2944 void LiftoffAssembler::emit_i32x4_replace_lane(LiftoffRegister dst,
2945                                                LiftoffRegister src1,
2946                                                LiftoffRegister src2,
2947                                                uint8_t imm_lane_idx) {
2948   VU.set(kScratchReg, E32, m1);
2949   li(kScratchReg, 0x1 << imm_lane_idx);
2950   vmv_sx(v0, kScratchReg);
2951   vmerge_vx(dst.fp().toV(), src2.gp(), src1.fp().toV());
2952 }
2953 
emit_i64x2_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2954 void LiftoffAssembler::emit_i64x2_replace_lane(LiftoffRegister dst,
2955                                                LiftoffRegister src1,
2956                                                LiftoffRegister src2,
2957                                                uint8_t imm_lane_idx) {
2958   VU.set(kScratchReg, E64, m1);
2959   li(kScratchReg, 0x1 << imm_lane_idx);
2960   vmv_sx(v0, kScratchReg);
2961   vmerge_vx(dst.fp().toV(), src2.gp(), src1.fp().toV());
2962 }
2963 
emit_f32x4_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2964 void LiftoffAssembler::emit_f32x4_replace_lane(LiftoffRegister dst,
2965                                                LiftoffRegister src1,
2966                                                LiftoffRegister src2,
2967                                                uint8_t imm_lane_idx) {
2968   bailout(kSimd, "emit_f32x4_replace_lane");
2969 }
2970 
emit_f64x2_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2971 void LiftoffAssembler::emit_f64x2_replace_lane(LiftoffRegister dst,
2972                                                LiftoffRegister src1,
2973                                                LiftoffRegister src2,
2974                                                uint8_t imm_lane_idx) {
2975   bailout(kSimd, "emit_f64x2_replace_lane");
2976 }
2977 
emit_s128_set_if_nan(Register dst,LiftoffRegister src,Register tmp_gp,LiftoffRegister tmp_s128,ValueKind lane_kind)2978 void LiftoffAssembler::emit_s128_set_if_nan(Register dst, LiftoffRegister src,
2979                                             Register tmp_gp,
2980                                             LiftoffRegister tmp_s128,
2981                                             ValueKind lane_kind) {
2982   bailout(kSimd, "emit_s128_set_if_nan");
2983 }
2984 
StackCheck(Label * ool_code,Register limit_address)2985 void LiftoffAssembler::StackCheck(Label* ool_code, Register limit_address) {
2986   TurboAssembler::Uld(limit_address, MemOperand(limit_address));
2987   TurboAssembler::Branch(ool_code, ule, sp, Operand(limit_address));
2988 }
2989 
CallTrapCallbackForTesting()2990 void LiftoffAssembler::CallTrapCallbackForTesting() {
2991   PrepareCallCFunction(0, GetUnusedRegister(kGpReg, {}).gp());
2992   CallCFunction(ExternalReference::wasm_call_trap_callback_for_testing(), 0);
2993 }
2994 
AssertUnreachable(AbortReason reason)2995 void LiftoffAssembler::AssertUnreachable(AbortReason reason) {
2996   if (FLAG_debug_code) Abort(reason);
2997 }
2998 
PushRegisters(LiftoffRegList regs)2999 void LiftoffAssembler::PushRegisters(LiftoffRegList regs) {
3000   LiftoffRegList gp_regs = regs & kGpCacheRegList;
3001   int32_t num_gp_regs = gp_regs.GetNumRegsSet();
3002   if (num_gp_regs) {
3003     int32_t offset = num_gp_regs * kSystemPointerSize;
3004     Add64(sp, sp, Operand(-offset));
3005     while (!gp_regs.is_empty()) {
3006       LiftoffRegister reg = gp_regs.GetFirstRegSet();
3007       offset -= kSystemPointerSize;
3008       Sd(reg.gp(), MemOperand(sp, offset));
3009       gp_regs.clear(reg);
3010     }
3011     DCHECK_EQ(offset, 0);
3012   }
3013   LiftoffRegList fp_regs = regs & kFpCacheRegList;
3014   int32_t num_fp_regs = fp_regs.GetNumRegsSet();
3015   if (num_fp_regs) {
3016     Add64(sp, sp, Operand(-(num_fp_regs * kStackSlotSize)));
3017     int32_t offset = 0;
3018     while (!fp_regs.is_empty()) {
3019       LiftoffRegister reg = fp_regs.GetFirstRegSet();
3020       TurboAssembler::StoreDouble(reg.fp(), MemOperand(sp, offset));
3021       fp_regs.clear(reg);
3022       offset += sizeof(double);
3023     }
3024     DCHECK_EQ(offset, num_fp_regs * sizeof(double));
3025   }
3026 }
3027 
PopRegisters(LiftoffRegList regs)3028 void LiftoffAssembler::PopRegisters(LiftoffRegList regs) {
3029   LiftoffRegList fp_regs = regs & kFpCacheRegList;
3030   int32_t fp_offset = 0;
3031   while (!fp_regs.is_empty()) {
3032     LiftoffRegister reg = fp_regs.GetFirstRegSet();
3033     TurboAssembler::LoadDouble(reg.fp(), MemOperand(sp, fp_offset));
3034     fp_regs.clear(reg);
3035     fp_offset += sizeof(double);
3036   }
3037   if (fp_offset) Add64(sp, sp, Operand(fp_offset));
3038   LiftoffRegList gp_regs = regs & kGpCacheRegList;
3039   int32_t gp_offset = 0;
3040   while (!gp_regs.is_empty()) {
3041     LiftoffRegister reg = gp_regs.GetLastRegSet();
3042     Ld(reg.gp(), MemOperand(sp, gp_offset));
3043     gp_regs.clear(reg);
3044     gp_offset += kSystemPointerSize;
3045   }
3046   Add64(sp, sp, Operand(gp_offset));
3047 }
3048 
RecordSpillsInSafepoint(Safepoint & safepoint,LiftoffRegList all_spills,LiftoffRegList ref_spills,int spill_offset)3049 void LiftoffAssembler::RecordSpillsInSafepoint(Safepoint& safepoint,
3050                                                LiftoffRegList all_spills,
3051                                                LiftoffRegList ref_spills,
3052                                                int spill_offset) {
3053   int spill_space_size = 0;
3054   while (!all_spills.is_empty()) {
3055     LiftoffRegister reg = all_spills.GetFirstRegSet();
3056     if (ref_spills.has(reg)) {
3057       safepoint.DefinePointerSlot(spill_offset);
3058     }
3059     all_spills.clear(reg);
3060     ++spill_offset;
3061     spill_space_size += kSystemPointerSize;
3062   }
3063   // Record the number of additional spill slots.
3064   RecordOolSpillSpaceSize(spill_space_size);
3065 }
3066 
DropStackSlotsAndRet(uint32_t num_stack_slots)3067 void LiftoffAssembler::DropStackSlotsAndRet(uint32_t num_stack_slots) {
3068   TurboAssembler::DropAndRet(static_cast<int>(num_stack_slots));
3069 }
3070 
CallC(const ValueKindSig * sig,const LiftoffRegister * args,const LiftoffRegister * rets,ValueKind out_argument_kind,int stack_bytes,ExternalReference ext_ref)3071 void LiftoffAssembler::CallC(const ValueKindSig* sig,
3072                              const LiftoffRegister* args,
3073                              const LiftoffRegister* rets,
3074                              ValueKind out_argument_kind, int stack_bytes,
3075                              ExternalReference ext_ref) {
3076   Add64(sp, sp, Operand(-stack_bytes));
3077 
3078   int arg_bytes = 0;
3079   for (ValueKind param_kind : sig->parameters()) {
3080     liftoff::Store(this, sp, arg_bytes, *args++, param_kind);
3081     arg_bytes += element_size_bytes(param_kind);
3082   }
3083   DCHECK_LE(arg_bytes, stack_bytes);
3084 
3085   // Pass a pointer to the buffer with the arguments to the C function.
3086   // On RISC-V, the first argument is passed in {a0}.
3087   constexpr Register kFirstArgReg = a0;
3088   mv(kFirstArgReg, sp);
3089 
3090   // Now call the C function.
3091   constexpr int kNumCCallArgs = 1;
3092   PrepareCallCFunction(kNumCCallArgs, kScratchReg);
3093   CallCFunction(ext_ref, kNumCCallArgs);
3094 
3095   // Move return value to the right register.
3096   const LiftoffRegister* next_result_reg = rets;
3097   if (sig->return_count() > 0) {
3098     DCHECK_EQ(1, sig->return_count());
3099     constexpr Register kReturnReg = a0;
3100     if (kReturnReg != next_result_reg->gp()) {
3101       Move(*next_result_reg, LiftoffRegister(kReturnReg), sig->GetReturn(0));
3102     }
3103     ++next_result_reg;
3104   }
3105 
3106   // Load potential output value from the buffer on the stack.
3107   if (out_argument_kind != kVoid) {
3108     liftoff::Load(this, *next_result_reg, MemOperand(sp, 0), out_argument_kind);
3109   }
3110 
3111   Add64(sp, sp, Operand(stack_bytes));
3112 }
3113 
CallNativeWasmCode(Address addr)3114 void LiftoffAssembler::CallNativeWasmCode(Address addr) {
3115   Call(addr, RelocInfo::WASM_CALL);
3116 }
3117 
TailCallNativeWasmCode(Address addr)3118 void LiftoffAssembler::TailCallNativeWasmCode(Address addr) {
3119   Jump(addr, RelocInfo::WASM_CALL);
3120 }
3121 
CallIndirect(const ValueKindSig * sig,compiler::CallDescriptor * call_descriptor,Register target)3122 void LiftoffAssembler::CallIndirect(const ValueKindSig* sig,
3123                                     compiler::CallDescriptor* call_descriptor,
3124                                     Register target) {
3125   if (target == no_reg) {
3126     pop(t6);
3127     Call(t6);
3128   } else {
3129     Call(target);
3130   }
3131 }
3132 
TailCallIndirect(Register target)3133 void LiftoffAssembler::TailCallIndirect(Register target) {
3134   if (target == no_reg) {
3135     Pop(t6);
3136     Jump(t6);
3137   } else {
3138     Jump(target);
3139   }
3140 }
3141 
CallRuntimeStub(WasmCode::RuntimeStubId sid)3142 void LiftoffAssembler::CallRuntimeStub(WasmCode::RuntimeStubId sid) {
3143   // A direct call to a wasm runtime stub defined in this module.
3144   // Just encode the stub index. This will be patched at relocation.
3145   Call(static_cast<Address>(sid), RelocInfo::WASM_STUB_CALL);
3146 }
3147 
AllocateStackSlot(Register addr,uint32_t size)3148 void LiftoffAssembler::AllocateStackSlot(Register addr, uint32_t size) {
3149   Add64(sp, sp, Operand(-size));
3150   TurboAssembler::Move(addr, sp);
3151 }
3152 
DeallocateStackSlot(uint32_t size)3153 void LiftoffAssembler::DeallocateStackSlot(uint32_t size) {
3154   Add64(sp, sp, Operand(size));
3155 }
3156 
MaybeOSR()3157 void LiftoffAssembler::MaybeOSR() {}
3158 
emit_set_if_nan(Register dst,FPURegister src,ValueKind kind)3159 void LiftoffAssembler::emit_set_if_nan(Register dst, FPURegister src,
3160                                        ValueKind kind) {
3161   UseScratchRegisterScope temps(this);
3162   Register scratch = temps.Acquire();
3163   li(scratch, 1);
3164   if (kind == kF32) {
3165     feq_s(scratch, src, src);  // rd <- !isNan(src)
3166   } else {
3167     DCHECK_EQ(kind, kF64);
3168     feq_d(scratch, src, src);  // rd <- !isNan(src)
3169   }
3170   not_(scratch, scratch);
3171   Sd(scratch, MemOperand(dst));
3172 }
3173 
Construct(int param_slots)3174 void LiftoffStackSlots::Construct(int param_slots) {
3175   DCHECK_LT(0, slots_.size());
3176   SortInPushOrder();
3177   int last_stack_slot = param_slots;
3178   for (auto& slot : slots_) {
3179     const int stack_slot = slot.dst_slot_;
3180     int stack_decrement = (last_stack_slot - stack_slot) * kSystemPointerSize;
3181     DCHECK_LT(0, stack_decrement);
3182     last_stack_slot = stack_slot;
3183     const LiftoffAssembler::VarState& src = slot.src_;
3184     switch (src.loc()) {
3185       case LiftoffAssembler::VarState::kStack:
3186         if (src.kind() != kS128) {
3187           asm_->AllocateStackSpace(stack_decrement - kSystemPointerSize);
3188           asm_->Ld(kScratchReg, liftoff::GetStackSlot(slot.src_offset_));
3189           asm_->push(kScratchReg);
3190         } else {
3191           asm_->AllocateStackSpace(stack_decrement - kSimd128Size);
3192           asm_->Ld(kScratchReg, liftoff::GetStackSlot(slot.src_offset_ - 8));
3193           asm_->push(kScratchReg);
3194           asm_->Ld(kScratchReg, liftoff::GetStackSlot(slot.src_offset_));
3195           asm_->push(kScratchReg);
3196         }
3197         break;
3198       case LiftoffAssembler::VarState::kRegister: {
3199         int pushed_bytes = SlotSizeInBytes(slot);
3200         asm_->AllocateStackSpace(stack_decrement - pushed_bytes);
3201         liftoff::push(asm_, src.reg(), src.kind());
3202         break;
3203       }
3204       case LiftoffAssembler::VarState::kIntConst: {
3205         asm_->AllocateStackSpace(stack_decrement - kSystemPointerSize);
3206         asm_->li(kScratchReg, Operand(src.i32_const()));
3207         asm_->push(kScratchReg);
3208         break;
3209       }
3210     }
3211   }
3212 }
3213 }  // namespace wasm
3214 }  // namespace internal
3215 }  // namespace v8
3216 
3217 #endif  // V8_WASM_BASELINE_RISCV64_LIFTOFF_ASSEMBLER_RISCV64_H_
3218