1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/base/iterator.h"
6 #include "src/compiler/backend/instruction-selector-impl.h"
7 #include "src/compiler/node-matchers.h"
8 #include "src/compiler/node-properties.h"
9 #include "src/execution/ppc/frame-constants-ppc.h"
10 
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14 
15 enum ImmediateMode {
16   kInt16Imm,
17   kInt16Imm_Unsigned,
18   kInt16Imm_Negate,
19   kInt16Imm_4ByteAligned,
20   kShift32Imm,
21   kShift64Imm,
22   kNoImmediate
23 };
24 
25 // Adds PPC-specific methods for generating operands.
26 class PPCOperandGenerator final : public OperandGenerator {
27  public:
PPCOperandGenerator(InstructionSelector * selector)28   explicit PPCOperandGenerator(InstructionSelector* selector)
29       : OperandGenerator(selector) {}
30 
UseOperand(Node * node,ImmediateMode mode)31   InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
32     if (CanBeImmediate(node, mode)) {
33       return UseImmediate(node);
34     }
35     return UseRegister(node);
36   }
37 
CanBeImmediate(Node * node,ImmediateMode mode)38   bool CanBeImmediate(Node* node, ImmediateMode mode) {
39     int64_t value;
40     if (node->opcode() == IrOpcode::kInt32Constant)
41       value = OpParameter<int32_t>(node->op());
42     else if (node->opcode() == IrOpcode::kInt64Constant)
43       value = OpParameter<int64_t>(node->op());
44     else
45       return false;
46     return CanBeImmediate(value, mode);
47   }
48 
CanBeImmediate(int64_t value,ImmediateMode mode)49   bool CanBeImmediate(int64_t value, ImmediateMode mode) {
50     switch (mode) {
51       case kInt16Imm:
52         return is_int16(value);
53       case kInt16Imm_Unsigned:
54         return is_uint16(value);
55       case kInt16Imm_Negate:
56         return is_int16(-value);
57       case kInt16Imm_4ByteAligned:
58         return is_int16(value) && !(value & 3);
59       case kShift32Imm:
60         return 0 <= value && value < 32;
61       case kShift64Imm:
62         return 0 <= value && value < 64;
63       case kNoImmediate:
64         return false;
65     }
66     return false;
67   }
68 };
69 
70 namespace {
71 
VisitRR(InstructionSelector * selector,InstructionCode opcode,Node * node)72 void VisitRR(InstructionSelector* selector, InstructionCode opcode,
73              Node* node) {
74   PPCOperandGenerator g(selector);
75   selector->Emit(opcode, g.DefineAsRegister(node),
76                  g.UseRegister(node->InputAt(0)));
77 }
78 
VisitRRR(InstructionSelector * selector,InstructionCode opcode,Node * node)79 void VisitRRR(InstructionSelector* selector, InstructionCode opcode,
80               Node* node) {
81   PPCOperandGenerator g(selector);
82   selector->Emit(opcode, g.DefineAsRegister(node),
83                  g.UseRegister(node->InputAt(0)),
84                  g.UseRegister(node->InputAt(1)));
85 }
86 
VisitRRO(InstructionSelector * selector,InstructionCode opcode,Node * node,ImmediateMode operand_mode)87 void VisitRRO(InstructionSelector* selector, InstructionCode opcode, Node* node,
88               ImmediateMode operand_mode) {
89   PPCOperandGenerator g(selector);
90   selector->Emit(opcode, g.DefineAsRegister(node),
91                  g.UseRegister(node->InputAt(0)),
92                  g.UseOperand(node->InputAt(1), operand_mode));
93 }
94 
95 #if V8_TARGET_ARCH_PPC64
VisitTryTruncateDouble(InstructionSelector * selector,InstructionCode opcode,Node * node)96 void VisitTryTruncateDouble(InstructionSelector* selector,
97                             InstructionCode opcode, Node* node) {
98   PPCOperandGenerator g(selector);
99   InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
100   InstructionOperand outputs[2];
101   size_t output_count = 0;
102   outputs[output_count++] = g.DefineAsRegister(node);
103 
104   Node* success_output = NodeProperties::FindProjection(node, 1);
105   if (success_output) {
106     outputs[output_count++] = g.DefineAsRegister(success_output);
107   }
108 
109   selector->Emit(opcode, output_count, outputs, 1, inputs);
110 }
111 #endif
112 
113 // Shared routine for multiple binary operations.
114 template <typename Matcher>
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode,ImmediateMode operand_mode,FlagsContinuation * cont)115 void VisitBinop(InstructionSelector* selector, Node* node,
116                 InstructionCode opcode, ImmediateMode operand_mode,
117                 FlagsContinuation* cont) {
118   PPCOperandGenerator g(selector);
119   Matcher m(node);
120   InstructionOperand inputs[4];
121   size_t input_count = 0;
122   InstructionOperand outputs[2];
123   size_t output_count = 0;
124 
125   inputs[input_count++] = g.UseRegister(m.left().node());
126   inputs[input_count++] = g.UseOperand(m.right().node(), operand_mode);
127 
128   if (cont->IsDeoptimize()) {
129     // If we can deoptimize as a result of the binop, we need to make sure that
130     // the deopt inputs are not overwritten by the binop result. One way
131     // to achieve that is to declare the output register as same-as-first.
132     outputs[output_count++] = g.DefineSameAsFirst(node);
133   } else {
134     outputs[output_count++] = g.DefineAsRegister(node);
135   }
136 
137   DCHECK_NE(0u, input_count);
138   DCHECK_NE(0u, output_count);
139   DCHECK_GE(arraysize(inputs), input_count);
140   DCHECK_GE(arraysize(outputs), output_count);
141 
142   selector->EmitWithContinuation(opcode, output_count, outputs, input_count,
143                                  inputs, cont);
144 }
145 
146 // Shared routine for multiple binary operations.
147 template <typename Matcher>
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode,ImmediateMode operand_mode)148 void VisitBinop(InstructionSelector* selector, Node* node,
149                 InstructionCode opcode, ImmediateMode operand_mode) {
150   FlagsContinuation cont;
151   VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
152 }
153 
154 }  // namespace
155 
VisitStackSlot(Node * node)156 void InstructionSelector::VisitStackSlot(Node* node) {
157   StackSlotRepresentation rep = StackSlotRepresentationOf(node->op());
158   int slot = frame_->AllocateSpillSlot(rep.size());
159   OperandGenerator g(this);
160 
161   Emit(kArchStackSlot, g.DefineAsRegister(node),
162        sequence()->AddImmediate(Constant(slot)), 0, nullptr);
163 }
164 
VisitAbortCSAAssert(Node * node)165 void InstructionSelector::VisitAbortCSAAssert(Node* node) {
166   PPCOperandGenerator g(this);
167   Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), r4));
168 }
169 
VisitLoad(Node * node)170 void InstructionSelector::VisitLoad(Node* node) {
171   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
172   PPCOperandGenerator g(this);
173   Node* base = node->InputAt(0);
174   Node* offset = node->InputAt(1);
175   InstructionCode opcode = kArchNop;
176   ImmediateMode mode = kInt16Imm;
177   switch (load_rep.representation()) {
178     case MachineRepresentation::kFloat32:
179       opcode = kPPC_LoadFloat32;
180       break;
181     case MachineRepresentation::kFloat64:
182       opcode = kPPC_LoadDouble;
183       break;
184     case MachineRepresentation::kBit:  // Fall through.
185     case MachineRepresentation::kWord8:
186       opcode = load_rep.IsSigned() ? kPPC_LoadWordS8 : kPPC_LoadWordU8;
187       break;
188     case MachineRepresentation::kWord16:
189       opcode = load_rep.IsSigned() ? kPPC_LoadWordS16 : kPPC_LoadWordU16;
190       break;
191     case MachineRepresentation::kWord32:
192       opcode = kPPC_LoadWordU32;
193       break;
194     case MachineRepresentation::kTaggedSigned:   // Fall through.
195     case MachineRepresentation::kTaggedPointer:  // Fall through.
196     case MachineRepresentation::kTagged:         // Fall through.
197     case MachineRepresentation::kWord64:
198       opcode = kPPC_LoadWord64;
199       mode = kInt16Imm_4ByteAligned;
200       break;
201     case MachineRepresentation::kCompressedPointer:  // Fall through.
202     case MachineRepresentation::kCompressed:         // Fall through.
203     case MachineRepresentation::kSimd128:  // Fall through.
204     case MachineRepresentation::kNone:
205       UNREACHABLE();
206   }
207 
208   if (node->opcode() == IrOpcode::kPoisonedLoad &&
209       poisoning_level_ != PoisoningMitigationLevel::kDontPoison) {
210     opcode |= MiscField::encode(kMemoryAccessPoisoned);
211   }
212 
213   bool is_atomic = (node->opcode() == IrOpcode::kWord32AtomicLoad ||
214                     node->opcode() == IrOpcode::kWord64AtomicLoad);
215 
216   if (g.CanBeImmediate(offset, mode)) {
217     Emit(opcode | AddressingModeField::encode(kMode_MRI),
218          g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(offset),
219          g.UseImmediate(is_atomic));
220   } else if (g.CanBeImmediate(base, mode)) {
221     Emit(opcode | AddressingModeField::encode(kMode_MRI),
222          g.DefineAsRegister(node), g.UseRegister(offset), g.UseImmediate(base),
223          g.UseImmediate(is_atomic));
224   } else {
225     Emit(opcode | AddressingModeField::encode(kMode_MRR),
226          g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset),
227          g.UseImmediate(is_atomic));
228   }
229 }
230 
VisitPoisonedLoad(Node * node)231 void InstructionSelector::VisitPoisonedLoad(Node* node) { VisitLoad(node); }
232 
VisitProtectedLoad(Node * node)233 void InstructionSelector::VisitProtectedLoad(Node* node) {
234   // TODO(eholk)
235   UNIMPLEMENTED();
236 }
237 
VisitStore(Node * node)238 void InstructionSelector::VisitStore(Node* node) {
239   PPCOperandGenerator g(this);
240   Node* base = node->InputAt(0);
241   Node* offset = node->InputAt(1);
242   Node* value = node->InputAt(2);
243 
244   bool is_atomic = (node->opcode() == IrOpcode::kWord32AtomicStore ||
245                     node->opcode() == IrOpcode::kWord64AtomicStore);
246 
247   MachineRepresentation rep;
248   WriteBarrierKind write_barrier_kind = kNoWriteBarrier;
249 
250   if (is_atomic) {
251     rep = AtomicStoreRepresentationOf(node->op());
252   } else {
253     StoreRepresentation store_rep = StoreRepresentationOf(node->op());
254     write_barrier_kind = store_rep.write_barrier_kind();
255     rep = store_rep.representation();
256   }
257 
258   if (write_barrier_kind != kNoWriteBarrier &&
259       V8_LIKELY(!FLAG_disable_write_barriers)) {
260     DCHECK(CanBeTaggedPointer(rep));
261     AddressingMode addressing_mode;
262     InstructionOperand inputs[3];
263     size_t input_count = 0;
264     inputs[input_count++] = g.UseUniqueRegister(base);
265     // OutOfLineRecordWrite uses the offset in an 'add' instruction as well as
266     // for the store itself, so we must check compatibility with both.
267     if (g.CanBeImmediate(offset, kInt16Imm)
268 #if V8_TARGET_ARCH_PPC64
269         && g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)
270 #endif
271             ) {
272       inputs[input_count++] = g.UseImmediate(offset);
273       addressing_mode = kMode_MRI;
274     } else {
275       inputs[input_count++] = g.UseUniqueRegister(offset);
276       addressing_mode = kMode_MRR;
277     }
278     inputs[input_count++] = g.UseUniqueRegister(value);
279     RecordWriteMode record_write_mode =
280         WriteBarrierKindToRecordWriteMode(write_barrier_kind);
281     InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
282     size_t const temp_count = arraysize(temps);
283     InstructionCode code = kArchStoreWithWriteBarrier;
284     code |= AddressingModeField::encode(addressing_mode);
285     code |= MiscField::encode(static_cast<int>(record_write_mode));
286     CHECK_EQ(is_atomic, false);
287     Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
288   } else {
289     ArchOpcode opcode = kArchNop;
290     ImmediateMode mode = kInt16Imm;
291     switch (rep) {
292       case MachineRepresentation::kFloat32:
293         opcode = kPPC_StoreFloat32;
294         break;
295       case MachineRepresentation::kFloat64:
296         opcode = kPPC_StoreDouble;
297         break;
298       case MachineRepresentation::kBit:  // Fall through.
299       case MachineRepresentation::kWord8:
300         opcode = kPPC_StoreWord8;
301         break;
302       case MachineRepresentation::kWord16:
303         opcode = kPPC_StoreWord16;
304         break;
305 #if !V8_TARGET_ARCH_PPC64
306       case MachineRepresentation::kTaggedSigned:   // Fall through.
307       case MachineRepresentation::kTaggedPointer:  // Fall through.
308       case MachineRepresentation::kTagged:         // Fall through.
309 #endif
310       case MachineRepresentation::kWord32:
311         opcode = kPPC_StoreWord32;
312         break;
313 #if V8_TARGET_ARCH_PPC64
314       case MachineRepresentation::kTaggedSigned:   // Fall through.
315       case MachineRepresentation::kTaggedPointer:  // Fall through.
316       case MachineRepresentation::kTagged:         // Fall through.
317       case MachineRepresentation::kWord64:
318         opcode = kPPC_StoreWord64;
319         mode = kInt16Imm_4ByteAligned;
320         break;
321 #else
322       case MachineRepresentation::kWord64:  // Fall through.
323 #endif
324       case MachineRepresentation::kCompressedPointer:  // Fall through.
325       case MachineRepresentation::kCompressed:         // Fall through.
326       case MachineRepresentation::kSimd128:  // Fall through.
327       case MachineRepresentation::kNone:
328         UNREACHABLE();
329         return;
330     }
331 
332     if (g.CanBeImmediate(offset, mode)) {
333       Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
334            g.UseRegister(base), g.UseImmediate(offset), g.UseRegister(value),
335            g.UseImmediate(is_atomic));
336     } else if (g.CanBeImmediate(base, mode)) {
337       Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
338            g.UseRegister(offset), g.UseImmediate(base), g.UseRegister(value),
339            g.UseImmediate(is_atomic));
340     } else {
341       Emit(opcode | AddressingModeField::encode(kMode_MRR), g.NoOutput(),
342            g.UseRegister(base), g.UseRegister(offset), g.UseRegister(value),
343            g.UseImmediate(is_atomic));
344     }
345   }
346 }
347 
VisitProtectedStore(Node * node)348 void InstructionSelector::VisitProtectedStore(Node* node) {
349   // TODO(eholk)
350   UNIMPLEMENTED();
351 }
352 
353 // Architecture supports unaligned access, therefore VisitLoad is used instead
VisitUnalignedLoad(Node * node)354 void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
355 
356 // Architecture supports unaligned access, therefore VisitStore is used instead
VisitUnalignedStore(Node * node)357 void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
358 
359 template <typename Matcher>
VisitLogical(InstructionSelector * selector,Node * node,Matcher * m,ArchOpcode opcode,bool left_can_cover,bool right_can_cover,ImmediateMode imm_mode)360 static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
361                          ArchOpcode opcode, bool left_can_cover,
362                          bool right_can_cover, ImmediateMode imm_mode) {
363   PPCOperandGenerator g(selector);
364 
365   // Map instruction to equivalent operation with inverted right input.
366   ArchOpcode inv_opcode = opcode;
367   switch (opcode) {
368     case kPPC_And:
369       inv_opcode = kPPC_AndComplement;
370       break;
371     case kPPC_Or:
372       inv_opcode = kPPC_OrComplement;
373       break;
374     default:
375       UNREACHABLE();
376   }
377 
378   // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
379   if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
380     Matcher mleft(m->left().node());
381     if (mleft.right().Is(-1)) {
382       selector->Emit(inv_opcode, g.DefineAsRegister(node),
383                      g.UseRegister(m->right().node()),
384                      g.UseRegister(mleft.left().node()));
385       return;
386     }
387   }
388 
389   // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
390   if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
391       right_can_cover) {
392     Matcher mright(m->right().node());
393     if (mright.right().Is(-1)) {
394       // TODO(all): support shifted operand on right.
395       selector->Emit(inv_opcode, g.DefineAsRegister(node),
396                      g.UseRegister(m->left().node()),
397                      g.UseRegister(mright.left().node()));
398       return;
399     }
400   }
401 
402   VisitBinop<Matcher>(selector, node, opcode, imm_mode);
403 }
404 
IsContiguousMask32(uint32_t value,int * mb,int * me)405 static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) {
406   int mask_width = base::bits::CountPopulation(value);
407   int mask_msb = base::bits::CountLeadingZeros32(value);
408   int mask_lsb = base::bits::CountTrailingZeros32(value);
409   if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32))
410     return false;
411   *mb = mask_lsb + mask_width - 1;
412   *me = mask_lsb;
413   return true;
414 }
415 
416 #if V8_TARGET_ARCH_PPC64
IsContiguousMask64(uint64_t value,int * mb,int * me)417 static inline bool IsContiguousMask64(uint64_t value, int* mb, int* me) {
418   int mask_width = base::bits::CountPopulation(value);
419   int mask_msb = base::bits::CountLeadingZeros64(value);
420   int mask_lsb = base::bits::CountTrailingZeros64(value);
421   if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64))
422     return false;
423   *mb = mask_lsb + mask_width - 1;
424   *me = mask_lsb;
425   return true;
426 }
427 #endif
428 
429 // TODO(mbrandy): Absorb rotate-right into rlwinm?
VisitWord32And(Node * node)430 void InstructionSelector::VisitWord32And(Node* node) {
431   PPCOperandGenerator g(this);
432   Int32BinopMatcher m(node);
433   int mb = 0;
434   int me = 0;
435   if (m.right().HasValue() && IsContiguousMask32(m.right().Value(), &mb, &me)) {
436     int sh = 0;
437     Node* left = m.left().node();
438     if ((m.left().IsWord32Shr() || m.left().IsWord32Shl()) &&
439         CanCover(node, left)) {
440       // Try to absorb left/right shift into rlwinm
441       Int32BinopMatcher mleft(m.left().node());
442       if (mleft.right().IsInRange(0, 31)) {
443         left = mleft.left().node();
444         sh = mleft.right().Value();
445         if (m.left().IsWord32Shr()) {
446           // Adjust the mask such that it doesn't include any rotated bits.
447           if (mb > 31 - sh) mb = 31 - sh;
448           sh = (32 - sh) & 0x1F;
449         } else {
450           // Adjust the mask such that it doesn't include any rotated bits.
451           if (me < sh) me = sh;
452         }
453       }
454     }
455     if (mb >= me) {
456       Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node), g.UseRegister(left),
457            g.TempImmediate(sh), g.TempImmediate(mb), g.TempImmediate(me));
458       return;
459     }
460   }
461   VisitLogical<Int32BinopMatcher>(
462       this, node, &m, kPPC_And, CanCover(node, m.left().node()),
463       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
464 }
465 
466 #if V8_TARGET_ARCH_PPC64
467 // TODO(mbrandy): Absorb rotate-right into rldic?
VisitWord64And(Node * node)468 void InstructionSelector::VisitWord64And(Node* node) {
469   PPCOperandGenerator g(this);
470   Int64BinopMatcher m(node);
471   int mb = 0;
472   int me = 0;
473   if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) {
474     int sh = 0;
475     Node* left = m.left().node();
476     if ((m.left().IsWord64Shr() || m.left().IsWord64Shl()) &&
477         CanCover(node, left)) {
478       // Try to absorb left/right shift into rldic
479       Int64BinopMatcher mleft(m.left().node());
480       if (mleft.right().IsInRange(0, 63)) {
481         left = mleft.left().node();
482         sh = mleft.right().Value();
483         if (m.left().IsWord64Shr()) {
484           // Adjust the mask such that it doesn't include any rotated bits.
485           if (mb > 63 - sh) mb = 63 - sh;
486           sh = (64 - sh) & 0x3F;
487         } else {
488           // Adjust the mask such that it doesn't include any rotated bits.
489           if (me < sh) me = sh;
490         }
491       }
492     }
493     if (mb >= me) {
494       bool match = false;
495       ArchOpcode opcode;
496       int mask;
497       if (me == 0) {
498         match = true;
499         opcode = kPPC_RotLeftAndClearLeft64;
500         mask = mb;
501       } else if (mb == 63) {
502         match = true;
503         opcode = kPPC_RotLeftAndClearRight64;
504         mask = me;
505       } else if (sh && me <= sh && m.left().IsWord64Shl()) {
506         match = true;
507         opcode = kPPC_RotLeftAndClear64;
508         mask = mb;
509       }
510       if (match) {
511         Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
512              g.TempImmediate(sh), g.TempImmediate(mask));
513         return;
514       }
515     }
516   }
517   VisitLogical<Int64BinopMatcher>(
518       this, node, &m, kPPC_And, CanCover(node, m.left().node()),
519       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
520 }
521 #endif
522 
VisitWord32Or(Node * node)523 void InstructionSelector::VisitWord32Or(Node* node) {
524   Int32BinopMatcher m(node);
525   VisitLogical<Int32BinopMatcher>(
526       this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
527       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
528 }
529 
530 #if V8_TARGET_ARCH_PPC64
VisitWord64Or(Node * node)531 void InstructionSelector::VisitWord64Or(Node* node) {
532   Int64BinopMatcher m(node);
533   VisitLogical<Int64BinopMatcher>(
534       this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
535       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
536 }
537 #endif
538 
VisitWord32Xor(Node * node)539 void InstructionSelector::VisitWord32Xor(Node* node) {
540   PPCOperandGenerator g(this);
541   Int32BinopMatcher m(node);
542   if (m.right().Is(-1)) {
543     Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
544   } else {
545     VisitBinop<Int32BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
546   }
547 }
548 
VisitStackPointerGreaterThan(Node * node,FlagsContinuation * cont)549 void InstructionSelector::VisitStackPointerGreaterThan(
550     Node* node, FlagsContinuation* cont) {
551   StackCheckKind kind = StackCheckKindOf(node->op());
552   InstructionCode opcode =
553       kArchStackPointerGreaterThan | MiscField::encode(static_cast<int>(kind));
554 
555   PPCOperandGenerator g(this);
556 
557   // No outputs.
558   InstructionOperand* const outputs = nullptr;
559   const int output_count = 0;
560 
561   // Applying an offset to this stack check requires a temp register. Offsets
562   // are only applied to the first stack check. If applying an offset, we must
563   // ensure the input and temp registers do not alias, thus kUniqueRegister.
564   InstructionOperand temps[] = {g.TempRegister()};
565   const int temp_count = (kind == StackCheckKind::kJSFunctionEntry) ? 1 : 0;
566   const auto register_mode = (kind == StackCheckKind::kJSFunctionEntry)
567                                  ? OperandGenerator::kUniqueRegister
568                                  : OperandGenerator::kRegister;
569 
570   Node* const value = node->InputAt(0);
571   InstructionOperand inputs[] = {g.UseRegisterWithMode(value, register_mode)};
572   static constexpr int input_count = arraysize(inputs);
573 
574   EmitWithContinuation(opcode, output_count, outputs, input_count, inputs,
575                        temp_count, temps, cont);
576 }
577 
578 #if V8_TARGET_ARCH_PPC64
VisitWord64Xor(Node * node)579 void InstructionSelector::VisitWord64Xor(Node* node) {
580   PPCOperandGenerator g(this);
581   Int64BinopMatcher m(node);
582   if (m.right().Is(-1)) {
583     Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
584   } else {
585     VisitBinop<Int64BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
586   }
587 }
588 #endif
589 
VisitWord32Shl(Node * node)590 void InstructionSelector::VisitWord32Shl(Node* node) {
591   PPCOperandGenerator g(this);
592   Int32BinopMatcher m(node);
593   if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
594     // Try to absorb logical-and into rlwinm
595     Int32BinopMatcher mleft(m.left().node());
596     int sh = m.right().Value();
597     int mb;
598     int me;
599     if (mleft.right().HasValue() &&
600         IsContiguousMask32(mleft.right().Value() << sh, &mb, &me)) {
601       // Adjust the mask such that it doesn't include any rotated bits.
602       if (me < sh) me = sh;
603       if (mb >= me) {
604         Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
605              g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
606              g.TempImmediate(mb), g.TempImmediate(me));
607         return;
608       }
609     }
610   }
611   VisitRRO(this, kPPC_ShiftLeft32, node, kShift32Imm);
612 }
613 
614 #if V8_TARGET_ARCH_PPC64
VisitWord64Shl(Node * node)615 void InstructionSelector::VisitWord64Shl(Node* node) {
616   PPCOperandGenerator g(this);
617   Int64BinopMatcher m(node);
618   // TODO(mbrandy): eliminate left sign extension if right >= 32
619   if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
620     // Try to absorb logical-and into rldic
621     Int64BinopMatcher mleft(m.left().node());
622     int sh = m.right().Value();
623     int mb;
624     int me;
625     if (mleft.right().HasValue() &&
626         IsContiguousMask64(mleft.right().Value() << sh, &mb, &me)) {
627       // Adjust the mask such that it doesn't include any rotated bits.
628       if (me < sh) me = sh;
629       if (mb >= me) {
630         bool match = false;
631         ArchOpcode opcode;
632         int mask;
633         if (me == 0) {
634           match = true;
635           opcode = kPPC_RotLeftAndClearLeft64;
636           mask = mb;
637         } else if (mb == 63) {
638           match = true;
639           opcode = kPPC_RotLeftAndClearRight64;
640           mask = me;
641         } else if (sh && me <= sh) {
642           match = true;
643           opcode = kPPC_RotLeftAndClear64;
644           mask = mb;
645         }
646         if (match) {
647           Emit(opcode, g.DefineAsRegister(node),
648                g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
649                g.TempImmediate(mask));
650           return;
651         }
652       }
653     }
654   }
655   VisitRRO(this, kPPC_ShiftLeft64, node, kShift64Imm);
656 }
657 #endif
658 
VisitWord32Shr(Node * node)659 void InstructionSelector::VisitWord32Shr(Node* node) {
660   PPCOperandGenerator g(this);
661   Int32BinopMatcher m(node);
662   if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
663     // Try to absorb logical-and into rlwinm
664     Int32BinopMatcher mleft(m.left().node());
665     int sh = m.right().Value();
666     int mb;
667     int me;
668     if (mleft.right().HasValue() &&
669         IsContiguousMask32((uint32_t)(mleft.right().Value()) >> sh, &mb, &me)) {
670       // Adjust the mask such that it doesn't include any rotated bits.
671       if (mb > 31 - sh) mb = 31 - sh;
672       sh = (32 - sh) & 0x1F;
673       if (mb >= me) {
674         Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
675              g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
676              g.TempImmediate(mb), g.TempImmediate(me));
677         return;
678       }
679     }
680   }
681   VisitRRO(this, kPPC_ShiftRight32, node, kShift32Imm);
682 }
683 
684 #if V8_TARGET_ARCH_PPC64
VisitWord64Shr(Node * node)685 void InstructionSelector::VisitWord64Shr(Node* node) {
686   PPCOperandGenerator g(this);
687   Int64BinopMatcher m(node);
688   if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
689     // Try to absorb logical-and into rldic
690     Int64BinopMatcher mleft(m.left().node());
691     int sh = m.right().Value();
692     int mb;
693     int me;
694     if (mleft.right().HasValue() &&
695         IsContiguousMask64((uint64_t)(mleft.right().Value()) >> sh, &mb, &me)) {
696       // Adjust the mask such that it doesn't include any rotated bits.
697       if (mb > 63 - sh) mb = 63 - sh;
698       sh = (64 - sh) & 0x3F;
699       if (mb >= me) {
700         bool match = false;
701         ArchOpcode opcode;
702         int mask;
703         if (me == 0) {
704           match = true;
705           opcode = kPPC_RotLeftAndClearLeft64;
706           mask = mb;
707         } else if (mb == 63) {
708           match = true;
709           opcode = kPPC_RotLeftAndClearRight64;
710           mask = me;
711         }
712         if (match) {
713           Emit(opcode, g.DefineAsRegister(node),
714                g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
715                g.TempImmediate(mask));
716           return;
717         }
718       }
719     }
720   }
721   VisitRRO(this, kPPC_ShiftRight64, node, kShift64Imm);
722 }
723 #endif
724 
VisitWord32Sar(Node * node)725 void InstructionSelector::VisitWord32Sar(Node* node) {
726   PPCOperandGenerator g(this);
727   Int32BinopMatcher m(node);
728   // Replace with sign extension for (x << K) >> K where K is 16 or 24.
729   if (CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
730     Int32BinopMatcher mleft(m.left().node());
731     if (mleft.right().Is(16) && m.right().Is(16)) {
732       Emit(kPPC_ExtendSignWord16, g.DefineAsRegister(node),
733            g.UseRegister(mleft.left().node()));
734       return;
735     } else if (mleft.right().Is(24) && m.right().Is(24)) {
736       Emit(kPPC_ExtendSignWord8, g.DefineAsRegister(node),
737            g.UseRegister(mleft.left().node()));
738       return;
739     }
740   }
741   VisitRRO(this, kPPC_ShiftRightAlg32, node, kShift32Imm);
742 }
743 
744 #if !V8_TARGET_ARCH_PPC64
VisitPairBinop(InstructionSelector * selector,InstructionCode opcode,InstructionCode opcode2,Node * node)745 void VisitPairBinop(InstructionSelector* selector, InstructionCode opcode,
746                     InstructionCode opcode2, Node* node) {
747   PPCOperandGenerator g(selector);
748 
749   Node* projection1 = NodeProperties::FindProjection(node, 1);
750   if (projection1) {
751     // We use UseUniqueRegister here to avoid register sharing with the output
752     // registers.
753     InstructionOperand inputs[] = {
754         g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
755         g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
756 
757     InstructionOperand outputs[] = {
758         g.DefineAsRegister(node),
759         g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
760 
761     selector->Emit(opcode, 2, outputs, 4, inputs);
762   } else {
763     // The high word of the result is not used, so we emit the standard 32 bit
764     // instruction.
765     selector->Emit(opcode2, g.DefineSameAsFirst(node),
766                    g.UseRegister(node->InputAt(0)),
767                    g.UseRegister(node->InputAt(2)));
768   }
769 }
770 
VisitInt32PairAdd(Node * node)771 void InstructionSelector::VisitInt32PairAdd(Node* node) {
772   VisitPairBinop(this, kPPC_AddPair, kPPC_Add32, node);
773 }
774 
VisitInt32PairSub(Node * node)775 void InstructionSelector::VisitInt32PairSub(Node* node) {
776   VisitPairBinop(this, kPPC_SubPair, kPPC_Sub, node);
777 }
778 
VisitInt32PairMul(Node * node)779 void InstructionSelector::VisitInt32PairMul(Node* node) {
780   PPCOperandGenerator g(this);
781   Node* projection1 = NodeProperties::FindProjection(node, 1);
782   if (projection1) {
783     InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
784                                    g.UseUniqueRegister(node->InputAt(1)),
785                                    g.UseUniqueRegister(node->InputAt(2)),
786                                    g.UseUniqueRegister(node->InputAt(3))};
787 
788     InstructionOperand outputs[] = {
789         g.DefineAsRegister(node),
790         g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
791 
792     InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
793 
794     Emit(kPPC_MulPair, 2, outputs, 4, inputs, 2, temps);
795   } else {
796     // The high word of the result is not used, so we emit the standard 32 bit
797     // instruction.
798     Emit(kPPC_Mul32, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)),
799          g.UseRegister(node->InputAt(2)));
800   }
801 }
802 
803 namespace {
804 // Shared routine for multiple shift operations.
VisitPairShift(InstructionSelector * selector,InstructionCode opcode,Node * node)805 void VisitPairShift(InstructionSelector* selector, InstructionCode opcode,
806                     Node* node) {
807   PPCOperandGenerator g(selector);
808   // We use g.UseUniqueRegister here to guarantee that there is
809   // no register aliasing of input registers with output registers.
810   Int32Matcher m(node->InputAt(2));
811   InstructionOperand shift_operand;
812   if (m.HasValue()) {
813     shift_operand = g.UseImmediate(m.node());
814   } else {
815     shift_operand = g.UseUniqueRegister(m.node());
816   }
817 
818   InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
819                                  g.UseUniqueRegister(node->InputAt(1)),
820                                  shift_operand};
821 
822   Node* projection1 = NodeProperties::FindProjection(node, 1);
823 
824   InstructionOperand outputs[2];
825   InstructionOperand temps[1];
826   int32_t output_count = 0;
827   int32_t temp_count = 0;
828 
829   outputs[output_count++] = g.DefineAsRegister(node);
830   if (projection1) {
831     outputs[output_count++] = g.DefineAsRegister(projection1);
832   } else {
833     temps[temp_count++] = g.TempRegister();
834   }
835 
836   selector->Emit(opcode, output_count, outputs, 3, inputs, temp_count, temps);
837 }
838 }  // namespace
839 
VisitWord32PairShl(Node * node)840 void InstructionSelector::VisitWord32PairShl(Node* node) {
841   VisitPairShift(this, kPPC_ShiftLeftPair, node);
842 }
843 
VisitWord32PairShr(Node * node)844 void InstructionSelector::VisitWord32PairShr(Node* node) {
845   VisitPairShift(this, kPPC_ShiftRightPair, node);
846 }
847 
VisitWord32PairSar(Node * node)848 void InstructionSelector::VisitWord32PairSar(Node* node) {
849   VisitPairShift(this, kPPC_ShiftRightAlgPair, node);
850 }
851 #endif
852 
853 #if V8_TARGET_ARCH_PPC64
VisitWord64Sar(Node * node)854 void InstructionSelector::VisitWord64Sar(Node* node) {
855   PPCOperandGenerator g(this);
856   Int64BinopMatcher m(node);
857   if (CanCover(m.node(), m.left().node()) && m.left().IsLoad() &&
858       m.right().Is(32)) {
859     // Just load and sign-extend the interesting 4 bytes instead. This happens,
860     // for example, when we're loading and untagging SMIs.
861     BaseWithIndexAndDisplacement64Matcher mleft(m.left().node(),
862                                                 AddressOption::kAllowAll);
863     if (mleft.matches() && mleft.index() == nullptr) {
864       int64_t offset = 0;
865       Node* displacement = mleft.displacement();
866       if (displacement != nullptr) {
867         Int64Matcher mdisplacement(displacement);
868         DCHECK(mdisplacement.HasValue());
869         offset = mdisplacement.Value();
870       }
871       offset = SmiWordOffset(offset);
872       if (g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)) {
873         Emit(kPPC_LoadWordS32 | AddressingModeField::encode(kMode_MRI),
874              g.DefineAsRegister(node), g.UseRegister(mleft.base()),
875              g.TempImmediate(offset), g.UseImmediate(0));
876         return;
877       }
878     }
879   }
880   VisitRRO(this, kPPC_ShiftRightAlg64, node, kShift64Imm);
881 }
882 #endif
883 
884 // TODO(mbrandy): Absorb logical-and into rlwinm?
VisitWord32Ror(Node * node)885 void InstructionSelector::VisitWord32Ror(Node* node) {
886   VisitRRO(this, kPPC_RotRight32, node, kShift32Imm);
887 }
888 
889 #if V8_TARGET_ARCH_PPC64
890 // TODO(mbrandy): Absorb logical-and into rldic?
VisitWord64Ror(Node * node)891 void InstructionSelector::VisitWord64Ror(Node* node) {
892   VisitRRO(this, kPPC_RotRight64, node, kShift64Imm);
893 }
894 #endif
895 
VisitWord32Clz(Node * node)896 void InstructionSelector::VisitWord32Clz(Node* node) {
897   PPCOperandGenerator g(this);
898   Emit(kPPC_Cntlz32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
899 }
900 
901 #if V8_TARGET_ARCH_PPC64
VisitWord64Clz(Node * node)902 void InstructionSelector::VisitWord64Clz(Node* node) {
903   PPCOperandGenerator g(this);
904   Emit(kPPC_Cntlz64, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
905 }
906 #endif
907 
VisitWord32Popcnt(Node * node)908 void InstructionSelector::VisitWord32Popcnt(Node* node) {
909   PPCOperandGenerator g(this);
910   Emit(kPPC_Popcnt32, g.DefineAsRegister(node),
911        g.UseRegister(node->InputAt(0)));
912 }
913 
914 #if V8_TARGET_ARCH_PPC64
VisitWord64Popcnt(Node * node)915 void InstructionSelector::VisitWord64Popcnt(Node* node) {
916   PPCOperandGenerator g(this);
917   Emit(kPPC_Popcnt64, g.DefineAsRegister(node),
918        g.UseRegister(node->InputAt(0)));
919 }
920 #endif
921 
VisitWord32Ctz(Node * node)922 void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
923 
924 #if V8_TARGET_ARCH_PPC64
VisitWord64Ctz(Node * node)925 void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
926 #endif
927 
VisitWord32ReverseBits(Node * node)928 void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
929 
930 #if V8_TARGET_ARCH_PPC64
VisitWord64ReverseBits(Node * node)931 void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
932 #endif
933 
VisitWord64ReverseBytes(Node * node)934 void InstructionSelector::VisitWord64ReverseBytes(Node* node) {
935   PPCOperandGenerator g(this);
936   InstructionOperand temp[] = {g.TempRegister()};
937   Emit(kPPC_ByteRev64, g.DefineAsRegister(node),
938        g.UseUniqueRegister(node->InputAt(0)), 1, temp);
939 }
940 
VisitWord32ReverseBytes(Node * node)941 void InstructionSelector::VisitWord32ReverseBytes(Node* node) {
942   PPCOperandGenerator g(this);
943   Emit(kPPC_ByteRev32, g.DefineAsRegister(node),
944        g.UseRegister(node->InputAt(0)));
945 }
946 
VisitSimd128ReverseBytes(Node * node)947 void InstructionSelector::VisitSimd128ReverseBytes(Node* node) {
948   // TODO(miladfar): Implement the ppc selector for reversing SIMD bytes.
949   // Check if the input node is a Load and do a Load Reverse at once.
950   UNIMPLEMENTED();
951 }
952 
VisitInt32Add(Node * node)953 void InstructionSelector::VisitInt32Add(Node* node) {
954   VisitBinop<Int32BinopMatcher>(this, node, kPPC_Add32, kInt16Imm);
955 }
956 
957 #if V8_TARGET_ARCH_PPC64
VisitInt64Add(Node * node)958 void InstructionSelector::VisitInt64Add(Node* node) {
959   VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64, kInt16Imm);
960 }
961 #endif
962 
VisitInt32Sub(Node * node)963 void InstructionSelector::VisitInt32Sub(Node* node) {
964   PPCOperandGenerator g(this);
965   Int32BinopMatcher m(node);
966   if (m.left().Is(0)) {
967     Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
968   } else {
969     VisitBinop<Int32BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
970   }
971 }
972 
973 #if V8_TARGET_ARCH_PPC64
VisitInt64Sub(Node * node)974 void InstructionSelector::VisitInt64Sub(Node* node) {
975   PPCOperandGenerator g(this);
976   Int64BinopMatcher m(node);
977   if (m.left().Is(0)) {
978     Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
979   } else {
980     VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
981   }
982 }
983 #endif
984 
985 namespace {
986 
987 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
988                   InstructionOperand left, InstructionOperand right,
989                   FlagsContinuation* cont);
EmitInt32MulWithOverflow(InstructionSelector * selector,Node * node,FlagsContinuation * cont)990 void EmitInt32MulWithOverflow(InstructionSelector* selector, Node* node,
991                               FlagsContinuation* cont) {
992   PPCOperandGenerator g(selector);
993   Int32BinopMatcher m(node);
994   InstructionOperand result_operand = g.DefineAsRegister(node);
995   InstructionOperand high32_operand = g.TempRegister();
996   InstructionOperand temp_operand = g.TempRegister();
997   {
998     InstructionOperand outputs[] = {result_operand, high32_operand};
999     InstructionOperand inputs[] = {g.UseRegister(m.left().node()),
1000                                    g.UseRegister(m.right().node())};
1001     selector->Emit(kPPC_Mul32WithHigh32, 2, outputs, 2, inputs);
1002   }
1003   {
1004     InstructionOperand shift_31 = g.UseImmediate(31);
1005     InstructionOperand outputs[] = {temp_operand};
1006     InstructionOperand inputs[] = {result_operand, shift_31};
1007     selector->Emit(kPPC_ShiftRightAlg32, 1, outputs, 2, inputs);
1008   }
1009 
1010   VisitCompare(selector, kPPC_Cmp32, high32_operand, temp_operand, cont);
1011 }
1012 
1013 }  // namespace
1014 
VisitInt32Mul(Node * node)1015 void InstructionSelector::VisitInt32Mul(Node* node) {
1016   VisitRRR(this, kPPC_Mul32, node);
1017 }
1018 
1019 #if V8_TARGET_ARCH_PPC64
VisitInt64Mul(Node * node)1020 void InstructionSelector::VisitInt64Mul(Node* node) {
1021   VisitRRR(this, kPPC_Mul64, node);
1022 }
1023 #endif
1024 
VisitInt32MulHigh(Node * node)1025 void InstructionSelector::VisitInt32MulHigh(Node* node) {
1026   PPCOperandGenerator g(this);
1027   Emit(kPPC_MulHigh32, g.DefineAsRegister(node),
1028        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
1029 }
1030 
VisitUint32MulHigh(Node * node)1031 void InstructionSelector::VisitUint32MulHigh(Node* node) {
1032   PPCOperandGenerator g(this);
1033   Emit(kPPC_MulHighU32, g.DefineAsRegister(node),
1034        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
1035 }
1036 
VisitInt32Div(Node * node)1037 void InstructionSelector::VisitInt32Div(Node* node) {
1038   VisitRRR(this, kPPC_Div32, node);
1039 }
1040 
1041 #if V8_TARGET_ARCH_PPC64
VisitInt64Div(Node * node)1042 void InstructionSelector::VisitInt64Div(Node* node) {
1043   VisitRRR(this, kPPC_Div64, node);
1044 }
1045 #endif
1046 
VisitUint32Div(Node * node)1047 void InstructionSelector::VisitUint32Div(Node* node) {
1048   VisitRRR(this, kPPC_DivU32, node);
1049 }
1050 
1051 #if V8_TARGET_ARCH_PPC64
VisitUint64Div(Node * node)1052 void InstructionSelector::VisitUint64Div(Node* node) {
1053   VisitRRR(this, kPPC_DivU64, node);
1054 }
1055 #endif
1056 
VisitInt32Mod(Node * node)1057 void InstructionSelector::VisitInt32Mod(Node* node) {
1058   VisitRRR(this, kPPC_Mod32, node);
1059 }
1060 
1061 #if V8_TARGET_ARCH_PPC64
VisitInt64Mod(Node * node)1062 void InstructionSelector::VisitInt64Mod(Node* node) {
1063   VisitRRR(this, kPPC_Mod64, node);
1064 }
1065 #endif
1066 
VisitUint32Mod(Node * node)1067 void InstructionSelector::VisitUint32Mod(Node* node) {
1068   VisitRRR(this, kPPC_ModU32, node);
1069 }
1070 
1071 #if V8_TARGET_ARCH_PPC64
VisitUint64Mod(Node * node)1072 void InstructionSelector::VisitUint64Mod(Node* node) {
1073   VisitRRR(this, kPPC_ModU64, node);
1074 }
1075 #endif
1076 
VisitChangeFloat32ToFloat64(Node * node)1077 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
1078   VisitRR(this, kPPC_Float32ToDouble, node);
1079 }
1080 
VisitRoundInt32ToFloat32(Node * node)1081 void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
1082   VisitRR(this, kPPC_Int32ToFloat32, node);
1083 }
1084 
VisitRoundUint32ToFloat32(Node * node)1085 void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
1086   VisitRR(this, kPPC_Uint32ToFloat32, node);
1087 }
1088 
VisitChangeInt32ToFloat64(Node * node)1089 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
1090   VisitRR(this, kPPC_Int32ToDouble, node);
1091 }
1092 
VisitChangeUint32ToFloat64(Node * node)1093 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
1094   VisitRR(this, kPPC_Uint32ToDouble, node);
1095 }
1096 
VisitChangeFloat64ToInt32(Node * node)1097 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
1098   VisitRR(this, kPPC_DoubleToInt32, node);
1099 }
1100 
VisitChangeFloat64ToUint32(Node * node)1101 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
1102   VisitRR(this, kPPC_DoubleToUint32, node);
1103 }
1104 
VisitTruncateFloat64ToUint32(Node * node)1105 void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
1106   VisitRR(this, kPPC_DoubleToUint32, node);
1107 }
1108 
VisitSignExtendWord8ToInt32(Node * node)1109 void InstructionSelector::VisitSignExtendWord8ToInt32(Node* node) {
1110   // TODO(mbrandy): inspect input to see if nop is appropriate.
1111   VisitRR(this, kPPC_ExtendSignWord8, node);
1112 }
1113 
VisitSignExtendWord16ToInt32(Node * node)1114 void InstructionSelector::VisitSignExtendWord16ToInt32(Node* node) {
1115   // TODO(mbrandy): inspect input to see if nop is appropriate.
1116   VisitRR(this, kPPC_ExtendSignWord16, node);
1117 }
1118 
1119 #if V8_TARGET_ARCH_PPC64
VisitTryTruncateFloat32ToInt64(Node * node)1120 void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1121   VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
1122 }
1123 
VisitTryTruncateFloat64ToInt64(Node * node)1124 void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1125   VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
1126 }
1127 
VisitTruncateFloat64ToInt64(Node * node)1128 void InstructionSelector::VisitTruncateFloat64ToInt64(Node* node) {
1129   VisitRR(this, kPPC_DoubleToInt64, node);
1130 }
1131 
VisitTryTruncateFloat32ToUint64(Node * node)1132 void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1133   VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
1134 }
1135 
VisitTryTruncateFloat64ToUint64(Node * node)1136 void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1137   VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
1138 }
1139 
VisitBitcastWord32ToWord64(Node * node)1140 void InstructionSelector::VisitBitcastWord32ToWord64(Node* node) {
1141   DCHECK(SmiValuesAre31Bits());
1142   DCHECK(COMPRESS_POINTERS_BOOL);
1143   EmitIdentity(node);
1144 }
1145 
VisitChangeInt32ToInt64(Node * node)1146 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1147   // TODO(mbrandy): inspect input to see if nop is appropriate.
1148   VisitRR(this, kPPC_ExtendSignWord32, node);
1149 }
1150 
VisitSignExtendWord8ToInt64(Node * node)1151 void InstructionSelector::VisitSignExtendWord8ToInt64(Node* node) {
1152   // TODO(mbrandy): inspect input to see if nop is appropriate.
1153   VisitRR(this, kPPC_ExtendSignWord8, node);
1154 }
1155 
VisitSignExtendWord16ToInt64(Node * node)1156 void InstructionSelector::VisitSignExtendWord16ToInt64(Node* node) {
1157   // TODO(mbrandy): inspect input to see if nop is appropriate.
1158   VisitRR(this, kPPC_ExtendSignWord16, node);
1159 }
1160 
VisitSignExtendWord32ToInt64(Node * node)1161 void InstructionSelector::VisitSignExtendWord32ToInt64(Node* node) {
1162   // TODO(mbrandy): inspect input to see if nop is appropriate.
1163   VisitRR(this, kPPC_ExtendSignWord32, node);
1164 }
1165 
VisitChangeUint32ToUint64(Node * node)1166 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1167   // TODO(mbrandy): inspect input to see if nop is appropriate.
1168   VisitRR(this, kPPC_Uint32ToUint64, node);
1169 }
1170 
VisitChangeFloat64ToUint64(Node * node)1171 void InstructionSelector::VisitChangeFloat64ToUint64(Node* node) {
1172   VisitRR(this, kPPC_DoubleToUint64, node);
1173 }
1174 
VisitChangeFloat64ToInt64(Node * node)1175 void InstructionSelector::VisitChangeFloat64ToInt64(Node* node) {
1176   VisitRR(this, kPPC_DoubleToInt64, node);
1177 }
1178 #endif
1179 
VisitTruncateFloat64ToFloat32(Node * node)1180 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
1181   VisitRR(this, kPPC_DoubleToFloat32, node);
1182 }
1183 
VisitTruncateFloat64ToWord32(Node * node)1184 void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
1185   VisitRR(this, kArchTruncateDoubleToI, node);
1186 }
1187 
VisitRoundFloat64ToInt32(Node * node)1188 void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
1189   VisitRR(this, kPPC_DoubleToInt32, node);
1190 }
1191 
VisitTruncateFloat32ToInt32(Node * node)1192 void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
1193   VisitRR(this, kPPC_DoubleToInt32, node);
1194 }
1195 
VisitTruncateFloat32ToUint32(Node * node)1196 void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
1197   VisitRR(this, kPPC_DoubleToUint32, node);
1198 }
1199 
1200 #if V8_TARGET_ARCH_PPC64
VisitTruncateInt64ToInt32(Node * node)1201 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1202   // TODO(mbrandy): inspect input to see if nop is appropriate.
1203   VisitRR(this, kPPC_Int64ToInt32, node);
1204 }
1205 
VisitRoundInt64ToFloat32(Node * node)1206 void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1207   VisitRR(this, kPPC_Int64ToFloat32, node);
1208 }
1209 
VisitRoundInt64ToFloat64(Node * node)1210 void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1211   VisitRR(this, kPPC_Int64ToDouble, node);
1212 }
1213 
VisitChangeInt64ToFloat64(Node * node)1214 void InstructionSelector::VisitChangeInt64ToFloat64(Node* node) {
1215   VisitRR(this, kPPC_Int64ToDouble, node);
1216 }
1217 
VisitRoundUint64ToFloat32(Node * node)1218 void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1219   VisitRR(this, kPPC_Uint64ToFloat32, node);
1220 }
1221 
VisitRoundUint64ToFloat64(Node * node)1222 void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1223   VisitRR(this, kPPC_Uint64ToDouble, node);
1224 }
1225 #endif
1226 
VisitBitcastFloat32ToInt32(Node * node)1227 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1228   VisitRR(this, kPPC_BitcastFloat32ToInt32, node);
1229 }
1230 
1231 #if V8_TARGET_ARCH_PPC64
VisitBitcastFloat64ToInt64(Node * node)1232 void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1233   VisitRR(this, kPPC_BitcastDoubleToInt64, node);
1234 }
1235 #endif
1236 
VisitBitcastInt32ToFloat32(Node * node)1237 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1238   VisitRR(this, kPPC_BitcastInt32ToFloat32, node);
1239 }
1240 
1241 #if V8_TARGET_ARCH_PPC64
VisitBitcastInt64ToFloat64(Node * node)1242 void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1243   VisitRR(this, kPPC_BitcastInt64ToDouble, node);
1244 }
1245 #endif
1246 
VisitFloat32Add(Node * node)1247 void InstructionSelector::VisitFloat32Add(Node* node) {
1248   VisitRRR(this, kPPC_AddDouble | MiscField::encode(1), node);
1249 }
1250 
VisitFloat64Add(Node * node)1251 void InstructionSelector::VisitFloat64Add(Node* node) {
1252   // TODO(mbrandy): detect multiply-add
1253   VisitRRR(this, kPPC_AddDouble, node);
1254 }
1255 
VisitFloat32Sub(Node * node)1256 void InstructionSelector::VisitFloat32Sub(Node* node) {
1257   VisitRRR(this, kPPC_SubDouble | MiscField::encode(1), node);
1258 }
1259 
VisitFloat64Sub(Node * node)1260 void InstructionSelector::VisitFloat64Sub(Node* node) {
1261   // TODO(mbrandy): detect multiply-subtract
1262   VisitRRR(this, kPPC_SubDouble, node);
1263 }
1264 
VisitFloat32Mul(Node * node)1265 void InstructionSelector::VisitFloat32Mul(Node* node) {
1266   VisitRRR(this, kPPC_MulDouble | MiscField::encode(1), node);
1267 }
1268 
VisitFloat64Mul(Node * node)1269 void InstructionSelector::VisitFloat64Mul(Node* node) {
1270   // TODO(mbrandy): detect negate
1271   VisitRRR(this, kPPC_MulDouble, node);
1272 }
1273 
VisitFloat32Div(Node * node)1274 void InstructionSelector::VisitFloat32Div(Node* node) {
1275   VisitRRR(this, kPPC_DivDouble | MiscField::encode(1), node);
1276 }
1277 
VisitFloat64Div(Node * node)1278 void InstructionSelector::VisitFloat64Div(Node* node) {
1279   VisitRRR(this, kPPC_DivDouble, node);
1280 }
1281 
VisitFloat64Mod(Node * node)1282 void InstructionSelector::VisitFloat64Mod(Node* node) {
1283   PPCOperandGenerator g(this);
1284   Emit(kPPC_ModDouble, g.DefineAsFixed(node, d1),
1285        g.UseFixed(node->InputAt(0), d1), g.UseFixed(node->InputAt(1), d2))
1286       ->MarkAsCall();
1287 }
1288 
VisitFloat32Max(Node * node)1289 void InstructionSelector::VisitFloat32Max(Node* node) {
1290   VisitRRR(this, kPPC_MaxDouble | MiscField::encode(1), node);
1291 }
1292 
VisitFloat64Max(Node * node)1293 void InstructionSelector::VisitFloat64Max(Node* node) {
1294   VisitRRR(this, kPPC_MaxDouble, node);
1295 }
1296 
VisitFloat64SilenceNaN(Node * node)1297 void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
1298   VisitRR(this, kPPC_Float64SilenceNaN, node);
1299 }
1300 
VisitFloat32Min(Node * node)1301 void InstructionSelector::VisitFloat32Min(Node* node) {
1302   VisitRRR(this, kPPC_MinDouble | MiscField::encode(1), node);
1303 }
1304 
VisitFloat64Min(Node * node)1305 void InstructionSelector::VisitFloat64Min(Node* node) {
1306   VisitRRR(this, kPPC_MinDouble, node);
1307 }
1308 
VisitFloat32Abs(Node * node)1309 void InstructionSelector::VisitFloat32Abs(Node* node) {
1310   VisitRR(this, kPPC_AbsDouble | MiscField::encode(1), node);
1311 }
1312 
VisitFloat64Abs(Node * node)1313 void InstructionSelector::VisitFloat64Abs(Node* node) {
1314   VisitRR(this, kPPC_AbsDouble, node);
1315 }
1316 
VisitFloat32Sqrt(Node * node)1317 void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1318   VisitRR(this, kPPC_SqrtDouble | MiscField::encode(1), node);
1319 }
1320 
VisitFloat64Ieee754Unop(Node * node,InstructionCode opcode)1321 void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1322                                                   InstructionCode opcode) {
1323   PPCOperandGenerator g(this);
1324   Emit(opcode, g.DefineAsFixed(node, d1), g.UseFixed(node->InputAt(0), d1))
1325       ->MarkAsCall();
1326 }
1327 
VisitFloat64Ieee754Binop(Node * node,InstructionCode opcode)1328 void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1329                                                    InstructionCode opcode) {
1330   PPCOperandGenerator g(this);
1331   Emit(opcode, g.DefineAsFixed(node, d1), g.UseFixed(node->InputAt(0), d1),
1332        g.UseFixed(node->InputAt(1), d2))
1333       ->MarkAsCall();
1334 }
1335 
VisitFloat64Sqrt(Node * node)1336 void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1337   VisitRR(this, kPPC_SqrtDouble, node);
1338 }
1339 
VisitFloat32RoundDown(Node * node)1340 void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1341   VisitRR(this, kPPC_FloorDouble | MiscField::encode(1), node);
1342 }
1343 
VisitFloat64RoundDown(Node * node)1344 void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1345   VisitRR(this, kPPC_FloorDouble, node);
1346 }
1347 
VisitFloat32RoundUp(Node * node)1348 void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1349   VisitRR(this, kPPC_CeilDouble | MiscField::encode(1), node);
1350 }
1351 
VisitFloat64RoundUp(Node * node)1352 void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1353   VisitRR(this, kPPC_CeilDouble, node);
1354 }
1355 
VisitFloat32RoundTruncate(Node * node)1356 void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1357   VisitRR(this, kPPC_TruncateDouble | MiscField::encode(1), node);
1358 }
1359 
VisitFloat64RoundTruncate(Node * node)1360 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1361   VisitRR(this, kPPC_TruncateDouble, node);
1362 }
1363 
VisitFloat64RoundTiesAway(Node * node)1364 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1365   VisitRR(this, kPPC_RoundDouble, node);
1366 }
1367 
VisitFloat32RoundTiesEven(Node * node)1368 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1369   UNREACHABLE();
1370 }
1371 
VisitFloat64RoundTiesEven(Node * node)1372 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1373   UNREACHABLE();
1374 }
1375 
VisitFloat32Neg(Node * node)1376 void InstructionSelector::VisitFloat32Neg(Node* node) {
1377   VisitRR(this, kPPC_NegDouble, node);
1378 }
1379 
VisitFloat64Neg(Node * node)1380 void InstructionSelector::VisitFloat64Neg(Node* node) {
1381   VisitRR(this, kPPC_NegDouble, node);
1382 }
1383 
VisitInt32AddWithOverflow(Node * node)1384 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1385   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1386     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1387     return VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32,
1388                                          kInt16Imm, &cont);
1389   }
1390   FlagsContinuation cont;
1391   VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32, kInt16Imm,
1392                                 &cont);
1393 }
1394 
VisitInt32SubWithOverflow(Node * node)1395 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1396   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1397     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1398     return VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
1399                                          kInt16Imm_Negate, &cont);
1400   }
1401   FlagsContinuation cont;
1402   VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
1403                                 kInt16Imm_Negate, &cont);
1404 }
1405 
1406 #if V8_TARGET_ARCH_PPC64
VisitInt64AddWithOverflow(Node * node)1407 void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
1408   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1409     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1410     return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64, kInt16Imm,
1411                                          &cont);
1412   }
1413   FlagsContinuation cont;
1414   VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64, kInt16Imm, &cont);
1415 }
1416 
VisitInt64SubWithOverflow(Node * node)1417 void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
1418   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1419     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1420     return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate,
1421                                          &cont);
1422   }
1423   FlagsContinuation cont;
1424   VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate, &cont);
1425 }
1426 #endif
1427 
CompareLogical(FlagsContinuation * cont)1428 static bool CompareLogical(FlagsContinuation* cont) {
1429   switch (cont->condition()) {
1430     case kUnsignedLessThan:
1431     case kUnsignedGreaterThanOrEqual:
1432     case kUnsignedLessThanOrEqual:
1433     case kUnsignedGreaterThan:
1434       return true;
1435     default:
1436       return false;
1437   }
1438   UNREACHABLE();
1439 }
1440 
1441 namespace {
1442 
1443 // Shared routine for multiple compare operations.
VisitCompare(InstructionSelector * selector,InstructionCode opcode,InstructionOperand left,InstructionOperand right,FlagsContinuation * cont)1444 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1445                   InstructionOperand left, InstructionOperand right,
1446                   FlagsContinuation* cont) {
1447   selector->EmitWithContinuation(opcode, left, right, cont);
1448 }
1449 
1450 // Shared routine for multiple word compare operations.
VisitWordCompare(InstructionSelector * selector,Node * node,InstructionCode opcode,FlagsContinuation * cont,bool commutative,ImmediateMode immediate_mode)1451 void VisitWordCompare(InstructionSelector* selector, Node* node,
1452                       InstructionCode opcode, FlagsContinuation* cont,
1453                       bool commutative, ImmediateMode immediate_mode) {
1454   PPCOperandGenerator g(selector);
1455   Node* left = node->InputAt(0);
1456   Node* right = node->InputAt(1);
1457 
1458   // Match immediates on left or right side of comparison.
1459   if (g.CanBeImmediate(right, immediate_mode)) {
1460     VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
1461                  cont);
1462   } else if (g.CanBeImmediate(left, immediate_mode)) {
1463     if (!commutative) cont->Commute();
1464     VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
1465                  cont);
1466   } else {
1467     VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
1468                  cont);
1469   }
1470 }
1471 
VisitWord32Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1472 void VisitWord32Compare(InstructionSelector* selector, Node* node,
1473                         FlagsContinuation* cont) {
1474   ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1475   VisitWordCompare(selector, node, kPPC_Cmp32, cont, false, mode);
1476 }
1477 
1478 #if V8_TARGET_ARCH_PPC64
VisitWord64Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1479 void VisitWord64Compare(InstructionSelector* selector, Node* node,
1480                         FlagsContinuation* cont) {
1481   ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1482   VisitWordCompare(selector, node, kPPC_Cmp64, cont, false, mode);
1483 }
1484 #endif
1485 
1486 // Shared routine for multiple float32 compare operations.
VisitFloat32Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1487 void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1488                          FlagsContinuation* cont) {
1489   PPCOperandGenerator g(selector);
1490   Node* left = node->InputAt(0);
1491   Node* right = node->InputAt(1);
1492   VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
1493                g.UseRegister(right), cont);
1494 }
1495 
1496 // Shared routine for multiple float64 compare operations.
VisitFloat64Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1497 void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1498                          FlagsContinuation* cont) {
1499   PPCOperandGenerator g(selector);
1500   Node* left = node->InputAt(0);
1501   Node* right = node->InputAt(1);
1502   VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
1503                g.UseRegister(right), cont);
1504 }
1505 
1506 }  // namespace
1507 
1508 // Shared routine for word comparisons against zero.
VisitWordCompareZero(Node * user,Node * value,FlagsContinuation * cont)1509 void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
1510                                                FlagsContinuation* cont) {
1511   // Try to combine with comparisons against 0 by simply inverting the branch.
1512   while (value->opcode() == IrOpcode::kWord32Equal && CanCover(user, value)) {
1513     Int32BinopMatcher m(value);
1514     if (!m.right().Is(0)) break;
1515 
1516     user = value;
1517     value = m.left().node();
1518     cont->Negate();
1519   }
1520 
1521   if (CanCover(user, value)) {
1522     switch (value->opcode()) {
1523       case IrOpcode::kWord32Equal:
1524         cont->OverwriteAndNegateIfEqual(kEqual);
1525         return VisitWord32Compare(this, value, cont);
1526       case IrOpcode::kInt32LessThan:
1527         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1528         return VisitWord32Compare(this, value, cont);
1529       case IrOpcode::kInt32LessThanOrEqual:
1530         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1531         return VisitWord32Compare(this, value, cont);
1532       case IrOpcode::kUint32LessThan:
1533         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1534         return VisitWord32Compare(this, value, cont);
1535       case IrOpcode::kUint32LessThanOrEqual:
1536         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1537         return VisitWord32Compare(this, value, cont);
1538 #if V8_TARGET_ARCH_PPC64
1539       case IrOpcode::kWord64Equal:
1540         cont->OverwriteAndNegateIfEqual(kEqual);
1541         return VisitWord64Compare(this, value, cont);
1542       case IrOpcode::kInt64LessThan:
1543         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1544         return VisitWord64Compare(this, value, cont);
1545       case IrOpcode::kInt64LessThanOrEqual:
1546         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1547         return VisitWord64Compare(this, value, cont);
1548       case IrOpcode::kUint64LessThan:
1549         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1550         return VisitWord64Compare(this, value, cont);
1551       case IrOpcode::kUint64LessThanOrEqual:
1552         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1553         return VisitWord64Compare(this, value, cont);
1554 #endif
1555       case IrOpcode::kFloat32Equal:
1556         cont->OverwriteAndNegateIfEqual(kEqual);
1557         return VisitFloat32Compare(this, value, cont);
1558       case IrOpcode::kFloat32LessThan:
1559         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1560         return VisitFloat32Compare(this, value, cont);
1561       case IrOpcode::kFloat32LessThanOrEqual:
1562         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1563         return VisitFloat32Compare(this, value, cont);
1564       case IrOpcode::kFloat64Equal:
1565         cont->OverwriteAndNegateIfEqual(kEqual);
1566         return VisitFloat64Compare(this, value, cont);
1567       case IrOpcode::kFloat64LessThan:
1568         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1569         return VisitFloat64Compare(this, value, cont);
1570       case IrOpcode::kFloat64LessThanOrEqual:
1571         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1572         return VisitFloat64Compare(this, value, cont);
1573       case IrOpcode::kProjection:
1574         // Check if this is the overflow output projection of an
1575         // <Operation>WithOverflow node.
1576         if (ProjectionIndexOf(value->op()) == 1u) {
1577           // We cannot combine the <Operation>WithOverflow with this branch
1578           // unless the 0th projection (the use of the actual value of the
1579           // <Operation> is either nullptr, which means there's no use of the
1580           // actual value, or was already defined, which means it is scheduled
1581           // *AFTER* this branch).
1582           Node* const node = value->InputAt(0);
1583           Node* const result = NodeProperties::FindProjection(node, 0);
1584           if (result == nullptr || IsDefined(result)) {
1585             switch (node->opcode()) {
1586               case IrOpcode::kInt32AddWithOverflow:
1587                 cont->OverwriteAndNegateIfEqual(kOverflow);
1588                 return VisitBinop<Int32BinopMatcher>(
1589                     this, node, kPPC_AddWithOverflow32, kInt16Imm, cont);
1590               case IrOpcode::kInt32SubWithOverflow:
1591                 cont->OverwriteAndNegateIfEqual(kOverflow);
1592                 return VisitBinop<Int32BinopMatcher>(
1593                     this, node, kPPC_SubWithOverflow32, kInt16Imm_Negate, cont);
1594               case IrOpcode::kInt32MulWithOverflow:
1595                 cont->OverwriteAndNegateIfEqual(kNotEqual);
1596                 return EmitInt32MulWithOverflow(this, node, cont);
1597 #if V8_TARGET_ARCH_PPC64
1598               case IrOpcode::kInt64AddWithOverflow:
1599                 cont->OverwriteAndNegateIfEqual(kOverflow);
1600                 return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64,
1601                                                      kInt16Imm, cont);
1602               case IrOpcode::kInt64SubWithOverflow:
1603                 cont->OverwriteAndNegateIfEqual(kOverflow);
1604                 return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub,
1605                                                      kInt16Imm_Negate, cont);
1606 #endif
1607               default:
1608                 break;
1609             }
1610           }
1611         }
1612         break;
1613       case IrOpcode::kInt32Sub:
1614         return VisitWord32Compare(this, value, cont);
1615       case IrOpcode::kWord32And:
1616         // TODO(mbandy): opportunity for rlwinm?
1617         return VisitWordCompare(this, value, kPPC_Tst32, cont, true,
1618                                 kInt16Imm_Unsigned);
1619 // TODO(mbrandy): Handle?
1620 // case IrOpcode::kInt32Add:
1621 // case IrOpcode::kWord32Or:
1622 // case IrOpcode::kWord32Xor:
1623 // case IrOpcode::kWord32Sar:
1624 // case IrOpcode::kWord32Shl:
1625 // case IrOpcode::kWord32Shr:
1626 // case IrOpcode::kWord32Ror:
1627 #if V8_TARGET_ARCH_PPC64
1628       case IrOpcode::kInt64Sub:
1629         return VisitWord64Compare(this, value, cont);
1630       case IrOpcode::kWord64And:
1631         // TODO(mbandy): opportunity for rldic?
1632         return VisitWordCompare(this, value, kPPC_Tst64, cont, true,
1633                                 kInt16Imm_Unsigned);
1634 // TODO(mbrandy): Handle?
1635 // case IrOpcode::kInt64Add:
1636 // case IrOpcode::kWord64Or:
1637 // case IrOpcode::kWord64Xor:
1638 // case IrOpcode::kWord64Sar:
1639 // case IrOpcode::kWord64Shl:
1640 // case IrOpcode::kWord64Shr:
1641 // case IrOpcode::kWord64Ror:
1642 #endif
1643       case IrOpcode::kStackPointerGreaterThan:
1644         cont->OverwriteAndNegateIfEqual(kStackPointerGreaterThanCondition);
1645         return VisitStackPointerGreaterThan(value, cont);
1646       default:
1647         break;
1648     }
1649   }
1650 
1651   // Branch could not be combined with a compare, emit compare against 0.
1652   PPCOperandGenerator g(this);
1653   VisitCompare(this, kPPC_Cmp32, g.UseRegister(value), g.TempImmediate(0),
1654                cont);
1655 }
1656 
VisitSwitch(Node * node,const SwitchInfo & sw)1657 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1658   PPCOperandGenerator g(this);
1659   InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1660 
1661   // Emit either ArchTableSwitch or ArchBinarySearchSwitch.
1662   if (enable_switch_jump_table_ == kEnableSwitchJumpTable) {
1663     static const size_t kMaxTableSwitchValueRange = 2 << 16;
1664     size_t table_space_cost = 4 + sw.value_range();
1665     size_t table_time_cost = 3;
1666     size_t lookup_space_cost = 3 + 2 * sw.case_count();
1667     size_t lookup_time_cost = sw.case_count();
1668     if (sw.case_count() > 0 &&
1669         table_space_cost + 3 * table_time_cost <=
1670             lookup_space_cost + 3 * lookup_time_cost &&
1671         sw.min_value() > std::numeric_limits<int32_t>::min() &&
1672         sw.value_range() <= kMaxTableSwitchValueRange) {
1673       InstructionOperand index_operand = value_operand;
1674       if (sw.min_value()) {
1675         index_operand = g.TempRegister();
1676         Emit(kPPC_Sub, index_operand, value_operand,
1677              g.TempImmediate(sw.min_value()));
1678       }
1679       // Generate a table lookup.
1680       return EmitTableSwitch(sw, index_operand);
1681     }
1682   }
1683 
1684   // Generate a tree of conditional jumps.
1685   return EmitBinarySearchSwitch(sw, value_operand);
1686 }
1687 
VisitWord32Equal(Node * const node)1688 void InstructionSelector::VisitWord32Equal(Node* const node) {
1689   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1690   VisitWord32Compare(this, node, &cont);
1691 }
1692 
VisitInt32LessThan(Node * node)1693 void InstructionSelector::VisitInt32LessThan(Node* node) {
1694   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1695   VisitWord32Compare(this, node, &cont);
1696 }
1697 
VisitInt32LessThanOrEqual(Node * node)1698 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1699   FlagsContinuation cont =
1700       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1701   VisitWord32Compare(this, node, &cont);
1702 }
1703 
VisitUint32LessThan(Node * node)1704 void InstructionSelector::VisitUint32LessThan(Node* node) {
1705   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1706   VisitWord32Compare(this, node, &cont);
1707 }
1708 
VisitUint32LessThanOrEqual(Node * node)1709 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1710   FlagsContinuation cont =
1711       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1712   VisitWord32Compare(this, node, &cont);
1713 }
1714 
1715 #if V8_TARGET_ARCH_PPC64
VisitWord64Equal(Node * const node)1716 void InstructionSelector::VisitWord64Equal(Node* const node) {
1717   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1718   VisitWord64Compare(this, node, &cont);
1719 }
1720 
VisitInt64LessThan(Node * node)1721 void InstructionSelector::VisitInt64LessThan(Node* node) {
1722   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1723   VisitWord64Compare(this, node, &cont);
1724 }
1725 
VisitInt64LessThanOrEqual(Node * node)1726 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1727   FlagsContinuation cont =
1728       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1729   VisitWord64Compare(this, node, &cont);
1730 }
1731 
VisitUint64LessThan(Node * node)1732 void InstructionSelector::VisitUint64LessThan(Node* node) {
1733   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1734   VisitWord64Compare(this, node, &cont);
1735 }
1736 
VisitUint64LessThanOrEqual(Node * node)1737 void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1738   FlagsContinuation cont =
1739       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1740   VisitWord64Compare(this, node, &cont);
1741 }
1742 #endif
1743 
VisitInt32MulWithOverflow(Node * node)1744 void InstructionSelector::VisitInt32MulWithOverflow(Node* node) {
1745   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1746     FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, ovf);
1747     return EmitInt32MulWithOverflow(this, node, &cont);
1748   }
1749   FlagsContinuation cont;
1750   EmitInt32MulWithOverflow(this, node, &cont);
1751 }
1752 
VisitFloat32Equal(Node * node)1753 void InstructionSelector::VisitFloat32Equal(Node* node) {
1754   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1755   VisitFloat32Compare(this, node, &cont);
1756 }
1757 
VisitFloat32LessThan(Node * node)1758 void InstructionSelector::VisitFloat32LessThan(Node* node) {
1759   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1760   VisitFloat32Compare(this, node, &cont);
1761 }
1762 
VisitFloat32LessThanOrEqual(Node * node)1763 void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1764   FlagsContinuation cont =
1765       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1766   VisitFloat32Compare(this, node, &cont);
1767 }
1768 
VisitFloat64Equal(Node * node)1769 void InstructionSelector::VisitFloat64Equal(Node* node) {
1770   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1771   VisitFloat64Compare(this, node, &cont);
1772 }
1773 
VisitFloat64LessThan(Node * node)1774 void InstructionSelector::VisitFloat64LessThan(Node* node) {
1775   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1776   VisitFloat64Compare(this, node, &cont);
1777 }
1778 
VisitFloat64LessThanOrEqual(Node * node)1779 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1780   FlagsContinuation cont =
1781       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1782   VisitFloat64Compare(this, node, &cont);
1783 }
1784 
EmitPrepareArguments(ZoneVector<PushParameter> * arguments,const CallDescriptor * call_descriptor,Node * node)1785 void InstructionSelector::EmitPrepareArguments(
1786     ZoneVector<PushParameter>* arguments, const CallDescriptor* call_descriptor,
1787     Node* node) {
1788   PPCOperandGenerator g(this);
1789 
1790   // Prepare for C function call.
1791   if (call_descriptor->IsCFunctionCall()) {
1792     Emit(kArchPrepareCallCFunction | MiscField::encode(static_cast<int>(
1793                                          call_descriptor->ParameterCount())),
1794          0, nullptr, 0, nullptr);
1795 
1796     // Poke any stack arguments.
1797     int slot = kStackFrameExtraParamSlot;
1798     for (PushParameter input : (*arguments)) {
1799       if (input.node == nullptr) continue;
1800       Emit(kPPC_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node),
1801            g.TempImmediate(slot));
1802       ++slot;
1803     }
1804   } else {
1805     // Push any stack arguments.
1806     for (PushParameter input : base::Reversed(*arguments)) {
1807       // Skip any alignment holes in pushed nodes.
1808       if (input.node == nullptr) continue;
1809       Emit(kPPC_Push, g.NoOutput(), g.UseRegister(input.node));
1810     }
1811   }
1812 }
1813 
IsTailCallAddressImmediate()1814 bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1815 
GetTempsCountForTailCallFromJSFunction()1816 int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
1817 
VisitFloat64ExtractLowWord32(Node * node)1818 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1819   PPCOperandGenerator g(this);
1820   Emit(kPPC_DoubleExtractLowWord32, g.DefineAsRegister(node),
1821        g.UseRegister(node->InputAt(0)));
1822 }
1823 
VisitFloat64ExtractHighWord32(Node * node)1824 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1825   PPCOperandGenerator g(this);
1826   Emit(kPPC_DoubleExtractHighWord32, g.DefineAsRegister(node),
1827        g.UseRegister(node->InputAt(0)));
1828 }
1829 
VisitFloat64InsertLowWord32(Node * node)1830 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1831   PPCOperandGenerator g(this);
1832   Node* left = node->InputAt(0);
1833   Node* right = node->InputAt(1);
1834   if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
1835       CanCover(node, left)) {
1836     left = left->InputAt(1);
1837     Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(left),
1838          g.UseRegister(right));
1839     return;
1840   }
1841   Emit(kPPC_DoubleInsertLowWord32, g.DefineSameAsFirst(node),
1842        g.UseRegister(left), g.UseRegister(right));
1843 }
1844 
VisitFloat64InsertHighWord32(Node * node)1845 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1846   PPCOperandGenerator g(this);
1847   Node* left = node->InputAt(0);
1848   Node* right = node->InputAt(1);
1849   if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
1850       CanCover(node, left)) {
1851     left = left->InputAt(1);
1852     Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(right),
1853          g.UseRegister(left));
1854     return;
1855   }
1856   Emit(kPPC_DoubleInsertHighWord32, g.DefineSameAsFirst(node),
1857        g.UseRegister(left), g.UseRegister(right));
1858 }
1859 
VisitMemoryBarrier(Node * node)1860 void InstructionSelector::VisitMemoryBarrier(Node* node) {
1861   PPCOperandGenerator g(this);
1862   Emit(kPPC_Sync, g.NoOutput());
1863 }
1864 
VisitWord32AtomicLoad(Node * node)1865 void InstructionSelector::VisitWord32AtomicLoad(Node* node) { VisitLoad(node); }
1866 
VisitWord64AtomicLoad(Node * node)1867 void InstructionSelector::VisitWord64AtomicLoad(Node* node) { VisitLoad(node); }
1868 
VisitWord32AtomicStore(Node * node)1869 void InstructionSelector::VisitWord32AtomicStore(Node* node) {
1870   VisitStore(node);
1871 }
1872 
VisitWord64AtomicStore(Node * node)1873 void InstructionSelector::VisitWord64AtomicStore(Node* node) {
1874   VisitStore(node);
1875 }
1876 
VisitAtomicExchange(InstructionSelector * selector,Node * node,ArchOpcode opcode)1877 void VisitAtomicExchange(InstructionSelector* selector, Node* node,
1878                          ArchOpcode opcode) {
1879   PPCOperandGenerator g(selector);
1880   Node* base = node->InputAt(0);
1881   Node* index = node->InputAt(1);
1882   Node* value = node->InputAt(2);
1883 
1884   AddressingMode addressing_mode = kMode_MRR;
1885   InstructionOperand inputs[3];
1886   size_t input_count = 0;
1887   inputs[input_count++] = g.UseUniqueRegister(base);
1888   inputs[input_count++] = g.UseUniqueRegister(index);
1889   inputs[input_count++] = g.UseUniqueRegister(value);
1890   InstructionOperand outputs[1];
1891   outputs[0] = g.UseUniqueRegister(node);
1892   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
1893   selector->Emit(code, 1, outputs, input_count, inputs);
1894 }
1895 
VisitWord32AtomicExchange(Node * node)1896 void InstructionSelector::VisitWord32AtomicExchange(Node* node) {
1897   ArchOpcode opcode = kArchNop;
1898   MachineType type = AtomicOpType(node->op());
1899   if (type == MachineType::Int8()) {
1900     opcode = kWord32AtomicExchangeInt8;
1901   } else if (type == MachineType::Uint8()) {
1902     opcode = kPPC_AtomicExchangeUint8;
1903   } else if (type == MachineType::Int16()) {
1904     opcode = kWord32AtomicExchangeInt16;
1905   } else if (type == MachineType::Uint16()) {
1906     opcode = kPPC_AtomicExchangeUint16;
1907   } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
1908     opcode = kPPC_AtomicExchangeWord32;
1909   } else {
1910     UNREACHABLE();
1911     return;
1912   }
1913   VisitAtomicExchange(this, node, opcode);
1914 }
1915 
VisitWord64AtomicExchange(Node * node)1916 void InstructionSelector::VisitWord64AtomicExchange(Node* node) {
1917   ArchOpcode opcode = kArchNop;
1918   MachineType type = AtomicOpType(node->op());
1919   if (type == MachineType::Uint8()) {
1920     opcode = kPPC_AtomicExchangeUint8;
1921   } else if (type == MachineType::Uint16()) {
1922     opcode = kPPC_AtomicExchangeUint16;
1923   } else if (type == MachineType::Uint32()) {
1924     opcode = kPPC_AtomicExchangeWord32;
1925   } else if (type == MachineType::Uint64()) {
1926     opcode = kPPC_AtomicExchangeWord64;
1927   } else {
1928     UNREACHABLE();
1929     return;
1930   }
1931   VisitAtomicExchange(this, node, opcode);
1932 }
1933 
VisitAtomicCompareExchange(InstructionSelector * selector,Node * node,ArchOpcode opcode)1934 void VisitAtomicCompareExchange(InstructionSelector* selector, Node* node,
1935                                 ArchOpcode opcode) {
1936   PPCOperandGenerator g(selector);
1937   Node* base = node->InputAt(0);
1938   Node* index = node->InputAt(1);
1939   Node* old_value = node->InputAt(2);
1940   Node* new_value = node->InputAt(3);
1941 
1942   AddressingMode addressing_mode = kMode_MRR;
1943   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
1944 
1945   InstructionOperand inputs[4];
1946   size_t input_count = 0;
1947   inputs[input_count++] = g.UseUniqueRegister(base);
1948   inputs[input_count++] = g.UseUniqueRegister(index);
1949   inputs[input_count++] = g.UseUniqueRegister(old_value);
1950   inputs[input_count++] = g.UseUniqueRegister(new_value);
1951 
1952   InstructionOperand outputs[1];
1953   size_t output_count = 0;
1954   outputs[output_count++] = g.DefineAsRegister(node);
1955 
1956   selector->Emit(code, output_count, outputs, input_count, inputs);
1957 }
1958 
VisitWord32AtomicCompareExchange(Node * node)1959 void InstructionSelector::VisitWord32AtomicCompareExchange(Node* node) {
1960   MachineType type = AtomicOpType(node->op());
1961   ArchOpcode opcode = kArchNop;
1962   if (type == MachineType::Int8()) {
1963     opcode = kWord32AtomicCompareExchangeInt8;
1964   } else if (type == MachineType::Uint8()) {
1965     opcode = kPPC_AtomicCompareExchangeUint8;
1966   } else if (type == MachineType::Int16()) {
1967     opcode = kWord32AtomicCompareExchangeInt16;
1968   } else if (type == MachineType::Uint16()) {
1969     opcode = kPPC_AtomicCompareExchangeUint16;
1970   } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
1971     opcode = kPPC_AtomicCompareExchangeWord32;
1972   } else {
1973     UNREACHABLE();
1974     return;
1975   }
1976   VisitAtomicCompareExchange(this, node, opcode);
1977 }
1978 
VisitWord64AtomicCompareExchange(Node * node)1979 void InstructionSelector::VisitWord64AtomicCompareExchange(Node* node) {
1980   MachineType type = AtomicOpType(node->op());
1981   ArchOpcode opcode = kArchNop;
1982   if (type == MachineType::Uint8()) {
1983     opcode = kPPC_AtomicCompareExchangeUint8;
1984   } else if (type == MachineType::Uint16()) {
1985     opcode = kPPC_AtomicCompareExchangeUint16;
1986   } else if (type == MachineType::Uint32()) {
1987     opcode = kPPC_AtomicCompareExchangeWord32;
1988   } else if (type == MachineType::Uint64()) {
1989     opcode = kPPC_AtomicCompareExchangeWord64;
1990   } else {
1991     UNREACHABLE();
1992     return;
1993   }
1994   VisitAtomicCompareExchange(this, node, opcode);
1995 }
1996 
VisitAtomicBinaryOperation(InstructionSelector * selector,Node * node,ArchOpcode int8_op,ArchOpcode uint8_op,ArchOpcode int16_op,ArchOpcode uint16_op,ArchOpcode int32_op,ArchOpcode uint32_op,ArchOpcode int64_op,ArchOpcode uint64_op)1997 void VisitAtomicBinaryOperation(InstructionSelector* selector, Node* node,
1998                                 ArchOpcode int8_op, ArchOpcode uint8_op,
1999                                 ArchOpcode int16_op, ArchOpcode uint16_op,
2000                                 ArchOpcode int32_op, ArchOpcode uint32_op,
2001                                 ArchOpcode int64_op, ArchOpcode uint64_op) {
2002   PPCOperandGenerator g(selector);
2003   Node* base = node->InputAt(0);
2004   Node* index = node->InputAt(1);
2005   Node* value = node->InputAt(2);
2006   MachineType type = AtomicOpType(node->op());
2007 
2008   ArchOpcode opcode = kArchNop;
2009 
2010   if (type == MachineType::Int8()) {
2011     opcode = int8_op;
2012   } else if (type == MachineType::Uint8()) {
2013     opcode = uint8_op;
2014   } else if (type == MachineType::Int16()) {
2015     opcode = int16_op;
2016   } else if (type == MachineType::Uint16()) {
2017     opcode = uint16_op;
2018   } else if (type == MachineType::Int32()) {
2019     opcode = int32_op;
2020   } else if (type == MachineType::Uint32()) {
2021     opcode = uint32_op;
2022   } else if (type == MachineType::Int64()) {
2023     opcode = int64_op;
2024   } else if (type == MachineType::Uint64()) {
2025     opcode = uint64_op;
2026   } else {
2027     UNREACHABLE();
2028     return;
2029   }
2030 
2031   AddressingMode addressing_mode = kMode_MRR;
2032   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2033   InstructionOperand inputs[3];
2034 
2035   size_t input_count = 0;
2036   inputs[input_count++] = g.UseUniqueRegister(base);
2037   inputs[input_count++] = g.UseUniqueRegister(index);
2038   inputs[input_count++] = g.UseUniqueRegister(value);
2039 
2040   InstructionOperand outputs[1];
2041   size_t output_count = 0;
2042   outputs[output_count++] = g.DefineAsRegister(node);
2043 
2044   selector->Emit(code, output_count, outputs, input_count, inputs);
2045 }
2046 
VisitWord32AtomicBinaryOperation(Node * node,ArchOpcode int8_op,ArchOpcode uint8_op,ArchOpcode int16_op,ArchOpcode uint16_op,ArchOpcode word32_op)2047 void InstructionSelector::VisitWord32AtomicBinaryOperation(
2048     Node* node, ArchOpcode int8_op, ArchOpcode uint8_op, ArchOpcode int16_op,
2049     ArchOpcode uint16_op, ArchOpcode word32_op) {
2050   // Unused
2051   UNREACHABLE();
2052 }
2053 
VisitWord64AtomicBinaryOperation(Node * node,ArchOpcode uint8_op,ArchOpcode uint16_op,ArchOpcode uint32_op,ArchOpcode uint64_op)2054 void InstructionSelector::VisitWord64AtomicBinaryOperation(
2055     Node* node, ArchOpcode uint8_op, ArchOpcode uint16_op, ArchOpcode uint32_op,
2056     ArchOpcode uint64_op) {
2057   // Unused
2058   UNREACHABLE();
2059 }
2060 
2061 #define VISIT_ATOMIC_BINOP(op)                                     \
2062   void InstructionSelector::VisitWord32Atomic##op(Node* node) {    \
2063     VisitAtomicBinaryOperation(                                    \
2064         this, node, kPPC_Atomic##op##Int8, kPPC_Atomic##op##Uint8, \
2065         kPPC_Atomic##op##Int16, kPPC_Atomic##op##Uint16,           \
2066         kPPC_Atomic##op##Int32, kPPC_Atomic##op##Uint32,           \
2067         kPPC_Atomic##op##Int64, kPPC_Atomic##op##Uint64);          \
2068   }                                                                \
2069   void InstructionSelector::VisitWord64Atomic##op(Node* node) {    \
2070     VisitAtomicBinaryOperation(                                    \
2071         this, node, kPPC_Atomic##op##Int8, kPPC_Atomic##op##Uint8, \
2072         kPPC_Atomic##op##Int16, kPPC_Atomic##op##Uint16,           \
2073         kPPC_Atomic##op##Int32, kPPC_Atomic##op##Uint32,           \
2074         kPPC_Atomic##op##Int64, kPPC_Atomic##op##Uint64);          \
2075   }
2076 VISIT_ATOMIC_BINOP(Add)
VISIT_ATOMIC_BINOP(Sub)2077 VISIT_ATOMIC_BINOP(Sub)
2078 VISIT_ATOMIC_BINOP(And)
2079 VISIT_ATOMIC_BINOP(Or)
2080 VISIT_ATOMIC_BINOP(Xor)
2081 #undef VISIT_ATOMIC_BINOP
2082 
2083 void InstructionSelector::VisitInt32AbsWithOverflow(Node* node) {
2084   UNREACHABLE();
2085 }
2086 
VisitInt64AbsWithOverflow(Node * node)2087 void InstructionSelector::VisitInt64AbsWithOverflow(Node* node) {
2088   UNREACHABLE();
2089 }
2090 
2091 #define SIMD_VISIT_EXTRACT_LANE(Type, Sign)                              \
2092   void InstructionSelector::Visit##Type##ExtractLane##Sign(Node* node) { \
2093     UNIMPLEMENTED();                                                     \
2094   }
2095 SIMD_VISIT_EXTRACT_LANE(F64x2, )
2096 SIMD_VISIT_EXTRACT_LANE(F32x4, )
2097 SIMD_VISIT_EXTRACT_LANE(I32x4, )
SIMD_VISIT_EXTRACT_LANE(I16x8,U)2098 SIMD_VISIT_EXTRACT_LANE(I16x8, U)
2099 SIMD_VISIT_EXTRACT_LANE(I16x8, S)
2100 SIMD_VISIT_EXTRACT_LANE(I8x16, U)
2101 SIMD_VISIT_EXTRACT_LANE(I8x16, S)
2102 #undef SIMD_VISIT_EXTRACT_LANE
2103 
2104 void InstructionSelector::VisitI32x4Splat(Node* node) { UNIMPLEMENTED(); }
2105 
VisitI32x4ReplaceLane(Node * node)2106 void InstructionSelector::VisitI32x4ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2107 
VisitI32x4Add(Node * node)2108 void InstructionSelector::VisitI32x4Add(Node* node) { UNIMPLEMENTED(); }
2109 
VisitI32x4Sub(Node * node)2110 void InstructionSelector::VisitI32x4Sub(Node* node) { UNIMPLEMENTED(); }
2111 
VisitI32x4Shl(Node * node)2112 void InstructionSelector::VisitI32x4Shl(Node* node) { UNIMPLEMENTED(); }
2113 
VisitI32x4ShrS(Node * node)2114 void InstructionSelector::VisitI32x4ShrS(Node* node) { UNIMPLEMENTED(); }
2115 
VisitI32x4Mul(Node * node)2116 void InstructionSelector::VisitI32x4Mul(Node* node) { UNIMPLEMENTED(); }
2117 
VisitI32x4MaxS(Node * node)2118 void InstructionSelector::VisitI32x4MaxS(Node* node) { UNIMPLEMENTED(); }
2119 
VisitI32x4MinS(Node * node)2120 void InstructionSelector::VisitI32x4MinS(Node* node) { UNIMPLEMENTED(); }
2121 
VisitI32x4Eq(Node * node)2122 void InstructionSelector::VisitI32x4Eq(Node* node) { UNIMPLEMENTED(); }
2123 
VisitI32x4Ne(Node * node)2124 void InstructionSelector::VisitI32x4Ne(Node* node) { UNIMPLEMENTED(); }
2125 
VisitI32x4MinU(Node * node)2126 void InstructionSelector::VisitI32x4MinU(Node* node) { UNIMPLEMENTED(); }
2127 
VisitI32x4MaxU(Node * node)2128 void InstructionSelector::VisitI32x4MaxU(Node* node) { UNIMPLEMENTED(); }
2129 
VisitI32x4ShrU(Node * node)2130 void InstructionSelector::VisitI32x4ShrU(Node* node) { UNIMPLEMENTED(); }
2131 
VisitI32x4Neg(Node * node)2132 void InstructionSelector::VisitI32x4Neg(Node* node) { UNIMPLEMENTED(); }
2133 
VisitI32x4GtS(Node * node)2134 void InstructionSelector::VisitI32x4GtS(Node* node) { UNIMPLEMENTED(); }
2135 
VisitI32x4GeS(Node * node)2136 void InstructionSelector::VisitI32x4GeS(Node* node) { UNIMPLEMENTED(); }
2137 
VisitI32x4GtU(Node * node)2138 void InstructionSelector::VisitI32x4GtU(Node* node) { UNIMPLEMENTED(); }
2139 
VisitI32x4GeU(Node * node)2140 void InstructionSelector::VisitI32x4GeU(Node* node) { UNIMPLEMENTED(); }
2141 
VisitI16x8Splat(Node * node)2142 void InstructionSelector::VisitI16x8Splat(Node* node) { UNIMPLEMENTED(); }
2143 
VisitI16x8ReplaceLane(Node * node)2144 void InstructionSelector::VisitI16x8ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2145 
VisitI16x8Shl(Node * node)2146 void InstructionSelector::VisitI16x8Shl(Node* node) { UNIMPLEMENTED(); }
2147 
VisitI16x8ShrS(Node * node)2148 void InstructionSelector::VisitI16x8ShrS(Node* node) { UNIMPLEMENTED(); }
2149 
VisitI16x8ShrU(Node * node)2150 void InstructionSelector::VisitI16x8ShrU(Node* node) { UNIMPLEMENTED(); }
2151 
VisitI16x8Add(Node * node)2152 void InstructionSelector::VisitI16x8Add(Node* node) { UNIMPLEMENTED(); }
2153 
VisitI16x8AddSaturateS(Node * node)2154 void InstructionSelector::VisitI16x8AddSaturateS(Node* node) {
2155   UNIMPLEMENTED();
2156 }
2157 
VisitI16x8Sub(Node * node)2158 void InstructionSelector::VisitI16x8Sub(Node* node) { UNIMPLEMENTED(); }
2159 
VisitI16x8SubSaturateS(Node * node)2160 void InstructionSelector::VisitI16x8SubSaturateS(Node* node) {
2161   UNIMPLEMENTED();
2162 }
2163 
VisitI16x8Mul(Node * node)2164 void InstructionSelector::VisitI16x8Mul(Node* node) { UNIMPLEMENTED(); }
2165 
VisitI16x8MinS(Node * node)2166 void InstructionSelector::VisitI16x8MinS(Node* node) { UNIMPLEMENTED(); }
2167 
VisitI16x8MaxS(Node * node)2168 void InstructionSelector::VisitI16x8MaxS(Node* node) { UNIMPLEMENTED(); }
2169 
VisitI16x8Eq(Node * node)2170 void InstructionSelector::VisitI16x8Eq(Node* node) { UNIMPLEMENTED(); }
2171 
VisitI16x8Ne(Node * node)2172 void InstructionSelector::VisitI16x8Ne(Node* node) { UNIMPLEMENTED(); }
2173 
VisitI16x8AddSaturateU(Node * node)2174 void InstructionSelector::VisitI16x8AddSaturateU(Node* node) {
2175   UNIMPLEMENTED();
2176 }
2177 
VisitI16x8SubSaturateU(Node * node)2178 void InstructionSelector::VisitI16x8SubSaturateU(Node* node) {
2179   UNIMPLEMENTED();
2180 }
2181 
VisitI16x8MinU(Node * node)2182 void InstructionSelector::VisitI16x8MinU(Node* node) { UNIMPLEMENTED(); }
2183 
VisitI16x8MaxU(Node * node)2184 void InstructionSelector::VisitI16x8MaxU(Node* node) { UNIMPLEMENTED(); }
2185 
VisitI16x8Neg(Node * node)2186 void InstructionSelector::VisitI16x8Neg(Node* node) { UNIMPLEMENTED(); }
2187 
VisitI16x8GtS(Node * node)2188 void InstructionSelector::VisitI16x8GtS(Node* node) { UNIMPLEMENTED(); }
2189 
VisitI16x8GeS(Node * node)2190 void InstructionSelector::VisitI16x8GeS(Node* node) { UNIMPLEMENTED(); }
2191 
VisitI16x8GtU(Node * node)2192 void InstructionSelector::VisitI16x8GtU(Node* node) { UNIMPLEMENTED(); }
2193 
VisitI16x8GeU(Node * node)2194 void InstructionSelector::VisitI16x8GeU(Node* node) { UNIMPLEMENTED(); }
2195 
VisitI16x8RoundingAverageU(Node * node)2196 void InstructionSelector::VisitI16x8RoundingAverageU(Node* node) {
2197   UNIMPLEMENTED();
2198 }
2199 
VisitI8x16RoundingAverageU(Node * node)2200 void InstructionSelector::VisitI8x16RoundingAverageU(Node* node) {
2201   UNIMPLEMENTED();
2202 }
2203 
VisitI8x16Neg(Node * node)2204 void InstructionSelector::VisitI8x16Neg(Node* node) { UNIMPLEMENTED(); }
2205 
VisitI8x16Splat(Node * node)2206 void InstructionSelector::VisitI8x16Splat(Node* node) { UNIMPLEMENTED(); }
2207 
VisitI8x16ReplaceLane(Node * node)2208 void InstructionSelector::VisitI8x16ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2209 
VisitI8x16Add(Node * node)2210 void InstructionSelector::VisitI8x16Add(Node* node) { UNIMPLEMENTED(); }
2211 
VisitI8x16AddSaturateS(Node * node)2212 void InstructionSelector::VisitI8x16AddSaturateS(Node* node) {
2213   UNIMPLEMENTED();
2214 }
2215 
VisitI8x16Sub(Node * node)2216 void InstructionSelector::VisitI8x16Sub(Node* node) { UNIMPLEMENTED(); }
2217 
VisitI8x16SubSaturateS(Node * node)2218 void InstructionSelector::VisitI8x16SubSaturateS(Node* node) {
2219   UNIMPLEMENTED();
2220 }
2221 
VisitI8x16MinS(Node * node)2222 void InstructionSelector::VisitI8x16MinS(Node* node) { UNIMPLEMENTED(); }
2223 
VisitI8x16MaxS(Node * node)2224 void InstructionSelector::VisitI8x16MaxS(Node* node) { UNIMPLEMENTED(); }
2225 
VisitI8x16Eq(Node * node)2226 void InstructionSelector::VisitI8x16Eq(Node* node) { UNIMPLEMENTED(); }
2227 
VisitI8x16Ne(Node * node)2228 void InstructionSelector::VisitI8x16Ne(Node* node) { UNIMPLEMENTED(); }
2229 
VisitI8x16GtS(Node * node)2230 void InstructionSelector::VisitI8x16GtS(Node* node) { UNIMPLEMENTED(); }
2231 
VisitI8x16GeS(Node * node)2232 void InstructionSelector::VisitI8x16GeS(Node* node) { UNIMPLEMENTED(); }
2233 
VisitI8x16AddSaturateU(Node * node)2234 void InstructionSelector::VisitI8x16AddSaturateU(Node* node) {
2235   UNIMPLEMENTED();
2236 }
2237 
VisitI8x16SubSaturateU(Node * node)2238 void InstructionSelector::VisitI8x16SubSaturateU(Node* node) {
2239   UNIMPLEMENTED();
2240 }
2241 
VisitI8x16MinU(Node * node)2242 void InstructionSelector::VisitI8x16MinU(Node* node) { UNIMPLEMENTED(); }
2243 
VisitI8x16MaxU(Node * node)2244 void InstructionSelector::VisitI8x16MaxU(Node* node) { UNIMPLEMENTED(); }
2245 
VisitI8x16GtU(Node * node)2246 void InstructionSelector::VisitI8x16GtU(Node* node) { UNIMPLEMENTED(); }
2247 
VisitI8x16GeU(Node * node)2248 void InstructionSelector::VisitI8x16GeU(Node* node) { UNIMPLEMENTED(); }
2249 
VisitS128And(Node * node)2250 void InstructionSelector::VisitS128And(Node* node) { UNIMPLEMENTED(); }
2251 
VisitS128Or(Node * node)2252 void InstructionSelector::VisitS128Or(Node* node) { UNIMPLEMENTED(); }
2253 
VisitS128Xor(Node * node)2254 void InstructionSelector::VisitS128Xor(Node* node) { UNIMPLEMENTED(); }
2255 
VisitS128Not(Node * node)2256 void InstructionSelector::VisitS128Not(Node* node) { UNIMPLEMENTED(); }
2257 
VisitS128AndNot(Node * node)2258 void InstructionSelector::VisitS128AndNot(Node* node) { UNIMPLEMENTED(); }
2259 
VisitS128Zero(Node * node)2260 void InstructionSelector::VisitS128Zero(Node* node) { UNIMPLEMENTED(); }
2261 
VisitF32x4Eq(Node * node)2262 void InstructionSelector::VisitF32x4Eq(Node* node) { UNIMPLEMENTED(); }
2263 
VisitF32x4Ne(Node * node)2264 void InstructionSelector::VisitF32x4Ne(Node* node) { UNIMPLEMENTED(); }
2265 
VisitF32x4Lt(Node * node)2266 void InstructionSelector::VisitF32x4Lt(Node* node) { UNIMPLEMENTED(); }
2267 
VisitF32x4Le(Node * node)2268 void InstructionSelector::VisitF32x4Le(Node* node) { UNIMPLEMENTED(); }
2269 
VisitF32x4Splat(Node * node)2270 void InstructionSelector::VisitF32x4Splat(Node* node) { UNIMPLEMENTED(); }
2271 
VisitF32x4ReplaceLane(Node * node)2272 void InstructionSelector::VisitF32x4ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2273 
EmitPrepareResults(ZoneVector<PushParameter> * results,const CallDescriptor * call_descriptor,Node * node)2274 void InstructionSelector::EmitPrepareResults(
2275     ZoneVector<PushParameter>* results, const CallDescriptor* call_descriptor,
2276     Node* node) {
2277   PPCOperandGenerator g(this);
2278 
2279   int reverse_slot = 0;
2280   for (PushParameter output : *results) {
2281     if (!output.location.IsCallerFrameSlot()) continue;
2282     // Skip any alignment holes in nodes.
2283     if (output.node != nullptr) {
2284       DCHECK(!call_descriptor->IsCFunctionCall());
2285       if (output.location.GetType() == MachineType::Float32()) {
2286         MarkAsFloat32(output.node);
2287       } else if (output.location.GetType() == MachineType::Float64()) {
2288         MarkAsFloat64(output.node);
2289       }
2290       Emit(kPPC_Peek, g.DefineAsRegister(output.node),
2291            g.UseImmediate(reverse_slot));
2292     }
2293     reverse_slot += output.location.GetSizeInPointers();
2294   }
2295 }
2296 
VisitF32x4Add(Node * node)2297 void InstructionSelector::VisitF32x4Add(Node* node) { UNIMPLEMENTED(); }
2298 
VisitF32x4Sub(Node * node)2299 void InstructionSelector::VisitF32x4Sub(Node* node) { UNIMPLEMENTED(); }
2300 
VisitF32x4Mul(Node * node)2301 void InstructionSelector::VisitF32x4Mul(Node* node) { UNIMPLEMENTED(); }
2302 
VisitF32x4Sqrt(Node * node)2303 void InstructionSelector::VisitF32x4Sqrt(Node* node) { UNIMPLEMENTED(); }
2304 
VisitF32x4Div(Node * node)2305 void InstructionSelector::VisitF32x4Div(Node* node) { UNIMPLEMENTED(); }
2306 
VisitF32x4Min(Node * node)2307 void InstructionSelector::VisitF32x4Min(Node* node) { UNIMPLEMENTED(); }
2308 
VisitF32x4Max(Node * node)2309 void InstructionSelector::VisitF32x4Max(Node* node) { UNIMPLEMENTED(); }
2310 
VisitS128Select(Node * node)2311 void InstructionSelector::VisitS128Select(Node* node) { UNIMPLEMENTED(); }
2312 
VisitF32x4Neg(Node * node)2313 void InstructionSelector::VisitF32x4Neg(Node* node) { UNIMPLEMENTED(); }
2314 
VisitF32x4Abs(Node * node)2315 void InstructionSelector::VisitF32x4Abs(Node* node) { UNIMPLEMENTED(); }
2316 
VisitF32x4RecipSqrtApprox(Node * node)2317 void InstructionSelector::VisitF32x4RecipSqrtApprox(Node* node) {
2318   UNIMPLEMENTED();
2319 }
2320 
VisitF32x4RecipApprox(Node * node)2321 void InstructionSelector::VisitF32x4RecipApprox(Node* node) { UNIMPLEMENTED(); }
2322 
VisitF32x4AddHoriz(Node * node)2323 void InstructionSelector::VisitF32x4AddHoriz(Node* node) { UNIMPLEMENTED(); }
VisitI32x4AddHoriz(Node * node)2324 void InstructionSelector::VisitI32x4AddHoriz(Node* node) { UNIMPLEMENTED(); }
VisitI16x8AddHoriz(Node * node)2325 void InstructionSelector::VisitI16x8AddHoriz(Node* node) { UNIMPLEMENTED(); }
2326 
VisitF32x4SConvertI32x4(Node * node)2327 void InstructionSelector::VisitF32x4SConvertI32x4(Node* node) {
2328   UNIMPLEMENTED();
2329 }
2330 
VisitF32x4UConvertI32x4(Node * node)2331 void InstructionSelector::VisitF32x4UConvertI32x4(Node* node) {
2332   UNIMPLEMENTED();
2333 }
2334 
VisitI32x4SConvertF32x4(Node * node)2335 void InstructionSelector::VisitI32x4SConvertF32x4(Node* node) {
2336   UNIMPLEMENTED();
2337 }
2338 
VisitI32x4UConvertF32x4(Node * node)2339 void InstructionSelector::VisitI32x4UConvertF32x4(Node* node) {
2340   UNIMPLEMENTED();
2341 }
2342 
VisitI32x4SConvertI16x8Low(Node * node)2343 void InstructionSelector::VisitI32x4SConvertI16x8Low(Node* node) {
2344   UNIMPLEMENTED();
2345 }
2346 
VisitI32x4SConvertI16x8High(Node * node)2347 void InstructionSelector::VisitI32x4SConvertI16x8High(Node* node) {
2348   UNIMPLEMENTED();
2349 }
2350 
VisitI32x4UConvertI16x8Low(Node * node)2351 void InstructionSelector::VisitI32x4UConvertI16x8Low(Node* node) {
2352   UNIMPLEMENTED();
2353 }
2354 
VisitI32x4UConvertI16x8High(Node * node)2355 void InstructionSelector::VisitI32x4UConvertI16x8High(Node* node) {
2356   UNIMPLEMENTED();
2357 }
2358 
VisitI16x8SConvertI8x16Low(Node * node)2359 void InstructionSelector::VisitI16x8SConvertI8x16Low(Node* node) {
2360   UNIMPLEMENTED();
2361 }
2362 
VisitI16x8SConvertI8x16High(Node * node)2363 void InstructionSelector::VisitI16x8SConvertI8x16High(Node* node) {
2364   UNIMPLEMENTED();
2365 }
2366 
VisitI16x8UConvertI8x16Low(Node * node)2367 void InstructionSelector::VisitI16x8UConvertI8x16Low(Node* node) {
2368   UNIMPLEMENTED();
2369 }
2370 
VisitI16x8UConvertI8x16High(Node * node)2371 void InstructionSelector::VisitI16x8UConvertI8x16High(Node* node) {
2372   UNIMPLEMENTED();
2373 }
2374 
VisitI16x8SConvertI32x4(Node * node)2375 void InstructionSelector::VisitI16x8SConvertI32x4(Node* node) {
2376   UNIMPLEMENTED();
2377 }
VisitI16x8UConvertI32x4(Node * node)2378 void InstructionSelector::VisitI16x8UConvertI32x4(Node* node) {
2379   UNIMPLEMENTED();
2380 }
2381 
VisitI8x16SConvertI16x8(Node * node)2382 void InstructionSelector::VisitI8x16SConvertI16x8(Node* node) {
2383   UNIMPLEMENTED();
2384 }
2385 
VisitI8x16UConvertI16x8(Node * node)2386 void InstructionSelector::VisitI8x16UConvertI16x8(Node* node) {
2387   UNIMPLEMENTED();
2388 }
2389 
VisitS1x4AnyTrue(Node * node)2390 void InstructionSelector::VisitS1x4AnyTrue(Node* node) { UNIMPLEMENTED(); }
2391 
VisitS1x4AllTrue(Node * node)2392 void InstructionSelector::VisitS1x4AllTrue(Node* node) { UNIMPLEMENTED(); }
2393 
VisitS1x8AnyTrue(Node * node)2394 void InstructionSelector::VisitS1x8AnyTrue(Node* node) { UNIMPLEMENTED(); }
2395 
VisitS1x8AllTrue(Node * node)2396 void InstructionSelector::VisitS1x8AllTrue(Node* node) { UNIMPLEMENTED(); }
2397 
VisitS1x16AnyTrue(Node * node)2398 void InstructionSelector::VisitS1x16AnyTrue(Node* node) { UNIMPLEMENTED(); }
2399 
VisitS1x16AllTrue(Node * node)2400 void InstructionSelector::VisitS1x16AllTrue(Node* node) { UNIMPLEMENTED(); }
2401 
VisitI8x16Shl(Node * node)2402 void InstructionSelector::VisitI8x16Shl(Node* node) { UNIMPLEMENTED(); }
2403 
VisitI8x16ShrS(Node * node)2404 void InstructionSelector::VisitI8x16ShrS(Node* node) { UNIMPLEMENTED(); }
2405 
VisitI8x16ShrU(Node * node)2406 void InstructionSelector::VisitI8x16ShrU(Node* node) { UNIMPLEMENTED(); }
2407 
VisitI8x16Mul(Node * node)2408 void InstructionSelector::VisitI8x16Mul(Node* node) { UNIMPLEMENTED(); }
2409 
VisitS8x16Shuffle(Node * node)2410 void InstructionSelector::VisitS8x16Shuffle(Node* node) { UNIMPLEMENTED(); }
2411 
VisitS8x16Swizzle(Node * node)2412 void InstructionSelector::VisitS8x16Swizzle(Node* node) { UNIMPLEMENTED(); }
2413 
VisitF64x2Splat(Node * node)2414 void InstructionSelector::VisitF64x2Splat(Node* node) { UNIMPLEMENTED(); }
2415 
VisitF64x2ReplaceLane(Node * node)2416 void InstructionSelector::VisitF64x2ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2417 
VisitF64x2Abs(Node * node)2418 void InstructionSelector::VisitF64x2Abs(Node* node) { UNIMPLEMENTED(); }
2419 
VisitF64x2Neg(Node * node)2420 void InstructionSelector::VisitF64x2Neg(Node* node) { UNIMPLEMENTED(); }
2421 
VisitF64x2Sqrt(Node * node)2422 void InstructionSelector::VisitF64x2Sqrt(Node* node) { UNIMPLEMENTED(); }
2423 
VisitF64x2Add(Node * node)2424 void InstructionSelector::VisitF64x2Add(Node* node) { UNIMPLEMENTED(); }
2425 
VisitF64x2Sub(Node * node)2426 void InstructionSelector::VisitF64x2Sub(Node* node) { UNIMPLEMENTED(); }
2427 
VisitF64x2Mul(Node * node)2428 void InstructionSelector::VisitF64x2Mul(Node* node) { UNIMPLEMENTED(); }
2429 
VisitF64x2Div(Node * node)2430 void InstructionSelector::VisitF64x2Div(Node* node) { UNIMPLEMENTED(); }
2431 
VisitF64x2Eq(Node * node)2432 void InstructionSelector::VisitF64x2Eq(Node* node) { UNIMPLEMENTED(); }
2433 
VisitF64x2Ne(Node * node)2434 void InstructionSelector::VisitF64x2Ne(Node* node) { UNIMPLEMENTED(); }
2435 
VisitF64x2Lt(Node * node)2436 void InstructionSelector::VisitF64x2Lt(Node* node) { UNIMPLEMENTED(); }
2437 
VisitF64x2Le(Node * node)2438 void InstructionSelector::VisitF64x2Le(Node* node) { UNIMPLEMENTED(); }
2439 
VisitI64x2Neg(Node * node)2440 void InstructionSelector::VisitI64x2Neg(Node* node) { UNIMPLEMENTED(); }
2441 
VisitI64x2Add(Node * node)2442 void InstructionSelector::VisitI64x2Add(Node* node) { UNIMPLEMENTED(); }
2443 
VisitI64x2Sub(Node * node)2444 void InstructionSelector::VisitI64x2Sub(Node* node) { UNIMPLEMENTED(); }
2445 
VisitI64x2Shl(Node * node)2446 void InstructionSelector::VisitI64x2Shl(Node* node) { UNIMPLEMENTED(); }
2447 
VisitI64x2ShrS(Node * node)2448 void InstructionSelector::VisitI64x2ShrS(Node* node) { UNIMPLEMENTED(); }
2449 
VisitI64x2ShrU(Node * node)2450 void InstructionSelector::VisitI64x2ShrU(Node* node) { UNIMPLEMENTED(); }
2451 
VisitI64x2Mul(Node * node)2452 void InstructionSelector::VisitI64x2Mul(Node* node) { UNIMPLEMENTED(); }
2453 
VisitF64x2Min(Node * node)2454 void InstructionSelector::VisitF64x2Min(Node* node) { UNIMPLEMENTED(); }
2455 
VisitF64x2Max(Node * node)2456 void InstructionSelector::VisitF64x2Max(Node* node) { UNIMPLEMENTED(); }
2457 
VisitLoadTransform(Node * node)2458 void InstructionSelector::VisitLoadTransform(Node* node) { UNIMPLEMENTED(); }
2459 
VisitI8x16Abs(Node * node)2460 void InstructionSelector::VisitI8x16Abs(Node* node) { UNIMPLEMENTED(); }
2461 
VisitI16x8Abs(Node * node)2462 void InstructionSelector::VisitI16x8Abs(Node* node) { UNIMPLEMENTED(); }
2463 
VisitI32x4Abs(Node * node)2464 void InstructionSelector::VisitI32x4Abs(Node* node) { UNIMPLEMENTED(); }
2465 
2466 // static
2467 MachineOperatorBuilder::Flags
SupportedMachineOperatorFlags()2468 InstructionSelector::SupportedMachineOperatorFlags() {
2469   return MachineOperatorBuilder::kFloat32RoundDown |
2470          MachineOperatorBuilder::kFloat64RoundDown |
2471          MachineOperatorBuilder::kFloat32RoundUp |
2472          MachineOperatorBuilder::kFloat64RoundUp |
2473          MachineOperatorBuilder::kFloat32RoundTruncate |
2474          MachineOperatorBuilder::kFloat64RoundTruncate |
2475          MachineOperatorBuilder::kFloat64RoundTiesAway |
2476          MachineOperatorBuilder::kWord32Popcnt |
2477          MachineOperatorBuilder::kWord64Popcnt;
2478   // We omit kWord32ShiftIsSafe as s[rl]w use 0x3F as a mask rather than 0x1F.
2479 }
2480 
2481 // static
2482 MachineOperatorBuilder::AlignmentRequirements
AlignmentRequirements()2483 InstructionSelector::AlignmentRequirements() {
2484   return MachineOperatorBuilder::AlignmentRequirements::
2485       FullUnalignedAccessSupport();
2486 }
2487 
2488 }  // namespace compiler
2489 }  // namespace internal
2490 }  // namespace v8
2491