1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/raw-machine-assembler.h"
6 
7 #include "src/base/small-vector.h"
8 #include "src/compiler/compiler-source-position-table.h"
9 #include "src/compiler/node-properties.h"
10 #include "src/compiler/pipeline.h"
11 #include "src/compiler/scheduler.h"
12 #include "src/heap/factory-inl.h"
13 
14 namespace v8 {
15 namespace internal {
16 namespace compiler {
17 
RawMachineAssembler(Isolate * isolate,Graph * graph,CallDescriptor * call_descriptor,MachineRepresentation word,MachineOperatorBuilder::Flags flags,MachineOperatorBuilder::AlignmentRequirements alignment_requirements)18 RawMachineAssembler::RawMachineAssembler(
19     Isolate* isolate, Graph* graph, CallDescriptor* call_descriptor,
20     MachineRepresentation word, MachineOperatorBuilder::Flags flags,
21     MachineOperatorBuilder::AlignmentRequirements alignment_requirements)
22     : isolate_(isolate),
23       graph_(graph),
24       schedule_(zone()->New<Schedule>(zone())),
25       source_positions_(zone()->New<SourcePositionTable>(graph)),
26       machine_(zone(), word, flags, alignment_requirements),
27       common_(zone()),
28       simplified_(zone()),
29       call_descriptor_(call_descriptor),
30       target_parameter_(nullptr),
31       parameters_(parameter_count(), zone()),
32       current_block_(schedule()->start()) {
33   int param_count = static_cast<int>(parameter_count());
34   // Add an extra input for the JSFunction parameter to the start node.
35   graph->SetStart(graph->NewNode(common_.Start(param_count + 1)));
36   if (call_descriptor->IsJSFunctionCall()) {
37     target_parameter_ = AddNode(
38         common()->Parameter(Linkage::kJSCallClosureParamIndex), graph->start());
39   }
40   for (size_t i = 0; i < parameter_count(); ++i) {
41     parameters_[i] =
42         AddNode(common()->Parameter(static_cast<int>(i)), graph->start());
43   }
44   graph->SetEnd(graph->NewNode(common_.End(0)));
45   source_positions_->AddDecorator();
46 }
47 
SetCurrentExternalSourcePosition(FileAndLine file_and_line)48 void RawMachineAssembler::SetCurrentExternalSourcePosition(
49     FileAndLine file_and_line) {
50   int file_id =
51       isolate()->LookupOrAddExternallyCompiledFilename(file_and_line.first);
52   SourcePosition p = SourcePosition::External(file_and_line.second, file_id);
53   DCHECK(p.ExternalLine() == file_and_line.second);
54   source_positions()->SetCurrentPosition(p);
55 }
56 
GetCurrentExternalSourcePosition() const57 FileAndLine RawMachineAssembler::GetCurrentExternalSourcePosition() const {
58   SourcePosition p = source_positions_->GetCurrentPosition();
59   if (!p.IsKnown()) return {nullptr, -1};
60   int file_id = p.ExternalFileId();
61   const char* file_name = isolate()->GetExternallyCompiledFilename(file_id);
62   int line = p.ExternalLine();
63   return {file_name, line};
64 }
65 
NullConstant()66 Node* RawMachineAssembler::NullConstant() {
67   return HeapConstant(isolate()->factory()->null_value());
68 }
69 
UndefinedConstant()70 Node* RawMachineAssembler::UndefinedConstant() {
71   return HeapConstant(isolate()->factory()->undefined_value());
72 }
73 
RelocatableIntPtrConstant(intptr_t value,RelocInfo::Mode rmode)74 Node* RawMachineAssembler::RelocatableIntPtrConstant(intptr_t value,
75                                                      RelocInfo::Mode rmode) {
76   return kSystemPointerSize == 8
77              ? RelocatableInt64Constant(value, rmode)
78              : RelocatableInt32Constant(static_cast<int>(value), rmode);
79 }
80 
OptimizedAllocate(Node * size,AllocationType allocation,AllowLargeObjects allow_large_objects)81 Node* RawMachineAssembler::OptimizedAllocate(
82     Node* size, AllocationType allocation,
83     AllowLargeObjects allow_large_objects) {
84   return AddNode(
85       simplified()->AllocateRaw(Type::Any(), allocation, allow_large_objects),
86       size);
87 }
88 
ExportForTest()89 Schedule* RawMachineAssembler::ExportForTest() {
90   // Compute the correct codegen order.
91   DCHECK(schedule_->rpo_order()->empty());
92   if (FLAG_trace_turbo_scheduler) {
93     PrintF("--- RAW SCHEDULE -------------------------------------------\n");
94     StdoutStream{} << *schedule_;
95   }
96   schedule_->EnsureCFGWellFormedness();
97   Scheduler::ComputeSpecialRPO(zone(), schedule_);
98   schedule_->PropagateDeferredMark();
99   if (FLAG_trace_turbo_scheduler) {
100     PrintF("--- EDGE SPLIT AND PROPAGATED DEFERRED SCHEDULE ------------\n");
101     StdoutStream{} << *schedule_;
102   }
103   // Invalidate RawMachineAssembler.
104   source_positions_->RemoveDecorator();
105   Schedule* schedule = schedule_;
106   schedule_ = nullptr;
107   return schedule;
108 }
109 
ExportForOptimization()110 Graph* RawMachineAssembler::ExportForOptimization() {
111   // Compute the correct codegen order.
112   DCHECK(schedule_->rpo_order()->empty());
113   if (FLAG_trace_turbo_scheduler) {
114     PrintF("--- RAW SCHEDULE -------------------------------------------\n");
115     StdoutStream{} << *schedule_;
116   }
117   schedule_->EnsureCFGWellFormedness();
118   OptimizeControlFlow(schedule_, graph(), common());
119   Scheduler::ComputeSpecialRPO(zone(), schedule_);
120   if (FLAG_trace_turbo_scheduler) {
121     PrintF("--- SCHEDULE BEFORE GRAPH CREATION -------------------------\n");
122     StdoutStream{} << *schedule_;
123   }
124   MakeReschedulable();
125   // Invalidate RawMachineAssembler.
126   schedule_ = nullptr;
127   return graph();
128 }
129 
OptimizeControlFlow(Schedule * schedule,Graph * graph,CommonOperatorBuilder * common)130 void RawMachineAssembler::OptimizeControlFlow(Schedule* schedule, Graph* graph,
131                                               CommonOperatorBuilder* common) {
132   for (bool changed = true; changed;) {
133     changed = false;
134     for (size_t i = 0; i < schedule->all_blocks()->size(); ++i) {
135       BasicBlock* block = (*schedule->all_blocks())[i];
136       if (block == nullptr) continue;
137 
138       // Short-circuit a goto if the succeeding block is not a control-flow
139       // merge. This is not really useful on it's own since graph construction
140       // has the same effect, but combining blocks improves the pattern-match on
141       // their structure below.
142       if (block->control() == BasicBlock::kGoto) {
143         DCHECK_EQ(block->SuccessorCount(), 1);
144         BasicBlock* successor = block->SuccessorAt(0);
145         if (successor->PredecessorCount() == 1) {
146           DCHECK_EQ(successor->PredecessorAt(0), block);
147           for (Node* node : *successor) {
148             schedule->SetBlockForNode(nullptr, node);
149             schedule->AddNode(block, node);
150           }
151           block->set_control(successor->control());
152           Node* control_input = successor->control_input();
153           block->set_control_input(control_input);
154           if (control_input) {
155             schedule->SetBlockForNode(block, control_input);
156           }
157           if (successor->deferred()) block->set_deferred(true);
158           block->ClearSuccessors();
159           schedule->MoveSuccessors(successor, block);
160           schedule->ClearBlockById(successor->id());
161           changed = true;
162           --i;
163           continue;
164         }
165       }
166       // Block-cloning in the simple case where a block consists only of a phi
167       // node and a branch on that phi. This just duplicates the branch block
168       // for each predecessor, replacing the phi node with the corresponding phi
169       // input.
170       if (block->control() == BasicBlock::kBranch && block->NodeCount() == 1) {
171         Node* phi = block->NodeAt(0);
172         if (phi->opcode() != IrOpcode::kPhi) continue;
173         Node* branch = block->control_input();
174         DCHECK_EQ(branch->opcode(), IrOpcode::kBranch);
175         if (NodeProperties::GetValueInput(branch, 0) != phi) continue;
176         if (phi->UseCount() != 1) continue;
177         DCHECK_EQ(phi->op()->ValueInputCount(), block->PredecessorCount());
178 
179         // Turn projection blocks into normal blocks.
180         DCHECK_EQ(block->SuccessorCount(), 2);
181         BasicBlock* true_block = block->SuccessorAt(0);
182         BasicBlock* false_block = block->SuccessorAt(1);
183         DCHECK_EQ(true_block->NodeAt(0)->opcode(), IrOpcode::kIfTrue);
184         DCHECK_EQ(false_block->NodeAt(0)->opcode(), IrOpcode::kIfFalse);
185         (*true_block->begin())->Kill();
186         true_block->RemoveNode(true_block->begin());
187         (*false_block->begin())->Kill();
188         false_block->RemoveNode(false_block->begin());
189         true_block->ClearPredecessors();
190         false_block->ClearPredecessors();
191 
192         size_t arity = block->PredecessorCount();
193         for (size_t j = 0; j < arity; ++j) {
194           BasicBlock* predecessor = block->PredecessorAt(j);
195           predecessor->ClearSuccessors();
196           if (block->deferred()) predecessor->set_deferred(true);
197           Node* branch_clone = graph->CloneNode(branch);
198           int phi_input = static_cast<int>(j);
199           NodeProperties::ReplaceValueInput(
200               branch_clone, NodeProperties::GetValueInput(phi, phi_input), 0);
201           BasicBlock* new_true_block = schedule->NewBasicBlock();
202           BasicBlock* new_false_block = schedule->NewBasicBlock();
203           new_true_block->AddNode(
204               graph->NewNode(common->IfTrue(), branch_clone));
205           new_false_block->AddNode(
206               graph->NewNode(common->IfFalse(), branch_clone));
207           schedule->AddGoto(new_true_block, true_block);
208           schedule->AddGoto(new_false_block, false_block);
209           DCHECK_EQ(predecessor->control(), BasicBlock::kGoto);
210           predecessor->set_control(BasicBlock::kNone);
211           schedule->AddBranch(predecessor, branch_clone, new_true_block,
212                               new_false_block);
213         }
214         branch->Kill();
215         schedule->ClearBlockById(block->id());
216         changed = true;
217         continue;
218       }
219     }
220   }
221 }
222 
MakeReschedulable()223 void RawMachineAssembler::MakeReschedulable() {
224   std::vector<Node*> block_final_control(schedule_->all_blocks_.size());
225   std::vector<Node*> block_final_effect(schedule_->all_blocks_.size());
226 
227   struct LoopHeader {
228     BasicBlock* block;
229     Node* loop_node;
230     Node* effect_phi;
231   };
232   std::vector<LoopHeader> loop_headers;
233 
234   // These are hoisted outside of the loop to avoid re-allocation.
235   std::vector<Node*> merge_inputs;
236   std::vector<Node*> effect_phi_inputs;
237 
238   for (BasicBlock* block : *schedule_->rpo_order()) {
239     Node* current_control;
240     Node* current_effect;
241     if (block == schedule_->start()) {
242       current_control = current_effect = graph()->start();
243     } else if (block == schedule_->end()) {
244       for (size_t i = 0; i < block->PredecessorCount(); ++i) {
245         NodeProperties::MergeControlToEnd(
246             graph(), common(), block->PredecessorAt(i)->control_input());
247       }
248     } else if (block->IsLoopHeader()) {
249       // The graph()->start() inputs are just placeholders until we computed the
250       // real back-edges and re-structure the control flow so the loop has
251       // exactly two predecessors.
252       current_control = graph()->NewNode(common()->Loop(2), graph()->start(),
253                                          graph()->start());
254       current_effect =
255           graph()->NewNode(common()->EffectPhi(2), graph()->start(),
256                            graph()->start(), current_control);
257 
258       Node* terminate = graph()->NewNode(common()->Terminate(), current_effect,
259                                          current_control);
260       NodeProperties::MergeControlToEnd(graph(), common(), terminate);
261       loop_headers.push_back(
262           LoopHeader{block, current_control, current_effect});
263     } else if (block->PredecessorCount() == 1) {
264       BasicBlock* predecessor = block->PredecessorAt(0);
265       DCHECK_LT(predecessor->rpo_number(), block->rpo_number());
266       current_effect = block_final_effect[predecessor->id().ToSize()];
267       current_control = block_final_control[predecessor->id().ToSize()];
268     } else {
269       // Create control merge nodes and effect phis for all predecessor blocks.
270       merge_inputs.clear();
271       effect_phi_inputs.clear();
272       int predecessor_count = static_cast<int>(block->PredecessorCount());
273       for (int i = 0; i < predecessor_count; ++i) {
274         BasicBlock* predecessor = block->PredecessorAt(i);
275         DCHECK_LT(predecessor->rpo_number(), block->rpo_number());
276         merge_inputs.push_back(block_final_control[predecessor->id().ToSize()]);
277         effect_phi_inputs.push_back(
278             block_final_effect[predecessor->id().ToSize()]);
279       }
280       current_control = graph()->NewNode(common()->Merge(predecessor_count),
281                                          static_cast<int>(merge_inputs.size()),
282                                          merge_inputs.data());
283       effect_phi_inputs.push_back(current_control);
284       current_effect = graph()->NewNode(
285           common()->EffectPhi(predecessor_count),
286           static_cast<int>(effect_phi_inputs.size()), effect_phi_inputs.data());
287     }
288 
289     auto update_current_control_and_effect = [&](Node* node) {
290       bool existing_effect_and_control =
291           IrOpcode::IsIfProjectionOpcode(node->opcode()) ||
292           IrOpcode::IsPhiOpcode(node->opcode());
293       if (node->op()->EffectInputCount() > 0) {
294         DCHECK_EQ(1, node->op()->EffectInputCount());
295         if (existing_effect_and_control) {
296           NodeProperties::ReplaceEffectInput(node, current_effect);
297         } else {
298           node->AppendInput(graph()->zone(), current_effect);
299         }
300       }
301       if (node->op()->ControlInputCount() > 0) {
302         DCHECK_EQ(1, node->op()->ControlInputCount());
303         if (existing_effect_and_control) {
304           NodeProperties::ReplaceControlInput(node, current_control);
305         } else {
306           node->AppendInput(graph()->zone(), current_control);
307         }
308       }
309       if (node->op()->EffectOutputCount() > 0) {
310         DCHECK_EQ(1, node->op()->EffectOutputCount());
311         current_effect = node;
312       }
313       if (node->op()->ControlOutputCount() > 0) {
314         current_control = node;
315       }
316     };
317 
318     for (Node* node : *block) {
319       update_current_control_and_effect(node);
320     }
321     if (block->deferred()) MarkControlDeferred(current_control);
322 
323     if (Node* block_terminator = block->control_input()) {
324       update_current_control_and_effect(block_terminator);
325     }
326 
327     block_final_effect[block->id().ToSize()] = current_effect;
328     block_final_control[block->id().ToSize()] = current_control;
329   }
330 
331   // Fix-up loop backedges and re-structure control flow so that loop nodes have
332   // exactly two control predecessors.
333   for (const LoopHeader& loop_header : loop_headers) {
334     BasicBlock* block = loop_header.block;
335     std::vector<BasicBlock*> loop_entries;
336     std::vector<BasicBlock*> loop_backedges;
337     for (size_t i = 0; i < block->PredecessorCount(); ++i) {
338       BasicBlock* predecessor = block->PredecessorAt(i);
339       if (block->LoopContains(predecessor)) {
340         loop_backedges.push_back(predecessor);
341       } else {
342         DCHECK(loop_backedges.empty());
343         loop_entries.push_back(predecessor);
344       }
345     }
346     DCHECK(!loop_entries.empty());
347     DCHECK(!loop_backedges.empty());
348 
349     int entrance_count = static_cast<int>(loop_entries.size());
350     int backedge_count = static_cast<int>(loop_backedges.size());
351     Node* control_loop_entry = CreateNodeFromPredecessors(
352         loop_entries, block_final_control, common()->Merge(entrance_count), {});
353     Node* control_backedge =
354         CreateNodeFromPredecessors(loop_backedges, block_final_control,
355                                    common()->Merge(backedge_count), {});
356     Node* effect_loop_entry = CreateNodeFromPredecessors(
357         loop_entries, block_final_effect, common()->EffectPhi(entrance_count),
358         {control_loop_entry});
359     Node* effect_backedge = CreateNodeFromPredecessors(
360         loop_backedges, block_final_effect, common()->EffectPhi(backedge_count),
361         {control_backedge});
362 
363     loop_header.loop_node->ReplaceInput(0, control_loop_entry);
364     loop_header.loop_node->ReplaceInput(1, control_backedge);
365     loop_header.effect_phi->ReplaceInput(0, effect_loop_entry);
366     loop_header.effect_phi->ReplaceInput(1, effect_backedge);
367 
368     for (Node* node : *block) {
369       if (node->opcode() == IrOpcode::kPhi) {
370         MakePhiBinary(node, static_cast<int>(loop_entries.size()),
371                       control_loop_entry, control_backedge);
372       }
373     }
374   }
375 }
376 
CreateNodeFromPredecessors(const std::vector<BasicBlock * > & predecessors,const std::vector<Node * > & sidetable,const Operator * op,const std::vector<Node * > & additional_inputs)377 Node* RawMachineAssembler::CreateNodeFromPredecessors(
378     const std::vector<BasicBlock*>& predecessors,
379     const std::vector<Node*>& sidetable, const Operator* op,
380     const std::vector<Node*>& additional_inputs) {
381   if (predecessors.size() == 1) {
382     return sidetable[predecessors.front()->id().ToSize()];
383   }
384   std::vector<Node*> inputs;
385   inputs.reserve(predecessors.size());
386   for (BasicBlock* predecessor : predecessors) {
387     inputs.push_back(sidetable[predecessor->id().ToSize()]);
388   }
389   for (Node* additional_input : additional_inputs) {
390     inputs.push_back(additional_input);
391   }
392   return graph()->NewNode(op, static_cast<int>(inputs.size()), inputs.data());
393 }
394 
MakePhiBinary(Node * phi,int split_point,Node * left_control,Node * right_control)395 void RawMachineAssembler::MakePhiBinary(Node* phi, int split_point,
396                                         Node* left_control,
397                                         Node* right_control) {
398   int value_count = phi->op()->ValueInputCount();
399   if (value_count == 2) return;
400   DCHECK_LT(split_point, value_count);
401   DCHECK_GT(split_point, 0);
402 
403   MachineRepresentation rep = PhiRepresentationOf(phi->op());
404   int left_input_count = split_point;
405   int right_input_count = value_count - split_point;
406 
407   Node* left_input;
408   if (left_input_count == 1) {
409     left_input = NodeProperties::GetValueInput(phi, 0);
410   } else {
411     std::vector<Node*> inputs;
412     inputs.reserve(left_input_count);
413     for (int i = 0; i < left_input_count; ++i) {
414       inputs.push_back(NodeProperties::GetValueInput(phi, i));
415     }
416     inputs.push_back(left_control);
417     left_input =
418         graph()->NewNode(common()->Phi(rep, static_cast<int>(left_input_count)),
419                          static_cast<int>(inputs.size()), inputs.data());
420   }
421 
422   Node* right_input;
423   if (right_input_count == 1) {
424     right_input = NodeProperties::GetValueInput(phi, split_point);
425   } else {
426     std::vector<Node*> inputs;
427     for (int i = split_point; i < value_count; ++i) {
428       inputs.push_back(NodeProperties::GetValueInput(phi, i));
429     }
430     inputs.push_back(right_control);
431     right_input = graph()->NewNode(
432         common()->Phi(rep, static_cast<int>(right_input_count)),
433         static_cast<int>(inputs.size()), inputs.data());
434   }
435 
436   Node* control = NodeProperties::GetControlInput(phi);
437   phi->TrimInputCount(3);
438   phi->ReplaceInput(0, left_input);
439   phi->ReplaceInput(1, right_input);
440   phi->ReplaceInput(2, control);
441   NodeProperties::ChangeOp(phi, common()->Phi(rep, 2));
442 }
443 
MarkControlDeferred(Node * control_node)444 void RawMachineAssembler::MarkControlDeferred(Node* control_node) {
445   BranchHint new_branch_hint;
446   Node* responsible_branch = nullptr;
447   while (responsible_branch == nullptr) {
448     switch (control_node->opcode()) {
449       case IrOpcode::kIfException:
450         // IfException projections are deferred by default.
451         return;
452       case IrOpcode::kIfSuccess:
453         control_node = NodeProperties::GetControlInput(control_node);
454         continue;
455       case IrOpcode::kIfValue: {
456         IfValueParameters parameters = IfValueParametersOf(control_node->op());
457         if (parameters.hint() != BranchHint::kFalse) {
458           NodeProperties::ChangeOp(
459               control_node, common()->IfValue(parameters.value(),
460                                               parameters.comparison_order(),
461                                               BranchHint::kFalse));
462         }
463         return;
464       }
465       case IrOpcode::kIfDefault:
466         if (BranchHintOf(control_node->op()) != BranchHint::kFalse) {
467           NodeProperties::ChangeOp(control_node,
468                                    common()->IfDefault(BranchHint::kFalse));
469         }
470         return;
471       case IrOpcode::kIfTrue: {
472         Node* branch = NodeProperties::GetControlInput(control_node);
473         BranchHint hint = BranchHintOf(branch->op());
474         if (hint == BranchHint::kTrue) {
475           // The other possibility is also deferred, so the responsible branch
476           // has to be before.
477           control_node = NodeProperties::GetControlInput(branch);
478           continue;
479         }
480         new_branch_hint = BranchHint::kFalse;
481         responsible_branch = branch;
482         break;
483       }
484       case IrOpcode::kIfFalse: {
485         Node* branch = NodeProperties::GetControlInput(control_node);
486         BranchHint hint = BranchHintOf(branch->op());
487         if (hint == BranchHint::kFalse) {
488           // The other possibility is also deferred, so the responsible branch
489           // has to be before.
490           control_node = NodeProperties::GetControlInput(branch);
491           continue;
492         }
493         new_branch_hint = BranchHint::kTrue;
494         responsible_branch = branch;
495         break;
496       }
497       case IrOpcode::kMerge:
498         for (int i = 0; i < control_node->op()->ControlInputCount(); ++i) {
499           MarkControlDeferred(NodeProperties::GetControlInput(control_node, i));
500         }
501         return;
502       case IrOpcode::kLoop:
503         control_node = NodeProperties::GetControlInput(control_node, 0);
504         continue;
505       case IrOpcode::kBranch:
506       case IrOpcode::kSwitch:
507         UNREACHABLE();
508       case IrOpcode::kStart:
509         return;
510       default:
511         DCHECK_EQ(1, control_node->op()->ControlInputCount());
512         control_node = NodeProperties::GetControlInput(control_node);
513         continue;
514     }
515   }
516 
517   BranchHint hint = BranchHintOf(responsible_branch->op());
518   if (hint == new_branch_hint) return;
519   NodeProperties::ChangeOp(responsible_branch,
520                            common()->Branch(new_branch_hint));
521 }
522 
TargetParameter()523 Node* RawMachineAssembler::TargetParameter() {
524   DCHECK_NOT_NULL(target_parameter_);
525   return target_parameter_;
526 }
527 
Parameter(size_t index)528 Node* RawMachineAssembler::Parameter(size_t index) {
529   DCHECK_LT(index, parameter_count());
530   return parameters_[index];
531 }
532 
533 
Goto(RawMachineLabel * label)534 void RawMachineAssembler::Goto(RawMachineLabel* label) {
535   DCHECK(current_block_ != schedule()->end());
536   schedule()->AddGoto(CurrentBlock(), Use(label));
537   current_block_ = nullptr;
538 }
539 
540 
Branch(Node * condition,RawMachineLabel * true_val,RawMachineLabel * false_val)541 void RawMachineAssembler::Branch(Node* condition, RawMachineLabel* true_val,
542                                  RawMachineLabel* false_val) {
543   DCHECK(current_block_ != schedule()->end());
544   Node* branch = MakeNode(common()->Branch(BranchHint::kNone), 1, &condition);
545   BasicBlock* true_block = schedule()->NewBasicBlock();
546   BasicBlock* false_block = schedule()->NewBasicBlock();
547   schedule()->AddBranch(CurrentBlock(), branch, true_block, false_block);
548 
549   true_block->AddNode(MakeNode(common()->IfTrue(), 1, &branch));
550   schedule()->AddGoto(true_block, Use(true_val));
551 
552   false_block->AddNode(MakeNode(common()->IfFalse(), 1, &branch));
553   schedule()->AddGoto(false_block, Use(false_val));
554 
555   current_block_ = nullptr;
556 }
557 
Continuations(Node * call,RawMachineLabel * if_success,RawMachineLabel * if_exception)558 void RawMachineAssembler::Continuations(Node* call, RawMachineLabel* if_success,
559                                         RawMachineLabel* if_exception) {
560   DCHECK_NOT_NULL(schedule_);
561   DCHECK_NOT_NULL(current_block_);
562   schedule()->AddCall(CurrentBlock(), call, Use(if_success), Use(if_exception));
563   current_block_ = nullptr;
564 }
565 
Switch(Node * index,RawMachineLabel * default_label,const int32_t * case_values,RawMachineLabel ** case_labels,size_t case_count)566 void RawMachineAssembler::Switch(Node* index, RawMachineLabel* default_label,
567                                  const int32_t* case_values,
568                                  RawMachineLabel** case_labels,
569                                  size_t case_count) {
570   DCHECK_NE(schedule()->end(), current_block_);
571   size_t succ_count = case_count + 1;
572   Node* switch_node = MakeNode(common()->Switch(succ_count), 1, &index);
573   BasicBlock** succ_blocks = zone()->NewArray<BasicBlock*>(succ_count);
574   for (size_t i = 0; i < case_count; ++i) {
575     int32_t case_value = case_values[i];
576     BasicBlock* case_block = schedule()->NewBasicBlock();
577     Node* case_node =
578         graph()->NewNode(common()->IfValue(case_value), switch_node);
579     schedule()->AddNode(case_block, case_node);
580     schedule()->AddGoto(case_block, Use(case_labels[i]));
581     succ_blocks[i] = case_block;
582   }
583   BasicBlock* default_block = schedule()->NewBasicBlock();
584   Node* default_node = graph()->NewNode(common()->IfDefault(), switch_node);
585   schedule()->AddNode(default_block, default_node);
586   schedule()->AddGoto(default_block, Use(default_label));
587   succ_blocks[case_count] = default_block;
588   schedule()->AddSwitch(CurrentBlock(), switch_node, succ_blocks, succ_count);
589   current_block_ = nullptr;
590 }
591 
Return(Node * value)592 void RawMachineAssembler::Return(Node* value) {
593   Node* values[] = {Int32Constant(0), value};
594   Node* ret = MakeNode(common()->Return(1), 2, values);
595   schedule()->AddReturn(CurrentBlock(), ret);
596   current_block_ = nullptr;
597 }
598 
Return(Node * v1,Node * v2)599 void RawMachineAssembler::Return(Node* v1, Node* v2) {
600   Node* values[] = {Int32Constant(0), v1, v2};
601   Node* ret = MakeNode(common()->Return(2), 3, values);
602   schedule()->AddReturn(CurrentBlock(), ret);
603   current_block_ = nullptr;
604 }
605 
Return(Node * v1,Node * v2,Node * v3)606 void RawMachineAssembler::Return(Node* v1, Node* v2, Node* v3) {
607   Node* values[] = {Int32Constant(0), v1, v2, v3};
608   Node* ret = MakeNode(common()->Return(3), 4, values);
609   schedule()->AddReturn(CurrentBlock(), ret);
610   current_block_ = nullptr;
611 }
612 
Return(Node * v1,Node * v2,Node * v3,Node * v4)613 void RawMachineAssembler::Return(Node* v1, Node* v2, Node* v3, Node* v4) {
614   Node* values[] = {Int32Constant(0), v1, v2, v3, v4};
615   Node* ret = MakeNode(common()->Return(4), 5, values);
616   schedule()->AddReturn(CurrentBlock(), ret);
617   current_block_ = nullptr;
618 }
619 
Return(int count,Node * vs[])620 void RawMachineAssembler::Return(int count, Node* vs[]) {
621   using Node_ptr = Node*;
622   Node** values = new Node_ptr[count + 1];
623   values[0] = Int32Constant(0);
624   for (int i = 0; i < count; ++i) values[i + 1] = vs[i];
625   Node* ret = MakeNode(common()->Return(count), count + 1, values);
626   schedule()->AddReturn(CurrentBlock(), ret);
627   current_block_ = nullptr;
628   delete[] values;
629 }
630 
PopAndReturn(Node * pop,Node * value)631 void RawMachineAssembler::PopAndReturn(Node* pop, Node* value) {
632   // PopAndReturn is supposed to be using ONLY in CSA/Torque builtins for
633   // dropping ALL JS arguments that are currently located on the stack.
634   // The check below ensures that there are no directly accessible stack
635   // parameters from current builtin, which implies that the builtin with
636   // JS calling convention (TFJ) was created with kDontAdaptArgumentsSentinel.
637   // This simplifies semantics of this instruction because in case of presence
638   // of directly accessible stack parameters it's impossible to distinguish
639   // the following cases:
640   // 1) stack parameter is included in JS arguments (and therefore it will be
641   //    dropped as a part of 'pop' number of arguments),
642   // 2) stack parameter is NOT included in JS arguments (and therefore it should
643   //    be dropped in ADDITION to the 'pop' number of arguments).
644   // Additionally, in order to simplify assembly code, PopAndReturn is also
645   // not allowed in builtins with stub linkage and parameters on stack.
646   CHECK_EQ(call_descriptor()->ParameterSlotCount(), 0);
647   Node* values[] = {pop, value};
648   Node* ret = MakeNode(common()->Return(1), 2, values);
649   schedule()->AddReturn(CurrentBlock(), ret);
650   current_block_ = nullptr;
651 }
652 
PopAndReturn(Node * pop,Node * v1,Node * v2)653 void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2) {
654   Node* values[] = {pop, v1, v2};
655   Node* ret = MakeNode(common()->Return(2), 3, values);
656   schedule()->AddReturn(CurrentBlock(), ret);
657   current_block_ = nullptr;
658 }
659 
PopAndReturn(Node * pop,Node * v1,Node * v2,Node * v3)660 void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2,
661                                        Node* v3) {
662   Node* values[] = {pop, v1, v2, v3};
663   Node* ret = MakeNode(common()->Return(3), 4, values);
664   schedule()->AddReturn(CurrentBlock(), ret);
665   current_block_ = nullptr;
666 }
667 
PopAndReturn(Node * pop,Node * v1,Node * v2,Node * v3,Node * v4)668 void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2, Node* v3,
669                                        Node* v4) {
670   Node* values[] = {pop, v1, v2, v3, v4};
671   Node* ret = MakeNode(common()->Return(4), 5, values);
672   schedule()->AddReturn(CurrentBlock(), ret);
673   current_block_ = nullptr;
674 }
675 
AbortCSADcheck(Node * message)676 void RawMachineAssembler::AbortCSADcheck(Node* message) {
677   AddNode(machine()->AbortCSADcheck(), message);
678 }
679 
DebugBreak()680 void RawMachineAssembler::DebugBreak() { AddNode(machine()->DebugBreak()); }
681 
Unreachable()682 void RawMachineAssembler::Unreachable() {
683   Node* ret = MakeNode(common()->Throw(), 0, nullptr);
684   schedule()->AddThrow(CurrentBlock(), ret);
685   current_block_ = nullptr;
686 }
687 
Comment(const std::string & msg)688 void RawMachineAssembler::Comment(const std::string& msg) {
689   size_t length = msg.length() + 1;
690   char* zone_buffer = zone()->NewArray<char>(length);
691   MemCopy(zone_buffer, msg.c_str(), length);
692   AddNode(machine()->Comment(zone_buffer));
693 }
694 
StaticAssert(Node * value,const char * source)695 void RawMachineAssembler::StaticAssert(Node* value, const char* source) {
696   AddNode(common()->StaticAssert(source), value);
697 }
698 
CallN(CallDescriptor * call_descriptor,int input_count,Node * const * inputs)699 Node* RawMachineAssembler::CallN(CallDescriptor* call_descriptor,
700                                  int input_count, Node* const* inputs) {
701   DCHECK(!call_descriptor->NeedsFrameState());
702   // +1 is for target.
703   DCHECK_EQ(input_count, call_descriptor->ParameterCount() + 1);
704   return AddNode(common()->Call(call_descriptor), input_count, inputs);
705 }
706 
CallNWithFrameState(CallDescriptor * call_descriptor,int input_count,Node * const * inputs)707 Node* RawMachineAssembler::CallNWithFrameState(CallDescriptor* call_descriptor,
708                                                int input_count,
709                                                Node* const* inputs) {
710   DCHECK(call_descriptor->NeedsFrameState());
711   // +2 is for target and frame state.
712   DCHECK_EQ(input_count, call_descriptor->ParameterCount() + 2);
713   return AddNode(common()->Call(call_descriptor), input_count, inputs);
714 }
715 
TailCallN(CallDescriptor * call_descriptor,int input_count,Node * const * inputs)716 void RawMachineAssembler::TailCallN(CallDescriptor* call_descriptor,
717                                     int input_count, Node* const* inputs) {
718   // +1 is for target.
719   DCHECK_EQ(input_count, call_descriptor->ParameterCount() + 1);
720   Node* tail_call =
721       MakeNode(common()->TailCall(call_descriptor), input_count, inputs);
722   schedule()->AddTailCall(CurrentBlock(), tail_call);
723   current_block_ = nullptr;
724 }
725 
726 namespace {
727 
728 enum FunctionDescriptorMode { kHasFunctionDescriptor, kNoFunctionDescriptor };
729 
CallCFunctionImpl(RawMachineAssembler * rasm,Node * function,base::Optional<MachineType> return_type,std::initializer_list<RawMachineAssembler::CFunctionArg> args,bool caller_saved_regs,SaveFPRegsMode mode,FunctionDescriptorMode no_function_descriptor)730 Node* CallCFunctionImpl(
731     RawMachineAssembler* rasm, Node* function,
732     base::Optional<MachineType> return_type,
733     std::initializer_list<RawMachineAssembler::CFunctionArg> args,
734     bool caller_saved_regs, SaveFPRegsMode mode,
735     FunctionDescriptorMode no_function_descriptor) {
736   static constexpr std::size_t kNumCArgs = 10;
737 
738   MachineSignature::Builder builder(rasm->zone(), return_type ? 1 : 0,
739                                     args.size());
740   if (return_type) {
741     builder.AddReturn(*return_type);
742   }
743   for (const auto& arg : args) builder.AddParam(arg.first);
744 
745   bool caller_saved_fp_regs =
746       caller_saved_regs && (mode == SaveFPRegsMode::kSave);
747   CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
748   if (caller_saved_regs) flags |= CallDescriptor::kCallerSavedRegisters;
749   if (caller_saved_fp_regs) flags |= CallDescriptor::kCallerSavedFPRegisters;
750   if (no_function_descriptor) flags |= CallDescriptor::kNoFunctionDescriptor;
751   auto call_descriptor =
752       Linkage::GetSimplifiedCDescriptor(rasm->zone(), builder.Build(), flags);
753 
754   base::SmallVector<Node*, kNumCArgs> nodes(args.size() + 1);
755   nodes[0] = function;
756   std::transform(
757       args.begin(), args.end(), std::next(nodes.begin()),
758       [](const RawMachineAssembler::CFunctionArg& arg) { return arg.second; });
759 
760   auto common = rasm->common();
761   return rasm->AddNode(common->Call(call_descriptor),
762                        static_cast<int>(nodes.size()), nodes.begin());
763 }
764 
765 }  // namespace
766 
CallCFunction(Node * function,base::Optional<MachineType> return_type,std::initializer_list<RawMachineAssembler::CFunctionArg> args)767 Node* RawMachineAssembler::CallCFunction(
768     Node* function, base::Optional<MachineType> return_type,
769     std::initializer_list<RawMachineAssembler::CFunctionArg> args) {
770   return CallCFunctionImpl(this, function, return_type, args, false,
771                            SaveFPRegsMode::kIgnore, kHasFunctionDescriptor);
772 }
773 
CallCFunctionWithoutFunctionDescriptor(Node * function,MachineType return_type,std::initializer_list<RawMachineAssembler::CFunctionArg> args)774 Node* RawMachineAssembler::CallCFunctionWithoutFunctionDescriptor(
775     Node* function, MachineType return_type,
776     std::initializer_list<RawMachineAssembler::CFunctionArg> args) {
777   return CallCFunctionImpl(this, function, return_type, args, false,
778                            SaveFPRegsMode::kIgnore, kNoFunctionDescriptor);
779 }
780 
CallCFunctionWithCallerSavedRegisters(Node * function,MachineType return_type,SaveFPRegsMode mode,std::initializer_list<RawMachineAssembler::CFunctionArg> args)781 Node* RawMachineAssembler::CallCFunctionWithCallerSavedRegisters(
782     Node* function, MachineType return_type, SaveFPRegsMode mode,
783     std::initializer_list<RawMachineAssembler::CFunctionArg> args) {
784   return CallCFunctionImpl(this, function, return_type, args, true, mode,
785                            kHasFunctionDescriptor);
786 }
787 
Use(RawMachineLabel * label)788 BasicBlock* RawMachineAssembler::Use(RawMachineLabel* label) {
789   label->used_ = true;
790   return EnsureBlock(label);
791 }
792 
EnsureBlock(RawMachineLabel * label)793 BasicBlock* RawMachineAssembler::EnsureBlock(RawMachineLabel* label) {
794   if (label->block_ == nullptr) {
795     label->block_ = schedule()->NewBasicBlock();
796   }
797   return label->block_;
798 }
799 
Bind(RawMachineLabel * label)800 void RawMachineAssembler::Bind(RawMachineLabel* label) {
801   DCHECK_NULL(current_block_);
802   DCHECK(!label->bound_);
803   label->bound_ = true;
804   current_block_ = EnsureBlock(label);
805   current_block_->set_deferred(label->deferred_);
806 }
807 
808 #if DEBUG
Bind(RawMachineLabel * label,AssemblerDebugInfo info)809 void RawMachineAssembler::Bind(RawMachineLabel* label,
810                                AssemblerDebugInfo info) {
811   if (current_block_ != nullptr) {
812     std::stringstream str;
813     str << "Binding label without closing previous block:"
814         << "\n#    label:          " << info
815         << "\n#    previous block: " << *current_block_;
816     FATAL("%s", str.str().c_str());
817   }
818   Bind(label);
819   current_block_->set_debug_info(info);
820 }
821 
PrintCurrentBlock(std::ostream & os)822 void RawMachineAssembler::PrintCurrentBlock(std::ostream& os) {
823   os << CurrentBlock();
824 }
825 
SetInitialDebugInformation(AssemblerDebugInfo debug_info)826 void RawMachineAssembler::SetInitialDebugInformation(
827     AssemblerDebugInfo debug_info) {
828   CurrentBlock()->set_debug_info(debug_info);
829 }
830 #endif  // DEBUG
831 
InsideBlock()832 bool RawMachineAssembler::InsideBlock() { return current_block_ != nullptr; }
833 
CurrentBlock()834 BasicBlock* RawMachineAssembler::CurrentBlock() {
835   DCHECK(current_block_);
836   return current_block_;
837 }
838 
Phi(MachineRepresentation rep,int input_count,Node * const * inputs)839 Node* RawMachineAssembler::Phi(MachineRepresentation rep, int input_count,
840                                Node* const* inputs) {
841   Node** buffer = zone()->NewArray<Node*>(input_count + 1);
842   std::copy(inputs, inputs + input_count, buffer);
843   buffer[input_count] = graph()->start();
844   return AddNode(common()->Phi(rep, input_count), input_count + 1, buffer);
845 }
846 
AppendPhiInput(Node * phi,Node * new_input)847 void RawMachineAssembler::AppendPhiInput(Node* phi, Node* new_input) {
848   const Operator* op = phi->op();
849   const Operator* new_op = common()->ResizeMergeOrPhi(op, phi->InputCount());
850   phi->InsertInput(zone(), phi->InputCount() - 1, new_input);
851   NodeProperties::ChangeOp(phi, new_op);
852 }
853 
AddNode(const Operator * op,int input_count,Node * const * inputs)854 Node* RawMachineAssembler::AddNode(const Operator* op, int input_count,
855                                    Node* const* inputs) {
856   DCHECK_NOT_NULL(schedule_);
857   DCHECK_NOT_NULL(current_block_);
858   Node* node = MakeNode(op, input_count, inputs);
859   schedule()->AddNode(CurrentBlock(), node);
860   return node;
861 }
862 
MakeNode(const Operator * op,int input_count,Node * const * inputs)863 Node* RawMachineAssembler::MakeNode(const Operator* op, int input_count,
864                                     Node* const* inputs) {
865   // The raw machine assembler nodes do not have effect and control inputs,
866   // so we disable checking input counts here.
867   return graph()->NewNodeUnchecked(op, input_count, inputs);
868 }
869 
~RawMachineLabel()870 RawMachineLabel::~RawMachineLabel() {
871 #if DEBUG
872   if (bound_ == used_) return;
873   std::stringstream str;
874   if (bound_) {
875     str << "A label has been bound but it's not used."
876         << "\n#    label: " << *block_;
877   } else {
878     str << "A label has been used but it's not bound.";
879   }
880   FATAL("%s", str.str().c_str());
881 #endif  // DEBUG
882 }
883 
884 }  // namespace compiler
885 }  // namespace internal
886 }  // namespace v8
887