1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/raw-machine-assembler.h"
6
7 #include "src/base/small-vector.h"
8 #include "src/compiler/compiler-source-position-table.h"
9 #include "src/compiler/node-properties.h"
10 #include "src/compiler/pipeline.h"
11 #include "src/compiler/scheduler.h"
12 #include "src/heap/factory-inl.h"
13
14 namespace v8 {
15 namespace internal {
16 namespace compiler {
17
RawMachineAssembler(Isolate * isolate,Graph * graph,CallDescriptor * call_descriptor,MachineRepresentation word,MachineOperatorBuilder::Flags flags,MachineOperatorBuilder::AlignmentRequirements alignment_requirements,PoisoningMitigationLevel poisoning_level)18 RawMachineAssembler::RawMachineAssembler(
19 Isolate* isolate, Graph* graph, CallDescriptor* call_descriptor,
20 MachineRepresentation word, MachineOperatorBuilder::Flags flags,
21 MachineOperatorBuilder::AlignmentRequirements alignment_requirements,
22 PoisoningMitigationLevel poisoning_level)
23 : isolate_(isolate),
24 graph_(graph),
25 schedule_(new (zone()) Schedule(zone())),
26 source_positions_(new (zone()) SourcePositionTable(graph)),
27 machine_(zone(), word, flags, alignment_requirements),
28 common_(zone()),
29 simplified_(zone()),
30 call_descriptor_(call_descriptor),
31 target_parameter_(nullptr),
32 parameters_(parameter_count(), zone()),
33 current_block_(schedule()->start()),
34 poisoning_level_(poisoning_level) {
35 int param_count = static_cast<int>(parameter_count());
36 // Add an extra input for the JSFunction parameter to the start node.
37 graph->SetStart(graph->NewNode(common_.Start(param_count + 1)));
38 if (call_descriptor->IsJSFunctionCall()) {
39 target_parameter_ = AddNode(
40 common()->Parameter(Linkage::kJSCallClosureParamIndex), graph->start());
41 }
42 for (size_t i = 0; i < parameter_count(); ++i) {
43 parameters_[i] =
44 AddNode(common()->Parameter(static_cast<int>(i)), graph->start());
45 }
46 graph->SetEnd(graph->NewNode(common_.End(0)));
47 source_positions_->AddDecorator();
48 }
49
SetSourcePosition(const char * file,int line)50 void RawMachineAssembler::SetSourcePosition(const char* file, int line) {
51 int file_id = isolate()->LookupOrAddExternallyCompiledFilename(file);
52 SourcePosition p = SourcePosition::External(line, file_id);
53 DCHECK(p.ExternalLine() == line);
54 source_positions()->SetCurrentPosition(p);
55 }
56
NullConstant()57 Node* RawMachineAssembler::NullConstant() {
58 return HeapConstant(isolate()->factory()->null_value());
59 }
60
UndefinedConstant()61 Node* RawMachineAssembler::UndefinedConstant() {
62 return HeapConstant(isolate()->factory()->undefined_value());
63 }
64
RelocatableIntPtrConstant(intptr_t value,RelocInfo::Mode rmode)65 Node* RawMachineAssembler::RelocatableIntPtrConstant(intptr_t value,
66 RelocInfo::Mode rmode) {
67 return kSystemPointerSize == 8
68 ? RelocatableInt64Constant(value, rmode)
69 : RelocatableInt32Constant(static_cast<int>(value), rmode);
70 }
71
OptimizedAllocate(Node * size,AllocationType allocation,AllowLargeObjects allow_large_objects)72 Node* RawMachineAssembler::OptimizedAllocate(
73 Node* size, AllocationType allocation,
74 AllowLargeObjects allow_large_objects) {
75 return AddNode(
76 simplified()->AllocateRaw(Type::Any(), allocation, allow_large_objects),
77 size);
78 }
79
ExportForTest()80 Schedule* RawMachineAssembler::ExportForTest() {
81 // Compute the correct codegen order.
82 DCHECK(schedule_->rpo_order()->empty());
83 if (FLAG_trace_turbo_scheduler) {
84 PrintF("--- RAW SCHEDULE -------------------------------------------\n");
85 StdoutStream{} << *schedule_;
86 }
87 schedule_->EnsureCFGWellFormedness();
88 Scheduler::ComputeSpecialRPO(zone(), schedule_);
89 schedule_->PropagateDeferredMark();
90 if (FLAG_trace_turbo_scheduler) {
91 PrintF("--- EDGE SPLIT AND PROPAGATED DEFERRED SCHEDULE ------------\n");
92 StdoutStream{} << *schedule_;
93 }
94 // Invalidate RawMachineAssembler.
95 source_positions_->RemoveDecorator();
96 Schedule* schedule = schedule_;
97 schedule_ = nullptr;
98 return schedule;
99 }
100
ExportForOptimization()101 Graph* RawMachineAssembler::ExportForOptimization() {
102 // Compute the correct codegen order.
103 DCHECK(schedule_->rpo_order()->empty());
104 if (FLAG_trace_turbo_scheduler) {
105 PrintF("--- RAW SCHEDULE -------------------------------------------\n");
106 StdoutStream{} << *schedule_;
107 }
108 schedule_->EnsureCFGWellFormedness();
109 OptimizeControlFlow(schedule_, graph(), common());
110 Scheduler::ComputeSpecialRPO(zone(), schedule_);
111 if (FLAG_trace_turbo_scheduler) {
112 PrintF("--- SCHEDULE BEFORE GRAPH CREATION -------------------------\n");
113 StdoutStream{} << *schedule_;
114 }
115 MakeReschedulable();
116 // Invalidate RawMachineAssembler.
117 schedule_ = nullptr;
118 return graph();
119 }
120
OptimizeControlFlow(Schedule * schedule,Graph * graph,CommonOperatorBuilder * common)121 void RawMachineAssembler::OptimizeControlFlow(Schedule* schedule, Graph* graph,
122 CommonOperatorBuilder* common) {
123 for (bool changed = true; changed;) {
124 changed = false;
125 for (size_t i = 0; i < schedule->all_blocks()->size(); ++i) {
126 BasicBlock* block = (*schedule->all_blocks())[i];
127 if (block == nullptr) continue;
128
129 // Short-circuit a goto if the succeeding block is not a control-flow
130 // merge. This is not really useful on it's own since graph construction
131 // has the same effect, but combining blocks improves the pattern-match on
132 // their structure below.
133 if (block->control() == BasicBlock::kGoto) {
134 DCHECK_EQ(block->SuccessorCount(), 1);
135 BasicBlock* successor = block->SuccessorAt(0);
136 if (successor->PredecessorCount() == 1) {
137 DCHECK_EQ(successor->PredecessorAt(0), block);
138 for (Node* node : *successor) {
139 schedule->SetBlockForNode(nullptr, node);
140 schedule->AddNode(block, node);
141 }
142 block->set_control(successor->control());
143 Node* control_input = successor->control_input();
144 block->set_control_input(control_input);
145 if (control_input) {
146 schedule->SetBlockForNode(block, control_input);
147 }
148 if (successor->deferred()) block->set_deferred(true);
149 block->ClearSuccessors();
150 schedule->MoveSuccessors(successor, block);
151 schedule->ClearBlockById(successor->id());
152 changed = true;
153 --i;
154 continue;
155 }
156 }
157 // Block-cloning in the simple case where a block consists only of a phi
158 // node and a branch on that phi. This just duplicates the branch block
159 // for each predecessor, replacing the phi node with the corresponding phi
160 // input.
161 if (block->control() == BasicBlock::kBranch && block->NodeCount() == 1) {
162 Node* phi = block->NodeAt(0);
163 if (phi->opcode() != IrOpcode::kPhi) continue;
164 Node* branch = block->control_input();
165 DCHECK_EQ(branch->opcode(), IrOpcode::kBranch);
166 if (NodeProperties::GetValueInput(branch, 0) != phi) continue;
167 if (phi->UseCount() != 1) continue;
168 DCHECK_EQ(phi->op()->ValueInputCount(), block->PredecessorCount());
169
170 // Turn projection blocks into normal blocks.
171 DCHECK_EQ(block->SuccessorCount(), 2);
172 BasicBlock* true_block = block->SuccessorAt(0);
173 BasicBlock* false_block = block->SuccessorAt(1);
174 DCHECK_EQ(true_block->NodeAt(0)->opcode(), IrOpcode::kIfTrue);
175 DCHECK_EQ(false_block->NodeAt(0)->opcode(), IrOpcode::kIfFalse);
176 (*true_block->begin())->Kill();
177 true_block->RemoveNode(true_block->begin());
178 (*false_block->begin())->Kill();
179 false_block->RemoveNode(false_block->begin());
180 true_block->ClearPredecessors();
181 false_block->ClearPredecessors();
182
183 size_t arity = block->PredecessorCount();
184 for (size_t i = 0; i < arity; ++i) {
185 BasicBlock* predecessor = block->PredecessorAt(i);
186 predecessor->ClearSuccessors();
187 if (block->deferred()) predecessor->set_deferred(true);
188 Node* branch_clone = graph->CloneNode(branch);
189 int phi_input = static_cast<int>(i);
190 NodeProperties::ReplaceValueInput(
191 branch_clone, NodeProperties::GetValueInput(phi, phi_input), 0);
192 BasicBlock* new_true_block = schedule->NewBasicBlock();
193 BasicBlock* new_false_block = schedule->NewBasicBlock();
194 new_true_block->AddNode(
195 graph->NewNode(common->IfTrue(), branch_clone));
196 new_false_block->AddNode(
197 graph->NewNode(common->IfFalse(), branch_clone));
198 schedule->AddGoto(new_true_block, true_block);
199 schedule->AddGoto(new_false_block, false_block);
200 DCHECK_EQ(predecessor->control(), BasicBlock::kGoto);
201 predecessor->set_control(BasicBlock::kNone);
202 schedule->AddBranch(predecessor, branch_clone, new_true_block,
203 new_false_block);
204 }
205 branch->Kill();
206 schedule->ClearBlockById(block->id());
207 changed = true;
208 continue;
209 }
210 }
211 }
212 }
213
MakeReschedulable()214 void RawMachineAssembler::MakeReschedulable() {
215 std::vector<Node*> block_final_control(schedule_->all_blocks_.size());
216 std::vector<Node*> block_final_effect(schedule_->all_blocks_.size());
217
218 struct LoopHeader {
219 BasicBlock* block;
220 Node* loop_node;
221 Node* effect_phi;
222 };
223 std::vector<LoopHeader> loop_headers;
224
225 // These are hoisted outside of the loop to avoid re-allocation.
226 std::vector<Node*> merge_inputs;
227 std::vector<Node*> effect_phi_inputs;
228
229 for (BasicBlock* block : *schedule_->rpo_order()) {
230 Node* current_control;
231 Node* current_effect;
232 if (block == schedule_->start()) {
233 current_control = current_effect = graph()->start();
234 } else if (block == schedule_->end()) {
235 for (size_t i = 0; i < block->PredecessorCount(); ++i) {
236 NodeProperties::MergeControlToEnd(
237 graph(), common(), block->PredecessorAt(i)->control_input());
238 }
239 } else if (block->IsLoopHeader()) {
240 // The graph()->start() inputs are just placeholders until we computed the
241 // real back-edges and re-structure the control flow so the loop has
242 // exactly two predecessors.
243 current_control = graph()->NewNode(common()->Loop(2), graph()->start(),
244 graph()->start());
245 current_effect =
246 graph()->NewNode(common()->EffectPhi(2), graph()->start(),
247 graph()->start(), current_control);
248
249 Node* terminate = graph()->NewNode(common()->Terminate(), current_effect,
250 current_control);
251 NodeProperties::MergeControlToEnd(graph(), common(), terminate);
252 loop_headers.push_back(
253 LoopHeader{block, current_control, current_effect});
254 } else if (block->PredecessorCount() == 1) {
255 BasicBlock* predecessor = block->PredecessorAt(0);
256 DCHECK_LT(predecessor->rpo_number(), block->rpo_number());
257 current_effect = block_final_effect[predecessor->id().ToSize()];
258 current_control = block_final_control[predecessor->id().ToSize()];
259 } else {
260 // Create control merge nodes and effect phis for all predecessor blocks.
261 merge_inputs.clear();
262 effect_phi_inputs.clear();
263 int predecessor_count = static_cast<int>(block->PredecessorCount());
264 for (int i = 0; i < predecessor_count; ++i) {
265 BasicBlock* predecessor = block->PredecessorAt(i);
266 DCHECK_LT(predecessor->rpo_number(), block->rpo_number());
267 merge_inputs.push_back(block_final_control[predecessor->id().ToSize()]);
268 effect_phi_inputs.push_back(
269 block_final_effect[predecessor->id().ToSize()]);
270 }
271 current_control = graph()->NewNode(common()->Merge(predecessor_count),
272 static_cast<int>(merge_inputs.size()),
273 merge_inputs.data());
274 effect_phi_inputs.push_back(current_control);
275 current_effect = graph()->NewNode(
276 common()->EffectPhi(predecessor_count),
277 static_cast<int>(effect_phi_inputs.size()), effect_phi_inputs.data());
278 }
279
280 auto update_current_control_and_effect = [&](Node* node) {
281 bool existing_effect_and_control =
282 IrOpcode::IsIfProjectionOpcode(node->opcode()) ||
283 IrOpcode::IsPhiOpcode(node->opcode());
284 if (node->op()->EffectInputCount() > 0) {
285 DCHECK_EQ(1, node->op()->EffectInputCount());
286 if (existing_effect_and_control) {
287 NodeProperties::ReplaceEffectInput(node, current_effect);
288 } else {
289 node->AppendInput(graph()->zone(), current_effect);
290 }
291 }
292 if (node->op()->ControlInputCount() > 0) {
293 DCHECK_EQ(1, node->op()->ControlInputCount());
294 if (existing_effect_and_control) {
295 NodeProperties::ReplaceControlInput(node, current_control);
296 } else {
297 node->AppendInput(graph()->zone(), current_control);
298 }
299 }
300 if (node->op()->EffectOutputCount() > 0) {
301 DCHECK_EQ(1, node->op()->EffectOutputCount());
302 current_effect = node;
303 }
304 if (node->op()->ControlOutputCount() > 0) {
305 current_control = node;
306 }
307 };
308
309 for (Node* node : *block) {
310 update_current_control_and_effect(node);
311 }
312 if (block->deferred()) MarkControlDeferred(current_control);
313
314 if (Node* block_terminator = block->control_input()) {
315 update_current_control_and_effect(block_terminator);
316 }
317
318 block_final_effect[block->id().ToSize()] = current_effect;
319 block_final_control[block->id().ToSize()] = current_control;
320 }
321
322 // Fix-up loop backedges and re-structure control flow so that loop nodes have
323 // exactly two control predecessors.
324 for (const LoopHeader& loop_header : loop_headers) {
325 BasicBlock* block = loop_header.block;
326 std::vector<BasicBlock*> loop_entries;
327 std::vector<BasicBlock*> loop_backedges;
328 for (size_t i = 0; i < block->PredecessorCount(); ++i) {
329 BasicBlock* predecessor = block->PredecessorAt(i);
330 if (block->LoopContains(predecessor)) {
331 loop_backedges.push_back(predecessor);
332 } else {
333 DCHECK(loop_backedges.empty());
334 loop_entries.push_back(predecessor);
335 }
336 }
337 DCHECK(!loop_entries.empty());
338 DCHECK(!loop_backedges.empty());
339
340 int entrance_count = static_cast<int>(loop_entries.size());
341 int backedge_count = static_cast<int>(loop_backedges.size());
342 Node* control_loop_entry = CreateNodeFromPredecessors(
343 loop_entries, block_final_control, common()->Merge(entrance_count), {});
344 Node* control_backedge =
345 CreateNodeFromPredecessors(loop_backedges, block_final_control,
346 common()->Merge(backedge_count), {});
347 Node* effect_loop_entry = CreateNodeFromPredecessors(
348 loop_entries, block_final_effect, common()->EffectPhi(entrance_count),
349 {control_loop_entry});
350 Node* effect_backedge = CreateNodeFromPredecessors(
351 loop_backedges, block_final_effect, common()->EffectPhi(backedge_count),
352 {control_backedge});
353
354 loop_header.loop_node->ReplaceInput(0, control_loop_entry);
355 loop_header.loop_node->ReplaceInput(1, control_backedge);
356 loop_header.effect_phi->ReplaceInput(0, effect_loop_entry);
357 loop_header.effect_phi->ReplaceInput(1, effect_backedge);
358
359 for (Node* node : *block) {
360 if (node->opcode() == IrOpcode::kPhi) {
361 MakePhiBinary(node, static_cast<int>(loop_entries.size()),
362 control_loop_entry, control_backedge);
363 }
364 }
365 }
366 }
367
CreateNodeFromPredecessors(const std::vector<BasicBlock * > & predecessors,const std::vector<Node * > & sidetable,const Operator * op,const std::vector<Node * > & additional_inputs)368 Node* RawMachineAssembler::CreateNodeFromPredecessors(
369 const std::vector<BasicBlock*>& predecessors,
370 const std::vector<Node*>& sidetable, const Operator* op,
371 const std::vector<Node*>& additional_inputs) {
372 if (predecessors.size() == 1) {
373 return sidetable[predecessors.front()->id().ToSize()];
374 }
375 std::vector<Node*> inputs;
376 for (BasicBlock* predecessor : predecessors) {
377 inputs.push_back(sidetable[predecessor->id().ToSize()]);
378 }
379 for (Node* additional_input : additional_inputs) {
380 inputs.push_back(additional_input);
381 }
382 return graph()->NewNode(op, static_cast<int>(inputs.size()), inputs.data());
383 }
384
MakePhiBinary(Node * phi,int split_point,Node * left_control,Node * right_control)385 void RawMachineAssembler::MakePhiBinary(Node* phi, int split_point,
386 Node* left_control,
387 Node* right_control) {
388 int value_count = phi->op()->ValueInputCount();
389 if (value_count == 2) return;
390 DCHECK_LT(split_point, value_count);
391 DCHECK_GT(split_point, 0);
392
393 MachineRepresentation rep = PhiRepresentationOf(phi->op());
394 int left_input_count = split_point;
395 int right_input_count = value_count - split_point;
396
397 Node* left_input;
398 if (left_input_count == 1) {
399 left_input = NodeProperties::GetValueInput(phi, 0);
400 } else {
401 std::vector<Node*> inputs;
402 for (int i = 0; i < left_input_count; ++i) {
403 inputs.push_back(NodeProperties::GetValueInput(phi, i));
404 }
405 inputs.push_back(left_control);
406 left_input =
407 graph()->NewNode(common()->Phi(rep, static_cast<int>(left_input_count)),
408 static_cast<int>(inputs.size()), inputs.data());
409 }
410
411 Node* right_input;
412 if (right_input_count == 1) {
413 right_input = NodeProperties::GetValueInput(phi, split_point);
414 } else {
415 std::vector<Node*> inputs;
416 for (int i = split_point; i < value_count; ++i) {
417 inputs.push_back(NodeProperties::GetValueInput(phi, i));
418 }
419 inputs.push_back(right_control);
420 right_input = graph()->NewNode(
421 common()->Phi(rep, static_cast<int>(right_input_count)),
422 static_cast<int>(inputs.size()), inputs.data());
423 }
424
425 Node* control = NodeProperties::GetControlInput(phi);
426 phi->TrimInputCount(3);
427 phi->ReplaceInput(0, left_input);
428 phi->ReplaceInput(1, right_input);
429 phi->ReplaceInput(2, control);
430 NodeProperties::ChangeOp(phi, common()->Phi(rep, 2));
431 }
432
MarkControlDeferred(Node * control_node)433 void RawMachineAssembler::MarkControlDeferred(Node* control_node) {
434 BranchHint new_branch_hint;
435 Node* responsible_branch = nullptr;
436 while (responsible_branch == nullptr) {
437 switch (control_node->opcode()) {
438 case IrOpcode::kIfException:
439 // IfException projections are deferred by default.
440 return;
441 case IrOpcode::kIfSuccess:
442 control_node = NodeProperties::GetControlInput(control_node);
443 continue;
444 case IrOpcode::kIfValue: {
445 IfValueParameters parameters = IfValueParametersOf(control_node->op());
446 if (parameters.hint() != BranchHint::kFalse) {
447 NodeProperties::ChangeOp(
448 control_node, common()->IfValue(parameters.value(),
449 parameters.comparison_order(),
450 BranchHint::kFalse));
451 }
452 return;
453 }
454 case IrOpcode::kIfDefault:
455 if (BranchHintOf(control_node->op()) != BranchHint::kFalse) {
456 NodeProperties::ChangeOp(control_node,
457 common()->IfDefault(BranchHint::kFalse));
458 }
459 return;
460 case IrOpcode::kIfTrue: {
461 Node* branch = NodeProperties::GetControlInput(control_node);
462 BranchHint hint = BranchOperatorInfoOf(branch->op()).hint;
463 if (hint == BranchHint::kTrue) {
464 // The other possibility is also deferred, so the responsible branch
465 // has to be before.
466 control_node = NodeProperties::GetControlInput(branch);
467 continue;
468 }
469 new_branch_hint = BranchHint::kFalse;
470 responsible_branch = branch;
471 break;
472 }
473 case IrOpcode::kIfFalse: {
474 Node* branch = NodeProperties::GetControlInput(control_node);
475 BranchHint hint = BranchOperatorInfoOf(branch->op()).hint;
476 if (hint == BranchHint::kFalse) {
477 // The other possibility is also deferred, so the responsible branch
478 // has to be before.
479 control_node = NodeProperties::GetControlInput(branch);
480 continue;
481 }
482 new_branch_hint = BranchHint::kTrue;
483 responsible_branch = branch;
484 break;
485 }
486 case IrOpcode::kMerge:
487 for (int i = 0; i < control_node->op()->ControlInputCount(); ++i) {
488 MarkControlDeferred(NodeProperties::GetControlInput(control_node, i));
489 }
490 return;
491 case IrOpcode::kLoop:
492 control_node = NodeProperties::GetControlInput(control_node, 0);
493 continue;
494 case IrOpcode::kBranch:
495 case IrOpcode::kSwitch:
496 UNREACHABLE();
497 case IrOpcode::kStart:
498 return;
499 default:
500 DCHECK_EQ(1, control_node->op()->ControlInputCount());
501 control_node = NodeProperties::GetControlInput(control_node);
502 continue;
503 }
504 }
505
506 BranchOperatorInfo info = BranchOperatorInfoOf(responsible_branch->op());
507 if (info.hint == new_branch_hint) return;
508 NodeProperties::ChangeOp(
509 responsible_branch,
510 common()->Branch(new_branch_hint, info.is_safety_check));
511 }
512
TargetParameter()513 Node* RawMachineAssembler::TargetParameter() {
514 DCHECK_NOT_NULL(target_parameter_);
515 return target_parameter_;
516 }
517
Parameter(size_t index)518 Node* RawMachineAssembler::Parameter(size_t index) {
519 DCHECK_LT(index, parameter_count());
520 return parameters_[index];
521 }
522
523
Goto(RawMachineLabel * label)524 void RawMachineAssembler::Goto(RawMachineLabel* label) {
525 DCHECK(current_block_ != schedule()->end());
526 schedule()->AddGoto(CurrentBlock(), Use(label));
527 current_block_ = nullptr;
528 }
529
530
Branch(Node * condition,RawMachineLabel * true_val,RawMachineLabel * false_val)531 void RawMachineAssembler::Branch(Node* condition, RawMachineLabel* true_val,
532 RawMachineLabel* false_val) {
533 DCHECK(current_block_ != schedule()->end());
534 Node* branch = MakeNode(
535 common()->Branch(BranchHint::kNone, IsSafetyCheck::kNoSafetyCheck), 1,
536 &condition);
537 BasicBlock* true_block = schedule()->NewBasicBlock();
538 BasicBlock* false_block = schedule()->NewBasicBlock();
539 schedule()->AddBranch(CurrentBlock(), branch, true_block, false_block);
540
541 true_block->AddNode(MakeNode(common()->IfTrue(), 1, &branch));
542 schedule()->AddGoto(true_block, Use(true_val));
543
544 false_block->AddNode(MakeNode(common()->IfFalse(), 1, &branch));
545 schedule()->AddGoto(false_block, Use(false_val));
546
547 current_block_ = nullptr;
548 }
549
Continuations(Node * call,RawMachineLabel * if_success,RawMachineLabel * if_exception)550 void RawMachineAssembler::Continuations(Node* call, RawMachineLabel* if_success,
551 RawMachineLabel* if_exception) {
552 DCHECK_NOT_NULL(schedule_);
553 DCHECK_NOT_NULL(current_block_);
554 schedule()->AddCall(CurrentBlock(), call, Use(if_success), Use(if_exception));
555 current_block_ = nullptr;
556 }
557
Switch(Node * index,RawMachineLabel * default_label,const int32_t * case_values,RawMachineLabel ** case_labels,size_t case_count)558 void RawMachineAssembler::Switch(Node* index, RawMachineLabel* default_label,
559 const int32_t* case_values,
560 RawMachineLabel** case_labels,
561 size_t case_count) {
562 DCHECK_NE(schedule()->end(), current_block_);
563 size_t succ_count = case_count + 1;
564 Node* switch_node = MakeNode(common()->Switch(succ_count), 1, &index);
565 BasicBlock** succ_blocks = zone()->NewArray<BasicBlock*>(succ_count);
566 for (size_t index = 0; index < case_count; ++index) {
567 int32_t case_value = case_values[index];
568 BasicBlock* case_block = schedule()->NewBasicBlock();
569 Node* case_node =
570 graph()->NewNode(common()->IfValue(case_value), switch_node);
571 schedule()->AddNode(case_block, case_node);
572 schedule()->AddGoto(case_block, Use(case_labels[index]));
573 succ_blocks[index] = case_block;
574 }
575 BasicBlock* default_block = schedule()->NewBasicBlock();
576 Node* default_node = graph()->NewNode(common()->IfDefault(), switch_node);
577 schedule()->AddNode(default_block, default_node);
578 schedule()->AddGoto(default_block, Use(default_label));
579 succ_blocks[case_count] = default_block;
580 schedule()->AddSwitch(CurrentBlock(), switch_node, succ_blocks, succ_count);
581 current_block_ = nullptr;
582 }
583
Return(Node * value)584 void RawMachineAssembler::Return(Node* value) {
585 Node* values[] = {Int32Constant(0), value};
586 Node* ret = MakeNode(common()->Return(1), 2, values);
587 schedule()->AddReturn(CurrentBlock(), ret);
588 current_block_ = nullptr;
589 }
590
Return(Node * v1,Node * v2)591 void RawMachineAssembler::Return(Node* v1, Node* v2) {
592 Node* values[] = {Int32Constant(0), v1, v2};
593 Node* ret = MakeNode(common()->Return(2), 3, values);
594 schedule()->AddReturn(CurrentBlock(), ret);
595 current_block_ = nullptr;
596 }
597
Return(Node * v1,Node * v2,Node * v3)598 void RawMachineAssembler::Return(Node* v1, Node* v2, Node* v3) {
599 Node* values[] = {Int32Constant(0), v1, v2, v3};
600 Node* ret = MakeNode(common()->Return(3), 4, values);
601 schedule()->AddReturn(CurrentBlock(), ret);
602 current_block_ = nullptr;
603 }
604
Return(Node * v1,Node * v2,Node * v3,Node * v4)605 void RawMachineAssembler::Return(Node* v1, Node* v2, Node* v3, Node* v4) {
606 Node* values[] = {Int32Constant(0), v1, v2, v3, v4};
607 Node* ret = MakeNode(common()->Return(4), 5, values);
608 schedule()->AddReturn(CurrentBlock(), ret);
609 current_block_ = nullptr;
610 }
611
Return(int count,Node * vs[])612 void RawMachineAssembler::Return(int count, Node* vs[]) {
613 using Node_ptr = Node*;
614 Node** values = new Node_ptr[count + 1];
615 values[0] = Int32Constant(0);
616 for (int i = 0; i < count; ++i) values[i + 1] = vs[i];
617 Node* ret = MakeNode(common()->Return(count), count + 1, values);
618 schedule()->AddReturn(CurrentBlock(), ret);
619 current_block_ = nullptr;
620 delete[] values;
621 }
622
PopAndReturn(Node * pop,Node * value)623 void RawMachineAssembler::PopAndReturn(Node* pop, Node* value) {
624 Node* values[] = {pop, value};
625 Node* ret = MakeNode(common()->Return(1), 2, values);
626 schedule()->AddReturn(CurrentBlock(), ret);
627 current_block_ = nullptr;
628 }
629
PopAndReturn(Node * pop,Node * v1,Node * v2)630 void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2) {
631 Node* values[] = {pop, v1, v2};
632 Node* ret = MakeNode(common()->Return(2), 3, values);
633 schedule()->AddReturn(CurrentBlock(), ret);
634 current_block_ = nullptr;
635 }
636
PopAndReturn(Node * pop,Node * v1,Node * v2,Node * v3)637 void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2,
638 Node* v3) {
639 Node* values[] = {pop, v1, v2, v3};
640 Node* ret = MakeNode(common()->Return(3), 4, values);
641 schedule()->AddReturn(CurrentBlock(), ret);
642 current_block_ = nullptr;
643 }
644
PopAndReturn(Node * pop,Node * v1,Node * v2,Node * v3,Node * v4)645 void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2, Node* v3,
646 Node* v4) {
647 Node* values[] = {pop, v1, v2, v3, v4};
648 Node* ret = MakeNode(common()->Return(4), 5, values);
649 schedule()->AddReturn(CurrentBlock(), ret);
650 current_block_ = nullptr;
651 }
652
AbortCSAAssert(Node * message)653 void RawMachineAssembler::AbortCSAAssert(Node* message) {
654 AddNode(machine()->AbortCSAAssert(), message);
655 }
656
DebugBreak()657 void RawMachineAssembler::DebugBreak() { AddNode(machine()->DebugBreak()); }
658
Unreachable()659 void RawMachineAssembler::Unreachable() {
660 Node* ret = MakeNode(common()->Throw(), 0, nullptr);
661 schedule()->AddThrow(CurrentBlock(), ret);
662 current_block_ = nullptr;
663 }
664
Comment(const std::string & msg)665 void RawMachineAssembler::Comment(const std::string& msg) {
666 size_t length = msg.length() + 1;
667 char* zone_buffer = zone()->NewArray<char>(length);
668 MemCopy(zone_buffer, msg.c_str(), length);
669 AddNode(machine()->Comment(zone_buffer));
670 }
671
StaticAssert(Node * value)672 void RawMachineAssembler::StaticAssert(Node* value) {
673 AddNode(common()->StaticAssert(), value);
674 }
675
CallN(CallDescriptor * call_descriptor,int input_count,Node * const * inputs)676 Node* RawMachineAssembler::CallN(CallDescriptor* call_descriptor,
677 int input_count, Node* const* inputs) {
678 DCHECK(!call_descriptor->NeedsFrameState());
679 // +1 is for target.
680 DCHECK_EQ(input_count, call_descriptor->ParameterCount() + 1);
681 return AddNode(common()->Call(call_descriptor), input_count, inputs);
682 }
683
CallNWithFrameState(CallDescriptor * call_descriptor,int input_count,Node * const * inputs)684 Node* RawMachineAssembler::CallNWithFrameState(CallDescriptor* call_descriptor,
685 int input_count,
686 Node* const* inputs) {
687 DCHECK(call_descriptor->NeedsFrameState());
688 // +2 is for target and frame state.
689 DCHECK_EQ(input_count, call_descriptor->ParameterCount() + 2);
690 return AddNode(common()->Call(call_descriptor), input_count, inputs);
691 }
692
TailCallN(CallDescriptor * call_descriptor,int input_count,Node * const * inputs)693 void RawMachineAssembler::TailCallN(CallDescriptor* call_descriptor,
694 int input_count, Node* const* inputs) {
695 // +1 is for target.
696 DCHECK_EQ(input_count, call_descriptor->ParameterCount() + 1);
697 Node* tail_call =
698 MakeNode(common()->TailCall(call_descriptor), input_count, inputs);
699 schedule()->AddTailCall(CurrentBlock(), tail_call);
700 current_block_ = nullptr;
701 }
702
703 namespace {
704
705 enum FunctionDescriptorMode { kHasFunctionDescriptor, kNoFunctionDescriptor };
706
CallCFunctionImpl(RawMachineAssembler * rasm,Node * function,MachineType return_type,std::initializer_list<RawMachineAssembler::CFunctionArg> args,bool caller_saved_regs,SaveFPRegsMode mode,FunctionDescriptorMode no_function_descriptor)707 Node* CallCFunctionImpl(
708 RawMachineAssembler* rasm, Node* function, MachineType return_type,
709 std::initializer_list<RawMachineAssembler::CFunctionArg> args,
710 bool caller_saved_regs, SaveFPRegsMode mode,
711 FunctionDescriptorMode no_function_descriptor) {
712 static constexpr std::size_t kNumCArgs = 10;
713
714 MachineSignature::Builder builder(rasm->zone(), 1, args.size());
715 builder.AddReturn(return_type);
716 for (const auto& arg : args) builder.AddParam(arg.first);
717
718 bool caller_saved_fp_regs = caller_saved_regs && (mode == kSaveFPRegs);
719 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
720 if (caller_saved_regs) flags |= CallDescriptor::kCallerSavedRegisters;
721 if (caller_saved_fp_regs) flags |= CallDescriptor::kCallerSavedFPRegisters;
722 if (no_function_descriptor) flags |= CallDescriptor::kNoFunctionDescriptor;
723 auto call_descriptor =
724 Linkage::GetSimplifiedCDescriptor(rasm->zone(), builder.Build(), flags);
725
726 base::SmallVector<Node*, kNumCArgs> nodes(args.size() + 1);
727 nodes[0] = function;
728 std::transform(
729 args.begin(), args.end(), std::next(nodes.begin()),
730 [](const RawMachineAssembler::CFunctionArg& arg) { return arg.second; });
731
732 auto common = rasm->common();
733 return rasm->AddNode(common->Call(call_descriptor),
734 static_cast<int>(nodes.size()), nodes.begin());
735 }
736
737 } // namespace
738
CallCFunction(Node * function,MachineType return_type,std::initializer_list<RawMachineAssembler::CFunctionArg> args)739 Node* RawMachineAssembler::CallCFunction(
740 Node* function, MachineType return_type,
741 std::initializer_list<RawMachineAssembler::CFunctionArg> args) {
742 return CallCFunctionImpl(this, function, return_type, args, false,
743 kDontSaveFPRegs, kHasFunctionDescriptor);
744 }
745
CallCFunctionWithoutFunctionDescriptor(Node * function,MachineType return_type,std::initializer_list<RawMachineAssembler::CFunctionArg> args)746 Node* RawMachineAssembler::CallCFunctionWithoutFunctionDescriptor(
747 Node* function, MachineType return_type,
748 std::initializer_list<RawMachineAssembler::CFunctionArg> args) {
749 return CallCFunctionImpl(this, function, return_type, args, false,
750 kDontSaveFPRegs, kNoFunctionDescriptor);
751 }
752
CallCFunctionWithCallerSavedRegisters(Node * function,MachineType return_type,SaveFPRegsMode mode,std::initializer_list<RawMachineAssembler::CFunctionArg> args)753 Node* RawMachineAssembler::CallCFunctionWithCallerSavedRegisters(
754 Node* function, MachineType return_type, SaveFPRegsMode mode,
755 std::initializer_list<RawMachineAssembler::CFunctionArg> args) {
756 return CallCFunctionImpl(this, function, return_type, args, true, mode,
757 kHasFunctionDescriptor);
758 }
759
Use(RawMachineLabel * label)760 BasicBlock* RawMachineAssembler::Use(RawMachineLabel* label) {
761 label->used_ = true;
762 return EnsureBlock(label);
763 }
764
EnsureBlock(RawMachineLabel * label)765 BasicBlock* RawMachineAssembler::EnsureBlock(RawMachineLabel* label) {
766 if (label->block_ == nullptr) {
767 label->block_ = schedule()->NewBasicBlock();
768 }
769 return label->block_;
770 }
771
Bind(RawMachineLabel * label)772 void RawMachineAssembler::Bind(RawMachineLabel* label) {
773 DCHECK_NULL(current_block_);
774 DCHECK(!label->bound_);
775 label->bound_ = true;
776 current_block_ = EnsureBlock(label);
777 current_block_->set_deferred(label->deferred_);
778 }
779
780 #if DEBUG
Bind(RawMachineLabel * label,AssemblerDebugInfo info)781 void RawMachineAssembler::Bind(RawMachineLabel* label,
782 AssemblerDebugInfo info) {
783 if (current_block_ != nullptr) {
784 std::stringstream str;
785 str << "Binding label without closing previous block:"
786 << "\n# label: " << info
787 << "\n# previous block: " << *current_block_;
788 FATAL("%s", str.str().c_str());
789 }
790 Bind(label);
791 current_block_->set_debug_info(info);
792 }
793
PrintCurrentBlock(std::ostream & os)794 void RawMachineAssembler::PrintCurrentBlock(std::ostream& os) {
795 os << CurrentBlock();
796 }
797
SetInitialDebugInformation(AssemblerDebugInfo debug_info)798 void RawMachineAssembler::SetInitialDebugInformation(
799 AssemblerDebugInfo debug_info) {
800 CurrentBlock()->set_debug_info(debug_info);
801 }
802 #endif // DEBUG
803
InsideBlock()804 bool RawMachineAssembler::InsideBlock() { return current_block_ != nullptr; }
805
CurrentBlock()806 BasicBlock* RawMachineAssembler::CurrentBlock() {
807 DCHECK(current_block_);
808 return current_block_;
809 }
810
Phi(MachineRepresentation rep,int input_count,Node * const * inputs)811 Node* RawMachineAssembler::Phi(MachineRepresentation rep, int input_count,
812 Node* const* inputs) {
813 Node** buffer = new (zone()->New(sizeof(Node*) * (input_count + 1)))
814 Node*[input_count + 1];
815 std::copy(inputs, inputs + input_count, buffer);
816 buffer[input_count] = graph()->start();
817 return AddNode(common()->Phi(rep, input_count), input_count + 1, buffer);
818 }
819
AppendPhiInput(Node * phi,Node * new_input)820 void RawMachineAssembler::AppendPhiInput(Node* phi, Node* new_input) {
821 const Operator* op = phi->op();
822 const Operator* new_op = common()->ResizeMergeOrPhi(op, phi->InputCount());
823 phi->InsertInput(zone(), phi->InputCount() - 1, new_input);
824 NodeProperties::ChangeOp(phi, new_op);
825 }
826
AddNode(const Operator * op,int input_count,Node * const * inputs)827 Node* RawMachineAssembler::AddNode(const Operator* op, int input_count,
828 Node* const* inputs) {
829 DCHECK_NOT_NULL(schedule_);
830 DCHECK_NOT_NULL(current_block_);
831 Node* node = MakeNode(op, input_count, inputs);
832 schedule()->AddNode(CurrentBlock(), node);
833 return node;
834 }
835
MakeNode(const Operator * op,int input_count,Node * const * inputs)836 Node* RawMachineAssembler::MakeNode(const Operator* op, int input_count,
837 Node* const* inputs) {
838 // The raw machine assembler nodes do not have effect and control inputs,
839 // so we disable checking input counts here.
840 return graph()->NewNodeUnchecked(op, input_count, inputs);
841 }
842
~RawMachineLabel()843 RawMachineLabel::~RawMachineLabel() {
844 #if DEBUG
845 if (bound_ == used_) return;
846 std::stringstream str;
847 if (bound_) {
848 str << "A label has been bound but it's not used."
849 << "\n# label: " << *block_;
850 } else {
851 str << "A label has been used but it's not bound.";
852 }
853 FATAL("%s", str.str().c_str());
854 #endif // DEBUG
855 }
856
857 } // namespace compiler
858 } // namespace internal
859 } // namespace v8
860