1 // Copyright (c) 2017 The Khronos Group Inc.
2 // Copyright (c) 2017 Valve Corporation
3 // Copyright (c) 2017 LunarG Inc.
4 // Copyright (c) 2018 Google Inc.
5 //
6 // Licensed under the Apache License, Version 2.0 (the "License");
7 // you may not use this file except in compliance with the License.
8 // You may obtain a copy of the License at
9 //
10 // http://www.apache.org/licenses/LICENSE-2.0
11 //
12 // Unless required by applicable law or agreed to in writing, software
13 // distributed under the License is distributed on an "AS IS" BASIS,
14 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 // See the License for the specific language governing permissions and
16 // limitations under the License.
17
18 #include "source/opt/dead_branch_elim_pass.h"
19
20 #include <list>
21 #include <memory>
22 #include <vector>
23
24 #include "source/cfa.h"
25 #include "source/opt/ir_context.h"
26 #include "source/opt/iterator.h"
27 #include "source/opt/struct_cfg_analysis.h"
28 #include "source/util/make_unique.h"
29
30 namespace spvtools {
31 namespace opt {
32
33 namespace {
34
35 const uint32_t kBranchCondTrueLabIdInIdx = 1;
36 const uint32_t kBranchCondFalseLabIdInIdx = 2;
37
38 } // anonymous namespace
39
GetConstCondition(uint32_t condId,bool * condVal)40 bool DeadBranchElimPass::GetConstCondition(uint32_t condId, bool* condVal) {
41 bool condIsConst;
42 Instruction* cInst = get_def_use_mgr()->GetDef(condId);
43 switch (cInst->opcode()) {
44 case SpvOpConstantNull:
45 case SpvOpConstantFalse: {
46 *condVal = false;
47 condIsConst = true;
48 } break;
49 case SpvOpConstantTrue: {
50 *condVal = true;
51 condIsConst = true;
52 } break;
53 case SpvOpLogicalNot: {
54 bool negVal;
55 condIsConst =
56 GetConstCondition(cInst->GetSingleWordInOperand(0), &negVal);
57 if (condIsConst) *condVal = !negVal;
58 } break;
59 default: { condIsConst = false; } break;
60 }
61 return condIsConst;
62 }
63
GetConstInteger(uint32_t selId,uint32_t * selVal)64 bool DeadBranchElimPass::GetConstInteger(uint32_t selId, uint32_t* selVal) {
65 Instruction* sInst = get_def_use_mgr()->GetDef(selId);
66 uint32_t typeId = sInst->type_id();
67 Instruction* typeInst = get_def_use_mgr()->GetDef(typeId);
68 if (!typeInst || (typeInst->opcode() != SpvOpTypeInt)) return false;
69 // TODO(greg-lunarg): Support non-32 bit ints
70 if (typeInst->GetSingleWordInOperand(0) != 32) return false;
71 if (sInst->opcode() == SpvOpConstant) {
72 *selVal = sInst->GetSingleWordInOperand(0);
73 return true;
74 } else if (sInst->opcode() == SpvOpConstantNull) {
75 *selVal = 0;
76 return true;
77 }
78 return false;
79 }
80
AddBranch(uint32_t labelId,BasicBlock * bp)81 void DeadBranchElimPass::AddBranch(uint32_t labelId, BasicBlock* bp) {
82 assert(get_def_use_mgr()->GetDef(labelId) != nullptr);
83 std::unique_ptr<Instruction> newBranch(
84 new Instruction(context(), SpvOpBranch, 0, 0,
85 {{spv_operand_type_t::SPV_OPERAND_TYPE_ID, {labelId}}}));
86 context()->AnalyzeDefUse(&*newBranch);
87 context()->set_instr_block(&*newBranch, bp);
88 bp->AddInstruction(std::move(newBranch));
89 }
90
GetParentBlock(uint32_t id)91 BasicBlock* DeadBranchElimPass::GetParentBlock(uint32_t id) {
92 return context()->get_instr_block(get_def_use_mgr()->GetDef(id));
93 }
94
MarkLiveBlocks(Function * func,std::unordered_set<BasicBlock * > * live_blocks)95 bool DeadBranchElimPass::MarkLiveBlocks(
96 Function* func, std::unordered_set<BasicBlock*>* live_blocks) {
97 std::vector<std::pair<BasicBlock*, uint32_t>> conditions_to_simplify;
98 std::unordered_set<BasicBlock*> blocks_with_backedge;
99 std::vector<BasicBlock*> stack;
100 stack.push_back(&*func->begin());
101 bool modified = false;
102 while (!stack.empty()) {
103 BasicBlock* block = stack.back();
104 stack.pop_back();
105
106 // Live blocks doubles as visited set.
107 if (!live_blocks->insert(block).second) continue;
108
109 uint32_t cont_id = block->ContinueBlockIdIfAny();
110 if (cont_id != 0) {
111 AddBlocksWithBackEdge(cont_id, block->id(), block->MergeBlockIdIfAny(),
112 &blocks_with_backedge);
113 }
114
115 Instruction* terminator = block->terminator();
116 uint32_t live_lab_id = 0;
117 // Check if the terminator has a single valid successor.
118 if (terminator->opcode() == SpvOpBranchConditional) {
119 bool condVal;
120 if (GetConstCondition(terminator->GetSingleWordInOperand(0u), &condVal)) {
121 live_lab_id = terminator->GetSingleWordInOperand(
122 condVal ? kBranchCondTrueLabIdInIdx : kBranchCondFalseLabIdInIdx);
123 }
124 } else if (terminator->opcode() == SpvOpSwitch) {
125 uint32_t sel_val;
126 if (GetConstInteger(terminator->GetSingleWordInOperand(0u), &sel_val)) {
127 // Search switch operands for selector value, set live_lab_id to
128 // corresponding label, use default if not found.
129 uint32_t icnt = 0;
130 uint32_t case_val;
131 terminator->WhileEachInOperand(
132 [&icnt, &case_val, &sel_val, &live_lab_id](const uint32_t* idp) {
133 if (icnt == 1) {
134 // Start with default label.
135 live_lab_id = *idp;
136 } else if (icnt > 1) {
137 if (icnt % 2 == 0) {
138 case_val = *idp;
139 } else {
140 if (case_val == sel_val) {
141 live_lab_id = *idp;
142 return false;
143 }
144 }
145 }
146 ++icnt;
147 return true;
148 });
149 }
150 }
151
152 // Don't simplify back edges unless it becomes a branch to the header. Every
153 // loop must have exactly one back edge to the loop header, so we cannot
154 // remove it.
155 bool simplify = false;
156 if (live_lab_id != 0) {
157 if (!blocks_with_backedge.count(block)) {
158 // This is not a back edge.
159 simplify = true;
160 } else {
161 const auto& struct_cfg_analysis = context()->GetStructuredCFGAnalysis();
162 uint32_t header_id = struct_cfg_analysis->ContainingLoop(block->id());
163 if (live_lab_id == header_id) {
164 // The new branch will be a branch to the header.
165 simplify = true;
166 }
167 }
168 }
169
170 if (simplify) {
171 conditions_to_simplify.push_back({block, live_lab_id});
172 stack.push_back(GetParentBlock(live_lab_id));
173 } else {
174 // All successors are live.
175 const auto* const_block = block;
176 const_block->ForEachSuccessorLabel([&stack, this](const uint32_t label) {
177 stack.push_back(GetParentBlock(label));
178 });
179 }
180 }
181
182 // Traverse |conditions_to_simplify| in reverse order. This is done so that
183 // we simplify nested constructs before simplifying the constructs that
184 // contain them.
185 for (auto b = conditions_to_simplify.rbegin();
186 b != conditions_to_simplify.rend(); ++b) {
187 modified |= SimplifyBranch(b->first, b->second);
188 }
189
190 return modified;
191 }
192
SimplifyBranch(BasicBlock * block,uint32_t live_lab_id)193 bool DeadBranchElimPass::SimplifyBranch(BasicBlock* block,
194 uint32_t live_lab_id) {
195 Instruction* merge_inst = block->GetMergeInst();
196 Instruction* terminator = block->terminator();
197 if (merge_inst && merge_inst->opcode() == SpvOpSelectionMerge) {
198 if (merge_inst->NextNode()->opcode() == SpvOpSwitch &&
199 SwitchHasNestedBreak(block->id())) {
200 if (terminator->NumInOperands() == 2) {
201 // We cannot remove the branch, and it already has a single case, so no
202 // work to do.
203 return false;
204 }
205 // We have to keep the switch because it has a nest break, so we
206 // remove all cases except for the live one.
207 Instruction::OperandList new_operands;
208 new_operands.push_back(terminator->GetInOperand(0));
209 new_operands.push_back({SPV_OPERAND_TYPE_ID, {live_lab_id}});
210 terminator->SetInOperands(move(new_operands));
211 context()->UpdateDefUse(terminator);
212 } else {
213 // Check if the merge instruction is still needed because of a
214 // non-nested break from the construct. Move the merge instruction if
215 // it is still needed.
216 StructuredCFGAnalysis* cfg_analysis =
217 context()->GetStructuredCFGAnalysis();
218 Instruction* first_break = FindFirstExitFromSelectionMerge(
219 live_lab_id, merge_inst->GetSingleWordInOperand(0),
220 cfg_analysis->LoopMergeBlock(live_lab_id),
221 cfg_analysis->LoopContinueBlock(live_lab_id),
222 cfg_analysis->SwitchMergeBlock(live_lab_id));
223
224 AddBranch(live_lab_id, block);
225 context()->KillInst(terminator);
226 if (first_break == nullptr) {
227 context()->KillInst(merge_inst);
228 } else {
229 merge_inst->RemoveFromList();
230 first_break->InsertBefore(std::unique_ptr<Instruction>(merge_inst));
231 context()->set_instr_block(merge_inst,
232 context()->get_instr_block(first_break));
233 }
234 }
235 } else {
236 AddBranch(live_lab_id, block);
237 context()->KillInst(terminator);
238 }
239 return true;
240 }
241
MarkUnreachableStructuredTargets(const std::unordered_set<BasicBlock * > & live_blocks,std::unordered_set<BasicBlock * > * unreachable_merges,std::unordered_map<BasicBlock *,BasicBlock * > * unreachable_continues)242 void DeadBranchElimPass::MarkUnreachableStructuredTargets(
243 const std::unordered_set<BasicBlock*>& live_blocks,
244 std::unordered_set<BasicBlock*>* unreachable_merges,
245 std::unordered_map<BasicBlock*, BasicBlock*>* unreachable_continues) {
246 for (auto block : live_blocks) {
247 if (auto merge_id = block->MergeBlockIdIfAny()) {
248 BasicBlock* merge_block = GetParentBlock(merge_id);
249 if (!live_blocks.count(merge_block)) {
250 unreachable_merges->insert(merge_block);
251 }
252 if (auto cont_id = block->ContinueBlockIdIfAny()) {
253 BasicBlock* cont_block = GetParentBlock(cont_id);
254 if (!live_blocks.count(cont_block)) {
255 (*unreachable_continues)[cont_block] = block;
256 }
257 }
258 }
259 }
260 }
261
FixPhiNodesInLiveBlocks(Function * func,const std::unordered_set<BasicBlock * > & live_blocks,const std::unordered_map<BasicBlock *,BasicBlock * > & unreachable_continues)262 bool DeadBranchElimPass::FixPhiNodesInLiveBlocks(
263 Function* func, const std::unordered_set<BasicBlock*>& live_blocks,
264 const std::unordered_map<BasicBlock*, BasicBlock*>& unreachable_continues) {
265 bool modified = false;
266 for (auto& block : *func) {
267 if (live_blocks.count(&block)) {
268 for (auto iter = block.begin(); iter != block.end();) {
269 if (iter->opcode() != SpvOpPhi) {
270 break;
271 }
272
273 bool changed = false;
274 bool backedge_added = false;
275 Instruction* inst = &*iter;
276 std::vector<Operand> operands;
277 // Build a complete set of operands (not just input operands). Start
278 // with type and result id operands.
279 operands.push_back(inst->GetOperand(0u));
280 operands.push_back(inst->GetOperand(1u));
281 // Iterate through the incoming labels and determine which to keep
282 // and/or modify. If there in an unreachable continue block, there will
283 // be an edge from that block to the header. We need to keep it to
284 // maintain the structured control flow. If the header has more that 2
285 // incoming edges, then the OpPhi must have an entry for that edge.
286 // However, if there is only one other incoming edge, the OpPhi can be
287 // eliminated.
288 for (uint32_t i = 1; i < inst->NumInOperands(); i += 2) {
289 BasicBlock* inc = GetParentBlock(inst->GetSingleWordInOperand(i));
290 auto cont_iter = unreachable_continues.find(inc);
291 if (cont_iter != unreachable_continues.end() &&
292 cont_iter->second == &block && inst->NumInOperands() > 4) {
293 if (get_def_use_mgr()
294 ->GetDef(inst->GetSingleWordInOperand(i - 1))
295 ->opcode() == SpvOpUndef) {
296 // Already undef incoming value, no change necessary.
297 operands.push_back(inst->GetInOperand(i - 1));
298 operands.push_back(inst->GetInOperand(i));
299 backedge_added = true;
300 } else {
301 // Replace incoming value with undef if this phi exists in the
302 // loop header. Otherwise, this edge is not live since the
303 // unreachable continue block will be replaced with an
304 // unconditional branch to the header only.
305 operands.emplace_back(
306 SPV_OPERAND_TYPE_ID,
307 std::initializer_list<uint32_t>{Type2Undef(inst->type_id())});
308 operands.push_back(inst->GetInOperand(i));
309 changed = true;
310 backedge_added = true;
311 }
312 } else if (live_blocks.count(inc) && inc->IsSuccessor(&block)) {
313 // Keep live incoming edge.
314 operands.push_back(inst->GetInOperand(i - 1));
315 operands.push_back(inst->GetInOperand(i));
316 } else {
317 // Remove incoming edge.
318 changed = true;
319 }
320 }
321
322 if (changed) {
323 modified = true;
324 uint32_t continue_id = block.ContinueBlockIdIfAny();
325 if (!backedge_added && continue_id != 0 &&
326 unreachable_continues.count(GetParentBlock(continue_id)) &&
327 operands.size() > 4) {
328 // Changed the backedge to branch from the continue block instead
329 // of a successor of the continue block. Add an entry to the phi to
330 // provide an undef for the continue block. Since the successor of
331 // the continue must also be unreachable (dominated by the continue
332 // block), any entry for the original backedge has been removed
333 // from the phi operands.
334 operands.emplace_back(
335 SPV_OPERAND_TYPE_ID,
336 std::initializer_list<uint32_t>{Type2Undef(inst->type_id())});
337 operands.emplace_back(SPV_OPERAND_TYPE_ID,
338 std::initializer_list<uint32_t>{continue_id});
339 }
340
341 // Either replace the phi with a single value or rebuild the phi out
342 // of |operands|.
343 //
344 // We always have type and result id operands. So this phi has a
345 // single source if there are two more operands beyond those.
346 if (operands.size() == 4) {
347 // First input data operands is at index 2.
348 uint32_t replId = operands[2u].words[0];
349 context()->KillNamesAndDecorates(inst->result_id());
350 context()->ReplaceAllUsesWith(inst->result_id(), replId);
351 iter = context()->KillInst(&*inst);
352 } else {
353 // We've rewritten the operands, so first instruct the def/use
354 // manager to forget uses in the phi before we replace them. After
355 // replacing operands update the def/use manager by re-analyzing
356 // the used ids in this phi.
357 get_def_use_mgr()->EraseUseRecordsOfOperandIds(inst);
358 inst->ReplaceOperands(operands);
359 get_def_use_mgr()->AnalyzeInstUse(inst);
360 ++iter;
361 }
362 } else {
363 ++iter;
364 }
365 }
366 }
367 }
368
369 return modified;
370 }
371
EraseDeadBlocks(Function * func,const std::unordered_set<BasicBlock * > & live_blocks,const std::unordered_set<BasicBlock * > & unreachable_merges,const std::unordered_map<BasicBlock *,BasicBlock * > & unreachable_continues)372 bool DeadBranchElimPass::EraseDeadBlocks(
373 Function* func, const std::unordered_set<BasicBlock*>& live_blocks,
374 const std::unordered_set<BasicBlock*>& unreachable_merges,
375 const std::unordered_map<BasicBlock*, BasicBlock*>& unreachable_continues) {
376 bool modified = false;
377 for (auto ebi = func->begin(); ebi != func->end();) {
378 if (unreachable_continues.count(&*ebi)) {
379 uint32_t cont_id = unreachable_continues.find(&*ebi)->second->id();
380 if (ebi->begin() != ebi->tail() ||
381 ebi->terminator()->opcode() != SpvOpBranch ||
382 ebi->terminator()->GetSingleWordInOperand(0u) != cont_id) {
383 // Make unreachable, but leave the label.
384 KillAllInsts(&*ebi, false);
385 // Add unconditional branch to header.
386 assert(unreachable_continues.count(&*ebi));
387 ebi->AddInstruction(MakeUnique<Instruction>(
388 context(), SpvOpBranch, 0, 0,
389 std::initializer_list<Operand>{{SPV_OPERAND_TYPE_ID, {cont_id}}}));
390 get_def_use_mgr()->AnalyzeInstUse(&*ebi->tail());
391 context()->set_instr_block(&*ebi->tail(), &*ebi);
392 modified = true;
393 }
394 ++ebi;
395 } else if (unreachable_merges.count(&*ebi)) {
396 if (ebi->begin() != ebi->tail() ||
397 ebi->terminator()->opcode() != SpvOpUnreachable) {
398 // Make unreachable, but leave the label.
399 KillAllInsts(&*ebi, false);
400 // Add unreachable terminator.
401 ebi->AddInstruction(
402 MakeUnique<Instruction>(context(), SpvOpUnreachable, 0, 0,
403 std::initializer_list<Operand>{}));
404 context()->AnalyzeUses(ebi->terminator());
405 context()->set_instr_block(ebi->terminator(), &*ebi);
406 modified = true;
407 }
408 ++ebi;
409 } else if (!live_blocks.count(&*ebi)) {
410 // Kill this block.
411 KillAllInsts(&*ebi);
412 ebi = ebi.Erase();
413 modified = true;
414 } else {
415 ++ebi;
416 }
417 }
418
419 return modified;
420 }
421
EliminateDeadBranches(Function * func)422 bool DeadBranchElimPass::EliminateDeadBranches(Function* func) {
423 if (func->IsDeclaration()) {
424 return false;
425 }
426
427 bool modified = false;
428 std::unordered_set<BasicBlock*> live_blocks;
429 modified |= MarkLiveBlocks(func, &live_blocks);
430
431 std::unordered_set<BasicBlock*> unreachable_merges;
432 std::unordered_map<BasicBlock*, BasicBlock*> unreachable_continues;
433 MarkUnreachableStructuredTargets(live_blocks, &unreachable_merges,
434 &unreachable_continues);
435 modified |= FixPhiNodesInLiveBlocks(func, live_blocks, unreachable_continues);
436 modified |= EraseDeadBlocks(func, live_blocks, unreachable_merges,
437 unreachable_continues);
438
439 return modified;
440 }
441
FixBlockOrder()442 void DeadBranchElimPass::FixBlockOrder() {
443 context()->BuildInvalidAnalyses(IRContext::kAnalysisCFG |
444 IRContext::kAnalysisDominatorAnalysis);
445 // Reorders blocks according to DFS of dominator tree.
446 ProcessFunction reorder_dominators = [this](Function* function) {
447 DominatorAnalysis* dominators = context()->GetDominatorAnalysis(function);
448 std::vector<BasicBlock*> blocks;
449 for (auto iter = dominators->GetDomTree().begin();
450 iter != dominators->GetDomTree().end(); ++iter) {
451 if (iter->id() != 0) {
452 blocks.push_back(iter->bb_);
453 }
454 }
455 for (uint32_t i = 1; i < blocks.size(); ++i) {
456 function->MoveBasicBlockToAfter(blocks[i]->id(), blocks[i - 1]);
457 }
458 return true;
459 };
460
461 // Reorders blocks according to structured order.
462 ProcessFunction reorder_structured = [this](Function* function) {
463 std::list<BasicBlock*> order;
464 context()->cfg()->ComputeStructuredOrder(function, &*function->begin(),
465 &order);
466 std::vector<BasicBlock*> blocks;
467 for (auto block : order) {
468 blocks.push_back(block);
469 }
470 for (uint32_t i = 1; i < blocks.size(); ++i) {
471 function->MoveBasicBlockToAfter(blocks[i]->id(), blocks[i - 1]);
472 }
473 return true;
474 };
475
476 // Structured order is more intuitive so use it where possible.
477 if (context()->get_feature_mgr()->HasCapability(SpvCapabilityShader)) {
478 context()->ProcessReachableCallTree(reorder_structured);
479 } else {
480 context()->ProcessReachableCallTree(reorder_dominators);
481 }
482 }
483
Process()484 Pass::Status DeadBranchElimPass::Process() {
485 // Do not process if module contains OpGroupDecorate. Additional
486 // support required in KillNamesAndDecorates().
487 // TODO(greg-lunarg): Add support for OpGroupDecorate
488 for (auto& ai : get_module()->annotations())
489 if (ai.opcode() == SpvOpGroupDecorate) return Status::SuccessWithoutChange;
490 // Process all entry point functions
491 ProcessFunction pfn = [this](Function* fp) {
492 return EliminateDeadBranches(fp);
493 };
494 bool modified = context()->ProcessReachableCallTree(pfn);
495 if (modified) FixBlockOrder();
496 return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
497 }
498
FindFirstExitFromSelectionMerge(uint32_t start_block_id,uint32_t merge_block_id,uint32_t loop_merge_id,uint32_t loop_continue_id,uint32_t switch_merge_id)499 Instruction* DeadBranchElimPass::FindFirstExitFromSelectionMerge(
500 uint32_t start_block_id, uint32_t merge_block_id, uint32_t loop_merge_id,
501 uint32_t loop_continue_id, uint32_t switch_merge_id) {
502 // To find the "first" exit, we follow branches looking for a conditional
503 // branch that is not in a nested construct and is not the header of a new
504 // construct. We follow the control flow from |start_block_id| to find the
505 // first one.
506
507 while (start_block_id != merge_block_id && start_block_id != loop_merge_id &&
508 start_block_id != loop_continue_id) {
509 BasicBlock* start_block = context()->get_instr_block(start_block_id);
510 Instruction* branch = start_block->terminator();
511 uint32_t next_block_id = 0;
512 switch (branch->opcode()) {
513 case SpvOpBranchConditional:
514 next_block_id = start_block->MergeBlockIdIfAny();
515 if (next_block_id == 0) {
516 // If a possible target is the |loop_merge_id| or |loop_continue_id|,
517 // which are not the current merge node, then we continue the search
518 // with the other target.
519 for (uint32_t i = 1; i < 3; i++) {
520 if (branch->GetSingleWordInOperand(i) == loop_merge_id &&
521 loop_merge_id != merge_block_id) {
522 next_block_id = branch->GetSingleWordInOperand(3 - i);
523 break;
524 }
525 if (branch->GetSingleWordInOperand(i) == loop_continue_id &&
526 loop_continue_id != merge_block_id) {
527 next_block_id = branch->GetSingleWordInOperand(3 - i);
528 break;
529 }
530 if (branch->GetSingleWordInOperand(i) == switch_merge_id &&
531 switch_merge_id != merge_block_id) {
532 next_block_id = branch->GetSingleWordInOperand(3 - i);
533 break;
534 }
535 }
536
537 if (next_block_id == 0) {
538 return branch;
539 }
540 }
541 break;
542 case SpvOpSwitch:
543 next_block_id = start_block->MergeBlockIdIfAny();
544 if (next_block_id == 0) {
545 // A switch with no merge instructions can have at most 5 targets:
546 // a. |merge_block_id|
547 // b. |loop_merge_id|
548 // c. |loop_continue_id|
549 // d. |switch_merge_id|
550 // e. 1 block inside the current region.
551 //
552 // Note that because this is a switch, |merge_block_id| must equal
553 // |switch_merge_id|.
554 //
555 // This leads to a number of cases of what to do.
556 //
557 // 1. Does not jump to a block inside of the current construct. In
558 // this case, there is not conditional break, so we should return
559 // |nullptr|.
560 //
561 // 2. Jumps to |merge_block_id| and a block inside the current
562 // construct. In this case, this branch conditionally break to the
563 // end of the current construct, so return the current branch.
564 //
565 // 3. Otherwise, this branch may break, but not to the current merge
566 // block. So we continue with the block that is inside the loop.
567 bool found_break = false;
568 for (uint32_t i = 1; i < branch->NumInOperands(); i += 2) {
569 uint32_t target = branch->GetSingleWordInOperand(i);
570 if (target == merge_block_id) {
571 found_break = true;
572 } else if (target != loop_merge_id && target != loop_continue_id) {
573 next_block_id = branch->GetSingleWordInOperand(i);
574 }
575 }
576
577 if (next_block_id == 0) {
578 // Case 1.
579 return nullptr;
580 }
581
582 if (found_break) {
583 // Case 2.
584 return branch;
585 }
586
587 // The fall through is case 3.
588 }
589 break;
590 case SpvOpBranch:
591 // Need to check if this is the header of a loop nested in the
592 // selection construct.
593 next_block_id = start_block->MergeBlockIdIfAny();
594 if (next_block_id == 0) {
595 next_block_id = branch->GetSingleWordInOperand(0);
596 }
597 break;
598 default:
599 return nullptr;
600 }
601 start_block_id = next_block_id;
602 }
603 return nullptr;
604 }
605
AddBlocksWithBackEdge(uint32_t cont_id,uint32_t header_id,uint32_t merge_id,std::unordered_set<BasicBlock * > * blocks_with_back_edges)606 void DeadBranchElimPass::AddBlocksWithBackEdge(
607 uint32_t cont_id, uint32_t header_id, uint32_t merge_id,
608 std::unordered_set<BasicBlock*>* blocks_with_back_edges) {
609 std::unordered_set<uint32_t> visited;
610 visited.insert(cont_id);
611 visited.insert(header_id);
612 visited.insert(merge_id);
613
614 std::vector<uint32_t> work_list;
615 work_list.push_back(cont_id);
616
617 while (!work_list.empty()) {
618 uint32_t bb_id = work_list.back();
619 work_list.pop_back();
620
621 BasicBlock* bb = context()->get_instr_block(bb_id);
622
623 bool has_back_edge = false;
624 bb->ForEachSuccessorLabel([header_id, &visited, &work_list,
625 &has_back_edge](uint32_t* succ_label_id) {
626 if (visited.insert(*succ_label_id).second) {
627 work_list.push_back(*succ_label_id);
628 }
629 if (*succ_label_id == header_id) {
630 has_back_edge = true;
631 }
632 });
633
634 if (has_back_edge) {
635 blocks_with_back_edges->insert(bb);
636 }
637 }
638 }
639
SwitchHasNestedBreak(uint32_t switch_header_id)640 bool DeadBranchElimPass::SwitchHasNestedBreak(uint32_t switch_header_id) {
641 std::vector<BasicBlock*> block_in_construct;
642 BasicBlock* start_block = context()->get_instr_block(switch_header_id);
643 uint32_t merge_block_id = start_block->MergeBlockIdIfAny();
644
645 StructuredCFGAnalysis* cfg_analysis = context()->GetStructuredCFGAnalysis();
646 return !get_def_use_mgr()->WhileEachUser(
647 merge_block_id,
648 [this, cfg_analysis, switch_header_id](Instruction* inst) {
649 if (!inst->IsBranch()) {
650 return true;
651 }
652
653 BasicBlock* bb = context()->get_instr_block(inst);
654 if (bb->id() == switch_header_id) {
655 return true;
656 }
657 return (cfg_analysis->ContainingConstruct(inst) == switch_header_id &&
658 bb->GetMergeInst() == nullptr);
659 });
660 }
661
662 } // namespace opt
663 } // namespace spvtools
664