1 //===- MachineVerifier.cpp - Machine Code Verifier ------------------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // Pass to verify generated machine code. The following is checked:
11 //
12 // Operand counts: All explicit operands must be present.
13 //
14 // Register classes: All physical and virtual register operands must be
15 // compatible with the register class required by the instruction descriptor.
16 //
17 // Register live intervals: Registers must be defined only once, and must be
18 // defined before use.
19 //
20 // The machine code verifier is enabled from LLVMTargetMachine.cpp with the
21 // command-line option -verify-machineinstrs, or by defining the environment
22 // variable LLVM_VERIFY_MACHINEINSTRS to the name of a file that will receive
23 // the verifier errors.
24 //===----------------------------------------------------------------------===//
25
26 #include "llvm/ADT/BitVector.h"
27 #include "llvm/ADT/DenseMap.h"
28 #include "llvm/ADT/DenseSet.h"
29 #include "llvm/ADT/DepthFirstIterator.h"
30 #include "llvm/ADT/STLExtras.h"
31 #include "llvm/ADT/SetOperations.h"
32 #include "llvm/ADT/SmallPtrSet.h"
33 #include "llvm/ADT/SmallVector.h"
34 #include "llvm/ADT/StringRef.h"
35 #include "llvm/ADT/Twine.h"
36 #include "llvm/Analysis/EHPersonalities.h"
37 #include "llvm/CodeGen/GlobalISel/RegisterBank.h"
38 #include "llvm/CodeGen/LiveInterval.h"
39 #include "llvm/CodeGen/LiveIntervals.h"
40 #include "llvm/CodeGen/LiveStacks.h"
41 #include "llvm/CodeGen/LiveVariables.h"
42 #include "llvm/CodeGen/MachineBasicBlock.h"
43 #include "llvm/CodeGen/MachineFrameInfo.h"
44 #include "llvm/CodeGen/MachineFunction.h"
45 #include "llvm/CodeGen/MachineFunctionPass.h"
46 #include "llvm/CodeGen/MachineInstr.h"
47 #include "llvm/CodeGen/MachineInstrBundle.h"
48 #include "llvm/CodeGen/MachineMemOperand.h"
49 #include "llvm/CodeGen/MachineOperand.h"
50 #include "llvm/CodeGen/MachineRegisterInfo.h"
51 #include "llvm/CodeGen/PseudoSourceValue.h"
52 #include "llvm/CodeGen/SlotIndexes.h"
53 #include "llvm/CodeGen/StackMaps.h"
54 #include "llvm/CodeGen/TargetInstrInfo.h"
55 #include "llvm/CodeGen/TargetOpcodes.h"
56 #include "llvm/CodeGen/TargetRegisterInfo.h"
57 #include "llvm/CodeGen/TargetSubtargetInfo.h"
58 #include "llvm/IR/BasicBlock.h"
59 #include "llvm/IR/Function.h"
60 #include "llvm/IR/InlineAsm.h"
61 #include "llvm/IR/Instructions.h"
62 #include "llvm/MC/LaneBitmask.h"
63 #include "llvm/MC/MCAsmInfo.h"
64 #include "llvm/MC/MCInstrDesc.h"
65 #include "llvm/MC/MCRegisterInfo.h"
66 #include "llvm/MC/MCTargetOptions.h"
67 #include "llvm/Pass.h"
68 #include "llvm/Support/Casting.h"
69 #include "llvm/Support/ErrorHandling.h"
70 #include "llvm/Support/LowLevelTypeImpl.h"
71 #include "llvm/Support/MathExtras.h"
72 #include "llvm/Support/raw_ostream.h"
73 #include "llvm/Target/TargetMachine.h"
74 #include <algorithm>
75 #include <cassert>
76 #include <cstddef>
77 #include <cstdint>
78 #include <iterator>
79 #include <string>
80 #include <utility>
81
82 using namespace llvm;
83
84 namespace {
85
86 struct MachineVerifier {
MachineVerifier__anone289cf670111::MachineVerifier87 MachineVerifier(Pass *pass, const char *b) : PASS(pass), Banner(b) {}
88
89 unsigned verify(MachineFunction &MF);
90
91 Pass *const PASS;
92 const char *Banner;
93 const MachineFunction *MF;
94 const TargetMachine *TM;
95 const TargetInstrInfo *TII;
96 const TargetRegisterInfo *TRI;
97 const MachineRegisterInfo *MRI;
98
99 unsigned foundErrors;
100
101 // Avoid querying the MachineFunctionProperties for each operand.
102 bool isFunctionRegBankSelected;
103 bool isFunctionSelected;
104
105 using RegVector = SmallVector<unsigned, 16>;
106 using RegMaskVector = SmallVector<const uint32_t *, 4>;
107 using RegSet = DenseSet<unsigned>;
108 using RegMap = DenseMap<unsigned, const MachineInstr *>;
109 using BlockSet = SmallPtrSet<const MachineBasicBlock *, 8>;
110
111 const MachineInstr *FirstTerminator;
112 BlockSet FunctionBlocks;
113
114 BitVector regsReserved;
115 RegSet regsLive;
116 RegVector regsDefined, regsDead, regsKilled;
117 RegMaskVector regMasks;
118
119 SlotIndex lastIndex;
120
121 // Add Reg and any sub-registers to RV
addRegWithSubRegs__anone289cf670111::MachineVerifier122 void addRegWithSubRegs(RegVector &RV, unsigned Reg) {
123 RV.push_back(Reg);
124 if (TargetRegisterInfo::isPhysicalRegister(Reg))
125 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs)
126 RV.push_back(*SubRegs);
127 }
128
129 struct BBInfo {
130 // Is this MBB reachable from the MF entry point?
131 bool reachable = false;
132
133 // Vregs that must be live in because they are used without being
134 // defined. Map value is the user.
135 RegMap vregsLiveIn;
136
137 // Regs killed in MBB. They may be defined again, and will then be in both
138 // regsKilled and regsLiveOut.
139 RegSet regsKilled;
140
141 // Regs defined in MBB and live out. Note that vregs passing through may
142 // be live out without being mentioned here.
143 RegSet regsLiveOut;
144
145 // Vregs that pass through MBB untouched. This set is disjoint from
146 // regsKilled and regsLiveOut.
147 RegSet vregsPassed;
148
149 // Vregs that must pass through MBB because they are needed by a successor
150 // block. This set is disjoint from regsLiveOut.
151 RegSet vregsRequired;
152
153 // Set versions of block's predecessor and successor lists.
154 BlockSet Preds, Succs;
155
156 BBInfo() = default;
157
158 // Add register to vregsPassed if it belongs there. Return true if
159 // anything changed.
addPassed__anone289cf670111::MachineVerifier::BBInfo160 bool addPassed(unsigned Reg) {
161 if (!TargetRegisterInfo::isVirtualRegister(Reg))
162 return false;
163 if (regsKilled.count(Reg) || regsLiveOut.count(Reg))
164 return false;
165 return vregsPassed.insert(Reg).second;
166 }
167
168 // Same for a full set.
addPassed__anone289cf670111::MachineVerifier::BBInfo169 bool addPassed(const RegSet &RS) {
170 bool changed = false;
171 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I)
172 if (addPassed(*I))
173 changed = true;
174 return changed;
175 }
176
177 // Add register to vregsRequired if it belongs there. Return true if
178 // anything changed.
addRequired__anone289cf670111::MachineVerifier::BBInfo179 bool addRequired(unsigned Reg) {
180 if (!TargetRegisterInfo::isVirtualRegister(Reg))
181 return false;
182 if (regsLiveOut.count(Reg))
183 return false;
184 return vregsRequired.insert(Reg).second;
185 }
186
187 // Same for a full set.
addRequired__anone289cf670111::MachineVerifier::BBInfo188 bool addRequired(const RegSet &RS) {
189 bool changed = false;
190 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I)
191 if (addRequired(*I))
192 changed = true;
193 return changed;
194 }
195
196 // Same for a full map.
addRequired__anone289cf670111::MachineVerifier::BBInfo197 bool addRequired(const RegMap &RM) {
198 bool changed = false;
199 for (RegMap::const_iterator I = RM.begin(), E = RM.end(); I != E; ++I)
200 if (addRequired(I->first))
201 changed = true;
202 return changed;
203 }
204
205 // Live-out registers are either in regsLiveOut or vregsPassed.
isLiveOut__anone289cf670111::MachineVerifier::BBInfo206 bool isLiveOut(unsigned Reg) const {
207 return regsLiveOut.count(Reg) || vregsPassed.count(Reg);
208 }
209 };
210
211 // Extra register info per MBB.
212 DenseMap<const MachineBasicBlock*, BBInfo> MBBInfoMap;
213
isReserved__anone289cf670111::MachineVerifier214 bool isReserved(unsigned Reg) {
215 return Reg < regsReserved.size() && regsReserved.test(Reg);
216 }
217
isAllocatable__anone289cf670111::MachineVerifier218 bool isAllocatable(unsigned Reg) const {
219 return Reg < TRI->getNumRegs() && TRI->isInAllocatableClass(Reg) &&
220 !regsReserved.test(Reg);
221 }
222
223 // Analysis information if available
224 LiveVariables *LiveVars;
225 LiveIntervals *LiveInts;
226 LiveStacks *LiveStks;
227 SlotIndexes *Indexes;
228
229 void visitMachineFunctionBefore();
230 void visitMachineBasicBlockBefore(const MachineBasicBlock *MBB);
231 void visitMachineBundleBefore(const MachineInstr *MI);
232 void visitMachineInstrBefore(const MachineInstr *MI);
233 void visitMachineOperand(const MachineOperand *MO, unsigned MONum);
234 void visitMachineInstrAfter(const MachineInstr *MI);
235 void visitMachineBundleAfter(const MachineInstr *MI);
236 void visitMachineBasicBlockAfter(const MachineBasicBlock *MBB);
237 void visitMachineFunctionAfter();
238
239 void report(const char *msg, const MachineFunction *MF);
240 void report(const char *msg, const MachineBasicBlock *MBB);
241 void report(const char *msg, const MachineInstr *MI);
242 void report(const char *msg, const MachineOperand *MO, unsigned MONum,
243 LLT MOVRegType = LLT{});
244
245 void report_context(const LiveInterval &LI) const;
246 void report_context(const LiveRange &LR, unsigned VRegUnit,
247 LaneBitmask LaneMask) const;
248 void report_context(const LiveRange::Segment &S) const;
249 void report_context(const VNInfo &VNI) const;
250 void report_context(SlotIndex Pos) const;
251 void report_context_liverange(const LiveRange &LR) const;
252 void report_context_lanemask(LaneBitmask LaneMask) const;
253 void report_context_vreg(unsigned VReg) const;
254 void report_context_vreg_regunit(unsigned VRegOrUnit) const;
255
256 void verifyInlineAsm(const MachineInstr *MI);
257
258 void checkLiveness(const MachineOperand *MO, unsigned MONum);
259 void checkLivenessAtUse(const MachineOperand *MO, unsigned MONum,
260 SlotIndex UseIdx, const LiveRange &LR, unsigned VRegOrUnit,
261 LaneBitmask LaneMask = LaneBitmask::getNone());
262 void checkLivenessAtDef(const MachineOperand *MO, unsigned MONum,
263 SlotIndex DefIdx, const LiveRange &LR, unsigned VRegOrUnit,
264 LaneBitmask LaneMask = LaneBitmask::getNone());
265
266 void markReachable(const MachineBasicBlock *MBB);
267 void calcRegsPassed();
268 void checkPHIOps(const MachineBasicBlock &MBB);
269
270 void calcRegsRequired();
271 void verifyLiveVariables();
272 void verifyLiveIntervals();
273 void verifyLiveInterval(const LiveInterval&);
274 void verifyLiveRangeValue(const LiveRange&, const VNInfo*, unsigned,
275 LaneBitmask);
276 void verifyLiveRangeSegment(const LiveRange&,
277 const LiveRange::const_iterator I, unsigned,
278 LaneBitmask);
279 void verifyLiveRange(const LiveRange&, unsigned,
280 LaneBitmask LaneMask = LaneBitmask::getNone());
281
282 void verifyStackFrame();
283
284 void verifySlotIndexes() const;
285 void verifyProperties(const MachineFunction &MF);
286 };
287
288 struct MachineVerifierPass : public MachineFunctionPass {
289 static char ID; // Pass ID, replacement for typeid
290
291 const std::string Banner;
292
MachineVerifierPass__anone289cf670111::MachineVerifierPass293 MachineVerifierPass(std::string banner = std::string())
294 : MachineFunctionPass(ID), Banner(std::move(banner)) {
295 initializeMachineVerifierPassPass(*PassRegistry::getPassRegistry());
296 }
297
getAnalysisUsage__anone289cf670111::MachineVerifierPass298 void getAnalysisUsage(AnalysisUsage &AU) const override {
299 AU.setPreservesAll();
300 MachineFunctionPass::getAnalysisUsage(AU);
301 }
302
runOnMachineFunction__anone289cf670111::MachineVerifierPass303 bool runOnMachineFunction(MachineFunction &MF) override {
304 unsigned FoundErrors = MachineVerifier(this, Banner.c_str()).verify(MF);
305 if (FoundErrors)
306 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors.");
307 return false;
308 }
309 };
310
311 } // end anonymous namespace
312
313 char MachineVerifierPass::ID = 0;
314
315 INITIALIZE_PASS(MachineVerifierPass, "machineverifier",
316 "Verify generated machine code", false, false)
317
createMachineVerifierPass(const std::string & Banner)318 FunctionPass *llvm::createMachineVerifierPass(const std::string &Banner) {
319 return new MachineVerifierPass(Banner);
320 }
321
verify(Pass * p,const char * Banner,bool AbortOnErrors) const322 bool MachineFunction::verify(Pass *p, const char *Banner, bool AbortOnErrors)
323 const {
324 MachineFunction &MF = const_cast<MachineFunction&>(*this);
325 unsigned FoundErrors = MachineVerifier(p, Banner).verify(MF);
326 if (AbortOnErrors && FoundErrors)
327 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors.");
328 return FoundErrors == 0;
329 }
330
verifySlotIndexes() const331 void MachineVerifier::verifySlotIndexes() const {
332 if (Indexes == nullptr)
333 return;
334
335 // Ensure the IdxMBB list is sorted by slot indexes.
336 SlotIndex Last;
337 for (SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin(),
338 E = Indexes->MBBIndexEnd(); I != E; ++I) {
339 assert(!Last.isValid() || I->first > Last);
340 Last = I->first;
341 }
342 }
343
verifyProperties(const MachineFunction & MF)344 void MachineVerifier::verifyProperties(const MachineFunction &MF) {
345 // If a pass has introduced virtual registers without clearing the
346 // NoVRegs property (or set it without allocating the vregs)
347 // then report an error.
348 if (MF.getProperties().hasProperty(
349 MachineFunctionProperties::Property::NoVRegs) &&
350 MRI->getNumVirtRegs())
351 report("Function has NoVRegs property but there are VReg operands", &MF);
352 }
353
verify(MachineFunction & MF)354 unsigned MachineVerifier::verify(MachineFunction &MF) {
355 foundErrors = 0;
356
357 this->MF = &MF;
358 TM = &MF.getTarget();
359 TII = MF.getSubtarget().getInstrInfo();
360 TRI = MF.getSubtarget().getRegisterInfo();
361 MRI = &MF.getRegInfo();
362
363 const bool isFunctionFailedISel = MF.getProperties().hasProperty(
364 MachineFunctionProperties::Property::FailedISel);
365 isFunctionRegBankSelected =
366 !isFunctionFailedISel &&
367 MF.getProperties().hasProperty(
368 MachineFunctionProperties::Property::RegBankSelected);
369 isFunctionSelected = !isFunctionFailedISel &&
370 MF.getProperties().hasProperty(
371 MachineFunctionProperties::Property::Selected);
372 LiveVars = nullptr;
373 LiveInts = nullptr;
374 LiveStks = nullptr;
375 Indexes = nullptr;
376 if (PASS) {
377 LiveInts = PASS->getAnalysisIfAvailable<LiveIntervals>();
378 // We don't want to verify LiveVariables if LiveIntervals is available.
379 if (!LiveInts)
380 LiveVars = PASS->getAnalysisIfAvailable<LiveVariables>();
381 LiveStks = PASS->getAnalysisIfAvailable<LiveStacks>();
382 Indexes = PASS->getAnalysisIfAvailable<SlotIndexes>();
383 }
384
385 verifySlotIndexes();
386
387 verifyProperties(MF);
388
389 visitMachineFunctionBefore();
390 for (MachineFunction::const_iterator MFI = MF.begin(), MFE = MF.end();
391 MFI!=MFE; ++MFI) {
392 visitMachineBasicBlockBefore(&*MFI);
393 // Keep track of the current bundle header.
394 const MachineInstr *CurBundle = nullptr;
395 // Do we expect the next instruction to be part of the same bundle?
396 bool InBundle = false;
397
398 for (MachineBasicBlock::const_instr_iterator MBBI = MFI->instr_begin(),
399 MBBE = MFI->instr_end(); MBBI != MBBE; ++MBBI) {
400 if (MBBI->getParent() != &*MFI) {
401 report("Bad instruction parent pointer", &*MFI);
402 errs() << "Instruction: " << *MBBI;
403 continue;
404 }
405
406 // Check for consistent bundle flags.
407 if (InBundle && !MBBI->isBundledWithPred())
408 report("Missing BundledPred flag, "
409 "BundledSucc was set on predecessor",
410 &*MBBI);
411 if (!InBundle && MBBI->isBundledWithPred())
412 report("BundledPred flag is set, "
413 "but BundledSucc not set on predecessor",
414 &*MBBI);
415
416 // Is this a bundle header?
417 if (!MBBI->isInsideBundle()) {
418 if (CurBundle)
419 visitMachineBundleAfter(CurBundle);
420 CurBundle = &*MBBI;
421 visitMachineBundleBefore(CurBundle);
422 } else if (!CurBundle)
423 report("No bundle header", &*MBBI);
424 visitMachineInstrBefore(&*MBBI);
425 for (unsigned I = 0, E = MBBI->getNumOperands(); I != E; ++I) {
426 const MachineInstr &MI = *MBBI;
427 const MachineOperand &Op = MI.getOperand(I);
428 if (Op.getParent() != &MI) {
429 // Make sure to use correct addOperand / RemoveOperand / ChangeTo
430 // functions when replacing operands of a MachineInstr.
431 report("Instruction has operand with wrong parent set", &MI);
432 }
433
434 visitMachineOperand(&Op, I);
435 }
436
437 visitMachineInstrAfter(&*MBBI);
438
439 // Was this the last bundled instruction?
440 InBundle = MBBI->isBundledWithSucc();
441 }
442 if (CurBundle)
443 visitMachineBundleAfter(CurBundle);
444 if (InBundle)
445 report("BundledSucc flag set on last instruction in block", &MFI->back());
446 visitMachineBasicBlockAfter(&*MFI);
447 }
448 visitMachineFunctionAfter();
449
450 // Clean up.
451 regsLive.clear();
452 regsDefined.clear();
453 regsDead.clear();
454 regsKilled.clear();
455 regMasks.clear();
456 MBBInfoMap.clear();
457
458 return foundErrors;
459 }
460
report(const char * msg,const MachineFunction * MF)461 void MachineVerifier::report(const char *msg, const MachineFunction *MF) {
462 assert(MF);
463 errs() << '\n';
464 if (!foundErrors++) {
465 if (Banner)
466 errs() << "# " << Banner << '\n';
467 if (LiveInts != nullptr)
468 LiveInts->print(errs());
469 else
470 MF->print(errs(), Indexes);
471 }
472 errs() << "*** Bad machine code: " << msg << " ***\n"
473 << "- function: " << MF->getName() << "\n";
474 }
475
report(const char * msg,const MachineBasicBlock * MBB)476 void MachineVerifier::report(const char *msg, const MachineBasicBlock *MBB) {
477 assert(MBB);
478 report(msg, MBB->getParent());
479 errs() << "- basic block: " << printMBBReference(*MBB) << ' '
480 << MBB->getName() << " (" << (const void *)MBB << ')';
481 if (Indexes)
482 errs() << " [" << Indexes->getMBBStartIdx(MBB)
483 << ';' << Indexes->getMBBEndIdx(MBB) << ')';
484 errs() << '\n';
485 }
486
report(const char * msg,const MachineInstr * MI)487 void MachineVerifier::report(const char *msg, const MachineInstr *MI) {
488 assert(MI);
489 report(msg, MI->getParent());
490 errs() << "- instruction: ";
491 if (Indexes && Indexes->hasIndex(*MI))
492 errs() << Indexes->getInstructionIndex(*MI) << '\t';
493 MI->print(errs(), /*SkipOpers=*/true);
494 }
495
report(const char * msg,const MachineOperand * MO,unsigned MONum,LLT MOVRegType)496 void MachineVerifier::report(const char *msg, const MachineOperand *MO,
497 unsigned MONum, LLT MOVRegType) {
498 assert(MO);
499 report(msg, MO->getParent());
500 errs() << "- operand " << MONum << ": ";
501 MO->print(errs(), MOVRegType, TRI);
502 errs() << "\n";
503 }
504
report_context(SlotIndex Pos) const505 void MachineVerifier::report_context(SlotIndex Pos) const {
506 errs() << "- at: " << Pos << '\n';
507 }
508
report_context(const LiveInterval & LI) const509 void MachineVerifier::report_context(const LiveInterval &LI) const {
510 errs() << "- interval: " << LI << '\n';
511 }
512
report_context(const LiveRange & LR,unsigned VRegUnit,LaneBitmask LaneMask) const513 void MachineVerifier::report_context(const LiveRange &LR, unsigned VRegUnit,
514 LaneBitmask LaneMask) const {
515 report_context_liverange(LR);
516 report_context_vreg_regunit(VRegUnit);
517 if (LaneMask.any())
518 report_context_lanemask(LaneMask);
519 }
520
report_context(const LiveRange::Segment & S) const521 void MachineVerifier::report_context(const LiveRange::Segment &S) const {
522 errs() << "- segment: " << S << '\n';
523 }
524
report_context(const VNInfo & VNI) const525 void MachineVerifier::report_context(const VNInfo &VNI) const {
526 errs() << "- ValNo: " << VNI.id << " (def " << VNI.def << ")\n";
527 }
528
report_context_liverange(const LiveRange & LR) const529 void MachineVerifier::report_context_liverange(const LiveRange &LR) const {
530 errs() << "- liverange: " << LR << '\n';
531 }
532
report_context_vreg(unsigned VReg) const533 void MachineVerifier::report_context_vreg(unsigned VReg) const {
534 errs() << "- v. register: " << printReg(VReg, TRI) << '\n';
535 }
536
report_context_vreg_regunit(unsigned VRegOrUnit) const537 void MachineVerifier::report_context_vreg_regunit(unsigned VRegOrUnit) const {
538 if (TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) {
539 report_context_vreg(VRegOrUnit);
540 } else {
541 errs() << "- regunit: " << printRegUnit(VRegOrUnit, TRI) << '\n';
542 }
543 }
544
report_context_lanemask(LaneBitmask LaneMask) const545 void MachineVerifier::report_context_lanemask(LaneBitmask LaneMask) const {
546 errs() << "- lanemask: " << PrintLaneMask(LaneMask) << '\n';
547 }
548
markReachable(const MachineBasicBlock * MBB)549 void MachineVerifier::markReachable(const MachineBasicBlock *MBB) {
550 BBInfo &MInfo = MBBInfoMap[MBB];
551 if (!MInfo.reachable) {
552 MInfo.reachable = true;
553 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(),
554 SuE = MBB->succ_end(); SuI != SuE; ++SuI)
555 markReachable(*SuI);
556 }
557 }
558
visitMachineFunctionBefore()559 void MachineVerifier::visitMachineFunctionBefore() {
560 lastIndex = SlotIndex();
561 regsReserved = MRI->reservedRegsFrozen() ? MRI->getReservedRegs()
562 : TRI->getReservedRegs(*MF);
563
564 if (!MF->empty())
565 markReachable(&MF->front());
566
567 // Build a set of the basic blocks in the function.
568 FunctionBlocks.clear();
569 for (const auto &MBB : *MF) {
570 FunctionBlocks.insert(&MBB);
571 BBInfo &MInfo = MBBInfoMap[&MBB];
572
573 MInfo.Preds.insert(MBB.pred_begin(), MBB.pred_end());
574 if (MInfo.Preds.size() != MBB.pred_size())
575 report("MBB has duplicate entries in its predecessor list.", &MBB);
576
577 MInfo.Succs.insert(MBB.succ_begin(), MBB.succ_end());
578 if (MInfo.Succs.size() != MBB.succ_size())
579 report("MBB has duplicate entries in its successor list.", &MBB);
580 }
581
582 // Check that the register use lists are sane.
583 MRI->verifyUseLists();
584
585 if (!MF->empty())
586 verifyStackFrame();
587 }
588
589 // Does iterator point to a and b as the first two elements?
matchPair(MachineBasicBlock::const_succ_iterator i,const MachineBasicBlock * a,const MachineBasicBlock * b)590 static bool matchPair(MachineBasicBlock::const_succ_iterator i,
591 const MachineBasicBlock *a, const MachineBasicBlock *b) {
592 if (*i == a)
593 return *++i == b;
594 if (*i == b)
595 return *++i == a;
596 return false;
597 }
598
599 void
visitMachineBasicBlockBefore(const MachineBasicBlock * MBB)600 MachineVerifier::visitMachineBasicBlockBefore(const MachineBasicBlock *MBB) {
601 FirstTerminator = nullptr;
602
603 if (!MF->getProperties().hasProperty(
604 MachineFunctionProperties::Property::NoPHIs) && MRI->tracksLiveness()) {
605 // If this block has allocatable physical registers live-in, check that
606 // it is an entry block or landing pad.
607 for (const auto &LI : MBB->liveins()) {
608 if (isAllocatable(LI.PhysReg) && !MBB->isEHPad() &&
609 MBB->getIterator() != MBB->getParent()->begin()) {
610 report("MBB has allocatable live-in, but isn't entry or landing-pad.", MBB);
611 }
612 }
613 }
614
615 // Count the number of landing pad successors.
616 SmallPtrSet<MachineBasicBlock*, 4> LandingPadSuccs;
617 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(),
618 E = MBB->succ_end(); I != E; ++I) {
619 if ((*I)->isEHPad())
620 LandingPadSuccs.insert(*I);
621 if (!FunctionBlocks.count(*I))
622 report("MBB has successor that isn't part of the function.", MBB);
623 if (!MBBInfoMap[*I].Preds.count(MBB)) {
624 report("Inconsistent CFG", MBB);
625 errs() << "MBB is not in the predecessor list of the successor "
626 << printMBBReference(*(*I)) << ".\n";
627 }
628 }
629
630 // Check the predecessor list.
631 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(),
632 E = MBB->pred_end(); I != E; ++I) {
633 if (!FunctionBlocks.count(*I))
634 report("MBB has predecessor that isn't part of the function.", MBB);
635 if (!MBBInfoMap[*I].Succs.count(MBB)) {
636 report("Inconsistent CFG", MBB);
637 errs() << "MBB is not in the successor list of the predecessor "
638 << printMBBReference(*(*I)) << ".\n";
639 }
640 }
641
642 const MCAsmInfo *AsmInfo = TM->getMCAsmInfo();
643 const BasicBlock *BB = MBB->getBasicBlock();
644 const Function &F = MF->getFunction();
645 if (LandingPadSuccs.size() > 1 &&
646 !(AsmInfo &&
647 AsmInfo->getExceptionHandlingType() == ExceptionHandling::SjLj &&
648 BB && isa<SwitchInst>(BB->getTerminator())) &&
649 !isScopedEHPersonality(classifyEHPersonality(F.getPersonalityFn())))
650 report("MBB has more than one landing pad successor", MBB);
651
652 // Call AnalyzeBranch. If it succeeds, there several more conditions to check.
653 MachineBasicBlock *TBB = nullptr, *FBB = nullptr;
654 SmallVector<MachineOperand, 4> Cond;
655 if (!TII->analyzeBranch(*const_cast<MachineBasicBlock *>(MBB), TBB, FBB,
656 Cond)) {
657 // Ok, AnalyzeBranch thinks it knows what's going on with this block. Let's
658 // check whether its answers match up with reality.
659 if (!TBB && !FBB) {
660 // Block falls through to its successor.
661 MachineFunction::const_iterator MBBI = MBB->getIterator();
662 ++MBBI;
663 if (MBBI == MF->end()) {
664 // It's possible that the block legitimately ends with a noreturn
665 // call or an unreachable, in which case it won't actually fall
666 // out the bottom of the function.
667 } else if (MBB->succ_size() == LandingPadSuccs.size()) {
668 // It's possible that the block legitimately ends with a noreturn
669 // call or an unreachable, in which case it won't actuall fall
670 // out of the block.
671 } else if (MBB->succ_size() != 1+LandingPadSuccs.size()) {
672 report("MBB exits via unconditional fall-through but doesn't have "
673 "exactly one CFG successor!", MBB);
674 } else if (!MBB->isSuccessor(&*MBBI)) {
675 report("MBB exits via unconditional fall-through but its successor "
676 "differs from its CFG successor!", MBB);
677 }
678 if (!MBB->empty() && MBB->back().isBarrier() &&
679 !TII->isPredicated(MBB->back())) {
680 report("MBB exits via unconditional fall-through but ends with a "
681 "barrier instruction!", MBB);
682 }
683 if (!Cond.empty()) {
684 report("MBB exits via unconditional fall-through but has a condition!",
685 MBB);
686 }
687 } else if (TBB && !FBB && Cond.empty()) {
688 // Block unconditionally branches somewhere.
689 // If the block has exactly one successor, that happens to be a
690 // landingpad, accept it as valid control flow.
691 if (MBB->succ_size() != 1+LandingPadSuccs.size() &&
692 (MBB->succ_size() != 1 || LandingPadSuccs.size() != 1 ||
693 *MBB->succ_begin() != *LandingPadSuccs.begin())) {
694 report("MBB exits via unconditional branch but doesn't have "
695 "exactly one CFG successor!", MBB);
696 } else if (!MBB->isSuccessor(TBB)) {
697 report("MBB exits via unconditional branch but the CFG "
698 "successor doesn't match the actual successor!", MBB);
699 }
700 if (MBB->empty()) {
701 report("MBB exits via unconditional branch but doesn't contain "
702 "any instructions!", MBB);
703 } else if (!MBB->back().isBarrier()) {
704 report("MBB exits via unconditional branch but doesn't end with a "
705 "barrier instruction!", MBB);
706 } else if (!MBB->back().isTerminator()) {
707 report("MBB exits via unconditional branch but the branch isn't a "
708 "terminator instruction!", MBB);
709 }
710 } else if (TBB && !FBB && !Cond.empty()) {
711 // Block conditionally branches somewhere, otherwise falls through.
712 MachineFunction::const_iterator MBBI = MBB->getIterator();
713 ++MBBI;
714 if (MBBI == MF->end()) {
715 report("MBB conditionally falls through out of function!", MBB);
716 } else if (MBB->succ_size() == 1) {
717 // A conditional branch with only one successor is weird, but allowed.
718 if (&*MBBI != TBB)
719 report("MBB exits via conditional branch/fall-through but only has "
720 "one CFG successor!", MBB);
721 else if (TBB != *MBB->succ_begin())
722 report("MBB exits via conditional branch/fall-through but the CFG "
723 "successor don't match the actual successor!", MBB);
724 } else if (MBB->succ_size() != 2) {
725 report("MBB exits via conditional branch/fall-through but doesn't have "
726 "exactly two CFG successors!", MBB);
727 } else if (!matchPair(MBB->succ_begin(), TBB, &*MBBI)) {
728 report("MBB exits via conditional branch/fall-through but the CFG "
729 "successors don't match the actual successors!", MBB);
730 }
731 if (MBB->empty()) {
732 report("MBB exits via conditional branch/fall-through but doesn't "
733 "contain any instructions!", MBB);
734 } else if (MBB->back().isBarrier()) {
735 report("MBB exits via conditional branch/fall-through but ends with a "
736 "barrier instruction!", MBB);
737 } else if (!MBB->back().isTerminator()) {
738 report("MBB exits via conditional branch/fall-through but the branch "
739 "isn't a terminator instruction!", MBB);
740 }
741 } else if (TBB && FBB) {
742 // Block conditionally branches somewhere, otherwise branches
743 // somewhere else.
744 if (MBB->succ_size() == 1) {
745 // A conditional branch with only one successor is weird, but allowed.
746 if (FBB != TBB)
747 report("MBB exits via conditional branch/branch through but only has "
748 "one CFG successor!", MBB);
749 else if (TBB != *MBB->succ_begin())
750 report("MBB exits via conditional branch/branch through but the CFG "
751 "successor don't match the actual successor!", MBB);
752 } else if (MBB->succ_size() != 2) {
753 report("MBB exits via conditional branch/branch but doesn't have "
754 "exactly two CFG successors!", MBB);
755 } else if (!matchPair(MBB->succ_begin(), TBB, FBB)) {
756 report("MBB exits via conditional branch/branch but the CFG "
757 "successors don't match the actual successors!", MBB);
758 }
759 if (MBB->empty()) {
760 report("MBB exits via conditional branch/branch but doesn't "
761 "contain any instructions!", MBB);
762 } else if (!MBB->back().isBarrier()) {
763 report("MBB exits via conditional branch/branch but doesn't end with a "
764 "barrier instruction!", MBB);
765 } else if (!MBB->back().isTerminator()) {
766 report("MBB exits via conditional branch/branch but the branch "
767 "isn't a terminator instruction!", MBB);
768 }
769 if (Cond.empty()) {
770 report("MBB exits via conditinal branch/branch but there's no "
771 "condition!", MBB);
772 }
773 } else {
774 report("AnalyzeBranch returned invalid data!", MBB);
775 }
776 }
777
778 regsLive.clear();
779 if (MRI->tracksLiveness()) {
780 for (const auto &LI : MBB->liveins()) {
781 if (!TargetRegisterInfo::isPhysicalRegister(LI.PhysReg)) {
782 report("MBB live-in list contains non-physical register", MBB);
783 continue;
784 }
785 for (MCSubRegIterator SubRegs(LI.PhysReg, TRI, /*IncludeSelf=*/true);
786 SubRegs.isValid(); ++SubRegs)
787 regsLive.insert(*SubRegs);
788 }
789 }
790
791 const MachineFrameInfo &MFI = MF->getFrameInfo();
792 BitVector PR = MFI.getPristineRegs(*MF);
793 for (unsigned I : PR.set_bits()) {
794 for (MCSubRegIterator SubRegs(I, TRI, /*IncludeSelf=*/true);
795 SubRegs.isValid(); ++SubRegs)
796 regsLive.insert(*SubRegs);
797 }
798
799 regsKilled.clear();
800 regsDefined.clear();
801
802 if (Indexes)
803 lastIndex = Indexes->getMBBStartIdx(MBB);
804 }
805
806 // This function gets called for all bundle headers, including normal
807 // stand-alone unbundled instructions.
visitMachineBundleBefore(const MachineInstr * MI)808 void MachineVerifier::visitMachineBundleBefore(const MachineInstr *MI) {
809 if (Indexes && Indexes->hasIndex(*MI)) {
810 SlotIndex idx = Indexes->getInstructionIndex(*MI);
811 if (!(idx > lastIndex)) {
812 report("Instruction index out of order", MI);
813 errs() << "Last instruction was at " << lastIndex << '\n';
814 }
815 lastIndex = idx;
816 }
817
818 // Ensure non-terminators don't follow terminators.
819 // Ignore predicated terminators formed by if conversion.
820 // FIXME: If conversion shouldn't need to violate this rule.
821 if (MI->isTerminator() && !TII->isPredicated(*MI)) {
822 if (!FirstTerminator)
823 FirstTerminator = MI;
824 } else if (FirstTerminator) {
825 report("Non-terminator instruction after the first terminator", MI);
826 errs() << "First terminator was:\t" << *FirstTerminator;
827 }
828 }
829
830 // The operands on an INLINEASM instruction must follow a template.
831 // Verify that the flag operands make sense.
verifyInlineAsm(const MachineInstr * MI)832 void MachineVerifier::verifyInlineAsm(const MachineInstr *MI) {
833 // The first two operands on INLINEASM are the asm string and global flags.
834 if (MI->getNumOperands() < 2) {
835 report("Too few operands on inline asm", MI);
836 return;
837 }
838 if (!MI->getOperand(0).isSymbol())
839 report("Asm string must be an external symbol", MI);
840 if (!MI->getOperand(1).isImm())
841 report("Asm flags must be an immediate", MI);
842 // Allowed flags are Extra_HasSideEffects = 1, Extra_IsAlignStack = 2,
843 // Extra_AsmDialect = 4, Extra_MayLoad = 8, and Extra_MayStore = 16,
844 // and Extra_IsConvergent = 32.
845 if (!isUInt<6>(MI->getOperand(1).getImm()))
846 report("Unknown asm flags", &MI->getOperand(1), 1);
847
848 static_assert(InlineAsm::MIOp_FirstOperand == 2, "Asm format changed");
849
850 unsigned OpNo = InlineAsm::MIOp_FirstOperand;
851 unsigned NumOps;
852 for (unsigned e = MI->getNumOperands(); OpNo < e; OpNo += NumOps) {
853 const MachineOperand &MO = MI->getOperand(OpNo);
854 // There may be implicit ops after the fixed operands.
855 if (!MO.isImm())
856 break;
857 NumOps = 1 + InlineAsm::getNumOperandRegisters(MO.getImm());
858 }
859
860 if (OpNo > MI->getNumOperands())
861 report("Missing operands in last group", MI);
862
863 // An optional MDNode follows the groups.
864 if (OpNo < MI->getNumOperands() && MI->getOperand(OpNo).isMetadata())
865 ++OpNo;
866
867 // All trailing operands must be implicit registers.
868 for (unsigned e = MI->getNumOperands(); OpNo < e; ++OpNo) {
869 const MachineOperand &MO = MI->getOperand(OpNo);
870 if (!MO.isReg() || !MO.isImplicit())
871 report("Expected implicit register after groups", &MO, OpNo);
872 }
873 }
874
visitMachineInstrBefore(const MachineInstr * MI)875 void MachineVerifier::visitMachineInstrBefore(const MachineInstr *MI) {
876 const MCInstrDesc &MCID = MI->getDesc();
877 if (MI->getNumOperands() < MCID.getNumOperands()) {
878 report("Too few operands", MI);
879 errs() << MCID.getNumOperands() << " operands expected, but "
880 << MI->getNumOperands() << " given.\n";
881 }
882
883 if (MI->isPHI() && MF->getProperties().hasProperty(
884 MachineFunctionProperties::Property::NoPHIs))
885 report("Found PHI instruction with NoPHIs property set", MI);
886
887 // Check the tied operands.
888 if (MI->isInlineAsm())
889 verifyInlineAsm(MI);
890
891 // Check the MachineMemOperands for basic consistency.
892 for (MachineInstr::mmo_iterator I = MI->memoperands_begin(),
893 E = MI->memoperands_end();
894 I != E; ++I) {
895 if ((*I)->isLoad() && !MI->mayLoad())
896 report("Missing mayLoad flag", MI);
897 if ((*I)->isStore() && !MI->mayStore())
898 report("Missing mayStore flag", MI);
899 }
900
901 // Debug values must not have a slot index.
902 // Other instructions must have one, unless they are inside a bundle.
903 if (LiveInts) {
904 bool mapped = !LiveInts->isNotInMIMap(*MI);
905 if (MI->isDebugInstr()) {
906 if (mapped)
907 report("Debug instruction has a slot index", MI);
908 } else if (MI->isInsideBundle()) {
909 if (mapped)
910 report("Instruction inside bundle has a slot index", MI);
911 } else {
912 if (!mapped)
913 report("Missing slot index", MI);
914 }
915 }
916
917 if (isPreISelGenericOpcode(MCID.getOpcode())) {
918 if (isFunctionSelected)
919 report("Unexpected generic instruction in a Selected function", MI);
920
921 // Check types.
922 SmallVector<LLT, 4> Types;
923 for (unsigned I = 0; I < MCID.getNumOperands(); ++I) {
924 if (!MCID.OpInfo[I].isGenericType())
925 continue;
926 // Generic instructions specify type equality constraints between some of
927 // their operands. Make sure these are consistent.
928 size_t TypeIdx = MCID.OpInfo[I].getGenericTypeIndex();
929 Types.resize(std::max(TypeIdx + 1, Types.size()));
930
931 const MachineOperand *MO = &MI->getOperand(I);
932 LLT OpTy = MRI->getType(MO->getReg());
933 // Don't report a type mismatch if there is no actual mismatch, only a
934 // type missing, to reduce noise:
935 if (OpTy.isValid()) {
936 // Only the first valid type for a type index will be printed: don't
937 // overwrite it later so it's always clear which type was expected:
938 if (!Types[TypeIdx].isValid())
939 Types[TypeIdx] = OpTy;
940 else if (Types[TypeIdx] != OpTy)
941 report("Type mismatch in generic instruction", MO, I, OpTy);
942 } else {
943 // Generic instructions must have types attached to their operands.
944 report("Generic instruction is missing a virtual register type", MO, I);
945 }
946 }
947
948 // Generic opcodes must not have physical register operands.
949 for (unsigned I = 0; I < MI->getNumOperands(); ++I) {
950 const MachineOperand *MO = &MI->getOperand(I);
951 if (MO->isReg() && TargetRegisterInfo::isPhysicalRegister(MO->getReg()))
952 report("Generic instruction cannot have physical register", MO, I);
953 }
954 }
955
956 StringRef ErrorInfo;
957 if (!TII->verifyInstruction(*MI, ErrorInfo))
958 report(ErrorInfo.data(), MI);
959
960 // Verify properties of various specific instruction types
961 switch(MI->getOpcode()) {
962 default:
963 break;
964 case TargetOpcode::G_LOAD:
965 case TargetOpcode::G_STORE:
966 // Generic loads and stores must have a single MachineMemOperand
967 // describing that access.
968 if (!MI->hasOneMemOperand())
969 report("Generic instruction accessing memory must have one mem operand",
970 MI);
971 break;
972 case TargetOpcode::G_PHI: {
973 LLT DstTy = MRI->getType(MI->getOperand(0).getReg());
974 if (!DstTy.isValid() ||
975 !std::all_of(MI->operands_begin() + 1, MI->operands_end(),
976 [this, &DstTy](const MachineOperand &MO) {
977 if (!MO.isReg())
978 return true;
979 LLT Ty = MRI->getType(MO.getReg());
980 if (!Ty.isValid() || (Ty != DstTy))
981 return false;
982 return true;
983 }))
984 report("Generic Instruction G_PHI has operands with incompatible/missing "
985 "types",
986 MI);
987 break;
988 }
989 case TargetOpcode::G_SEXT:
990 case TargetOpcode::G_ZEXT:
991 case TargetOpcode::G_ANYEXT:
992 case TargetOpcode::G_TRUNC:
993 case TargetOpcode::G_FPEXT:
994 case TargetOpcode::G_FPTRUNC: {
995 // Number of operands and presense of types is already checked (and
996 // reported in case of any issues), so no need to report them again. As
997 // we're trying to report as many issues as possible at once, however, the
998 // instructions aren't guaranteed to have the right number of operands or
999 // types attached to them at this point
1000 assert(MCID.getNumOperands() == 2 && "Expected 2 operands G_*{EXT,TRUNC}");
1001 if (MI->getNumOperands() < MCID.getNumOperands())
1002 break;
1003 LLT DstTy = MRI->getType(MI->getOperand(0).getReg());
1004 LLT SrcTy = MRI->getType(MI->getOperand(1).getReg());
1005 if (!DstTy.isValid() || !SrcTy.isValid())
1006 break;
1007
1008 LLT DstElTy = DstTy.isVector() ? DstTy.getElementType() : DstTy;
1009 LLT SrcElTy = SrcTy.isVector() ? SrcTy.getElementType() : SrcTy;
1010 if (DstElTy.isPointer() || SrcElTy.isPointer())
1011 report("Generic extend/truncate can not operate on pointers", MI);
1012
1013 if (DstTy.isVector() != SrcTy.isVector()) {
1014 report("Generic extend/truncate must be all-vector or all-scalar", MI);
1015 // Generally we try to report as many issues as possible at once, but in
1016 // this case it's not clear what should we be comparing the size of the
1017 // scalar with: the size of the whole vector or its lane. Instead of
1018 // making an arbitrary choice and emitting not so helpful message, let's
1019 // avoid the extra noise and stop here.
1020 break;
1021 }
1022 if (DstTy.isVector() && DstTy.getNumElements() != SrcTy.getNumElements())
1023 report("Generic vector extend/truncate must preserve number of lanes",
1024 MI);
1025 unsigned DstSize = DstElTy.getSizeInBits();
1026 unsigned SrcSize = SrcElTy.getSizeInBits();
1027 switch (MI->getOpcode()) {
1028 default:
1029 if (DstSize <= SrcSize)
1030 report("Generic extend has destination type no larger than source", MI);
1031 break;
1032 case TargetOpcode::G_TRUNC:
1033 case TargetOpcode::G_FPTRUNC:
1034 if (DstSize >= SrcSize)
1035 report("Generic truncate has destination type no smaller than source",
1036 MI);
1037 break;
1038 }
1039 break;
1040 }
1041 case TargetOpcode::COPY: {
1042 if (foundErrors)
1043 break;
1044 const MachineOperand &DstOp = MI->getOperand(0);
1045 const MachineOperand &SrcOp = MI->getOperand(1);
1046 LLT DstTy = MRI->getType(DstOp.getReg());
1047 LLT SrcTy = MRI->getType(SrcOp.getReg());
1048 if (SrcTy.isValid() && DstTy.isValid()) {
1049 // If both types are valid, check that the types are the same.
1050 if (SrcTy != DstTy) {
1051 report("Copy Instruction is illegal with mismatching types", MI);
1052 errs() << "Def = " << DstTy << ", Src = " << SrcTy << "\n";
1053 }
1054 }
1055 if (SrcTy.isValid() || DstTy.isValid()) {
1056 // If one of them have valid types, let's just check they have the same
1057 // size.
1058 unsigned SrcSize = TRI->getRegSizeInBits(SrcOp.getReg(), *MRI);
1059 unsigned DstSize = TRI->getRegSizeInBits(DstOp.getReg(), *MRI);
1060 assert(SrcSize && "Expecting size here");
1061 assert(DstSize && "Expecting size here");
1062 if (SrcSize != DstSize)
1063 if (!DstOp.getSubReg() && !SrcOp.getSubReg()) {
1064 report("Copy Instruction is illegal with mismatching sizes", MI);
1065 errs() << "Def Size = " << DstSize << ", Src Size = " << SrcSize
1066 << "\n";
1067 }
1068 }
1069 break;
1070 }
1071 case TargetOpcode::STATEPOINT:
1072 if (!MI->getOperand(StatepointOpers::IDPos).isImm() ||
1073 !MI->getOperand(StatepointOpers::NBytesPos).isImm() ||
1074 !MI->getOperand(StatepointOpers::NCallArgsPos).isImm())
1075 report("meta operands to STATEPOINT not constant!", MI);
1076 break;
1077
1078 auto VerifyStackMapConstant = [&](unsigned Offset) {
1079 if (!MI->getOperand(Offset).isImm() ||
1080 MI->getOperand(Offset).getImm() != StackMaps::ConstantOp ||
1081 !MI->getOperand(Offset + 1).isImm())
1082 report("stack map constant to STATEPOINT not well formed!", MI);
1083 };
1084 const unsigned VarStart = StatepointOpers(MI).getVarIdx();
1085 VerifyStackMapConstant(VarStart + StatepointOpers::CCOffset);
1086 VerifyStackMapConstant(VarStart + StatepointOpers::FlagsOffset);
1087 VerifyStackMapConstant(VarStart + StatepointOpers::NumDeoptOperandsOffset);
1088
1089 // TODO: verify we have properly encoded deopt arguments
1090 };
1091 }
1092
1093 void
visitMachineOperand(const MachineOperand * MO,unsigned MONum)1094 MachineVerifier::visitMachineOperand(const MachineOperand *MO, unsigned MONum) {
1095 const MachineInstr *MI = MO->getParent();
1096 const MCInstrDesc &MCID = MI->getDesc();
1097 unsigned NumDefs = MCID.getNumDefs();
1098 if (MCID.getOpcode() == TargetOpcode::PATCHPOINT)
1099 NumDefs = (MONum == 0 && MO->isReg()) ? NumDefs : 0;
1100
1101 // The first MCID.NumDefs operands must be explicit register defines
1102 if (MONum < NumDefs) {
1103 const MCOperandInfo &MCOI = MCID.OpInfo[MONum];
1104 if (!MO->isReg())
1105 report("Explicit definition must be a register", MO, MONum);
1106 else if (!MO->isDef() && !MCOI.isOptionalDef())
1107 report("Explicit definition marked as use", MO, MONum);
1108 else if (MO->isImplicit())
1109 report("Explicit definition marked as implicit", MO, MONum);
1110 } else if (MONum < MCID.getNumOperands()) {
1111 const MCOperandInfo &MCOI = MCID.OpInfo[MONum];
1112 // Don't check if it's the last operand in a variadic instruction. See,
1113 // e.g., LDM_RET in the arm back end.
1114 if (MO->isReg() &&
1115 !(MI->isVariadic() && MONum == MCID.getNumOperands()-1)) {
1116 if (MO->isDef() && !MCOI.isOptionalDef())
1117 report("Explicit operand marked as def", MO, MONum);
1118 if (MO->isImplicit())
1119 report("Explicit operand marked as implicit", MO, MONum);
1120 }
1121
1122 int TiedTo = MCID.getOperandConstraint(MONum, MCOI::TIED_TO);
1123 if (TiedTo != -1) {
1124 if (!MO->isReg())
1125 report("Tied use must be a register", MO, MONum);
1126 else if (!MO->isTied())
1127 report("Operand should be tied", MO, MONum);
1128 else if (unsigned(TiedTo) != MI->findTiedOperandIdx(MONum))
1129 report("Tied def doesn't match MCInstrDesc", MO, MONum);
1130 else if (TargetRegisterInfo::isPhysicalRegister(MO->getReg())) {
1131 const MachineOperand &MOTied = MI->getOperand(TiedTo);
1132 if (!MOTied.isReg())
1133 report("Tied counterpart must be a register", &MOTied, TiedTo);
1134 else if (TargetRegisterInfo::isPhysicalRegister(MOTied.getReg()) &&
1135 MO->getReg() != MOTied.getReg())
1136 report("Tied physical registers must match.", &MOTied, TiedTo);
1137 }
1138 } else if (MO->isReg() && MO->isTied())
1139 report("Explicit operand should not be tied", MO, MONum);
1140 } else {
1141 // ARM adds %reg0 operands to indicate predicates. We'll allow that.
1142 if (MO->isReg() && !MO->isImplicit() && !MI->isVariadic() && MO->getReg())
1143 report("Extra explicit operand on non-variadic instruction", MO, MONum);
1144 }
1145
1146 switch (MO->getType()) {
1147 case MachineOperand::MO_Register: {
1148 const unsigned Reg = MO->getReg();
1149 if (!Reg)
1150 return;
1151 if (MRI->tracksLiveness() && !MI->isDebugValue())
1152 checkLiveness(MO, MONum);
1153
1154 // Verify the consistency of tied operands.
1155 if (MO->isTied()) {
1156 unsigned OtherIdx = MI->findTiedOperandIdx(MONum);
1157 const MachineOperand &OtherMO = MI->getOperand(OtherIdx);
1158 if (!OtherMO.isReg())
1159 report("Must be tied to a register", MO, MONum);
1160 if (!OtherMO.isTied())
1161 report("Missing tie flags on tied operand", MO, MONum);
1162 if (MI->findTiedOperandIdx(OtherIdx) != MONum)
1163 report("Inconsistent tie links", MO, MONum);
1164 if (MONum < MCID.getNumDefs()) {
1165 if (OtherIdx < MCID.getNumOperands()) {
1166 if (-1 == MCID.getOperandConstraint(OtherIdx, MCOI::TIED_TO))
1167 report("Explicit def tied to explicit use without tie constraint",
1168 MO, MONum);
1169 } else {
1170 if (!OtherMO.isImplicit())
1171 report("Explicit def should be tied to implicit use", MO, MONum);
1172 }
1173 }
1174 }
1175
1176 // Verify two-address constraints after leaving SSA form.
1177 unsigned DefIdx;
1178 if (!MRI->isSSA() && MO->isUse() &&
1179 MI->isRegTiedToDefOperand(MONum, &DefIdx) &&
1180 Reg != MI->getOperand(DefIdx).getReg())
1181 report("Two-address instruction operands must be identical", MO, MONum);
1182
1183 // Check register classes.
1184 unsigned SubIdx = MO->getSubReg();
1185
1186 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
1187 if (SubIdx) {
1188 report("Illegal subregister index for physical register", MO, MONum);
1189 return;
1190 }
1191 if (MONum < MCID.getNumOperands()) {
1192 if (const TargetRegisterClass *DRC =
1193 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1194 if (!DRC->contains(Reg)) {
1195 report("Illegal physical register for instruction", MO, MONum);
1196 errs() << printReg(Reg, TRI) << " is not a "
1197 << TRI->getRegClassName(DRC) << " register.\n";
1198 }
1199 }
1200 }
1201 if (MO->isRenamable()) {
1202 if (MRI->isReserved(Reg)) {
1203 report("isRenamable set on reserved register", MO, MONum);
1204 return;
1205 }
1206 }
1207 if (MI->isDebugValue() && MO->isUse() && !MO->isDebug()) {
1208 report("Use-reg is not IsDebug in a DBG_VALUE", MO, MONum);
1209 return;
1210 }
1211 } else {
1212 // Virtual register.
1213 const TargetRegisterClass *RC = MRI->getRegClassOrNull(Reg);
1214 if (!RC) {
1215 // This is a generic virtual register.
1216
1217 // If we're post-Select, we can't have gvregs anymore.
1218 if (isFunctionSelected) {
1219 report("Generic virtual register invalid in a Selected function",
1220 MO, MONum);
1221 return;
1222 }
1223
1224 // The gvreg must have a type and it must not have a SubIdx.
1225 LLT Ty = MRI->getType(Reg);
1226 if (!Ty.isValid()) {
1227 report("Generic virtual register must have a valid type", MO,
1228 MONum);
1229 return;
1230 }
1231
1232 const RegisterBank *RegBank = MRI->getRegBankOrNull(Reg);
1233
1234 // If we're post-RegBankSelect, the gvreg must have a bank.
1235 if (!RegBank && isFunctionRegBankSelected) {
1236 report("Generic virtual register must have a bank in a "
1237 "RegBankSelected function",
1238 MO, MONum);
1239 return;
1240 }
1241
1242 // Make sure the register fits into its register bank if any.
1243 if (RegBank && Ty.isValid() &&
1244 RegBank->getSize() < Ty.getSizeInBits()) {
1245 report("Register bank is too small for virtual register", MO,
1246 MONum);
1247 errs() << "Register bank " << RegBank->getName() << " too small("
1248 << RegBank->getSize() << ") to fit " << Ty.getSizeInBits()
1249 << "-bits\n";
1250 return;
1251 }
1252 if (SubIdx) {
1253 report("Generic virtual register does not subregister index", MO,
1254 MONum);
1255 return;
1256 }
1257
1258 // If this is a target specific instruction and this operand
1259 // has register class constraint, the virtual register must
1260 // comply to it.
1261 if (!isPreISelGenericOpcode(MCID.getOpcode()) &&
1262 MONum < MCID.getNumOperands() &&
1263 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1264 report("Virtual register does not match instruction constraint", MO,
1265 MONum);
1266 errs() << "Expect register class "
1267 << TRI->getRegClassName(
1268 TII->getRegClass(MCID, MONum, TRI, *MF))
1269 << " but got nothing\n";
1270 return;
1271 }
1272
1273 break;
1274 }
1275 if (SubIdx) {
1276 const TargetRegisterClass *SRC =
1277 TRI->getSubClassWithSubReg(RC, SubIdx);
1278 if (!SRC) {
1279 report("Invalid subregister index for virtual register", MO, MONum);
1280 errs() << "Register class " << TRI->getRegClassName(RC)
1281 << " does not support subreg index " << SubIdx << "\n";
1282 return;
1283 }
1284 if (RC != SRC) {
1285 report("Invalid register class for subregister index", MO, MONum);
1286 errs() << "Register class " << TRI->getRegClassName(RC)
1287 << " does not fully support subreg index " << SubIdx << "\n";
1288 return;
1289 }
1290 }
1291 if (MONum < MCID.getNumOperands()) {
1292 if (const TargetRegisterClass *DRC =
1293 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1294 if (SubIdx) {
1295 const TargetRegisterClass *SuperRC =
1296 TRI->getLargestLegalSuperClass(RC, *MF);
1297 if (!SuperRC) {
1298 report("No largest legal super class exists.", MO, MONum);
1299 return;
1300 }
1301 DRC = TRI->getMatchingSuperRegClass(SuperRC, DRC, SubIdx);
1302 if (!DRC) {
1303 report("No matching super-reg register class.", MO, MONum);
1304 return;
1305 }
1306 }
1307 if (!RC->hasSuperClassEq(DRC)) {
1308 report("Illegal virtual register for instruction", MO, MONum);
1309 errs() << "Expected a " << TRI->getRegClassName(DRC)
1310 << " register, but got a " << TRI->getRegClassName(RC)
1311 << " register\n";
1312 }
1313 }
1314 }
1315 }
1316 break;
1317 }
1318
1319 case MachineOperand::MO_RegisterMask:
1320 regMasks.push_back(MO->getRegMask());
1321 break;
1322
1323 case MachineOperand::MO_MachineBasicBlock:
1324 if (MI->isPHI() && !MO->getMBB()->isSuccessor(MI->getParent()))
1325 report("PHI operand is not in the CFG", MO, MONum);
1326 break;
1327
1328 case MachineOperand::MO_FrameIndex:
1329 if (LiveStks && LiveStks->hasInterval(MO->getIndex()) &&
1330 LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1331 int FI = MO->getIndex();
1332 LiveInterval &LI = LiveStks->getInterval(FI);
1333 SlotIndex Idx = LiveInts->getInstructionIndex(*MI);
1334
1335 bool stores = MI->mayStore();
1336 bool loads = MI->mayLoad();
1337 // For a memory-to-memory move, we need to check if the frame
1338 // index is used for storing or loading, by inspecting the
1339 // memory operands.
1340 if (stores && loads) {
1341 for (auto *MMO : MI->memoperands()) {
1342 const PseudoSourceValue *PSV = MMO->getPseudoValue();
1343 if (PSV == nullptr) continue;
1344 const FixedStackPseudoSourceValue *Value =
1345 dyn_cast<FixedStackPseudoSourceValue>(PSV);
1346 if (Value == nullptr) continue;
1347 if (Value->getFrameIndex() != FI) continue;
1348
1349 if (MMO->isStore())
1350 loads = false;
1351 else
1352 stores = false;
1353 break;
1354 }
1355 if (loads == stores)
1356 report("Missing fixed stack memoperand.", MI);
1357 }
1358 if (loads && !LI.liveAt(Idx.getRegSlot(true))) {
1359 report("Instruction loads from dead spill slot", MO, MONum);
1360 errs() << "Live stack: " << LI << '\n';
1361 }
1362 if (stores && !LI.liveAt(Idx.getRegSlot())) {
1363 report("Instruction stores to dead spill slot", MO, MONum);
1364 errs() << "Live stack: " << LI << '\n';
1365 }
1366 }
1367 break;
1368
1369 default:
1370 break;
1371 }
1372 }
1373
checkLivenessAtUse(const MachineOperand * MO,unsigned MONum,SlotIndex UseIdx,const LiveRange & LR,unsigned VRegOrUnit,LaneBitmask LaneMask)1374 void MachineVerifier::checkLivenessAtUse(const MachineOperand *MO,
1375 unsigned MONum, SlotIndex UseIdx, const LiveRange &LR, unsigned VRegOrUnit,
1376 LaneBitmask LaneMask) {
1377 LiveQueryResult LRQ = LR.Query(UseIdx);
1378 // Check if we have a segment at the use, note however that we only need one
1379 // live subregister range, the others may be dead.
1380 if (!LRQ.valueIn() && LaneMask.none()) {
1381 report("No live segment at use", MO, MONum);
1382 report_context_liverange(LR);
1383 report_context_vreg_regunit(VRegOrUnit);
1384 report_context(UseIdx);
1385 }
1386 if (MO->isKill() && !LRQ.isKill()) {
1387 report("Live range continues after kill flag", MO, MONum);
1388 report_context_liverange(LR);
1389 report_context_vreg_regunit(VRegOrUnit);
1390 if (LaneMask.any())
1391 report_context_lanemask(LaneMask);
1392 report_context(UseIdx);
1393 }
1394 }
1395
checkLivenessAtDef(const MachineOperand * MO,unsigned MONum,SlotIndex DefIdx,const LiveRange & LR,unsigned VRegOrUnit,LaneBitmask LaneMask)1396 void MachineVerifier::checkLivenessAtDef(const MachineOperand *MO,
1397 unsigned MONum, SlotIndex DefIdx, const LiveRange &LR, unsigned VRegOrUnit,
1398 LaneBitmask LaneMask) {
1399 if (const VNInfo *VNI = LR.getVNInfoAt(DefIdx)) {
1400 assert(VNI && "NULL valno is not allowed");
1401 if (VNI->def != DefIdx) {
1402 report("Inconsistent valno->def", MO, MONum);
1403 report_context_liverange(LR);
1404 report_context_vreg_regunit(VRegOrUnit);
1405 if (LaneMask.any())
1406 report_context_lanemask(LaneMask);
1407 report_context(*VNI);
1408 report_context(DefIdx);
1409 }
1410 } else {
1411 report("No live segment at def", MO, MONum);
1412 report_context_liverange(LR);
1413 report_context_vreg_regunit(VRegOrUnit);
1414 if (LaneMask.any())
1415 report_context_lanemask(LaneMask);
1416 report_context(DefIdx);
1417 }
1418 // Check that, if the dead def flag is present, LiveInts agree.
1419 if (MO->isDead()) {
1420 LiveQueryResult LRQ = LR.Query(DefIdx);
1421 if (!LRQ.isDeadDef()) {
1422 // In case of physregs we can have a non-dead definition on another
1423 // operand.
1424 bool otherDef = false;
1425 if (!TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) {
1426 const MachineInstr &MI = *MO->getParent();
1427 for (const MachineOperand &MO : MI.operands()) {
1428 if (!MO.isReg() || !MO.isDef() || MO.isDead())
1429 continue;
1430 unsigned Reg = MO.getReg();
1431 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
1432 if (*Units == VRegOrUnit) {
1433 otherDef = true;
1434 break;
1435 }
1436 }
1437 }
1438 }
1439
1440 if (!otherDef) {
1441 report("Live range continues after dead def flag", MO, MONum);
1442 report_context_liverange(LR);
1443 report_context_vreg_regunit(VRegOrUnit);
1444 if (LaneMask.any())
1445 report_context_lanemask(LaneMask);
1446 }
1447 }
1448 }
1449 }
1450
checkLiveness(const MachineOperand * MO,unsigned MONum)1451 void MachineVerifier::checkLiveness(const MachineOperand *MO, unsigned MONum) {
1452 const MachineInstr *MI = MO->getParent();
1453 const unsigned Reg = MO->getReg();
1454
1455 // Both use and def operands can read a register.
1456 if (MO->readsReg()) {
1457 if (MO->isKill())
1458 addRegWithSubRegs(regsKilled, Reg);
1459
1460 // Check that LiveVars knows this kill.
1461 if (LiveVars && TargetRegisterInfo::isVirtualRegister(Reg) &&
1462 MO->isKill()) {
1463 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg);
1464 if (!is_contained(VI.Kills, MI))
1465 report("Kill missing from LiveVariables", MO, MONum);
1466 }
1467
1468 // Check LiveInts liveness and kill.
1469 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1470 SlotIndex UseIdx = LiveInts->getInstructionIndex(*MI);
1471 // Check the cached regunit intervals.
1472 if (TargetRegisterInfo::isPhysicalRegister(Reg) && !isReserved(Reg)) {
1473 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
1474 if (MRI->isReservedRegUnit(*Units))
1475 continue;
1476 if (const LiveRange *LR = LiveInts->getCachedRegUnit(*Units))
1477 checkLivenessAtUse(MO, MONum, UseIdx, *LR, *Units);
1478 }
1479 }
1480
1481 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1482 if (LiveInts->hasInterval(Reg)) {
1483 // This is a virtual register interval.
1484 const LiveInterval &LI = LiveInts->getInterval(Reg);
1485 checkLivenessAtUse(MO, MONum, UseIdx, LI, Reg);
1486
1487 if (LI.hasSubRanges() && !MO->isDef()) {
1488 unsigned SubRegIdx = MO->getSubReg();
1489 LaneBitmask MOMask = SubRegIdx != 0
1490 ? TRI->getSubRegIndexLaneMask(SubRegIdx)
1491 : MRI->getMaxLaneMaskForVReg(Reg);
1492 LaneBitmask LiveInMask;
1493 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1494 if ((MOMask & SR.LaneMask).none())
1495 continue;
1496 checkLivenessAtUse(MO, MONum, UseIdx, SR, Reg, SR.LaneMask);
1497 LiveQueryResult LRQ = SR.Query(UseIdx);
1498 if (LRQ.valueIn())
1499 LiveInMask |= SR.LaneMask;
1500 }
1501 // At least parts of the register has to be live at the use.
1502 if ((LiveInMask & MOMask).none()) {
1503 report("No live subrange at use", MO, MONum);
1504 report_context(LI);
1505 report_context(UseIdx);
1506 }
1507 }
1508 } else {
1509 report("Virtual register has no live interval", MO, MONum);
1510 }
1511 }
1512 }
1513
1514 // Use of a dead register.
1515 if (!regsLive.count(Reg)) {
1516 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
1517 // Reserved registers may be used even when 'dead'.
1518 bool Bad = !isReserved(Reg);
1519 // We are fine if just any subregister has a defined value.
1520 if (Bad) {
1521 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid();
1522 ++SubRegs) {
1523 if (regsLive.count(*SubRegs)) {
1524 Bad = false;
1525 break;
1526 }
1527 }
1528 }
1529 // If there is an additional implicit-use of a super register we stop
1530 // here. By definition we are fine if the super register is not
1531 // (completely) dead, if the complete super register is dead we will
1532 // get a report for its operand.
1533 if (Bad) {
1534 for (const MachineOperand &MOP : MI->uses()) {
1535 if (!MOP.isReg())
1536 continue;
1537 if (!MOP.isImplicit())
1538 continue;
1539 for (MCSubRegIterator SubRegs(MOP.getReg(), TRI); SubRegs.isValid();
1540 ++SubRegs) {
1541 if (*SubRegs == Reg) {
1542 Bad = false;
1543 break;
1544 }
1545 }
1546 }
1547 }
1548 if (Bad)
1549 report("Using an undefined physical register", MO, MONum);
1550 } else if (MRI->def_empty(Reg)) {
1551 report("Reading virtual register without a def", MO, MONum);
1552 } else {
1553 BBInfo &MInfo = MBBInfoMap[MI->getParent()];
1554 // We don't know which virtual registers are live in, so only complain
1555 // if vreg was killed in this MBB. Otherwise keep track of vregs that
1556 // must be live in. PHI instructions are handled separately.
1557 if (MInfo.regsKilled.count(Reg))
1558 report("Using a killed virtual register", MO, MONum);
1559 else if (!MI->isPHI())
1560 MInfo.vregsLiveIn.insert(std::make_pair(Reg, MI));
1561 }
1562 }
1563 }
1564
1565 if (MO->isDef()) {
1566 // Register defined.
1567 // TODO: verify that earlyclobber ops are not used.
1568 if (MO->isDead())
1569 addRegWithSubRegs(regsDead, Reg);
1570 else
1571 addRegWithSubRegs(regsDefined, Reg);
1572
1573 // Verify SSA form.
1574 if (MRI->isSSA() && TargetRegisterInfo::isVirtualRegister(Reg) &&
1575 std::next(MRI->def_begin(Reg)) != MRI->def_end())
1576 report("Multiple virtual register defs in SSA form", MO, MONum);
1577
1578 // Check LiveInts for a live segment, but only for virtual registers.
1579 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1580 SlotIndex DefIdx = LiveInts->getInstructionIndex(*MI);
1581 DefIdx = DefIdx.getRegSlot(MO->isEarlyClobber());
1582
1583 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1584 if (LiveInts->hasInterval(Reg)) {
1585 const LiveInterval &LI = LiveInts->getInterval(Reg);
1586 checkLivenessAtDef(MO, MONum, DefIdx, LI, Reg);
1587
1588 if (LI.hasSubRanges()) {
1589 unsigned SubRegIdx = MO->getSubReg();
1590 LaneBitmask MOMask = SubRegIdx != 0
1591 ? TRI->getSubRegIndexLaneMask(SubRegIdx)
1592 : MRI->getMaxLaneMaskForVReg(Reg);
1593 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1594 if ((SR.LaneMask & MOMask).none())
1595 continue;
1596 checkLivenessAtDef(MO, MONum, DefIdx, SR, Reg, SR.LaneMask);
1597 }
1598 }
1599 } else {
1600 report("Virtual register has no Live interval", MO, MONum);
1601 }
1602 }
1603 }
1604 }
1605 }
1606
visitMachineInstrAfter(const MachineInstr * MI)1607 void MachineVerifier::visitMachineInstrAfter(const MachineInstr *MI) {}
1608
1609 // This function gets called after visiting all instructions in a bundle. The
1610 // argument points to the bundle header.
1611 // Normal stand-alone instructions are also considered 'bundles', and this
1612 // function is called for all of them.
visitMachineBundleAfter(const MachineInstr * MI)1613 void MachineVerifier::visitMachineBundleAfter(const MachineInstr *MI) {
1614 BBInfo &MInfo = MBBInfoMap[MI->getParent()];
1615 set_union(MInfo.regsKilled, regsKilled);
1616 set_subtract(regsLive, regsKilled); regsKilled.clear();
1617 // Kill any masked registers.
1618 while (!regMasks.empty()) {
1619 const uint32_t *Mask = regMasks.pop_back_val();
1620 for (RegSet::iterator I = regsLive.begin(), E = regsLive.end(); I != E; ++I)
1621 if (TargetRegisterInfo::isPhysicalRegister(*I) &&
1622 MachineOperand::clobbersPhysReg(Mask, *I))
1623 regsDead.push_back(*I);
1624 }
1625 set_subtract(regsLive, regsDead); regsDead.clear();
1626 set_union(regsLive, regsDefined); regsDefined.clear();
1627 }
1628
1629 void
visitMachineBasicBlockAfter(const MachineBasicBlock * MBB)1630 MachineVerifier::visitMachineBasicBlockAfter(const MachineBasicBlock *MBB) {
1631 MBBInfoMap[MBB].regsLiveOut = regsLive;
1632 regsLive.clear();
1633
1634 if (Indexes) {
1635 SlotIndex stop = Indexes->getMBBEndIdx(MBB);
1636 if (!(stop > lastIndex)) {
1637 report("Block ends before last instruction index", MBB);
1638 errs() << "Block ends at " << stop
1639 << " last instruction was at " << lastIndex << '\n';
1640 }
1641 lastIndex = stop;
1642 }
1643 }
1644
1645 // Calculate the largest possible vregsPassed sets. These are the registers that
1646 // can pass through an MBB live, but may not be live every time. It is assumed
1647 // that all vregsPassed sets are empty before the call.
calcRegsPassed()1648 void MachineVerifier::calcRegsPassed() {
1649 // First push live-out regs to successors' vregsPassed. Remember the MBBs that
1650 // have any vregsPassed.
1651 SmallPtrSet<const MachineBasicBlock*, 8> todo;
1652 for (const auto &MBB : *MF) {
1653 BBInfo &MInfo = MBBInfoMap[&MBB];
1654 if (!MInfo.reachable)
1655 continue;
1656 for (MachineBasicBlock::const_succ_iterator SuI = MBB.succ_begin(),
1657 SuE = MBB.succ_end(); SuI != SuE; ++SuI) {
1658 BBInfo &SInfo = MBBInfoMap[*SuI];
1659 if (SInfo.addPassed(MInfo.regsLiveOut))
1660 todo.insert(*SuI);
1661 }
1662 }
1663
1664 // Iteratively push vregsPassed to successors. This will converge to the same
1665 // final state regardless of DenseSet iteration order.
1666 while (!todo.empty()) {
1667 const MachineBasicBlock *MBB = *todo.begin();
1668 todo.erase(MBB);
1669 BBInfo &MInfo = MBBInfoMap[MBB];
1670 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(),
1671 SuE = MBB->succ_end(); SuI != SuE; ++SuI) {
1672 if (*SuI == MBB)
1673 continue;
1674 BBInfo &SInfo = MBBInfoMap[*SuI];
1675 if (SInfo.addPassed(MInfo.vregsPassed))
1676 todo.insert(*SuI);
1677 }
1678 }
1679 }
1680
1681 // Calculate the set of virtual registers that must be passed through each basic
1682 // block in order to satisfy the requirements of successor blocks. This is very
1683 // similar to calcRegsPassed, only backwards.
calcRegsRequired()1684 void MachineVerifier::calcRegsRequired() {
1685 // First push live-in regs to predecessors' vregsRequired.
1686 SmallPtrSet<const MachineBasicBlock*, 8> todo;
1687 for (const auto &MBB : *MF) {
1688 BBInfo &MInfo = MBBInfoMap[&MBB];
1689 for (MachineBasicBlock::const_pred_iterator PrI = MBB.pred_begin(),
1690 PrE = MBB.pred_end(); PrI != PrE; ++PrI) {
1691 BBInfo &PInfo = MBBInfoMap[*PrI];
1692 if (PInfo.addRequired(MInfo.vregsLiveIn))
1693 todo.insert(*PrI);
1694 }
1695 }
1696
1697 // Iteratively push vregsRequired to predecessors. This will converge to the
1698 // same final state regardless of DenseSet iteration order.
1699 while (!todo.empty()) {
1700 const MachineBasicBlock *MBB = *todo.begin();
1701 todo.erase(MBB);
1702 BBInfo &MInfo = MBBInfoMap[MBB];
1703 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(),
1704 PrE = MBB->pred_end(); PrI != PrE; ++PrI) {
1705 if (*PrI == MBB)
1706 continue;
1707 BBInfo &SInfo = MBBInfoMap[*PrI];
1708 if (SInfo.addRequired(MInfo.vregsRequired))
1709 todo.insert(*PrI);
1710 }
1711 }
1712 }
1713
1714 // Check PHI instructions at the beginning of MBB. It is assumed that
1715 // calcRegsPassed has been run so BBInfo::isLiveOut is valid.
checkPHIOps(const MachineBasicBlock & MBB)1716 void MachineVerifier::checkPHIOps(const MachineBasicBlock &MBB) {
1717 BBInfo &MInfo = MBBInfoMap[&MBB];
1718
1719 SmallPtrSet<const MachineBasicBlock*, 8> seen;
1720 for (const MachineInstr &Phi : MBB) {
1721 if (!Phi.isPHI())
1722 break;
1723 seen.clear();
1724
1725 const MachineOperand &MODef = Phi.getOperand(0);
1726 if (!MODef.isReg() || !MODef.isDef()) {
1727 report("Expected first PHI operand to be a register def", &MODef, 0);
1728 continue;
1729 }
1730 if (MODef.isTied() || MODef.isImplicit() || MODef.isInternalRead() ||
1731 MODef.isEarlyClobber() || MODef.isDebug())
1732 report("Unexpected flag on PHI operand", &MODef, 0);
1733 unsigned DefReg = MODef.getReg();
1734 if (!TargetRegisterInfo::isVirtualRegister(DefReg))
1735 report("Expected first PHI operand to be a virtual register", &MODef, 0);
1736
1737 for (unsigned I = 1, E = Phi.getNumOperands(); I != E; I += 2) {
1738 const MachineOperand &MO0 = Phi.getOperand(I);
1739 if (!MO0.isReg()) {
1740 report("Expected PHI operand to be a register", &MO0, I);
1741 continue;
1742 }
1743 if (MO0.isImplicit() || MO0.isInternalRead() || MO0.isEarlyClobber() ||
1744 MO0.isDebug() || MO0.isTied())
1745 report("Unexpected flag on PHI operand", &MO0, I);
1746
1747 const MachineOperand &MO1 = Phi.getOperand(I + 1);
1748 if (!MO1.isMBB()) {
1749 report("Expected PHI operand to be a basic block", &MO1, I + 1);
1750 continue;
1751 }
1752
1753 const MachineBasicBlock &Pre = *MO1.getMBB();
1754 if (!Pre.isSuccessor(&MBB)) {
1755 report("PHI input is not a predecessor block", &MO1, I + 1);
1756 continue;
1757 }
1758
1759 if (MInfo.reachable) {
1760 seen.insert(&Pre);
1761 BBInfo &PrInfo = MBBInfoMap[&Pre];
1762 if (!MO0.isUndef() && PrInfo.reachable &&
1763 !PrInfo.isLiveOut(MO0.getReg()))
1764 report("PHI operand is not live-out from predecessor", &MO0, I);
1765 }
1766 }
1767
1768 // Did we see all predecessors?
1769 if (MInfo.reachable) {
1770 for (MachineBasicBlock *Pred : MBB.predecessors()) {
1771 if (!seen.count(Pred)) {
1772 report("Missing PHI operand", &Phi);
1773 errs() << printMBBReference(*Pred)
1774 << " is a predecessor according to the CFG.\n";
1775 }
1776 }
1777 }
1778 }
1779 }
1780
visitMachineFunctionAfter()1781 void MachineVerifier::visitMachineFunctionAfter() {
1782 calcRegsPassed();
1783
1784 for (const MachineBasicBlock &MBB : *MF)
1785 checkPHIOps(MBB);
1786
1787 // Now check liveness info if available
1788 calcRegsRequired();
1789
1790 // Check for killed virtual registers that should be live out.
1791 for (const auto &MBB : *MF) {
1792 BBInfo &MInfo = MBBInfoMap[&MBB];
1793 for (RegSet::iterator
1794 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E;
1795 ++I)
1796 if (MInfo.regsKilled.count(*I)) {
1797 report("Virtual register killed in block, but needed live out.", &MBB);
1798 errs() << "Virtual register " << printReg(*I)
1799 << " is used after the block.\n";
1800 }
1801 }
1802
1803 if (!MF->empty()) {
1804 BBInfo &MInfo = MBBInfoMap[&MF->front()];
1805 for (RegSet::iterator
1806 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E;
1807 ++I) {
1808 report("Virtual register defs don't dominate all uses.", MF);
1809 report_context_vreg(*I);
1810 }
1811 }
1812
1813 if (LiveVars)
1814 verifyLiveVariables();
1815 if (LiveInts)
1816 verifyLiveIntervals();
1817 }
1818
verifyLiveVariables()1819 void MachineVerifier::verifyLiveVariables() {
1820 assert(LiveVars && "Don't call verifyLiveVariables without LiveVars");
1821 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
1822 unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
1823 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg);
1824 for (const auto &MBB : *MF) {
1825 BBInfo &MInfo = MBBInfoMap[&MBB];
1826
1827 // Our vregsRequired should be identical to LiveVariables' AliveBlocks
1828 if (MInfo.vregsRequired.count(Reg)) {
1829 if (!VI.AliveBlocks.test(MBB.getNumber())) {
1830 report("LiveVariables: Block missing from AliveBlocks", &MBB);
1831 errs() << "Virtual register " << printReg(Reg)
1832 << " must be live through the block.\n";
1833 }
1834 } else {
1835 if (VI.AliveBlocks.test(MBB.getNumber())) {
1836 report("LiveVariables: Block should not be in AliveBlocks", &MBB);
1837 errs() << "Virtual register " << printReg(Reg)
1838 << " is not needed live through the block.\n";
1839 }
1840 }
1841 }
1842 }
1843 }
1844
verifyLiveIntervals()1845 void MachineVerifier::verifyLiveIntervals() {
1846 assert(LiveInts && "Don't call verifyLiveIntervals without LiveInts");
1847 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
1848 unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
1849
1850 // Spilling and splitting may leave unused registers around. Skip them.
1851 if (MRI->reg_nodbg_empty(Reg))
1852 continue;
1853
1854 if (!LiveInts->hasInterval(Reg)) {
1855 report("Missing live interval for virtual register", MF);
1856 errs() << printReg(Reg, TRI) << " still has defs or uses\n";
1857 continue;
1858 }
1859
1860 const LiveInterval &LI = LiveInts->getInterval(Reg);
1861 assert(Reg == LI.reg && "Invalid reg to interval mapping");
1862 verifyLiveInterval(LI);
1863 }
1864
1865 // Verify all the cached regunit intervals.
1866 for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i)
1867 if (const LiveRange *LR = LiveInts->getCachedRegUnit(i))
1868 verifyLiveRange(*LR, i);
1869 }
1870
verifyLiveRangeValue(const LiveRange & LR,const VNInfo * VNI,unsigned Reg,LaneBitmask LaneMask)1871 void MachineVerifier::verifyLiveRangeValue(const LiveRange &LR,
1872 const VNInfo *VNI, unsigned Reg,
1873 LaneBitmask LaneMask) {
1874 if (VNI->isUnused())
1875 return;
1876
1877 const VNInfo *DefVNI = LR.getVNInfoAt(VNI->def);
1878
1879 if (!DefVNI) {
1880 report("Value not live at VNInfo def and not marked unused", MF);
1881 report_context(LR, Reg, LaneMask);
1882 report_context(*VNI);
1883 return;
1884 }
1885
1886 if (DefVNI != VNI) {
1887 report("Live segment at def has different VNInfo", MF);
1888 report_context(LR, Reg, LaneMask);
1889 report_context(*VNI);
1890 return;
1891 }
1892
1893 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(VNI->def);
1894 if (!MBB) {
1895 report("Invalid VNInfo definition index", MF);
1896 report_context(LR, Reg, LaneMask);
1897 report_context(*VNI);
1898 return;
1899 }
1900
1901 if (VNI->isPHIDef()) {
1902 if (VNI->def != LiveInts->getMBBStartIdx(MBB)) {
1903 report("PHIDef VNInfo is not defined at MBB start", MBB);
1904 report_context(LR, Reg, LaneMask);
1905 report_context(*VNI);
1906 }
1907 return;
1908 }
1909
1910 // Non-PHI def.
1911 const MachineInstr *MI = LiveInts->getInstructionFromIndex(VNI->def);
1912 if (!MI) {
1913 report("No instruction at VNInfo def index", MBB);
1914 report_context(LR, Reg, LaneMask);
1915 report_context(*VNI);
1916 return;
1917 }
1918
1919 if (Reg != 0) {
1920 bool hasDef = false;
1921 bool isEarlyClobber = false;
1922 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) {
1923 if (!MOI->isReg() || !MOI->isDef())
1924 continue;
1925 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1926 if (MOI->getReg() != Reg)
1927 continue;
1928 } else {
1929 if (!TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) ||
1930 !TRI->hasRegUnit(MOI->getReg(), Reg))
1931 continue;
1932 }
1933 if (LaneMask.any() &&
1934 (TRI->getSubRegIndexLaneMask(MOI->getSubReg()) & LaneMask).none())
1935 continue;
1936 hasDef = true;
1937 if (MOI->isEarlyClobber())
1938 isEarlyClobber = true;
1939 }
1940
1941 if (!hasDef) {
1942 report("Defining instruction does not modify register", MI);
1943 report_context(LR, Reg, LaneMask);
1944 report_context(*VNI);
1945 }
1946
1947 // Early clobber defs begin at USE slots, but other defs must begin at
1948 // DEF slots.
1949 if (isEarlyClobber) {
1950 if (!VNI->def.isEarlyClobber()) {
1951 report("Early clobber def must be at an early-clobber slot", MBB);
1952 report_context(LR, Reg, LaneMask);
1953 report_context(*VNI);
1954 }
1955 } else if (!VNI->def.isRegister()) {
1956 report("Non-PHI, non-early clobber def must be at a register slot", MBB);
1957 report_context(LR, Reg, LaneMask);
1958 report_context(*VNI);
1959 }
1960 }
1961 }
1962
verifyLiveRangeSegment(const LiveRange & LR,const LiveRange::const_iterator I,unsigned Reg,LaneBitmask LaneMask)1963 void MachineVerifier::verifyLiveRangeSegment(const LiveRange &LR,
1964 const LiveRange::const_iterator I,
1965 unsigned Reg, LaneBitmask LaneMask)
1966 {
1967 const LiveRange::Segment &S = *I;
1968 const VNInfo *VNI = S.valno;
1969 assert(VNI && "Live segment has no valno");
1970
1971 if (VNI->id >= LR.getNumValNums() || VNI != LR.getValNumInfo(VNI->id)) {
1972 report("Foreign valno in live segment", MF);
1973 report_context(LR, Reg, LaneMask);
1974 report_context(S);
1975 report_context(*VNI);
1976 }
1977
1978 if (VNI->isUnused()) {
1979 report("Live segment valno is marked unused", MF);
1980 report_context(LR, Reg, LaneMask);
1981 report_context(S);
1982 }
1983
1984 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(S.start);
1985 if (!MBB) {
1986 report("Bad start of live segment, no basic block", MF);
1987 report_context(LR, Reg, LaneMask);
1988 report_context(S);
1989 return;
1990 }
1991 SlotIndex MBBStartIdx = LiveInts->getMBBStartIdx(MBB);
1992 if (S.start != MBBStartIdx && S.start != VNI->def) {
1993 report("Live segment must begin at MBB entry or valno def", MBB);
1994 report_context(LR, Reg, LaneMask);
1995 report_context(S);
1996 }
1997
1998 const MachineBasicBlock *EndMBB =
1999 LiveInts->getMBBFromIndex(S.end.getPrevSlot());
2000 if (!EndMBB) {
2001 report("Bad end of live segment, no basic block", MF);
2002 report_context(LR, Reg, LaneMask);
2003 report_context(S);
2004 return;
2005 }
2006
2007 // No more checks for live-out segments.
2008 if (S.end == LiveInts->getMBBEndIdx(EndMBB))
2009 return;
2010
2011 // RegUnit intervals are allowed dead phis.
2012 if (!TargetRegisterInfo::isVirtualRegister(Reg) && VNI->isPHIDef() &&
2013 S.start == VNI->def && S.end == VNI->def.getDeadSlot())
2014 return;
2015
2016 // The live segment is ending inside EndMBB
2017 const MachineInstr *MI =
2018 LiveInts->getInstructionFromIndex(S.end.getPrevSlot());
2019 if (!MI) {
2020 report("Live segment doesn't end at a valid instruction", EndMBB);
2021 report_context(LR, Reg, LaneMask);
2022 report_context(S);
2023 return;
2024 }
2025
2026 // The block slot must refer to a basic block boundary.
2027 if (S.end.isBlock()) {
2028 report("Live segment ends at B slot of an instruction", EndMBB);
2029 report_context(LR, Reg, LaneMask);
2030 report_context(S);
2031 }
2032
2033 if (S.end.isDead()) {
2034 // Segment ends on the dead slot.
2035 // That means there must be a dead def.
2036 if (!SlotIndex::isSameInstr(S.start, S.end)) {
2037 report("Live segment ending at dead slot spans instructions", EndMBB);
2038 report_context(LR, Reg, LaneMask);
2039 report_context(S);
2040 }
2041 }
2042
2043 // A live segment can only end at an early-clobber slot if it is being
2044 // redefined by an early-clobber def.
2045 if (S.end.isEarlyClobber()) {
2046 if (I+1 == LR.end() || (I+1)->start != S.end) {
2047 report("Live segment ending at early clobber slot must be "
2048 "redefined by an EC def in the same instruction", EndMBB);
2049 report_context(LR, Reg, LaneMask);
2050 report_context(S);
2051 }
2052 }
2053
2054 // The following checks only apply to virtual registers. Physreg liveness
2055 // is too weird to check.
2056 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
2057 // A live segment can end with either a redefinition, a kill flag on a
2058 // use, or a dead flag on a def.
2059 bool hasRead = false;
2060 bool hasSubRegDef = false;
2061 bool hasDeadDef = false;
2062 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) {
2063 if (!MOI->isReg() || MOI->getReg() != Reg)
2064 continue;
2065 unsigned Sub = MOI->getSubReg();
2066 LaneBitmask SLM = Sub != 0 ? TRI->getSubRegIndexLaneMask(Sub)
2067 : LaneBitmask::getAll();
2068 if (MOI->isDef()) {
2069 if (Sub != 0) {
2070 hasSubRegDef = true;
2071 // An operand %0:sub0 reads %0:sub1..n. Invert the lane
2072 // mask for subregister defs. Read-undef defs will be handled by
2073 // readsReg below.
2074 SLM = ~SLM;
2075 }
2076 if (MOI->isDead())
2077 hasDeadDef = true;
2078 }
2079 if (LaneMask.any() && (LaneMask & SLM).none())
2080 continue;
2081 if (MOI->readsReg())
2082 hasRead = true;
2083 }
2084 if (S.end.isDead()) {
2085 // Make sure that the corresponding machine operand for a "dead" live
2086 // range has the dead flag. We cannot perform this check for subregister
2087 // liveranges as partially dead values are allowed.
2088 if (LaneMask.none() && !hasDeadDef) {
2089 report("Instruction ending live segment on dead slot has no dead flag",
2090 MI);
2091 report_context(LR, Reg, LaneMask);
2092 report_context(S);
2093 }
2094 } else {
2095 if (!hasRead) {
2096 // When tracking subregister liveness, the main range must start new
2097 // values on partial register writes, even if there is no read.
2098 if (!MRI->shouldTrackSubRegLiveness(Reg) || LaneMask.any() ||
2099 !hasSubRegDef) {
2100 report("Instruction ending live segment doesn't read the register",
2101 MI);
2102 report_context(LR, Reg, LaneMask);
2103 report_context(S);
2104 }
2105 }
2106 }
2107 }
2108
2109 // Now check all the basic blocks in this live segment.
2110 MachineFunction::const_iterator MFI = MBB->getIterator();
2111 // Is this live segment the beginning of a non-PHIDef VN?
2112 if (S.start == VNI->def && !VNI->isPHIDef()) {
2113 // Not live-in to any blocks.
2114 if (MBB == EndMBB)
2115 return;
2116 // Skip this block.
2117 ++MFI;
2118 }
2119 while (true) {
2120 assert(LiveInts->isLiveInToMBB(LR, &*MFI));
2121 // We don't know how to track physregs into a landing pad.
2122 if (!TargetRegisterInfo::isVirtualRegister(Reg) &&
2123 MFI->isEHPad()) {
2124 if (&*MFI == EndMBB)
2125 break;
2126 ++MFI;
2127 continue;
2128 }
2129
2130 // Is VNI a PHI-def in the current block?
2131 bool IsPHI = VNI->isPHIDef() &&
2132 VNI->def == LiveInts->getMBBStartIdx(&*MFI);
2133
2134 // Check that VNI is live-out of all predecessors.
2135 for (MachineBasicBlock::const_pred_iterator PI = MFI->pred_begin(),
2136 PE = MFI->pred_end(); PI != PE; ++PI) {
2137 SlotIndex PEnd = LiveInts->getMBBEndIdx(*PI);
2138 const VNInfo *PVNI = LR.getVNInfoBefore(PEnd);
2139
2140 // All predecessors must have a live-out value. However for a phi
2141 // instruction with subregister intervals
2142 // only one of the subregisters (not necessarily the current one) needs to
2143 // be defined.
2144 if (!PVNI && (LaneMask.none() || !IsPHI) ) {
2145 report("Register not marked live out of predecessor", *PI);
2146 report_context(LR, Reg, LaneMask);
2147 report_context(*VNI);
2148 errs() << " live into " << printMBBReference(*MFI) << '@'
2149 << LiveInts->getMBBStartIdx(&*MFI) << ", not live before "
2150 << PEnd << '\n';
2151 continue;
2152 }
2153
2154 // Only PHI-defs can take different predecessor values.
2155 if (!IsPHI && PVNI != VNI) {
2156 report("Different value live out of predecessor", *PI);
2157 report_context(LR, Reg, LaneMask);
2158 errs() << "Valno #" << PVNI->id << " live out of "
2159 << printMBBReference(*(*PI)) << '@' << PEnd << "\nValno #"
2160 << VNI->id << " live into " << printMBBReference(*MFI) << '@'
2161 << LiveInts->getMBBStartIdx(&*MFI) << '\n';
2162 }
2163 }
2164 if (&*MFI == EndMBB)
2165 break;
2166 ++MFI;
2167 }
2168 }
2169
verifyLiveRange(const LiveRange & LR,unsigned Reg,LaneBitmask LaneMask)2170 void MachineVerifier::verifyLiveRange(const LiveRange &LR, unsigned Reg,
2171 LaneBitmask LaneMask) {
2172 for (const VNInfo *VNI : LR.valnos)
2173 verifyLiveRangeValue(LR, VNI, Reg, LaneMask);
2174
2175 for (LiveRange::const_iterator I = LR.begin(), E = LR.end(); I != E; ++I)
2176 verifyLiveRangeSegment(LR, I, Reg, LaneMask);
2177 }
2178
verifyLiveInterval(const LiveInterval & LI)2179 void MachineVerifier::verifyLiveInterval(const LiveInterval &LI) {
2180 unsigned Reg = LI.reg;
2181 assert(TargetRegisterInfo::isVirtualRegister(Reg));
2182 verifyLiveRange(LI, Reg);
2183
2184 LaneBitmask Mask;
2185 LaneBitmask MaxMask = MRI->getMaxLaneMaskForVReg(Reg);
2186 for (const LiveInterval::SubRange &SR : LI.subranges()) {
2187 if ((Mask & SR.LaneMask).any()) {
2188 report("Lane masks of sub ranges overlap in live interval", MF);
2189 report_context(LI);
2190 }
2191 if ((SR.LaneMask & ~MaxMask).any()) {
2192 report("Subrange lanemask is invalid", MF);
2193 report_context(LI);
2194 }
2195 if (SR.empty()) {
2196 report("Subrange must not be empty", MF);
2197 report_context(SR, LI.reg, SR.LaneMask);
2198 }
2199 Mask |= SR.LaneMask;
2200 verifyLiveRange(SR, LI.reg, SR.LaneMask);
2201 if (!LI.covers(SR)) {
2202 report("A Subrange is not covered by the main range", MF);
2203 report_context(LI);
2204 }
2205 }
2206
2207 // Check the LI only has one connected component.
2208 ConnectedVNInfoEqClasses ConEQ(*LiveInts);
2209 unsigned NumComp = ConEQ.Classify(LI);
2210 if (NumComp > 1) {
2211 report("Multiple connected components in live interval", MF);
2212 report_context(LI);
2213 for (unsigned comp = 0; comp != NumComp; ++comp) {
2214 errs() << comp << ": valnos";
2215 for (LiveInterval::const_vni_iterator I = LI.vni_begin(),
2216 E = LI.vni_end(); I!=E; ++I)
2217 if (comp == ConEQ.getEqClass(*I))
2218 errs() << ' ' << (*I)->id;
2219 errs() << '\n';
2220 }
2221 }
2222 }
2223
2224 namespace {
2225
2226 // FrameSetup and FrameDestroy can have zero adjustment, so using a single
2227 // integer, we can't tell whether it is a FrameSetup or FrameDestroy if the
2228 // value is zero.
2229 // We use a bool plus an integer to capture the stack state.
2230 struct StackStateOfBB {
2231 StackStateOfBB() = default;
StackStateOfBB__anone289cf670411::StackStateOfBB2232 StackStateOfBB(int EntryVal, int ExitVal, bool EntrySetup, bool ExitSetup) :
2233 EntryValue(EntryVal), ExitValue(ExitVal), EntryIsSetup(EntrySetup),
2234 ExitIsSetup(ExitSetup) {}
2235
2236 // Can be negative, which means we are setting up a frame.
2237 int EntryValue = 0;
2238 int ExitValue = 0;
2239 bool EntryIsSetup = false;
2240 bool ExitIsSetup = false;
2241 };
2242
2243 } // end anonymous namespace
2244
2245 /// Make sure on every path through the CFG, a FrameSetup <n> is always followed
2246 /// by a FrameDestroy <n>, stack adjustments are identical on all
2247 /// CFG edges to a merge point, and frame is destroyed at end of a return block.
verifyStackFrame()2248 void MachineVerifier::verifyStackFrame() {
2249 unsigned FrameSetupOpcode = TII->getCallFrameSetupOpcode();
2250 unsigned FrameDestroyOpcode = TII->getCallFrameDestroyOpcode();
2251 if (FrameSetupOpcode == ~0u && FrameDestroyOpcode == ~0u)
2252 return;
2253
2254 SmallVector<StackStateOfBB, 8> SPState;
2255 SPState.resize(MF->getNumBlockIDs());
2256 df_iterator_default_set<const MachineBasicBlock*> Reachable;
2257
2258 // Visit the MBBs in DFS order.
2259 for (df_ext_iterator<const MachineFunction *,
2260 df_iterator_default_set<const MachineBasicBlock *>>
2261 DFI = df_ext_begin(MF, Reachable), DFE = df_ext_end(MF, Reachable);
2262 DFI != DFE; ++DFI) {
2263 const MachineBasicBlock *MBB = *DFI;
2264
2265 StackStateOfBB BBState;
2266 // Check the exit state of the DFS stack predecessor.
2267 if (DFI.getPathLength() >= 2) {
2268 const MachineBasicBlock *StackPred = DFI.getPath(DFI.getPathLength() - 2);
2269 assert(Reachable.count(StackPred) &&
2270 "DFS stack predecessor is already visited.\n");
2271 BBState.EntryValue = SPState[StackPred->getNumber()].ExitValue;
2272 BBState.EntryIsSetup = SPState[StackPred->getNumber()].ExitIsSetup;
2273 BBState.ExitValue = BBState.EntryValue;
2274 BBState.ExitIsSetup = BBState.EntryIsSetup;
2275 }
2276
2277 // Update stack state by checking contents of MBB.
2278 for (const auto &I : *MBB) {
2279 if (I.getOpcode() == FrameSetupOpcode) {
2280 if (BBState.ExitIsSetup)
2281 report("FrameSetup is after another FrameSetup", &I);
2282 BBState.ExitValue -= TII->getFrameTotalSize(I);
2283 BBState.ExitIsSetup = true;
2284 }
2285
2286 if (I.getOpcode() == FrameDestroyOpcode) {
2287 int Size = TII->getFrameTotalSize(I);
2288 if (!BBState.ExitIsSetup)
2289 report("FrameDestroy is not after a FrameSetup", &I);
2290 int AbsSPAdj = BBState.ExitValue < 0 ? -BBState.ExitValue :
2291 BBState.ExitValue;
2292 if (BBState.ExitIsSetup && AbsSPAdj != Size) {
2293 report("FrameDestroy <n> is after FrameSetup <m>", &I);
2294 errs() << "FrameDestroy <" << Size << "> is after FrameSetup <"
2295 << AbsSPAdj << ">.\n";
2296 }
2297 BBState.ExitValue += Size;
2298 BBState.ExitIsSetup = false;
2299 }
2300 }
2301 SPState[MBB->getNumber()] = BBState;
2302
2303 // Make sure the exit state of any predecessor is consistent with the entry
2304 // state.
2305 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(),
2306 E = MBB->pred_end(); I != E; ++I) {
2307 if (Reachable.count(*I) &&
2308 (SPState[(*I)->getNumber()].ExitValue != BBState.EntryValue ||
2309 SPState[(*I)->getNumber()].ExitIsSetup != BBState.EntryIsSetup)) {
2310 report("The exit stack state of a predecessor is inconsistent.", MBB);
2311 errs() << "Predecessor " << printMBBReference(*(*I))
2312 << " has exit state (" << SPState[(*I)->getNumber()].ExitValue
2313 << ", " << SPState[(*I)->getNumber()].ExitIsSetup << "), while "
2314 << printMBBReference(*MBB) << " has entry state ("
2315 << BBState.EntryValue << ", " << BBState.EntryIsSetup << ").\n";
2316 }
2317 }
2318
2319 // Make sure the entry state of any successor is consistent with the exit
2320 // state.
2321 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(),
2322 E = MBB->succ_end(); I != E; ++I) {
2323 if (Reachable.count(*I) &&
2324 (SPState[(*I)->getNumber()].EntryValue != BBState.ExitValue ||
2325 SPState[(*I)->getNumber()].EntryIsSetup != BBState.ExitIsSetup)) {
2326 report("The entry stack state of a successor is inconsistent.", MBB);
2327 errs() << "Successor " << printMBBReference(*(*I))
2328 << " has entry state (" << SPState[(*I)->getNumber()].EntryValue
2329 << ", " << SPState[(*I)->getNumber()].EntryIsSetup << "), while "
2330 << printMBBReference(*MBB) << " has exit state ("
2331 << BBState.ExitValue << ", " << BBState.ExitIsSetup << ").\n";
2332 }
2333 }
2334
2335 // Make sure a basic block with return ends with zero stack adjustment.
2336 if (!MBB->empty() && MBB->back().isReturn()) {
2337 if (BBState.ExitIsSetup)
2338 report("A return block ends with a FrameSetup.", MBB);
2339 if (BBState.ExitValue)
2340 report("A return block ends with a nonzero stack adjustment.", MBB);
2341 }
2342 }
2343 }
2344