1 //===- RegisterScavenging.cpp - Machine register scavenging ---------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 /// \file
10 /// This file implements the machine register scavenger. It can provide
11 /// information, such as unused registers, at any point in a machine basic
12 /// block. It also provides a mechanism to make registers available by evicting
13 /// them to spill slots.
14 //
15 //===----------------------------------------------------------------------===//
16 
17 #include "llvm/CodeGen/RegisterScavenging.h"
18 #include "llvm/ADT/ArrayRef.h"
19 #include "llvm/ADT/BitVector.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/CodeGen/LiveRegUnits.h"
23 #include "llvm/CodeGen/MachineBasicBlock.h"
24 #include "llvm/CodeGen/MachineFrameInfo.h"
25 #include "llvm/CodeGen/MachineFunction.h"
26 #include "llvm/CodeGen/MachineFunctionPass.h"
27 #include "llvm/CodeGen/MachineInstr.h"
28 #include "llvm/CodeGen/MachineOperand.h"
29 #include "llvm/CodeGen/MachineRegisterInfo.h"
30 #include "llvm/CodeGen/TargetFrameLowering.h"
31 #include "llvm/CodeGen/TargetInstrInfo.h"
32 #include "llvm/CodeGen/TargetRegisterInfo.h"
33 #include "llvm/CodeGen/TargetSubtargetInfo.h"
34 #include "llvm/InitializePasses.h"
35 #include "llvm/MC/MCRegisterInfo.h"
36 #include "llvm/Pass.h"
37 #include "llvm/Support/Debug.h"
38 #include "llvm/Support/ErrorHandling.h"
39 #include "llvm/Support/raw_ostream.h"
40 #include <cassert>
41 #include <iterator>
42 #include <limits>
43 #include <utility>
44 
45 using namespace llvm;
46 
47 #define DEBUG_TYPE "reg-scavenging"
48 
49 STATISTIC(NumScavengedRegs, "Number of frame index regs scavenged");
50 
51 void RegScavenger::setRegUsed(Register Reg, LaneBitmask LaneMask) {
52   LiveUnits.addRegMasked(Reg, LaneMask);
53 }
54 
55 void RegScavenger::init(MachineBasicBlock &MBB) {
56   MachineFunction &MF = *MBB.getParent();
57   TII = MF.getSubtarget().getInstrInfo();
58   TRI = MF.getSubtarget().getRegisterInfo();
59   MRI = &MF.getRegInfo();
60   LiveUnits.init(*TRI);
61 
62   assert((NumRegUnits == 0 || NumRegUnits == TRI->getNumRegUnits()) &&
63          "Target changed?");
64 
65   // Self-initialize.
66   if (!this->MBB) {
67     NumRegUnits = TRI->getNumRegUnits();
68     KillRegUnits.resize(NumRegUnits);
69     DefRegUnits.resize(NumRegUnits);
70     TmpRegUnits.resize(NumRegUnits);
71   }
72   this->MBB = &MBB;
73 
74   for (ScavengedInfo &SI : Scavenged) {
75     SI.Reg = 0;
76     SI.Restore = nullptr;
77   }
78 
79   Tracking = false;
80 }
81 
82 void RegScavenger::enterBasicBlock(MachineBasicBlock &MBB) {
83   init(MBB);
84   LiveUnits.addLiveIns(MBB);
85 }
86 
87 void RegScavenger::enterBasicBlockEnd(MachineBasicBlock &MBB) {
88   init(MBB);
89   LiveUnits.addLiveOuts(MBB);
90 
91   // Move internal iterator at the last instruction of the block.
92   if (!MBB.empty()) {
93     MBBI = std::prev(MBB.end());
94     Tracking = true;
95   }
96 }
97 
98 void RegScavenger::addRegUnits(BitVector &BV, MCRegister Reg) {
99   for (MCRegUnit Unit : TRI->regunits(Reg))
100     BV.set(Unit);
101 }
102 
103 void RegScavenger::removeRegUnits(BitVector &BV, MCRegister Reg) {
104   for (MCRegUnit Unit : TRI->regunits(Reg))
105     BV.reset(Unit);
106 }
107 
108 void RegScavenger::determineKillsAndDefs() {
109   assert(Tracking && "Must be tracking to determine kills and defs");
110 
111   MachineInstr &MI = *MBBI;
112   assert(!MI.isDebugInstr() && "Debug values have no kills or defs");
113 
114   // Find out which registers are early clobbered, killed, defined, and marked
115   // def-dead in this instruction.
116   KillRegUnits.reset();
117   DefRegUnits.reset();
118   for (const MachineOperand &MO : MI.operands()) {
119     if (MO.isRegMask()) {
120       TmpRegUnits.reset();
121       for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); RU != RUEnd; ++RU) {
122         for (MCRegUnitRootIterator RURI(RU, TRI); RURI.isValid(); ++RURI) {
123           if (MO.clobbersPhysReg(*RURI)) {
124             TmpRegUnits.set(RU);
125             break;
126           }
127         }
128       }
129 
130       // Apply the mask.
131       KillRegUnits |= TmpRegUnits;
132     }
133     if (!MO.isReg())
134       continue;
135     if (!MO.getReg().isPhysical() || isReserved(MO.getReg()))
136       continue;
137     MCRegister Reg = MO.getReg().asMCReg();
138 
139     if (MO.isUse()) {
140       // Ignore undef uses.
141       if (MO.isUndef())
142         continue;
143       if (MO.isKill())
144         addRegUnits(KillRegUnits, Reg);
145     } else {
146       assert(MO.isDef());
147       if (MO.isDead())
148         addRegUnits(KillRegUnits, Reg);
149       else
150         addRegUnits(DefRegUnits, Reg);
151     }
152   }
153 }
154 
155 void RegScavenger::forward() {
156   // Move ptr forward.
157   if (!Tracking) {
158     MBBI = MBB->begin();
159     Tracking = true;
160   } else {
161     assert(MBBI != MBB->end() && "Already past the end of the basic block!");
162     MBBI = std::next(MBBI);
163   }
164   assert(MBBI != MBB->end() && "Already at the end of the basic block!");
165 
166   MachineInstr &MI = *MBBI;
167 
168   for (ScavengedInfo &I : Scavenged) {
169     if (I.Restore != &MI)
170       continue;
171 
172     I.Reg = 0;
173     I.Restore = nullptr;
174   }
175 
176   if (MI.isDebugOrPseudoInstr())
177     return;
178 
179   determineKillsAndDefs();
180 
181   // Verify uses and defs.
182 #ifndef NDEBUG
183   for (const MachineOperand &MO : MI.operands()) {
184     if (!MO.isReg())
185       continue;
186     Register Reg = MO.getReg();
187     if (!Reg.isPhysical() || isReserved(Reg))
188       continue;
189     if (MO.isUse()) {
190       if (MO.isUndef())
191         continue;
192       if (!isRegUsed(Reg)) {
193         // Check if it's partial live: e.g.
194         // D0 = insert_subreg undef D0, S0
195         // ... D0
196         // The problem is the insert_subreg could be eliminated. The use of
197         // D0 is using a partially undef value. This is not *incorrect* since
198         // S1 is can be freely clobbered.
199         // Ideally we would like a way to model this, but leaving the
200         // insert_subreg around causes both correctness and performance issues.
201         if (none_of(TRI->subregs(Reg),
202                     [&](MCPhysReg SR) { return isRegUsed(SR); }) &&
203             none_of(TRI->superregs(Reg),
204                     [&](MCPhysReg SR) { return isRegUsed(SR); })) {
205           MBB->getParent()->verify(nullptr, "In Register Scavenger");
206           llvm_unreachable("Using an undefined register!");
207         }
208       }
209     } else {
210       assert(MO.isDef());
211 #if 0
212       // FIXME: Enable this once we've figured out how to correctly transfer
213       // implicit kills during codegen passes like the coalescer.
214       assert((KillRegs.test(Reg) || isUnused(Reg) ||
215               isLiveInButUnusedBefore(Reg, MI, MBB, TRI, MRI)) &&
216              "Re-defining a live register!");
217 #endif
218     }
219   }
220 #endif // NDEBUG
221 
222   // Commit the changes.
223   setUnused(KillRegUnits);
224   setUsed(DefRegUnits);
225 }
226 
227 void RegScavenger::backward() {
228   assert(Tracking && "Must be tracking to determine kills and defs");
229 
230   const MachineInstr &MI = *MBBI;
231   LiveUnits.stepBackward(MI);
232 
233   // Expire scavenge spill frameindex uses.
234   for (ScavengedInfo &I : Scavenged) {
235     if (I.Restore == &MI) {
236       I.Reg = 0;
237       I.Restore = nullptr;
238     }
239   }
240 
241   if (MBBI == MBB->begin()) {
242     MBBI = MachineBasicBlock::iterator(nullptr);
243     Tracking = false;
244   } else
245     --MBBI;
246 }
247 
248 bool RegScavenger::isRegUsed(Register Reg, bool includeReserved) const {
249   if (isReserved(Reg))
250     return includeReserved;
251   return !LiveUnits.available(Reg);
252 }
253 
254 Register RegScavenger::FindUnusedReg(const TargetRegisterClass *RC) const {
255   for (Register Reg : *RC) {
256     if (!isRegUsed(Reg)) {
257       LLVM_DEBUG(dbgs() << "Scavenger found unused reg: " << printReg(Reg, TRI)
258                         << "\n");
259       return Reg;
260     }
261   }
262   return 0;
263 }
264 
265 BitVector RegScavenger::getRegsAvailable(const TargetRegisterClass *RC) {
266   BitVector Mask(TRI->getNumRegs());
267   for (Register Reg : *RC)
268     if (!isRegUsed(Reg))
269       Mask.set(Reg);
270   return Mask;
271 }
272 
273 /// Given the bitvector \p Available of free register units at position
274 /// \p From. Search backwards to find a register that is part of \p
275 /// Candidates and not used/clobbered until the point \p To. If there is
276 /// multiple candidates continue searching and pick the one that is not used/
277 /// clobbered for the longest time.
278 /// Returns the register and the earliest position we know it to be free or
279 /// the position MBB.end() if no register is available.
280 static std::pair<MCPhysReg, MachineBasicBlock::iterator>
281 findSurvivorBackwards(const MachineRegisterInfo &MRI,
282     MachineBasicBlock::iterator From, MachineBasicBlock::iterator To,
283     const LiveRegUnits &LiveOut, ArrayRef<MCPhysReg> AllocationOrder,
284     bool RestoreAfter) {
285   bool FoundTo = false;
286   MCPhysReg Survivor = 0;
287   MachineBasicBlock::iterator Pos;
288   MachineBasicBlock &MBB = *From->getParent();
289   unsigned InstrLimit = 25;
290   unsigned InstrCountDown = InstrLimit;
291   const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
292   LiveRegUnits Used(TRI);
293 
294   assert(From->getParent() == To->getParent() &&
295          "Target instruction is in other than current basic block, use "
296          "enterBasicBlockEnd first");
297 
298   for (MachineBasicBlock::iterator I = From;; --I) {
299     const MachineInstr &MI = *I;
300 
301     Used.accumulate(MI);
302 
303     if (I == To) {
304       // See if one of the registers in RC wasn't used so far.
305       for (MCPhysReg Reg : AllocationOrder) {
306         if (!MRI.isReserved(Reg) && Used.available(Reg) &&
307             LiveOut.available(Reg))
308           return std::make_pair(Reg, MBB.end());
309       }
310       // Otherwise we will continue up to InstrLimit instructions to find
311       // the register which is not defined/used for the longest time.
312       FoundTo = true;
313       Pos = To;
314       // Note: It was fine so far to start our search at From, however now that
315       // we have to spill, and can only place the restore after From then
316       // add the regs used/defed by std::next(From) to the set.
317       if (RestoreAfter)
318         Used.accumulate(*std::next(From));
319     }
320     if (FoundTo) {
321       // Don't search to FrameSetup instructions if we were searching from
322       // Non-FrameSetup instructions. Otherwise, the spill position may point
323       // before FrameSetup instructions.
324       if (!From->getFlag(MachineInstr::FrameSetup) &&
325           MI.getFlag(MachineInstr::FrameSetup))
326         break;
327 
328       if (Survivor == 0 || !Used.available(Survivor)) {
329         MCPhysReg AvilableReg = 0;
330         for (MCPhysReg Reg : AllocationOrder) {
331           if (!MRI.isReserved(Reg) && Used.available(Reg)) {
332             AvilableReg = Reg;
333             break;
334           }
335         }
336         if (AvilableReg == 0)
337           break;
338         Survivor = AvilableReg;
339       }
340       if (--InstrCountDown == 0)
341         break;
342 
343       // Keep searching when we find a vreg since the spilled register will
344       // be usefull for this other vreg as well later.
345       bool FoundVReg = false;
346       for (const MachineOperand &MO : MI.operands()) {
347         if (MO.isReg() && MO.getReg().isVirtual()) {
348           FoundVReg = true;
349           break;
350         }
351       }
352       if (FoundVReg) {
353         InstrCountDown = InstrLimit;
354         Pos = I;
355       }
356       if (I == MBB.begin())
357         break;
358     }
359     assert(I != MBB.begin() && "Did not find target instruction while "
360                                "iterating backwards");
361   }
362 
363   return std::make_pair(Survivor, Pos);
364 }
365 
366 static unsigned getFrameIndexOperandNum(MachineInstr &MI) {
367   unsigned i = 0;
368   while (!MI.getOperand(i).isFI()) {
369     ++i;
370     assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!");
371   }
372   return i;
373 }
374 
375 RegScavenger::ScavengedInfo &
376 RegScavenger::spill(Register Reg, const TargetRegisterClass &RC, int SPAdj,
377                     MachineBasicBlock::iterator Before,
378                     MachineBasicBlock::iterator &UseMI) {
379   // Find an available scavenging slot with size and alignment matching
380   // the requirements of the class RC.
381   const MachineFunction &MF = *Before->getMF();
382   const MachineFrameInfo &MFI = MF.getFrameInfo();
383   unsigned NeedSize = TRI->getSpillSize(RC);
384   Align NeedAlign = TRI->getSpillAlign(RC);
385 
386   unsigned SI = Scavenged.size(), Diff = std::numeric_limits<unsigned>::max();
387   int FIB = MFI.getObjectIndexBegin(), FIE = MFI.getObjectIndexEnd();
388   for (unsigned I = 0; I < Scavenged.size(); ++I) {
389     if (Scavenged[I].Reg != 0)
390       continue;
391     // Verify that this slot is valid for this register.
392     int FI = Scavenged[I].FrameIndex;
393     if (FI < FIB || FI >= FIE)
394       continue;
395     unsigned S = MFI.getObjectSize(FI);
396     Align A = MFI.getObjectAlign(FI);
397     if (NeedSize > S || NeedAlign > A)
398       continue;
399     // Avoid wasting slots with large size and/or large alignment. Pick one
400     // that is the best fit for this register class (in street metric).
401     // Picking a larger slot than necessary could happen if a slot for a
402     // larger register is reserved before a slot for a smaller one. When
403     // trying to spill a smaller register, the large slot would be found
404     // first, thus making it impossible to spill the larger register later.
405     unsigned D = (S - NeedSize) + (A.value() - NeedAlign.value());
406     if (D < Diff) {
407       SI = I;
408       Diff = D;
409     }
410   }
411 
412   if (SI == Scavenged.size()) {
413     // We need to scavenge a register but have no spill slot, the target
414     // must know how to do it (if not, we'll assert below).
415     Scavenged.push_back(ScavengedInfo(FIE));
416   }
417 
418   // Avoid infinite regress
419   Scavenged[SI].Reg = Reg;
420 
421   // If the target knows how to save/restore the register, let it do so;
422   // otherwise, use the emergency stack spill slot.
423   if (!TRI->saveScavengerRegister(*MBB, Before, UseMI, &RC, Reg)) {
424     // Spill the scavenged register before \p Before.
425     int FI = Scavenged[SI].FrameIndex;
426     if (FI < FIB || FI >= FIE) {
427       report_fatal_error(Twine("Error while trying to spill ") +
428                          TRI->getName(Reg) + " from class " +
429                          TRI->getRegClassName(&RC) +
430                          ": Cannot scavenge register without an emergency "
431                          "spill slot!");
432     }
433     TII->storeRegToStackSlot(*MBB, Before, Reg, true, FI, &RC, TRI, Register());
434     MachineBasicBlock::iterator II = std::prev(Before);
435 
436     unsigned FIOperandNum = getFrameIndexOperandNum(*II);
437     TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
438 
439     // Restore the scavenged register before its use (or first terminator).
440     TII->loadRegFromStackSlot(*MBB, UseMI, Reg, FI, &RC, TRI, Register());
441     II = std::prev(UseMI);
442 
443     FIOperandNum = getFrameIndexOperandNum(*II);
444     TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
445   }
446   return Scavenged[SI];
447 }
448 
449 Register RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass &RC,
450                                                  MachineBasicBlock::iterator To,
451                                                  bool RestoreAfter, int SPAdj,
452                                                  bool AllowSpill) {
453   const MachineBasicBlock &MBB = *To->getParent();
454   const MachineFunction &MF = *MBB.getParent();
455 
456   // Find the register whose use is furthest away.
457   MachineBasicBlock::iterator UseMI;
458   ArrayRef<MCPhysReg> AllocationOrder = RC.getRawAllocationOrder(MF);
459   std::pair<MCPhysReg, MachineBasicBlock::iterator> P =
460       findSurvivorBackwards(*MRI, MBBI, To, LiveUnits, AllocationOrder,
461                             RestoreAfter);
462   MCPhysReg Reg = P.first;
463   MachineBasicBlock::iterator SpillBefore = P.second;
464   // Found an available register?
465   if (Reg != 0 && SpillBefore == MBB.end()) {
466     LLVM_DEBUG(dbgs() << "Scavenged free register: " << printReg(Reg, TRI)
467                << '\n');
468     return Reg;
469   }
470 
471   if (!AllowSpill)
472     return 0;
473 
474   assert(Reg != 0 && "No register left to scavenge!");
475 
476   MachineBasicBlock::iterator ReloadAfter =
477     RestoreAfter ? std::next(MBBI) : MBBI;
478   MachineBasicBlock::iterator ReloadBefore = std::next(ReloadAfter);
479   if (ReloadBefore != MBB.end())
480     LLVM_DEBUG(dbgs() << "Reload before: " << *ReloadBefore << '\n');
481   ScavengedInfo &Scavenged = spill(Reg, RC, SPAdj, SpillBefore, ReloadBefore);
482   Scavenged.Restore = &*std::prev(SpillBefore);
483   LiveUnits.removeReg(Reg);
484   LLVM_DEBUG(dbgs() << "Scavenged register with spill: " << printReg(Reg, TRI)
485              << " until " << *SpillBefore);
486   return Reg;
487 }
488 
489 /// Allocate a register for the virtual register \p VReg. The last use of
490 /// \p VReg is around the current position of the register scavenger \p RS.
491 /// \p ReserveAfter controls whether the scavenged register needs to be reserved
492 /// after the current instruction, otherwise it will only be reserved before the
493 /// current instruction.
494 static Register scavengeVReg(MachineRegisterInfo &MRI, RegScavenger &RS,
495                              Register VReg, bool ReserveAfter) {
496   const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
497 #ifndef NDEBUG
498   // Verify that all definitions and uses are in the same basic block.
499   const MachineBasicBlock *CommonMBB = nullptr;
500   // Real definition for the reg, re-definitions are not considered.
501   const MachineInstr *RealDef = nullptr;
502   for (MachineOperand &MO : MRI.reg_nodbg_operands(VReg)) {
503     MachineBasicBlock *MBB = MO.getParent()->getParent();
504     if (CommonMBB == nullptr)
505       CommonMBB = MBB;
506     assert(MBB == CommonMBB && "All defs+uses must be in the same basic block");
507     if (MO.isDef()) {
508       const MachineInstr &MI = *MO.getParent();
509       if (!MI.readsRegister(VReg, &TRI)) {
510         assert((!RealDef || RealDef == &MI) &&
511                "Can have at most one definition which is not a redefinition");
512         RealDef = &MI;
513       }
514     }
515   }
516   assert(RealDef != nullptr && "Must have at least 1 Def");
517 #endif
518 
519   // We should only have one definition of the register. However to accommodate
520   // the requirements of two address code we also allow definitions in
521   // subsequent instructions provided they also read the register. That way
522   // we get a single contiguous lifetime.
523   //
524   // Definitions in MRI.def_begin() are unordered, search for the first.
525   MachineRegisterInfo::def_iterator FirstDef = llvm::find_if(
526       MRI.def_operands(VReg), [VReg, &TRI](const MachineOperand &MO) {
527         return !MO.getParent()->readsRegister(VReg, &TRI);
528       });
529   assert(FirstDef != MRI.def_end() &&
530          "Must have one definition that does not redefine vreg");
531   MachineInstr &DefMI = *FirstDef->getParent();
532 
533   // The register scavenger will report a free register inserting an emergency
534   // spill/reload if necessary.
535   int SPAdj = 0;
536   const TargetRegisterClass &RC = *MRI.getRegClass(VReg);
537   Register SReg = RS.scavengeRegisterBackwards(RC, DefMI.getIterator(),
538                                                ReserveAfter, SPAdj);
539   MRI.replaceRegWith(VReg, SReg);
540   ++NumScavengedRegs;
541   return SReg;
542 }
543 
544 /// Allocate (scavenge) vregs inside a single basic block.
545 /// Returns true if the target spill callback created new vregs and a 2nd pass
546 /// is necessary.
547 static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo &MRI,
548                                             RegScavenger &RS,
549                                             MachineBasicBlock &MBB) {
550   const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
551   RS.enterBasicBlockEnd(MBB);
552 
553   unsigned InitialNumVirtRegs = MRI.getNumVirtRegs();
554   bool NextInstructionReadsVReg = false;
555   for (MachineBasicBlock::iterator I = MBB.end(); I != MBB.begin(); ) {
556     --I;
557     // Move RegScavenger to the position between *I and *std::next(I).
558     RS.backward(I);
559 
560     // Look for unassigned vregs in the uses of *std::next(I).
561     if (NextInstructionReadsVReg) {
562       MachineBasicBlock::iterator N = std::next(I);
563       const MachineInstr &NMI = *N;
564       for (const MachineOperand &MO : NMI.operands()) {
565         if (!MO.isReg())
566           continue;
567         Register Reg = MO.getReg();
568         // We only care about virtual registers and ignore virtual registers
569         // created by the target callbacks in the process (those will be handled
570         // in a scavenging round).
571         if (!Reg.isVirtual() ||
572             Register::virtReg2Index(Reg) >= InitialNumVirtRegs)
573           continue;
574         if (!MO.readsReg())
575           continue;
576 
577         Register SReg = scavengeVReg(MRI, RS, Reg, true);
578         N->addRegisterKilled(SReg, &TRI, false);
579         RS.setRegUsed(SReg);
580       }
581     }
582 
583     // Look for unassigned vregs in the defs of *I.
584     NextInstructionReadsVReg = false;
585     const MachineInstr &MI = *I;
586     for (const MachineOperand &MO : MI.operands()) {
587       if (!MO.isReg())
588         continue;
589       Register Reg = MO.getReg();
590       // Only vregs, no newly created vregs (see above).
591       if (!Reg.isVirtual() ||
592           Register::virtReg2Index(Reg) >= InitialNumVirtRegs)
593         continue;
594       // We have to look at all operands anyway so we can precalculate here
595       // whether there is a reading operand. This allows use to skip the use
596       // step in the next iteration if there was none.
597       assert(!MO.isInternalRead() && "Cannot assign inside bundles");
598       assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
599       if (MO.readsReg()) {
600         NextInstructionReadsVReg = true;
601       }
602       if (MO.isDef()) {
603         Register SReg = scavengeVReg(MRI, RS, Reg, false);
604         I->addRegisterDead(SReg, &TRI, false);
605       }
606     }
607   }
608 #ifndef NDEBUG
609   for (const MachineOperand &MO : MBB.front().operands()) {
610     if (!MO.isReg() || !MO.getReg().isVirtual())
611       continue;
612     assert(!MO.isInternalRead() && "Cannot assign inside bundles");
613     assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
614     assert(!MO.readsReg() && "Vreg use in first instruction not allowed");
615   }
616 #endif
617 
618   return MRI.getNumVirtRegs() != InitialNumVirtRegs;
619 }
620 
621 void llvm::scavengeFrameVirtualRegs(MachineFunction &MF, RegScavenger &RS) {
622   // FIXME: Iterating over the instruction stream is unnecessary. We can simply
623   // iterate over the vreg use list, which at this point only contains machine
624   // operands for which eliminateFrameIndex need a new scratch reg.
625   MachineRegisterInfo &MRI = MF.getRegInfo();
626   // Shortcut.
627   if (MRI.getNumVirtRegs() == 0) {
628     MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
629     return;
630   }
631 
632   // Run through the instructions and find any virtual registers.
633   for (MachineBasicBlock &MBB : MF) {
634     if (MBB.empty())
635       continue;
636 
637     bool Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
638     if (Again) {
639       LLVM_DEBUG(dbgs() << "Warning: Required two scavenging passes for block "
640                         << MBB.getName() << '\n');
641       Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
642       // The target required a 2nd run (because it created new vregs while
643       // spilling). Refuse to do another pass to keep compiletime in check.
644       if (Again)
645         report_fatal_error("Incomplete scavenging after 2nd pass");
646     }
647   }
648 
649   MRI.clearVirtRegs();
650   MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
651 }
652 
653 namespace {
654 
655 /// This class runs register scavenging independ of the PrologEpilogInserter.
656 /// This is used in for testing.
657 class ScavengerTest : public MachineFunctionPass {
658 public:
659   static char ID;
660 
661   ScavengerTest() : MachineFunctionPass(ID) {}
662 
663   bool runOnMachineFunction(MachineFunction &MF) override {
664     const TargetSubtargetInfo &STI = MF.getSubtarget();
665     const TargetFrameLowering &TFL = *STI.getFrameLowering();
666 
667     RegScavenger RS;
668     // Let's hope that calling those outside of PrologEpilogueInserter works
669     // well enough to initialize the scavenger with some emergency spillslots
670     // for the target.
671     BitVector SavedRegs;
672     TFL.determineCalleeSaves(MF, SavedRegs, &RS);
673     TFL.processFunctionBeforeFrameFinalized(MF, &RS);
674 
675     // Let's scavenge the current function
676     scavengeFrameVirtualRegs(MF, RS);
677     return true;
678   }
679 };
680 
681 } // end anonymous namespace
682 
683 char ScavengerTest::ID;
684 
685 INITIALIZE_PASS(ScavengerTest, "scavenger-test",
686                 "Scavenge virtual registers inside basic blocks", false, false)
687