1 //===- RegisterScavenging.cpp - Machine register scavenging ---------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 /// \file
10 /// This file implements the machine register scavenger. It can provide
11 /// information, such as unused registers, at any point in a machine basic
12 /// block. It also provides a mechanism to make registers available by evicting
13 /// them to spill slots.
14 //
15 //===----------------------------------------------------------------------===//
16 
17 #include "llvm/CodeGen/RegisterScavenging.h"
18 #include "llvm/ADT/ArrayRef.h"
19 #include "llvm/ADT/BitVector.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/CodeGen/LiveRegUnits.h"
23 #include "llvm/CodeGen/MachineBasicBlock.h"
24 #include "llvm/CodeGen/MachineFrameInfo.h"
25 #include "llvm/CodeGen/MachineFunction.h"
26 #include "llvm/CodeGen/MachineFunctionPass.h"
27 #include "llvm/CodeGen/MachineInstr.h"
28 #include "llvm/CodeGen/MachineOperand.h"
29 #include "llvm/CodeGen/MachineRegisterInfo.h"
30 #include "llvm/CodeGen/TargetFrameLowering.h"
31 #include "llvm/CodeGen/TargetInstrInfo.h"
32 #include "llvm/CodeGen/TargetRegisterInfo.h"
33 #include "llvm/CodeGen/TargetSubtargetInfo.h"
34 #include "llvm/InitializePasses.h"
35 #include "llvm/MC/MCRegisterInfo.h"
36 #include "llvm/Pass.h"
37 #include "llvm/Support/Debug.h"
38 #include "llvm/Support/ErrorHandling.h"
39 #include "llvm/Support/raw_ostream.h"
40 #include <cassert>
41 #include <iterator>
42 #include <limits>
43 #include <utility>
44 
45 using namespace llvm;
46 
47 #define DEBUG_TYPE "reg-scavenging"
48 
49 STATISTIC(NumScavengedRegs, "Number of frame index regs scavenged");
50 
51 void RegScavenger::setRegUsed(Register Reg, LaneBitmask LaneMask) {
52   LiveUnits.addRegMasked(Reg, LaneMask);
53 }
54 
55 void RegScavenger::init(MachineBasicBlock &MBB) {
56   MachineFunction &MF = *MBB.getParent();
57   TII = MF.getSubtarget().getInstrInfo();
58   TRI = MF.getSubtarget().getRegisterInfo();
59   MRI = &MF.getRegInfo();
60   LiveUnits.init(*TRI);
61 
62   assert((NumRegUnits == 0 || NumRegUnits == TRI->getNumRegUnits()) &&
63          "Target changed?");
64 
65   // Self-initialize.
66   if (!this->MBB) {
67     NumRegUnits = TRI->getNumRegUnits();
68     KillRegUnits.resize(NumRegUnits);
69     DefRegUnits.resize(NumRegUnits);
70     TmpRegUnits.resize(NumRegUnits);
71   }
72   this->MBB = &MBB;
73 
74   for (ScavengedInfo &SI : Scavenged) {
75     SI.Reg = 0;
76     SI.Restore = nullptr;
77   }
78 
79   Tracking = false;
80 }
81 
82 void RegScavenger::enterBasicBlock(MachineBasicBlock &MBB) {
83   init(MBB);
84   LiveUnits.addLiveIns(MBB);
85 }
86 
87 void RegScavenger::enterBasicBlockEnd(MachineBasicBlock &MBB) {
88   init(MBB);
89   LiveUnits.addLiveOuts(MBB);
90 
91   // Move internal iterator at the last instruction of the block.
92   if (!MBB.empty()) {
93     MBBI = std::prev(MBB.end());
94     Tracking = true;
95   }
96 }
97 
98 void RegScavenger::addRegUnits(BitVector &BV, MCRegister Reg) {
99   for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
100     BV.set(*RUI);
101 }
102 
103 void RegScavenger::removeRegUnits(BitVector &BV, MCRegister Reg) {
104   for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
105     BV.reset(*RUI);
106 }
107 
108 void RegScavenger::determineKillsAndDefs() {
109   assert(Tracking && "Must be tracking to determine kills and defs");
110 
111   MachineInstr &MI = *MBBI;
112   assert(!MI.isDebugInstr() && "Debug values have no kills or defs");
113 
114   // Find out which registers are early clobbered, killed, defined, and marked
115   // def-dead in this instruction.
116   KillRegUnits.reset();
117   DefRegUnits.reset();
118   for (const MachineOperand &MO : MI.operands()) {
119     if (MO.isRegMask()) {
120       TmpRegUnits.reset();
121       for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); RU != RUEnd; ++RU) {
122         for (MCRegUnitRootIterator RURI(RU, TRI); RURI.isValid(); ++RURI) {
123           if (MO.clobbersPhysReg(*RURI)) {
124             TmpRegUnits.set(RU);
125             break;
126           }
127         }
128       }
129 
130       // Apply the mask.
131       KillRegUnits |= TmpRegUnits;
132     }
133     if (!MO.isReg())
134       continue;
135     if (!MO.getReg().isPhysical() || isReserved(MO.getReg()))
136       continue;
137     MCRegister Reg = MO.getReg().asMCReg();
138 
139     if (MO.isUse()) {
140       // Ignore undef uses.
141       if (MO.isUndef())
142         continue;
143       if (MO.isKill())
144         addRegUnits(KillRegUnits, Reg);
145     } else {
146       assert(MO.isDef());
147       if (MO.isDead())
148         addRegUnits(KillRegUnits, Reg);
149       else
150         addRegUnits(DefRegUnits, Reg);
151     }
152   }
153 }
154 
155 void RegScavenger::forward() {
156   // Move ptr forward.
157   if (!Tracking) {
158     MBBI = MBB->begin();
159     Tracking = true;
160   } else {
161     assert(MBBI != MBB->end() && "Already past the end of the basic block!");
162     MBBI = std::next(MBBI);
163   }
164   assert(MBBI != MBB->end() && "Already at the end of the basic block!");
165 
166   MachineInstr &MI = *MBBI;
167 
168   for (ScavengedInfo &I : Scavenged) {
169     if (I.Restore != &MI)
170       continue;
171 
172     I.Reg = 0;
173     I.Restore = nullptr;
174   }
175 
176   if (MI.isDebugOrPseudoInstr())
177     return;
178 
179   determineKillsAndDefs();
180 
181   // Verify uses and defs.
182 #ifndef NDEBUG
183   for (const MachineOperand &MO : MI.operands()) {
184     if (!MO.isReg())
185       continue;
186     Register Reg = MO.getReg();
187     if (!Reg.isPhysical() || isReserved(Reg))
188       continue;
189     if (MO.isUse()) {
190       if (MO.isUndef())
191         continue;
192       if (!isRegUsed(Reg)) {
193         // Check if it's partial live: e.g.
194         // D0 = insert_subreg undef D0, S0
195         // ... D0
196         // The problem is the insert_subreg could be eliminated. The use of
197         // D0 is using a partially undef value. This is not *incorrect* since
198         // S1 is can be freely clobbered.
199         // Ideally we would like a way to model this, but leaving the
200         // insert_subreg around causes both correctness and performance issues.
201         bool SubUsed = false;
202         for (const MCPhysReg &SubReg : TRI->subregs(Reg))
203           if (isRegUsed(SubReg)) {
204             SubUsed = true;
205             break;
206           }
207         bool SuperUsed = false;
208         for (MCSuperRegIterator SR(Reg, TRI); SR.isValid(); ++SR) {
209           if (isRegUsed(*SR)) {
210             SuperUsed = true;
211             break;
212           }
213         }
214         if (!SubUsed && !SuperUsed) {
215           MBB->getParent()->verify(nullptr, "In Register Scavenger");
216           llvm_unreachable("Using an undefined register!");
217         }
218         (void)SubUsed;
219         (void)SuperUsed;
220       }
221     } else {
222       assert(MO.isDef());
223 #if 0
224       // FIXME: Enable this once we've figured out how to correctly transfer
225       // implicit kills during codegen passes like the coalescer.
226       assert((KillRegs.test(Reg) || isUnused(Reg) ||
227               isLiveInButUnusedBefore(Reg, MI, MBB, TRI, MRI)) &&
228              "Re-defining a live register!");
229 #endif
230     }
231   }
232 #endif // NDEBUG
233 
234   // Commit the changes.
235   setUnused(KillRegUnits);
236   setUsed(DefRegUnits);
237 }
238 
239 void RegScavenger::backward() {
240   assert(Tracking && "Must be tracking to determine kills and defs");
241 
242   const MachineInstr &MI = *MBBI;
243   LiveUnits.stepBackward(MI);
244 
245   // Expire scavenge spill frameindex uses.
246   for (ScavengedInfo &I : Scavenged) {
247     if (I.Restore == &MI) {
248       I.Reg = 0;
249       I.Restore = nullptr;
250     }
251   }
252 
253   if (MBBI == MBB->begin()) {
254     MBBI = MachineBasicBlock::iterator(nullptr);
255     Tracking = false;
256   } else
257     --MBBI;
258 }
259 
260 bool RegScavenger::isRegUsed(Register Reg, bool includeReserved) const {
261   if (isReserved(Reg))
262     return includeReserved;
263   return !LiveUnits.available(Reg);
264 }
265 
266 Register RegScavenger::FindUnusedReg(const TargetRegisterClass *RC) const {
267   for (Register Reg : *RC) {
268     if (!isRegUsed(Reg)) {
269       LLVM_DEBUG(dbgs() << "Scavenger found unused reg: " << printReg(Reg, TRI)
270                         << "\n");
271       return Reg;
272     }
273   }
274   return 0;
275 }
276 
277 BitVector RegScavenger::getRegsAvailable(const TargetRegisterClass *RC) {
278   BitVector Mask(TRI->getNumRegs());
279   for (Register Reg : *RC)
280     if (!isRegUsed(Reg))
281       Mask.set(Reg);
282   return Mask;
283 }
284 
285 Register RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI,
286                                        BitVector &Candidates,
287                                        unsigned InstrLimit,
288                                        MachineBasicBlock::iterator &UseMI) {
289   int Survivor = Candidates.find_first();
290   assert(Survivor > 0 && "No candidates for scavenging");
291 
292   MachineBasicBlock::iterator ME = MBB->getFirstTerminator();
293   assert(StartMI != ME && "MI already at terminator");
294   MachineBasicBlock::iterator RestorePointMI = StartMI;
295   MachineBasicBlock::iterator MI = StartMI;
296 
297   bool inVirtLiveRange = false;
298   for (++MI; InstrLimit > 0 && MI != ME; ++MI, --InstrLimit) {
299     if (MI->isDebugOrPseudoInstr()) {
300       ++InstrLimit; // Don't count debug instructions
301       continue;
302     }
303     bool isVirtKillInsn = false;
304     bool isVirtDefInsn = false;
305     // Remove any candidates touched by instruction.
306     for (const MachineOperand &MO : MI->operands()) {
307       if (MO.isRegMask())
308         Candidates.clearBitsNotInMask(MO.getRegMask());
309       if (!MO.isReg() || MO.isUndef() || !MO.getReg())
310         continue;
311       if (MO.getReg().isVirtual()) {
312         if (MO.isDef())
313           isVirtDefInsn = true;
314         else if (MO.isKill())
315           isVirtKillInsn = true;
316         continue;
317       }
318       for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
319         Candidates.reset(*AI);
320     }
321     // If we're not in a virtual reg's live range, this is a valid
322     // restore point.
323     if (!inVirtLiveRange) RestorePointMI = MI;
324 
325     // Update whether we're in the live range of a virtual register
326     if (isVirtKillInsn) inVirtLiveRange = false;
327     if (isVirtDefInsn) inVirtLiveRange = true;
328 
329     // Was our survivor untouched by this instruction?
330     if (Candidates.test(Survivor))
331       continue;
332 
333     // All candidates gone?
334     if (Candidates.none())
335       break;
336 
337     Survivor = Candidates.find_first();
338   }
339   // If we ran off the end, that's where we want to restore.
340   if (MI == ME) RestorePointMI = ME;
341   assert(RestorePointMI != StartMI &&
342          "No available scavenger restore location!");
343 
344   // We ran out of candidates, so stop the search.
345   UseMI = RestorePointMI;
346   return Survivor;
347 }
348 
349 /// Given the bitvector \p Available of free register units at position
350 /// \p From. Search backwards to find a register that is part of \p
351 /// Candidates and not used/clobbered until the point \p To. If there is
352 /// multiple candidates continue searching and pick the one that is not used/
353 /// clobbered for the longest time.
354 /// Returns the register and the earliest position we know it to be free or
355 /// the position MBB.end() if no register is available.
356 static std::pair<MCPhysReg, MachineBasicBlock::iterator>
357 findSurvivorBackwards(const MachineRegisterInfo &MRI,
358     MachineBasicBlock::iterator From, MachineBasicBlock::iterator To,
359     const LiveRegUnits &LiveOut, ArrayRef<MCPhysReg> AllocationOrder,
360     bool RestoreAfter) {
361   bool FoundTo = false;
362   MCPhysReg Survivor = 0;
363   MachineBasicBlock::iterator Pos;
364   MachineBasicBlock &MBB = *From->getParent();
365   unsigned InstrLimit = 25;
366   unsigned InstrCountDown = InstrLimit;
367   const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
368   LiveRegUnits Used(TRI);
369 
370   assert(From->getParent() == To->getParent() &&
371          "Target instruction is in other than current basic block, use "
372          "enterBasicBlockEnd first");
373 
374   for (MachineBasicBlock::iterator I = From;; --I) {
375     const MachineInstr &MI = *I;
376 
377     Used.accumulate(MI);
378 
379     if (I == To) {
380       // See if one of the registers in RC wasn't used so far.
381       for (MCPhysReg Reg : AllocationOrder) {
382         if (!MRI.isReserved(Reg) && Used.available(Reg) &&
383             LiveOut.available(Reg))
384           return std::make_pair(Reg, MBB.end());
385       }
386       // Otherwise we will continue up to InstrLimit instructions to find
387       // the register which is not defined/used for the longest time.
388       FoundTo = true;
389       Pos = To;
390       // Note: It was fine so far to start our search at From, however now that
391       // we have to spill, and can only place the restore after From then
392       // add the regs used/defed by std::next(From) to the set.
393       if (RestoreAfter)
394         Used.accumulate(*std::next(From));
395     }
396     if (FoundTo) {
397       // Don't search to FrameSetup instructions if we were searching from
398       // Non-FrameSetup instructions. Otherwise, the spill position may point
399       // before FrameSetup instructions.
400       if (!From->getFlag(MachineInstr::FrameSetup) &&
401           MI.getFlag(MachineInstr::FrameSetup))
402         break;
403 
404       if (Survivor == 0 || !Used.available(Survivor)) {
405         MCPhysReg AvilableReg = 0;
406         for (MCPhysReg Reg : AllocationOrder) {
407           if (!MRI.isReserved(Reg) && Used.available(Reg)) {
408             AvilableReg = Reg;
409             break;
410           }
411         }
412         if (AvilableReg == 0)
413           break;
414         Survivor = AvilableReg;
415       }
416       if (--InstrCountDown == 0)
417         break;
418 
419       // Keep searching when we find a vreg since the spilled register will
420       // be usefull for this other vreg as well later.
421       bool FoundVReg = false;
422       for (const MachineOperand &MO : MI.operands()) {
423         if (MO.isReg() && MO.getReg().isVirtual()) {
424           FoundVReg = true;
425           break;
426         }
427       }
428       if (FoundVReg) {
429         InstrCountDown = InstrLimit;
430         Pos = I;
431       }
432       if (I == MBB.begin())
433         break;
434     }
435     assert(I != MBB.begin() && "Did not find target instruction while "
436                                "iterating backwards");
437   }
438 
439   return std::make_pair(Survivor, Pos);
440 }
441 
442 static unsigned getFrameIndexOperandNum(MachineInstr &MI) {
443   unsigned i = 0;
444   while (!MI.getOperand(i).isFI()) {
445     ++i;
446     assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!");
447   }
448   return i;
449 }
450 
451 RegScavenger::ScavengedInfo &
452 RegScavenger::spill(Register Reg, const TargetRegisterClass &RC, int SPAdj,
453                     MachineBasicBlock::iterator Before,
454                     MachineBasicBlock::iterator &UseMI) {
455   // Find an available scavenging slot with size and alignment matching
456   // the requirements of the class RC.
457   const MachineFunction &MF = *Before->getMF();
458   const MachineFrameInfo &MFI = MF.getFrameInfo();
459   unsigned NeedSize = TRI->getSpillSize(RC);
460   Align NeedAlign = TRI->getSpillAlign(RC);
461 
462   unsigned SI = Scavenged.size(), Diff = std::numeric_limits<unsigned>::max();
463   int FIB = MFI.getObjectIndexBegin(), FIE = MFI.getObjectIndexEnd();
464   for (unsigned I = 0; I < Scavenged.size(); ++I) {
465     if (Scavenged[I].Reg != 0)
466       continue;
467     // Verify that this slot is valid for this register.
468     int FI = Scavenged[I].FrameIndex;
469     if (FI < FIB || FI >= FIE)
470       continue;
471     unsigned S = MFI.getObjectSize(FI);
472     Align A = MFI.getObjectAlign(FI);
473     if (NeedSize > S || NeedAlign > A)
474       continue;
475     // Avoid wasting slots with large size and/or large alignment. Pick one
476     // that is the best fit for this register class (in street metric).
477     // Picking a larger slot than necessary could happen if a slot for a
478     // larger register is reserved before a slot for a smaller one. When
479     // trying to spill a smaller register, the large slot would be found
480     // first, thus making it impossible to spill the larger register later.
481     unsigned D = (S - NeedSize) + (A.value() - NeedAlign.value());
482     if (D < Diff) {
483       SI = I;
484       Diff = D;
485     }
486   }
487 
488   if (SI == Scavenged.size()) {
489     // We need to scavenge a register but have no spill slot, the target
490     // must know how to do it (if not, we'll assert below).
491     Scavenged.push_back(ScavengedInfo(FIE));
492   }
493 
494   // Avoid infinite regress
495   Scavenged[SI].Reg = Reg;
496 
497   // If the target knows how to save/restore the register, let it do so;
498   // otherwise, use the emergency stack spill slot.
499   if (!TRI->saveScavengerRegister(*MBB, Before, UseMI, &RC, Reg)) {
500     // Spill the scavenged register before \p Before.
501     int FI = Scavenged[SI].FrameIndex;
502     if (FI < FIB || FI >= FIE) {
503       report_fatal_error(Twine("Error while trying to spill ") +
504                          TRI->getName(Reg) + " from class " +
505                          TRI->getRegClassName(&RC) +
506                          ": Cannot scavenge register without an emergency "
507                          "spill slot!");
508     }
509     TII->storeRegToStackSlot(*MBB, Before, Reg, true, FI, &RC, TRI, Register());
510     MachineBasicBlock::iterator II = std::prev(Before);
511 
512     unsigned FIOperandNum = getFrameIndexOperandNum(*II);
513     TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
514 
515     // Restore the scavenged register before its use (or first terminator).
516     TII->loadRegFromStackSlot(*MBB, UseMI, Reg, FI, &RC, TRI, Register());
517     II = std::prev(UseMI);
518 
519     FIOperandNum = getFrameIndexOperandNum(*II);
520     TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
521   }
522   return Scavenged[SI];
523 }
524 
525 Register RegScavenger::scavengeRegister(const TargetRegisterClass *RC,
526                                         MachineBasicBlock::iterator I,
527                                         int SPAdj, bool AllowSpill) {
528   MachineInstr &MI = *I;
529   const MachineFunction &MF = *MI.getMF();
530   // Consider all allocatable registers in the register class initially
531   BitVector Candidates = TRI->getAllocatableSet(MF, RC);
532 
533   // Exclude all the registers being used by the instruction.
534   for (const MachineOperand &MO : MI.operands()) {
535     if (MO.isReg() && MO.getReg() != 0 && !(MO.isUse() && MO.isUndef()) &&
536         !MO.getReg().isVirtual())
537       for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
538         Candidates.reset(*AI);
539   }
540 
541   // If we have already scavenged some registers, remove them from the
542   // candidates. If we end up recursively calling eliminateFrameIndex, we don't
543   // want to be clobbering previously scavenged registers or their associated
544   // stack slots.
545   for (ScavengedInfo &SI : Scavenged) {
546     if (SI.Reg) {
547       if (isRegUsed(SI.Reg)) {
548         LLVM_DEBUG(
549           dbgs() << "Removing " << printReg(SI.Reg, TRI) <<
550           " from scavenging candidates since it was already scavenged\n");
551         for (MCRegAliasIterator AI(SI.Reg, TRI, true); AI.isValid(); ++AI)
552           Candidates.reset(*AI);
553       }
554     }
555   }
556 
557   // Try to find a register that's unused if there is one, as then we won't
558   // have to spill.
559   BitVector Available = getRegsAvailable(RC);
560   Available &= Candidates;
561   if (Available.any())
562     Candidates = Available;
563 
564   // Find the register whose use is furthest away.
565   MachineBasicBlock::iterator UseMI;
566   Register SReg = findSurvivorReg(I, Candidates, 25, UseMI);
567 
568   // If we found an unused register there is no reason to spill it.
569   if (!isRegUsed(SReg)) {
570     LLVM_DEBUG(dbgs() << "Scavenged register: " << printReg(SReg, TRI) << "\n");
571     return SReg;
572   }
573 
574   if (!AllowSpill)
575     return 0;
576 
577 #ifndef NDEBUG
578   for (ScavengedInfo &SI : Scavenged) {
579     assert(SI.Reg != SReg && "scavenged a previously scavenged register");
580   }
581 #endif
582 
583   ScavengedInfo &Scavenged = spill(SReg, *RC, SPAdj, I, UseMI);
584   Scavenged.Restore = &*std::prev(UseMI);
585 
586   LLVM_DEBUG(dbgs() << "Scavenged register (with spill): "
587                     << printReg(SReg, TRI) << "\n");
588 
589   return SReg;
590 }
591 
592 Register RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass &RC,
593                                                  MachineBasicBlock::iterator To,
594                                                  bool RestoreAfter, int SPAdj,
595                                                  bool AllowSpill) {
596   const MachineBasicBlock &MBB = *To->getParent();
597   const MachineFunction &MF = *MBB.getParent();
598 
599   // Find the register whose use is furthest away.
600   MachineBasicBlock::iterator UseMI;
601   ArrayRef<MCPhysReg> AllocationOrder = RC.getRawAllocationOrder(MF);
602   std::pair<MCPhysReg, MachineBasicBlock::iterator> P =
603       findSurvivorBackwards(*MRI, MBBI, To, LiveUnits, AllocationOrder,
604                             RestoreAfter);
605   MCPhysReg Reg = P.first;
606   MachineBasicBlock::iterator SpillBefore = P.second;
607   // Found an available register?
608   if (Reg != 0 && SpillBefore == MBB.end()) {
609     LLVM_DEBUG(dbgs() << "Scavenged free register: " << printReg(Reg, TRI)
610                << '\n');
611     return Reg;
612   }
613 
614   if (!AllowSpill)
615     return 0;
616 
617   assert(Reg != 0 && "No register left to scavenge!");
618 
619   MachineBasicBlock::iterator ReloadAfter =
620     RestoreAfter ? std::next(MBBI) : MBBI;
621   MachineBasicBlock::iterator ReloadBefore = std::next(ReloadAfter);
622   if (ReloadBefore != MBB.end())
623     LLVM_DEBUG(dbgs() << "Reload before: " << *ReloadBefore << '\n');
624   ScavengedInfo &Scavenged = spill(Reg, RC, SPAdj, SpillBefore, ReloadBefore);
625   Scavenged.Restore = &*std::prev(SpillBefore);
626   LiveUnits.removeReg(Reg);
627   LLVM_DEBUG(dbgs() << "Scavenged register with spill: " << printReg(Reg, TRI)
628              << " until " << *SpillBefore);
629   return Reg;
630 }
631 
632 /// Allocate a register for the virtual register \p VReg. The last use of
633 /// \p VReg is around the current position of the register scavenger \p RS.
634 /// \p ReserveAfter controls whether the scavenged register needs to be reserved
635 /// after the current instruction, otherwise it will only be reserved before the
636 /// current instruction.
637 static Register scavengeVReg(MachineRegisterInfo &MRI, RegScavenger &RS,
638                              Register VReg, bool ReserveAfter) {
639   const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
640 #ifndef NDEBUG
641   // Verify that all definitions and uses are in the same basic block.
642   const MachineBasicBlock *CommonMBB = nullptr;
643   // Real definition for the reg, re-definitions are not considered.
644   const MachineInstr *RealDef = nullptr;
645   for (MachineOperand &MO : MRI.reg_nodbg_operands(VReg)) {
646     MachineBasicBlock *MBB = MO.getParent()->getParent();
647     if (CommonMBB == nullptr)
648       CommonMBB = MBB;
649     assert(MBB == CommonMBB && "All defs+uses must be in the same basic block");
650     if (MO.isDef()) {
651       const MachineInstr &MI = *MO.getParent();
652       if (!MI.readsRegister(VReg, &TRI)) {
653         assert((!RealDef || RealDef == &MI) &&
654                "Can have at most one definition which is not a redefinition");
655         RealDef = &MI;
656       }
657     }
658   }
659   assert(RealDef != nullptr && "Must have at least 1 Def");
660 #endif
661 
662   // We should only have one definition of the register. However to accommodate
663   // the requirements of two address code we also allow definitions in
664   // subsequent instructions provided they also read the register. That way
665   // we get a single contiguous lifetime.
666   //
667   // Definitions in MRI.def_begin() are unordered, search for the first.
668   MachineRegisterInfo::def_iterator FirstDef = llvm::find_if(
669       MRI.def_operands(VReg), [VReg, &TRI](const MachineOperand &MO) {
670         return !MO.getParent()->readsRegister(VReg, &TRI);
671       });
672   assert(FirstDef != MRI.def_end() &&
673          "Must have one definition that does not redefine vreg");
674   MachineInstr &DefMI = *FirstDef->getParent();
675 
676   // The register scavenger will report a free register inserting an emergency
677   // spill/reload if necessary.
678   int SPAdj = 0;
679   const TargetRegisterClass &RC = *MRI.getRegClass(VReg);
680   Register SReg = RS.scavengeRegisterBackwards(RC, DefMI.getIterator(),
681                                                ReserveAfter, SPAdj);
682   MRI.replaceRegWith(VReg, SReg);
683   ++NumScavengedRegs;
684   return SReg;
685 }
686 
687 /// Allocate (scavenge) vregs inside a single basic block.
688 /// Returns true if the target spill callback created new vregs and a 2nd pass
689 /// is necessary.
690 static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo &MRI,
691                                             RegScavenger &RS,
692                                             MachineBasicBlock &MBB) {
693   const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
694   RS.enterBasicBlockEnd(MBB);
695 
696   unsigned InitialNumVirtRegs = MRI.getNumVirtRegs();
697   bool NextInstructionReadsVReg = false;
698   for (MachineBasicBlock::iterator I = MBB.end(); I != MBB.begin(); ) {
699     --I;
700     // Move RegScavenger to the position between *I and *std::next(I).
701     RS.backward(I);
702 
703     // Look for unassigned vregs in the uses of *std::next(I).
704     if (NextInstructionReadsVReg) {
705       MachineBasicBlock::iterator N = std::next(I);
706       const MachineInstr &NMI = *N;
707       for (const MachineOperand &MO : NMI.operands()) {
708         if (!MO.isReg())
709           continue;
710         Register Reg = MO.getReg();
711         // We only care about virtual registers and ignore virtual registers
712         // created by the target callbacks in the process (those will be handled
713         // in a scavenging round).
714         if (!Reg.isVirtual() ||
715             Register::virtReg2Index(Reg) >= InitialNumVirtRegs)
716           continue;
717         if (!MO.readsReg())
718           continue;
719 
720         Register SReg = scavengeVReg(MRI, RS, Reg, true);
721         N->addRegisterKilled(SReg, &TRI, false);
722         RS.setRegUsed(SReg);
723       }
724     }
725 
726     // Look for unassigned vregs in the defs of *I.
727     NextInstructionReadsVReg = false;
728     const MachineInstr &MI = *I;
729     for (const MachineOperand &MO : MI.operands()) {
730       if (!MO.isReg())
731         continue;
732       Register Reg = MO.getReg();
733       // Only vregs, no newly created vregs (see above).
734       if (!Reg.isVirtual() ||
735           Register::virtReg2Index(Reg) >= InitialNumVirtRegs)
736         continue;
737       // We have to look at all operands anyway so we can precalculate here
738       // whether there is a reading operand. This allows use to skip the use
739       // step in the next iteration if there was none.
740       assert(!MO.isInternalRead() && "Cannot assign inside bundles");
741       assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
742       if (MO.readsReg()) {
743         NextInstructionReadsVReg = true;
744       }
745       if (MO.isDef()) {
746         Register SReg = scavengeVReg(MRI, RS, Reg, false);
747         I->addRegisterDead(SReg, &TRI, false);
748       }
749     }
750   }
751 #ifndef NDEBUG
752   for (const MachineOperand &MO : MBB.front().operands()) {
753     if (!MO.isReg() || !MO.getReg().isVirtual())
754       continue;
755     assert(!MO.isInternalRead() && "Cannot assign inside bundles");
756     assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
757     assert(!MO.readsReg() && "Vreg use in first instruction not allowed");
758   }
759 #endif
760 
761   return MRI.getNumVirtRegs() != InitialNumVirtRegs;
762 }
763 
764 void llvm::scavengeFrameVirtualRegs(MachineFunction &MF, RegScavenger &RS) {
765   // FIXME: Iterating over the instruction stream is unnecessary. We can simply
766   // iterate over the vreg use list, which at this point only contains machine
767   // operands for which eliminateFrameIndex need a new scratch reg.
768   MachineRegisterInfo &MRI = MF.getRegInfo();
769   // Shortcut.
770   if (MRI.getNumVirtRegs() == 0) {
771     MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
772     return;
773   }
774 
775   // Run through the instructions and find any virtual registers.
776   for (MachineBasicBlock &MBB : MF) {
777     if (MBB.empty())
778       continue;
779 
780     bool Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
781     if (Again) {
782       LLVM_DEBUG(dbgs() << "Warning: Required two scavenging passes for block "
783                         << MBB.getName() << '\n');
784       Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
785       // The target required a 2nd run (because it created new vregs while
786       // spilling). Refuse to do another pass to keep compiletime in check.
787       if (Again)
788         report_fatal_error("Incomplete scavenging after 2nd pass");
789     }
790   }
791 
792   MRI.clearVirtRegs();
793   MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
794 }
795 
796 namespace {
797 
798 /// This class runs register scavenging independ of the PrologEpilogInserter.
799 /// This is used in for testing.
800 class ScavengerTest : public MachineFunctionPass {
801 public:
802   static char ID;
803 
804   ScavengerTest() : MachineFunctionPass(ID) {}
805 
806   bool runOnMachineFunction(MachineFunction &MF) override {
807     const TargetSubtargetInfo &STI = MF.getSubtarget();
808     const TargetFrameLowering &TFL = *STI.getFrameLowering();
809 
810     RegScavenger RS;
811     // Let's hope that calling those outside of PrologEpilogueInserter works
812     // well enough to initialize the scavenger with some emergency spillslots
813     // for the target.
814     BitVector SavedRegs;
815     TFL.determineCalleeSaves(MF, SavedRegs, &RS);
816     TFL.processFunctionBeforeFrameFinalized(MF, &RS);
817 
818     // Let's scavenge the current function
819     scavengeFrameVirtualRegs(MF, RS);
820     return true;
821   }
822 };
823 
824 } // end anonymous namespace
825 
826 char ScavengerTest::ID;
827 
828 INITIALIZE_PASS(ScavengerTest, "scavenger-test",
829                 "Scavenge virtual registers inside basic blocks", false, false)
830