1 //===------ VirtualInstruction.cpp ------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // Tools for determining which instructions are within a statement and the
10 // nature of their operands.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "polly/Support/VirtualInstruction.h"
15 
16 using namespace polly;
17 using namespace llvm;
18 
create(Scop * S,const Use & U,LoopInfo * LI,bool Virtual)19 VirtualUse VirtualUse::create(Scop *S, const Use &U, LoopInfo *LI,
20                               bool Virtual) {
21   auto *UserBB = getUseBlock(U);
22   Loop *UserScope = LI->getLoopFor(UserBB);
23   Instruction *UI = dyn_cast<Instruction>(U.getUser());
24   ScopStmt *UserStmt = S->getStmtFor(UI);
25 
26   // Uses by PHI nodes are always reading values written by other statements,
27   // except it is within a region statement.
28   if (PHINode *PHI = dyn_cast<PHINode>(UI)) {
29     // Handle PHI in exit block.
30     if (S->getRegion().getExit() == PHI->getParent())
31       return VirtualUse(UserStmt, U.get(), Inter, nullptr, nullptr);
32 
33     if (UserStmt->getEntryBlock() != PHI->getParent())
34       return VirtualUse(UserStmt, U.get(), Intra, nullptr, nullptr);
35 
36     // The MemoryAccess is expected to be set if @p Virtual is true.
37     MemoryAccess *IncomingMA = nullptr;
38     if (Virtual) {
39       if (const ScopArrayInfo *SAI =
40               S->getScopArrayInfoOrNull(PHI, MemoryKind::PHI)) {
41         IncomingMA = S->getPHIRead(SAI);
42         assert(IncomingMA->getStatement() == UserStmt);
43       }
44     }
45 
46     return VirtualUse(UserStmt, U.get(), Inter, nullptr, IncomingMA);
47   }
48 
49   return create(S, UserStmt, UserScope, U.get(), Virtual);
50 }
51 
create(Scop * S,ScopStmt * UserStmt,Loop * UserScope,Value * Val,bool Virtual)52 VirtualUse VirtualUse::create(Scop *S, ScopStmt *UserStmt, Loop *UserScope,
53                               Value *Val, bool Virtual) {
54   assert(!isa<StoreInst>(Val) && "a StoreInst cannot be used");
55 
56   if (isa<BasicBlock>(Val))
57     return VirtualUse(UserStmt, Val, Block, nullptr, nullptr);
58 
59   if (isa<llvm::Constant>(Val) || isa<MetadataAsValue>(Val))
60     return VirtualUse(UserStmt, Val, Constant, nullptr, nullptr);
61 
62   // Is the value synthesizable? If the user has been pruned
63   // (UserStmt == nullptr), it is either not used anywhere or is synthesizable.
64   // We assume synthesizable which practically should have the same effect.
65   auto *SE = S->getSE();
66   if (SE->isSCEVable(Val->getType())) {
67     auto *ScevExpr = SE->getSCEVAtScope(Val, UserScope);
68     if (!UserStmt || canSynthesize(Val, *UserStmt->getParent(), SE, UserScope))
69       return VirtualUse(UserStmt, Val, Synthesizable, ScevExpr, nullptr);
70   }
71 
72   // FIXME: Inconsistency between lookupInvariantEquivClass and
73   // getRequiredInvariantLoads. Querying one of them should be enough.
74   auto &RIL = S->getRequiredInvariantLoads();
75   if (S->lookupInvariantEquivClass(Val) || RIL.count(dyn_cast<LoadInst>(Val)))
76     return VirtualUse(UserStmt, Val, Hoisted, nullptr, nullptr);
77 
78   // ReadOnly uses may have MemoryAccesses that we want to associate with the
79   // use. This is why we look for a MemoryAccess here already.
80   MemoryAccess *InputMA = nullptr;
81   if (UserStmt && Virtual)
82     InputMA = UserStmt->lookupValueReadOf(Val);
83 
84   // Uses are read-only if they have been defined before the SCoP, i.e., they
85   // cannot be written to inside the SCoP. Arguments are defined before any
86   // instructions, hence also before the SCoP. If the user has been pruned
87   // (UserStmt == nullptr) and is not SCEVable, assume it is read-only as it is
88   // neither an intra- nor an inter-use.
89   if (!UserStmt || isa<Argument>(Val))
90     return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA);
91 
92   auto Inst = cast<Instruction>(Val);
93   if (!S->contains(Inst))
94     return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA);
95 
96   // A use is inter-statement if either it is defined in another statement, or
97   // there is a MemoryAccess that reads its value that has been written by
98   // another statement.
99   if (InputMA || (!Virtual && UserStmt != S->getStmtFor(Inst)))
100     return VirtualUse(UserStmt, Val, Inter, nullptr, InputMA);
101 
102   return VirtualUse(UserStmt, Val, Intra, nullptr, nullptr);
103 }
104 
print(raw_ostream & OS,bool Reproducible) const105 void VirtualUse::print(raw_ostream &OS, bool Reproducible) const {
106   OS << "User: [" << User->getBaseName() << "] ";
107   switch (Kind) {
108   case VirtualUse::Constant:
109     OS << "Constant Op:";
110     break;
111   case VirtualUse::Block:
112     OS << "BasicBlock Op:";
113     break;
114   case VirtualUse::Synthesizable:
115     OS << "Synthesizable Op:";
116     break;
117   case VirtualUse::Hoisted:
118     OS << "Hoisted load Op:";
119     break;
120   case VirtualUse::ReadOnly:
121     OS << "Read-Only Op:";
122     break;
123   case VirtualUse::Intra:
124     OS << "Intra Op:";
125     break;
126   case VirtualUse::Inter:
127     OS << "Inter Op:";
128     break;
129   }
130 
131   if (Val) {
132     OS << ' ';
133     if (Reproducible)
134       OS << '"' << Val->getName() << '"';
135     else
136       Val->print(OS, true);
137   }
138   if (ScevExpr) {
139     OS << ' ';
140     ScevExpr->print(OS);
141   }
142   if (InputMA && !Reproducible)
143     OS << ' ' << InputMA;
144 }
145 
146 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
dump() const147 LLVM_DUMP_METHOD void VirtualUse::dump() const {
148   print(errs(), false);
149   errs() << '\n';
150 }
151 #endif
152 
print(raw_ostream & OS,bool Reproducible) const153 void VirtualInstruction::print(raw_ostream &OS, bool Reproducible) const {
154   if (!Stmt || !Inst) {
155     OS << "[null VirtualInstruction]";
156     return;
157   }
158 
159   OS << "[" << Stmt->getBaseName() << "]";
160   Inst->print(OS, !Reproducible);
161 }
162 
163 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
dump() const164 LLVM_DUMP_METHOD void VirtualInstruction::dump() const {
165   print(errs(), false);
166   errs() << '\n';
167 }
168 #endif
169 
170 /// Return true if @p Inst cannot be removed, even if it is nowhere referenced.
isRoot(const Instruction * Inst)171 static bool isRoot(const Instruction *Inst) {
172   // The store is handled by its MemoryAccess. The load must be reached from the
173   // roots in order to be marked as used.
174   if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst))
175     return false;
176 
177   // Terminator instructions (in region statements) are required for control
178   // flow.
179   if (Inst->isTerminator())
180     return true;
181 
182   // Writes to memory must be honored.
183   if (Inst->mayWriteToMemory())
184     return true;
185 
186   return false;
187 }
188 
189 /// Return true for MemoryAccesses that cannot be removed because it represents
190 /// an llvm::Value that is used after the SCoP.
isEscaping(MemoryAccess * MA)191 static bool isEscaping(MemoryAccess *MA) {
192   assert(MA->isOriginalValueKind());
193   Scop *S = MA->getStatement()->getParent();
194   return S->isEscaping(cast<Instruction>(MA->getAccessValue()));
195 }
196 
197 /// Add non-removable virtual instructions in @p Stmt to @p RootInsts.
198 static void
addInstructionRoots(ScopStmt * Stmt,SmallVectorImpl<VirtualInstruction> & RootInsts)199 addInstructionRoots(ScopStmt *Stmt,
200                     SmallVectorImpl<VirtualInstruction> &RootInsts) {
201   if (!Stmt->isBlockStmt()) {
202     // In region statements the terminator statement and all statements that
203     // are not in the entry block cannot be eliminated and consequently must
204     // be roots.
205     RootInsts.emplace_back(Stmt,
206                            Stmt->getRegion()->getEntry()->getTerminator());
207     for (BasicBlock *BB : Stmt->getRegion()->blocks())
208       if (Stmt->getRegion()->getEntry() != BB)
209         for (Instruction &Inst : *BB)
210           RootInsts.emplace_back(Stmt, &Inst);
211     return;
212   }
213 
214   for (Instruction *Inst : Stmt->getInstructions())
215     if (isRoot(Inst))
216       RootInsts.emplace_back(Stmt, Inst);
217 }
218 
219 /// Add non-removable memory accesses in @p Stmt to @p RootInsts.
220 ///
221 /// @param Local If true, all writes are assumed to escape. markAndSweep
222 /// algorithms can use this to be applicable to a single ScopStmt only without
223 /// the risk of removing definitions required by other statements.
224 ///              If false, only writes for SCoP-escaping values are roots.  This
225 ///              is global mode, where such writes must be marked by theirs uses
226 ///              in order to be reachable.
addAccessRoots(ScopStmt * Stmt,SmallVectorImpl<MemoryAccess * > & RootAccs,bool Local)227 static void addAccessRoots(ScopStmt *Stmt,
228                            SmallVectorImpl<MemoryAccess *> &RootAccs,
229                            bool Local) {
230   for (auto *MA : *Stmt) {
231     if (!MA->isWrite())
232       continue;
233 
234     // Writes to arrays are always used.
235     if (MA->isLatestArrayKind())
236       RootAccs.push_back(MA);
237 
238     // Values are roots if they are escaping.
239     else if (MA->isLatestValueKind()) {
240       if (Local || isEscaping(MA))
241         RootAccs.push_back(MA);
242     }
243 
244     // Exit phis are, by definition, escaping.
245     else if (MA->isLatestExitPHIKind())
246       RootAccs.push_back(MA);
247 
248     // phi writes are only roots if we are not visiting the statement
249     // containing the PHINode.
250     else if (Local && MA->isLatestPHIKind())
251       RootAccs.push_back(MA);
252   }
253 }
254 
255 /// Determine all instruction and access roots.
addRoots(ScopStmt * Stmt,SmallVectorImpl<VirtualInstruction> & RootInsts,SmallVectorImpl<MemoryAccess * > & RootAccs,bool Local)256 static void addRoots(ScopStmt *Stmt,
257                      SmallVectorImpl<VirtualInstruction> &RootInsts,
258                      SmallVectorImpl<MemoryAccess *> &RootAccs, bool Local) {
259   addInstructionRoots(Stmt, RootInsts);
260   addAccessRoots(Stmt, RootAccs, Local);
261 }
262 
263 /// Mark accesses and instructions as used if they are reachable from a root,
264 /// walking the operand trees.
265 ///
266 /// @param S              The SCoP to walk.
267 /// @param LI             The LoopInfo Analysis.
268 /// @param RootInsts      List of root instructions.
269 /// @param RootAccs       List of root accesses.
270 /// @param UsesInsts[out] Receives all reachable instructions, including the
271 /// roots.
272 /// @param UsedAccs[out]  Receives all reachable accesses, including the roots.
273 /// @param OnlyLocal      If non-nullptr, restricts walking to a single
274 /// statement.
walkReachable(Scop * S,LoopInfo * LI,ArrayRef<VirtualInstruction> RootInsts,ArrayRef<MemoryAccess * > RootAccs,DenseSet<VirtualInstruction> & UsedInsts,DenseSet<MemoryAccess * > & UsedAccs,ScopStmt * OnlyLocal=nullptr)275 static void walkReachable(Scop *S, LoopInfo *LI,
276                           ArrayRef<VirtualInstruction> RootInsts,
277                           ArrayRef<MemoryAccess *> RootAccs,
278                           DenseSet<VirtualInstruction> &UsedInsts,
279                           DenseSet<MemoryAccess *> &UsedAccs,
280                           ScopStmt *OnlyLocal = nullptr) {
281   UsedInsts.clear();
282   UsedAccs.clear();
283 
284   SmallVector<VirtualInstruction, 32> WorklistInsts;
285   SmallVector<MemoryAccess *, 32> WorklistAccs;
286 
287   WorklistInsts.append(RootInsts.begin(), RootInsts.end());
288   WorklistAccs.append(RootAccs.begin(), RootAccs.end());
289 
290   auto AddToWorklist = [&](VirtualUse VUse) {
291     switch (VUse.getKind()) {
292     case VirtualUse::Block:
293     case VirtualUse::Constant:
294     case VirtualUse::Synthesizable:
295     case VirtualUse::Hoisted:
296       break;
297     case VirtualUse::ReadOnly:
298       // Read-only scalars only have MemoryAccesses if ModelReadOnlyScalars is
299       // enabled.
300       if (!VUse.getMemoryAccess())
301         break;
302       LLVM_FALLTHROUGH;
303     case VirtualUse::Inter:
304       assert(VUse.getMemoryAccess());
305       WorklistAccs.push_back(VUse.getMemoryAccess());
306       break;
307     case VirtualUse::Intra:
308       WorklistInsts.emplace_back(VUse.getUser(),
309                                  cast<Instruction>(VUse.getValue()));
310       break;
311     }
312   };
313 
314   while (true) {
315     // We have two worklists to process: Only when the MemoryAccess worklist is
316     // empty, we process the instruction worklist.
317 
318     while (!WorklistAccs.empty()) {
319       auto *Acc = WorklistAccs.pop_back_val();
320 
321       ScopStmt *Stmt = Acc->getStatement();
322       if (OnlyLocal && Stmt != OnlyLocal)
323         continue;
324 
325       auto Inserted = UsedAccs.insert(Acc);
326       if (!Inserted.second)
327         continue;
328 
329       if (Acc->isRead()) {
330         const ScopArrayInfo *SAI = Acc->getScopArrayInfo();
331 
332         if (Acc->isLatestValueKind()) {
333           MemoryAccess *DefAcc = S->getValueDef(SAI);
334 
335           // Accesses to read-only values do not have a definition.
336           if (DefAcc)
337             WorklistAccs.push_back(S->getValueDef(SAI));
338         }
339 
340         if (Acc->isLatestAnyPHIKind()) {
341           auto IncomingMAs = S->getPHIIncomings(SAI);
342           WorklistAccs.append(IncomingMAs.begin(), IncomingMAs.end());
343         }
344       }
345 
346       if (Acc->isWrite()) {
347         if (Acc->isOriginalValueKind() ||
348             (Acc->isOriginalArrayKind() && Acc->getAccessValue())) {
349           Loop *Scope = Stmt->getSurroundingLoop();
350           VirtualUse VUse =
351               VirtualUse::create(S, Stmt, Scope, Acc->getAccessValue(), true);
352           AddToWorklist(VUse);
353         }
354 
355         if (Acc->isOriginalAnyPHIKind()) {
356           for (auto Incoming : Acc->getIncoming()) {
357             VirtualUse VUse = VirtualUse::create(
358                 S, Stmt, LI->getLoopFor(Incoming.first), Incoming.second, true);
359             AddToWorklist(VUse);
360           }
361         }
362 
363         if (Acc->isOriginalArrayKind())
364           WorklistInsts.emplace_back(Stmt, Acc->getAccessInstruction());
365       }
366     }
367 
368     // If both worklists are empty, stop walking.
369     if (WorklistInsts.empty())
370       break;
371 
372     VirtualInstruction VInst = WorklistInsts.pop_back_val();
373     ScopStmt *Stmt = VInst.getStmt();
374     Instruction *Inst = VInst.getInstruction();
375 
376     // Do not process statements other than the local.
377     if (OnlyLocal && Stmt != OnlyLocal)
378       continue;
379 
380     auto InsertResult = UsedInsts.insert(VInst);
381     if (!InsertResult.second)
382       continue;
383 
384     // Add all operands to the worklists.
385     PHINode *PHI = dyn_cast<PHINode>(Inst);
386     if (PHI && PHI->getParent() == Stmt->getEntryBlock()) {
387       if (MemoryAccess *PHIRead = Stmt->lookupPHIReadOf(PHI))
388         WorklistAccs.push_back(PHIRead);
389     } else {
390       for (VirtualUse VUse : VInst.operands())
391         AddToWorklist(VUse);
392     }
393 
394     // If there is an array access, also add its MemoryAccesses to the worklist.
395     const MemoryAccessList *Accs = Stmt->lookupArrayAccessesFor(Inst);
396     if (!Accs)
397       continue;
398 
399     for (MemoryAccess *Acc : *Accs)
400       WorklistAccs.push_back(Acc);
401   }
402 }
403 
markReachable(Scop * S,LoopInfo * LI,DenseSet<VirtualInstruction> & UsedInsts,DenseSet<MemoryAccess * > & UsedAccs,ScopStmt * OnlyLocal)404 void polly::markReachable(Scop *S, LoopInfo *LI,
405                           DenseSet<VirtualInstruction> &UsedInsts,
406                           DenseSet<MemoryAccess *> &UsedAccs,
407                           ScopStmt *OnlyLocal) {
408   SmallVector<VirtualInstruction, 32> RootInsts;
409   SmallVector<MemoryAccess *, 32> RootAccs;
410 
411   if (OnlyLocal) {
412     addRoots(OnlyLocal, RootInsts, RootAccs, true);
413   } else {
414     for (auto &Stmt : *S)
415       addRoots(&Stmt, RootInsts, RootAccs, false);
416   }
417 
418   walkReachable(S, LI, RootInsts, RootAccs, UsedInsts, UsedAccs, OnlyLocal);
419 }
420