1 //===--- LoopUnrolling.cpp - Unroll loops -----------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 ///
9 /// This file contains functions which are used to decide if a loop worth to be
10 /// unrolled. Moreover, these functions manages the stack of loop which is
11 /// tracked by the ProgramState.
12 ///
13 //===----------------------------------------------------------------------===//
14 
15 #include "clang/ASTMatchers/ASTMatchers.h"
16 #include "clang/ASTMatchers/ASTMatchFinder.h"
17 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
18 #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
19 #include "clang/StaticAnalyzer/Core/PathSensitive/LoopUnrolling.h"
20 #include <optional>
21 
22 using namespace clang;
23 using namespace ento;
24 using namespace clang::ast_matchers;
25 
26 static const int MAXIMUM_STEP_UNROLLED = 128;
27 
28 namespace {
29 struct LoopState {
30 private:
31   enum Kind { Normal, Unrolled } K;
32   const Stmt *LoopStmt;
33   const LocationContext *LCtx;
34   unsigned maxStep;
35   LoopState(Kind InK, const Stmt *S, const LocationContext *L, unsigned N)
36       : K(InK), LoopStmt(S), LCtx(L), maxStep(N) {}
37 
38 public:
39   static LoopState getNormal(const Stmt *S, const LocationContext *L,
40                              unsigned N) {
41     return LoopState(Normal, S, L, N);
42   }
43   static LoopState getUnrolled(const Stmt *S, const LocationContext *L,
44                                unsigned N) {
45     return LoopState(Unrolled, S, L, N);
46   }
47   bool isUnrolled() const { return K == Unrolled; }
48   unsigned getMaxStep() const { return maxStep; }
49   const Stmt *getLoopStmt() const { return LoopStmt; }
50   const LocationContext *getLocationContext() const { return LCtx; }
51   bool operator==(const LoopState &X) const {
52     return K == X.K && LoopStmt == X.LoopStmt;
53   }
54   void Profile(llvm::FoldingSetNodeID &ID) const {
55     ID.AddInteger(K);
56     ID.AddPointer(LoopStmt);
57     ID.AddPointer(LCtx);
58     ID.AddInteger(maxStep);
59   }
60 };
61 } // namespace
62 
63 // The tracked stack of loops. The stack indicates that which loops the
64 // simulated element contained by. The loops are marked depending if we decided
65 // to unroll them.
66 // TODO: The loop stack should not need to be in the program state since it is
67 // lexical in nature. Instead, the stack of loops should be tracked in the
68 // LocationContext.
69 REGISTER_LIST_WITH_PROGRAMSTATE(LoopStack, LoopState)
70 
71 namespace clang {
72 namespace ento {
73 
74 static bool isLoopStmt(const Stmt *S) {
75   return isa_and_nonnull<ForStmt, WhileStmt, DoStmt>(S);
76 }
77 
78 ProgramStateRef processLoopEnd(const Stmt *LoopStmt, ProgramStateRef State) {
79   auto LS = State->get<LoopStack>();
80   if (!LS.isEmpty() && LS.getHead().getLoopStmt() == LoopStmt)
81     State = State->set<LoopStack>(LS.getTail());
82   return State;
83 }
84 
85 static internal::Matcher<Stmt> simpleCondition(StringRef BindName,
86                                                StringRef RefName) {
87   return binaryOperator(
88              anyOf(hasOperatorName("<"), hasOperatorName(">"),
89                    hasOperatorName("<="), hasOperatorName(">="),
90                    hasOperatorName("!=")),
91              hasEitherOperand(ignoringParenImpCasts(
92                  declRefExpr(to(varDecl(hasType(isInteger())).bind(BindName)))
93                      .bind(RefName))),
94              hasEitherOperand(
95                  ignoringParenImpCasts(integerLiteral().bind("boundNum"))))
96       .bind("conditionOperator");
97 }
98 
99 static internal::Matcher<Stmt>
100 changeIntBoundNode(internal::Matcher<Decl> VarNodeMatcher) {
101   return anyOf(
102       unaryOperator(anyOf(hasOperatorName("--"), hasOperatorName("++")),
103                     hasUnaryOperand(ignoringParenImpCasts(
104                         declRefExpr(to(varDecl(VarNodeMatcher)))))),
105       binaryOperator(isAssignmentOperator(),
106                      hasLHS(ignoringParenImpCasts(
107                          declRefExpr(to(varDecl(VarNodeMatcher)))))));
108 }
109 
110 static internal::Matcher<Stmt>
111 callByRef(internal::Matcher<Decl> VarNodeMatcher) {
112   return callExpr(forEachArgumentWithParam(
113       declRefExpr(to(varDecl(VarNodeMatcher))),
114       parmVarDecl(hasType(references(qualType(unless(isConstQualified())))))));
115 }
116 
117 static internal::Matcher<Stmt>
118 assignedToRef(internal::Matcher<Decl> VarNodeMatcher) {
119   return declStmt(hasDescendant(varDecl(
120       allOf(hasType(referenceType()),
121             hasInitializer(anyOf(
122                 initListExpr(has(declRefExpr(to(varDecl(VarNodeMatcher))))),
123                 declRefExpr(to(varDecl(VarNodeMatcher)))))))));
124 }
125 
126 static internal::Matcher<Stmt>
127 getAddrTo(internal::Matcher<Decl> VarNodeMatcher) {
128   return unaryOperator(
129       hasOperatorName("&"),
130       hasUnaryOperand(declRefExpr(hasDeclaration(VarNodeMatcher))));
131 }
132 
133 static internal::Matcher<Stmt> hasSuspiciousStmt(StringRef NodeName) {
134   return hasDescendant(stmt(
135       anyOf(gotoStmt(), switchStmt(), returnStmt(),
136             // Escaping and not known mutation of the loop counter is handled
137             // by exclusion of assigning and address-of operators and
138             // pass-by-ref function calls on the loop counter from the body.
139             changeIntBoundNode(equalsBoundNode(std::string(NodeName))),
140             callByRef(equalsBoundNode(std::string(NodeName))),
141             getAddrTo(equalsBoundNode(std::string(NodeName))),
142             assignedToRef(equalsBoundNode(std::string(NodeName))))));
143 }
144 
145 static internal::Matcher<Stmt> forLoopMatcher() {
146   return forStmt(
147              hasCondition(simpleCondition("initVarName", "initVarRef")),
148              // Initialization should match the form: 'int i = 6' or 'i = 42'.
149              hasLoopInit(
150                  anyOf(declStmt(hasSingleDecl(
151                            varDecl(allOf(hasInitializer(ignoringParenImpCasts(
152                                              integerLiteral().bind("initNum"))),
153                                          equalsBoundNode("initVarName"))))),
154                        binaryOperator(hasLHS(declRefExpr(to(varDecl(
155                                           equalsBoundNode("initVarName"))))),
156                                       hasRHS(ignoringParenImpCasts(
157                                           integerLiteral().bind("initNum")))))),
158              // Incrementation should be a simple increment or decrement
159              // operator call.
160              hasIncrement(unaryOperator(
161                  anyOf(hasOperatorName("++"), hasOperatorName("--")),
162                  hasUnaryOperand(declRefExpr(
163                      to(varDecl(allOf(equalsBoundNode("initVarName"),
164                                       hasType(isInteger())))))))),
165              unless(hasBody(hasSuspiciousStmt("initVarName"))))
166       .bind("forLoop");
167 }
168 
169 static bool isCapturedByReference(ExplodedNode *N, const DeclRefExpr *DR) {
170 
171   // Get the lambda CXXRecordDecl
172   assert(DR->refersToEnclosingVariableOrCapture());
173   const LocationContext *LocCtxt = N->getLocationContext();
174   const Decl *D = LocCtxt->getDecl();
175   const auto *MD = cast<CXXMethodDecl>(D);
176   assert(MD && MD->getParent()->isLambda() &&
177          "Captured variable should only be seen while evaluating a lambda");
178   const CXXRecordDecl *LambdaCXXRec = MD->getParent();
179 
180   // Lookup the fields of the lambda
181   llvm::DenseMap<const ValueDecl *, FieldDecl *> LambdaCaptureFields;
182   FieldDecl *LambdaThisCaptureField;
183   LambdaCXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField);
184 
185   // Check if the counter is captured by reference
186   const VarDecl *VD = cast<VarDecl>(DR->getDecl()->getCanonicalDecl());
187   assert(VD);
188   const FieldDecl *FD = LambdaCaptureFields[VD];
189   assert(FD && "Captured variable without a corresponding field");
190   return FD->getType()->isReferenceType();
191 }
192 
193 // A loop counter is considered escaped if:
194 // case 1: It is a global variable.
195 // case 2: It is a reference parameter or a reference capture.
196 // case 3: It is assigned to a non-const reference variable or parameter.
197 // case 4: Has its address taken.
198 static bool isPossiblyEscaped(ExplodedNode *N, const DeclRefExpr *DR) {
199   const VarDecl *VD = cast<VarDecl>(DR->getDecl()->getCanonicalDecl());
200   assert(VD);
201   // Case 1:
202   if (VD->hasGlobalStorage())
203     return true;
204 
205   const bool IsRefParamOrCapture =
206       isa<ParmVarDecl>(VD) || DR->refersToEnclosingVariableOrCapture();
207   // Case 2:
208   if ((DR->refersToEnclosingVariableOrCapture() &&
209        isCapturedByReference(N, DR)) ||
210       (IsRefParamOrCapture && VD->getType()->isReferenceType()))
211     return true;
212 
213   while (!N->pred_empty()) {
214     // FIXME: getStmtForDiagnostics() does nasty things in order to provide
215     // a valid statement for body farms, do we need this behavior here?
216     const Stmt *S = N->getStmtForDiagnostics();
217     if (!S) {
218       N = N->getFirstPred();
219       continue;
220     }
221 
222     if (const DeclStmt *DS = dyn_cast<DeclStmt>(S)) {
223       for (const Decl *D : DS->decls()) {
224         // Once we reach the declaration of the VD we can return.
225         if (D->getCanonicalDecl() == VD)
226           return false;
227       }
228     }
229     // Check the usage of the pass-by-ref function calls and adress-of operator
230     // on VD and reference initialized by VD.
231     ASTContext &ASTCtx =
232         N->getLocationContext()->getAnalysisDeclContext()->getASTContext();
233     // Case 3 and 4:
234     auto Match =
235         match(stmt(anyOf(callByRef(equalsNode(VD)), getAddrTo(equalsNode(VD)),
236                          assignedToRef(equalsNode(VD)))),
237               *S, ASTCtx);
238     if (!Match.empty())
239       return true;
240 
241     N = N->getFirstPred();
242   }
243 
244   // Reference parameter and reference capture will not be found.
245   if (IsRefParamOrCapture)
246     return false;
247 
248   llvm_unreachable("Reached root without finding the declaration of VD");
249 }
250 
251 bool shouldCompletelyUnroll(const Stmt *LoopStmt, ASTContext &ASTCtx,
252                             ExplodedNode *Pred, unsigned &maxStep) {
253 
254   if (!isLoopStmt(LoopStmt))
255     return false;
256 
257   // TODO: Match the cases where the bound is not a concrete literal but an
258   // integer with known value
259   auto Matches = match(forLoopMatcher(), *LoopStmt, ASTCtx);
260   if (Matches.empty())
261     return false;
262 
263   const auto *CounterVarRef = Matches[0].getNodeAs<DeclRefExpr>("initVarRef");
264   llvm::APInt BoundNum =
265       Matches[0].getNodeAs<IntegerLiteral>("boundNum")->getValue();
266   llvm::APInt InitNum =
267       Matches[0].getNodeAs<IntegerLiteral>("initNum")->getValue();
268   auto CondOp = Matches[0].getNodeAs<BinaryOperator>("conditionOperator");
269   if (InitNum.getBitWidth() != BoundNum.getBitWidth()) {
270     InitNum = InitNum.zext(BoundNum.getBitWidth());
271     BoundNum = BoundNum.zext(InitNum.getBitWidth());
272   }
273 
274   if (CondOp->getOpcode() == BO_GE || CondOp->getOpcode() == BO_LE)
275     maxStep = (BoundNum - InitNum + 1).abs().getZExtValue();
276   else
277     maxStep = (BoundNum - InitNum).abs().getZExtValue();
278 
279   // Check if the counter of the loop is not escaped before.
280   return !isPossiblyEscaped(Pred, CounterVarRef);
281 }
282 
283 bool madeNewBranch(ExplodedNode *N, const Stmt *LoopStmt) {
284   const Stmt *S = nullptr;
285   while (!N->pred_empty()) {
286     if (N->succ_size() > 1)
287       return true;
288 
289     ProgramPoint P = N->getLocation();
290     if (std::optional<BlockEntrance> BE = P.getAs<BlockEntrance>())
291       S = BE->getBlock()->getTerminatorStmt();
292 
293     if (S == LoopStmt)
294       return false;
295 
296     N = N->getFirstPred();
297   }
298 
299   llvm_unreachable("Reached root without encountering the previous step");
300 }
301 
302 // updateLoopStack is called on every basic block, therefore it needs to be fast
303 ProgramStateRef updateLoopStack(const Stmt *LoopStmt, ASTContext &ASTCtx,
304                                 ExplodedNode *Pred, unsigned maxVisitOnPath) {
305   auto State = Pred->getState();
306   auto LCtx = Pred->getLocationContext();
307 
308   if (!isLoopStmt(LoopStmt))
309     return State;
310 
311   auto LS = State->get<LoopStack>();
312   if (!LS.isEmpty() && LoopStmt == LS.getHead().getLoopStmt() &&
313       LCtx == LS.getHead().getLocationContext()) {
314     if (LS.getHead().isUnrolled() && madeNewBranch(Pred, LoopStmt)) {
315       State = State->set<LoopStack>(LS.getTail());
316       State = State->add<LoopStack>(
317           LoopState::getNormal(LoopStmt, LCtx, maxVisitOnPath));
318     }
319     return State;
320   }
321   unsigned maxStep;
322   if (!shouldCompletelyUnroll(LoopStmt, ASTCtx, Pred, maxStep)) {
323     State = State->add<LoopStack>(
324         LoopState::getNormal(LoopStmt, LCtx, maxVisitOnPath));
325     return State;
326   }
327 
328   unsigned outerStep = (LS.isEmpty() ? 1 : LS.getHead().getMaxStep());
329 
330   unsigned innerMaxStep = maxStep * outerStep;
331   if (innerMaxStep > MAXIMUM_STEP_UNROLLED)
332     State = State->add<LoopStack>(
333         LoopState::getNormal(LoopStmt, LCtx, maxVisitOnPath));
334   else
335     State = State->add<LoopStack>(
336         LoopState::getUnrolled(LoopStmt, LCtx, innerMaxStep));
337   return State;
338 }
339 
340 bool isUnrolledState(ProgramStateRef State) {
341   auto LS = State->get<LoopStack>();
342   if (LS.isEmpty() || !LS.getHead().isUnrolled())
343     return false;
344   return true;
345 }
346 }
347 }
348