10b57cec5SDimitry Andric //===- CodeMetrics.cpp - Code cost measurements ---------------------------===//
20b57cec5SDimitry Andric //
30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
60b57cec5SDimitry Andric //
70b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
80b57cec5SDimitry Andric //
90b57cec5SDimitry Andric // This file implements code cost measurement utilities.
100b57cec5SDimitry Andric //
110b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
120b57cec5SDimitry Andric 
130b57cec5SDimitry Andric #include "llvm/Analysis/CodeMetrics.h"
145ffd83dbSDimitry Andric #include "llvm/ADT/SmallPtrSet.h"
150b57cec5SDimitry Andric #include "llvm/Analysis/AssumptionCache.h"
160b57cec5SDimitry Andric #include "llvm/Analysis/LoopInfo.h"
170b57cec5SDimitry Andric #include "llvm/Analysis/TargetTransformInfo.h"
180b57cec5SDimitry Andric #include "llvm/IR/Function.h"
190b57cec5SDimitry Andric #include "llvm/Support/Debug.h"
20fe6060f1SDimitry Andric #include "llvm/Support/InstructionCost.h"
210b57cec5SDimitry Andric 
220b57cec5SDimitry Andric #define DEBUG_TYPE "code-metrics"
230b57cec5SDimitry Andric 
240b57cec5SDimitry Andric using namespace llvm;
250b57cec5SDimitry Andric 
260b57cec5SDimitry Andric static void
appendSpeculatableOperands(const Value * V,SmallPtrSetImpl<const Value * > & Visited,SmallVectorImpl<const Value * > & Worklist)270b57cec5SDimitry Andric appendSpeculatableOperands(const Value *V,
280b57cec5SDimitry Andric                            SmallPtrSetImpl<const Value *> &Visited,
290b57cec5SDimitry Andric                            SmallVectorImpl<const Value *> &Worklist) {
300b57cec5SDimitry Andric   const User *U = dyn_cast<User>(V);
310b57cec5SDimitry Andric   if (!U)
320b57cec5SDimitry Andric     return;
330b57cec5SDimitry Andric 
340b57cec5SDimitry Andric   for (const Value *Operand : U->operands())
350b57cec5SDimitry Andric     if (Visited.insert(Operand).second)
36349cc55cSDimitry Andric       if (const auto *I = dyn_cast<Instruction>(Operand))
37349cc55cSDimitry Andric         if (!I->mayHaveSideEffects() && !I->isTerminator())
38349cc55cSDimitry Andric           Worklist.push_back(I);
390b57cec5SDimitry Andric }
400b57cec5SDimitry Andric 
completeEphemeralValues(SmallPtrSetImpl<const Value * > & Visited,SmallVectorImpl<const Value * > & Worklist,SmallPtrSetImpl<const Value * > & EphValues)410b57cec5SDimitry Andric static void completeEphemeralValues(SmallPtrSetImpl<const Value *> &Visited,
420b57cec5SDimitry Andric                                     SmallVectorImpl<const Value *> &Worklist,
430b57cec5SDimitry Andric                                     SmallPtrSetImpl<const Value *> &EphValues) {
440b57cec5SDimitry Andric   // Note: We don't speculate PHIs here, so we'll miss instruction chains kept
450b57cec5SDimitry Andric   // alive only by ephemeral values.
460b57cec5SDimitry Andric 
470b57cec5SDimitry Andric   // Walk the worklist using an index but without caching the size so we can
480b57cec5SDimitry Andric   // append more entries as we process the worklist. This forms a queue without
490b57cec5SDimitry Andric   // quadratic behavior by just leaving processed nodes at the head of the
500b57cec5SDimitry Andric   // worklist forever.
510b57cec5SDimitry Andric   for (int i = 0; i < (int)Worklist.size(); ++i) {
520b57cec5SDimitry Andric     const Value *V = Worklist[i];
530b57cec5SDimitry Andric 
540b57cec5SDimitry Andric     assert(Visited.count(V) &&
550b57cec5SDimitry Andric            "Failed to add a worklist entry to our visited set!");
560b57cec5SDimitry Andric 
570b57cec5SDimitry Andric     // If all uses of this value are ephemeral, then so is this value.
580b57cec5SDimitry Andric     if (!all_of(V->users(), [&](const User *U) { return EphValues.count(U); }))
590b57cec5SDimitry Andric       continue;
600b57cec5SDimitry Andric 
610b57cec5SDimitry Andric     EphValues.insert(V);
620b57cec5SDimitry Andric     LLVM_DEBUG(dbgs() << "Ephemeral Value: " << *V << "\n");
630b57cec5SDimitry Andric 
640b57cec5SDimitry Andric     // Append any more operands to consider.
650b57cec5SDimitry Andric     appendSpeculatableOperands(V, Visited, Worklist);
660b57cec5SDimitry Andric   }
670b57cec5SDimitry Andric }
680b57cec5SDimitry Andric 
690b57cec5SDimitry Andric // Find all ephemeral values.
collectEphemeralValues(const Loop * L,AssumptionCache * AC,SmallPtrSetImpl<const Value * > & EphValues)700b57cec5SDimitry Andric void CodeMetrics::collectEphemeralValues(
710b57cec5SDimitry Andric     const Loop *L, AssumptionCache *AC,
720b57cec5SDimitry Andric     SmallPtrSetImpl<const Value *> &EphValues) {
730b57cec5SDimitry Andric   SmallPtrSet<const Value *, 32> Visited;
740b57cec5SDimitry Andric   SmallVector<const Value *, 16> Worklist;
750b57cec5SDimitry Andric 
760b57cec5SDimitry Andric   for (auto &AssumeVH : AC->assumptions()) {
770b57cec5SDimitry Andric     if (!AssumeVH)
780b57cec5SDimitry Andric       continue;
790b57cec5SDimitry Andric     Instruction *I = cast<Instruction>(AssumeVH);
800b57cec5SDimitry Andric 
810b57cec5SDimitry Andric     // Filter out call sites outside of the loop so we don't do a function's
820b57cec5SDimitry Andric     // worth of work for each of its loops (and, in the common case, ephemeral
830b57cec5SDimitry Andric     // values in the loop are likely due to @llvm.assume calls in the loop).
840b57cec5SDimitry Andric     if (!L->contains(I->getParent()))
850b57cec5SDimitry Andric       continue;
860b57cec5SDimitry Andric 
870b57cec5SDimitry Andric     if (EphValues.insert(I).second)
880b57cec5SDimitry Andric       appendSpeculatableOperands(I, Visited, Worklist);
890b57cec5SDimitry Andric   }
900b57cec5SDimitry Andric 
910b57cec5SDimitry Andric   completeEphemeralValues(Visited, Worklist, EphValues);
920b57cec5SDimitry Andric }
930b57cec5SDimitry Andric 
collectEphemeralValues(const Function * F,AssumptionCache * AC,SmallPtrSetImpl<const Value * > & EphValues)940b57cec5SDimitry Andric void CodeMetrics::collectEphemeralValues(
950b57cec5SDimitry Andric     const Function *F, AssumptionCache *AC,
960b57cec5SDimitry Andric     SmallPtrSetImpl<const Value *> &EphValues) {
970b57cec5SDimitry Andric   SmallPtrSet<const Value *, 32> Visited;
980b57cec5SDimitry Andric   SmallVector<const Value *, 16> Worklist;
990b57cec5SDimitry Andric 
1000b57cec5SDimitry Andric   for (auto &AssumeVH : AC->assumptions()) {
1010b57cec5SDimitry Andric     if (!AssumeVH)
1020b57cec5SDimitry Andric       continue;
1030b57cec5SDimitry Andric     Instruction *I = cast<Instruction>(AssumeVH);
1040b57cec5SDimitry Andric     assert(I->getParent()->getParent() == F &&
1050b57cec5SDimitry Andric            "Found assumption for the wrong function!");
1060b57cec5SDimitry Andric 
1070b57cec5SDimitry Andric     if (EphValues.insert(I).second)
1080b57cec5SDimitry Andric       appendSpeculatableOperands(I, Visited, Worklist);
1090b57cec5SDimitry Andric   }
1100b57cec5SDimitry Andric 
1110b57cec5SDimitry Andric   completeEphemeralValues(Visited, Worklist, EphValues);
1120b57cec5SDimitry Andric }
1130b57cec5SDimitry Andric 
1140b57cec5SDimitry Andric /// Fill in the current structure with information gleaned from the specified
1150b57cec5SDimitry Andric /// block.
analyzeBasicBlock(const BasicBlock * BB,const TargetTransformInfo & TTI,const SmallPtrSetImpl<const Value * > & EphValues,bool PrepareForLTO)116e8d8bef9SDimitry Andric void CodeMetrics::analyzeBasicBlock(
117e8d8bef9SDimitry Andric     const BasicBlock *BB, const TargetTransformInfo &TTI,
118e8d8bef9SDimitry Andric     const SmallPtrSetImpl<const Value *> &EphValues, bool PrepareForLTO) {
1190b57cec5SDimitry Andric   ++NumBlocks;
120fe6060f1SDimitry Andric   InstructionCost NumInstsBeforeThisBB = NumInsts;
1210b57cec5SDimitry Andric   for (const Instruction &I : *BB) {
1220b57cec5SDimitry Andric     // Skip ephemeral values.
1230b57cec5SDimitry Andric     if (EphValues.count(&I))
1240b57cec5SDimitry Andric       continue;
1250b57cec5SDimitry Andric 
1260b57cec5SDimitry Andric     // Special handling for calls.
1270b57cec5SDimitry Andric     if (const auto *Call = dyn_cast<CallBase>(&I)) {
1280b57cec5SDimitry Andric       if (const Function *F = Call->getCalledFunction()) {
129e8d8bef9SDimitry Andric         bool IsLoweredToCall = TTI.isLoweredToCall(F);
1300b57cec5SDimitry Andric         // If a function is both internal and has a single use, then it is
1310b57cec5SDimitry Andric         // extremely likely to get inlined in the future (it was probably
1320b57cec5SDimitry Andric         // exposed by an interleaved devirtualization pass).
133e8d8bef9SDimitry Andric         // When preparing for LTO, liberally consider calls as inline
134e8d8bef9SDimitry Andric         // candidates.
135e8d8bef9SDimitry Andric         if (!Call->isNoInline() && IsLoweredToCall &&
136972a253aSDimitry Andric             ((F->hasInternalLinkage() && F->hasOneLiveUse()) ||
137972a253aSDimitry Andric              PrepareForLTO)) {
1380b57cec5SDimitry Andric           ++NumInlineCandidates;
139e8d8bef9SDimitry Andric         }
1400b57cec5SDimitry Andric 
1410b57cec5SDimitry Andric         // If this call is to function itself, then the function is recursive.
1420b57cec5SDimitry Andric         // Inlining it into other functions is a bad idea, because this is
1430b57cec5SDimitry Andric         // basically just a form of loop peeling, and our metrics aren't useful
1440b57cec5SDimitry Andric         // for that case.
1450b57cec5SDimitry Andric         if (F == BB->getParent())
1460b57cec5SDimitry Andric           isRecursive = true;
1470b57cec5SDimitry Andric 
148e8d8bef9SDimitry Andric         if (IsLoweredToCall)
1490b57cec5SDimitry Andric           ++NumCalls;
1500b57cec5SDimitry Andric       } else {
1510b57cec5SDimitry Andric         // We don't want inline asm to count as a call - that would prevent loop
1520b57cec5SDimitry Andric         // unrolling. The argument setup cost is still real, though.
1530b57cec5SDimitry Andric         if (!Call->isInlineAsm())
1540b57cec5SDimitry Andric           ++NumCalls;
1550b57cec5SDimitry Andric       }
1560b57cec5SDimitry Andric     }
1570b57cec5SDimitry Andric 
1580b57cec5SDimitry Andric     if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) {
1590b57cec5SDimitry Andric       if (!AI->isStaticAlloca())
1600b57cec5SDimitry Andric         this->usesDynamicAlloca = true;
1610b57cec5SDimitry Andric     }
1620b57cec5SDimitry Andric 
1630b57cec5SDimitry Andric     if (isa<ExtractElementInst>(I) || I.getType()->isVectorTy())
1640b57cec5SDimitry Andric       ++NumVectorInsts;
1650b57cec5SDimitry Andric 
1660b57cec5SDimitry Andric     if (I.getType()->isTokenTy() && I.isUsedOutsideOfBlock(BB))
1670b57cec5SDimitry Andric       notDuplicatable = true;
1680b57cec5SDimitry Andric 
1690b57cec5SDimitry Andric     if (const CallInst *CI = dyn_cast<CallInst>(&I)) {
1700b57cec5SDimitry Andric       if (CI->cannotDuplicate())
1710b57cec5SDimitry Andric         notDuplicatable = true;
1720b57cec5SDimitry Andric       if (CI->isConvergent())
1730b57cec5SDimitry Andric         convergent = true;
1740b57cec5SDimitry Andric     }
1750b57cec5SDimitry Andric 
1760b57cec5SDimitry Andric     if (const InvokeInst *InvI = dyn_cast<InvokeInst>(&I))
1770b57cec5SDimitry Andric       if (InvI->cannotDuplicate())
1780b57cec5SDimitry Andric         notDuplicatable = true;
1790b57cec5SDimitry Andric 
180bdd1243dSDimitry Andric     NumInsts += TTI.getInstructionCost(&I, TargetTransformInfo::TCK_CodeSize);
1810b57cec5SDimitry Andric   }
1820b57cec5SDimitry Andric 
1830b57cec5SDimitry Andric   if (isa<ReturnInst>(BB->getTerminator()))
1840b57cec5SDimitry Andric     ++NumRets;
1850b57cec5SDimitry Andric 
1860b57cec5SDimitry Andric   // We never want to inline functions that contain an indirectbr.  This is
1870b57cec5SDimitry Andric   // incorrect because all the blockaddress's (in static global initializers
1880b57cec5SDimitry Andric   // for example) would be referring to the original function, and this indirect
1890b57cec5SDimitry Andric   // jump would jump from the inlined copy of the function into the original
1900b57cec5SDimitry Andric   // function which is extremely undefined behavior.
1910b57cec5SDimitry Andric   // FIXME: This logic isn't really right; we can safely inline functions
1920b57cec5SDimitry Andric   // with indirectbr's as long as no other function or global references the
1930b57cec5SDimitry Andric   // blockaddress of a block within the current function.  And as a QOI issue,
1940b57cec5SDimitry Andric   // if someone is using a blockaddress without an indirectbr, and that
1950b57cec5SDimitry Andric   // reference somehow ends up in another function or global, we probably
1960b57cec5SDimitry Andric   // don't want to inline this function.
1970b57cec5SDimitry Andric   notDuplicatable |= isa<IndirectBrInst>(BB->getTerminator());
1980b57cec5SDimitry Andric 
1990b57cec5SDimitry Andric   // Remember NumInsts for this BB.
20081ad6265SDimitry Andric   InstructionCost NumInstsThisBB = NumInsts - NumInstsBeforeThisBB;
20181ad6265SDimitry Andric   NumBBInsts[BB] = NumInstsThisBB;
2020b57cec5SDimitry Andric }
203