1 //===- TypeMetadataUtils.cpp - Utilities related to type metadata ---------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file contains functions that make it easier to manipulate type metadata 10 // for devirtualization. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "llvm/Analysis/TypeMetadataUtils.h" 15 #include "llvm/IR/Constants.h" 16 #include "llvm/IR/Dominators.h" 17 #include "llvm/IR/Intrinsics.h" 18 #include "llvm/IR/Module.h" 19 20 using namespace llvm; 21 22 // Search for virtual calls that call FPtr and add them to DevirtCalls. 23 static void 24 findCallsAtConstantOffset(SmallVectorImpl<DevirtCallSite> &DevirtCalls, 25 bool *HasNonCallUses, Value *FPtr, uint64_t Offset, 26 const CallInst *CI, DominatorTree &DT) { 27 for (const Use &U : FPtr->uses()) { 28 Instruction *User = cast<Instruction>(U.getUser()); 29 // Ignore this instruction if it is not dominated by the type intrinsic 30 // being analyzed. Otherwise we may transform a call sharing the same 31 // vtable pointer incorrectly. Specifically, this situation can arise 32 // after indirect call promotion and inlining, where we may have uses 33 // of the vtable pointer guarded by a function pointer check, and a fallback 34 // indirect call. 35 if (!DT.dominates(CI, User)) 36 continue; 37 if (isa<BitCastInst>(User)) { 38 findCallsAtConstantOffset(DevirtCalls, HasNonCallUses, User, Offset, CI, 39 DT); 40 } else if (auto CI = dyn_cast<CallInst>(User)) { 41 DevirtCalls.push_back({Offset, CI}); 42 } else if (auto II = dyn_cast<InvokeInst>(User)) { 43 DevirtCalls.push_back({Offset, II}); 44 } else if (HasNonCallUses) { 45 *HasNonCallUses = true; 46 } 47 } 48 } 49 50 // Search for virtual calls that load from VPtr and add them to DevirtCalls. 51 static void findLoadCallsAtConstantOffset( 52 const Module *M, SmallVectorImpl<DevirtCallSite> &DevirtCalls, Value *VPtr, 53 int64_t Offset, const CallInst *CI, DominatorTree &DT) { 54 for (const Use &U : VPtr->uses()) { 55 Value *User = U.getUser(); 56 if (isa<BitCastInst>(User)) { 57 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset, CI, DT); 58 } else if (isa<LoadInst>(User)) { 59 findCallsAtConstantOffset(DevirtCalls, nullptr, User, Offset, CI, DT); 60 } else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) { 61 // Take into account the GEP offset. 62 if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) { 63 SmallVector<Value *, 8> Indices(GEP->op_begin() + 1, GEP->op_end()); 64 int64_t GEPOffset = M->getDataLayout().getIndexedOffsetInType( 65 GEP->getSourceElementType(), Indices); 66 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset + GEPOffset, 67 CI, DT); 68 } 69 } 70 } 71 } 72 73 void llvm::findDevirtualizableCallsForTypeTest( 74 SmallVectorImpl<DevirtCallSite> &DevirtCalls, 75 SmallVectorImpl<CallInst *> &Assumes, const CallInst *CI, 76 DominatorTree &DT) { 77 assert(CI->getCalledFunction()->getIntrinsicID() == Intrinsic::type_test); 78 79 const Module *M = CI->getParent()->getParent()->getParent(); 80 81 // Find llvm.assume intrinsics for this llvm.type.test call. 82 for (const Use &CIU : CI->uses()) { 83 if (auto *AssumeCI = dyn_cast<CallInst>(CIU.getUser())) { 84 Function *F = AssumeCI->getCalledFunction(); 85 if (F && F->getIntrinsicID() == Intrinsic::assume) 86 Assumes.push_back(AssumeCI); 87 } 88 } 89 90 // If we found any, search for virtual calls based on %p and add them to 91 // DevirtCalls. 92 if (!Assumes.empty()) 93 findLoadCallsAtConstantOffset( 94 M, DevirtCalls, CI->getArgOperand(0)->stripPointerCasts(), 0, CI, DT); 95 } 96 97 void llvm::findDevirtualizableCallsForTypeCheckedLoad( 98 SmallVectorImpl<DevirtCallSite> &DevirtCalls, 99 SmallVectorImpl<Instruction *> &LoadedPtrs, 100 SmallVectorImpl<Instruction *> &Preds, bool &HasNonCallUses, 101 const CallInst *CI, DominatorTree &DT) { 102 assert(CI->getCalledFunction()->getIntrinsicID() == 103 Intrinsic::type_checked_load); 104 105 auto *Offset = dyn_cast<ConstantInt>(CI->getArgOperand(1)); 106 if (!Offset) { 107 HasNonCallUses = true; 108 return; 109 } 110 111 for (const Use &U : CI->uses()) { 112 auto CIU = U.getUser(); 113 if (auto EVI = dyn_cast<ExtractValueInst>(CIU)) { 114 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 0) { 115 LoadedPtrs.push_back(EVI); 116 continue; 117 } 118 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 1) { 119 Preds.push_back(EVI); 120 continue; 121 } 122 } 123 HasNonCallUses = true; 124 } 125 126 for (Value *LoadedPtr : LoadedPtrs) 127 findCallsAtConstantOffset(DevirtCalls, &HasNonCallUses, LoadedPtr, 128 Offset->getZExtValue(), CI, DT); 129 } 130 131 Constant *llvm::getPointerAtOffset(Constant *I, uint64_t Offset, Module &M) { 132 if (I->getType()->isPointerTy()) { 133 if (Offset == 0) 134 return I; 135 return nullptr; 136 } 137 138 const DataLayout &DL = M.getDataLayout(); 139 140 if (auto *C = dyn_cast<ConstantStruct>(I)) { 141 const StructLayout *SL = DL.getStructLayout(C->getType()); 142 if (Offset >= SL->getSizeInBytes()) 143 return nullptr; 144 145 unsigned Op = SL->getElementContainingOffset(Offset); 146 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)), 147 Offset - SL->getElementOffset(Op), M); 148 } 149 if (auto *C = dyn_cast<ConstantArray>(I)) { 150 ArrayType *VTableTy = C->getType(); 151 uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType()); 152 153 unsigned Op = Offset / ElemSize; 154 if (Op >= C->getNumOperands()) 155 return nullptr; 156 157 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)), 158 Offset % ElemSize, M); 159 } 160 return nullptr; 161 } 162