/openbsd/gnu/llvm/llvm/lib/Analysis/ |
H A D | MemoryLocation.cpp | 95 MemoryLocation MemoryLocation::getForSource(const MemTransferInst *MTI) { in getForSource() 99 MemoryLocation MemoryLocation::getForSource(const AtomicMemTransferInst *MTI) { in getForSource() 103 MemoryLocation MemoryLocation::getForSource(const AnyMemTransferInst *MTI) { in getForSource()
|
H A D | AliasSetTracker.cpp | 394 void AliasSetTracker::add(AnyMemTransferInst *MTI) { in add() 438 if (AnyMemTransferInst *MTI = dyn_cast<AnyMemTransferInst>(I)) in add() local
|
H A D | StackSafetyAnalysis.cpp | 319 if (const auto *MTI = dyn_cast<MemTransferInst>(MI)) { in getMemIntrinsicAccessRange() local 463 if (const auto *MTI = dyn_cast<MemTransferInst>(MI)) { in analyzeAllUses() local
|
H A D | LazyValueInfo.cpp | 656 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) in AddNonNullPointersByInstruction() local
|
/openbsd/gnu/llvm/llvm/lib/Transforms/Utils/ |
H A D | VNCoercion.cpp | 375 MemTransferInst *MTI = cast<MemTransferInst>(MI); in analyzeLoadFromClobberingMemInst() local 562 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst); in getMemInstValueForLoad() local 586 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst); in getConstantMemInstValueForLoad() local
|
H A D | GlobalStatus.cpp | 162 } else if (const MemTransferInst *MTI = dyn_cast<MemTransferInst>(I)) { in analyzeGlobalAux() local
|
/openbsd/gnu/llvm/llvm/lib/Transforms/Scalar/ |
H A D | AlignmentFromAssumptions.cpp | 293 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) { in processAssumption() local
|
H A D | InferAddressSpaces.cpp | 478 if (auto *MTI = dyn_cast<MemTransferInst>(MI)) in collectFlatAddressExpressions() local 1029 } else if (auto *MTI = dyn_cast<MemTransferInst>(MI)) { in handleMemIntrinsicPtrUse() local
|
/openbsd/gnu/llvm/llvm/lib/CodeGen/ |
H A D | SafeStack.cpp | 259 if (auto MTI = dyn_cast<MemTransferInst>(MI)) { in IsMemIntrinsicSafe() local
|
H A D | CodeGenPrepare.cpp | 2264 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) { in optimizeCallInst() local
|
/openbsd/gnu/llvm/llvm/lib/Target/AMDGPU/ |
H A D | AMDGPUPromoteAlloca.cpp | 589 if (const MemTransferInst *MTI = dyn_cast<MemTransferInst>(Inst)) { in tryPromoteAllocaToVector() local
|
/openbsd/gnu/llvm/llvm/lib/Transforms/InstCombine/ |
H A D | InstCombineCalls.cpp | 1222 if (AnyMemTransferInst *MTI = dyn_cast<AnyMemTransferInst>(MI)) { in visitCallInst() local 1230 if (auto *MTI = dyn_cast<AnyMemTransferInst>(MI)) { in visitCallInst() local
|
/openbsd/gnu/llvm/llvm/lib/Transforms/Instrumentation/ |
H A D | HWAddressSanitizer.cpp | 934 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) { in ignoreMemIntrinsic() local
|
H A D | DataFlowSanitizer.cpp | 2938 auto *MTI = cast<MemTransferInst>( in visitMemTransferInst() local
|
/openbsd/gnu/llvm/llvm/lib/Target/Mips/ |
H A D | MipsFastISel.cpp | 1650 const auto *MTI = cast<MemTransferInst>(II); in fastLowerIntrinsicCall() local
|
/openbsd/gnu/llvm/llvm/lib/Target/ARM/ |
H A D | ARMFastISel.cpp | 2522 const MemTransferInst &MTI = cast<MemTransferInst>(I); in SelectIntrinsicCall() local
|
/openbsd/gnu/llvm/llvm/lib/Transforms/IPO/ |
H A D | GlobalOpt.cpp | 228 } else if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(U)) { in CleanupPointerRootUsers() local
|
H A D | Attributor.cpp | 825 if (const MemTransferInst *MTI = dyn_cast<MemTransferInst>(&I)) in isPotentiallyAffectedByBarrier() local
|
/openbsd/gnu/llvm/llvm/lib/IR/ |
H A D | AutoUpgrade.cpp | 4391 if (auto *MTI = dyn_cast<MemTransferInst>(MemCI)) in UpgradeIntrinsicCall() local
|
/openbsd/gnu/llvm/llvm/lib/Target/AArch64/ |
H A D | AArch64FastISel.cpp | 3490 const auto *MTI = cast<MemTransferInst>(II); in fastLowerIntrinsicCall() local
|