1 //===-- Value.cpp - Implement the Value class -----------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the Value, ValueHandle, and User classes.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "llvm/IR/Value.h"
14 #include "LLVMContextImpl.h"
15 #include "llvm/ADT/DenseMap.h"
16 #include "llvm/ADT/SmallString.h"
17 #include "llvm/IR/Constant.h"
18 #include "llvm/IR/Constants.h"
19 #include "llvm/IR/DataLayout.h"
20 #include "llvm/IR/DebugInfo.h"
21 #include "llvm/IR/DerivedTypes.h"
22 #include "llvm/IR/DerivedUser.h"
23 #include "llvm/IR/GetElementPtrTypeIterator.h"
24 #include "llvm/IR/InstrTypes.h"
25 #include "llvm/IR/Instructions.h"
26 #include "llvm/IR/IntrinsicInst.h"
27 #include "llvm/IR/Module.h"
28 #include "llvm/IR/Operator.h"
29 #include "llvm/IR/TypedPointerType.h"
30 #include "llvm/IR/ValueHandle.h"
31 #include "llvm/IR/ValueSymbolTable.h"
32 #include "llvm/Support/CommandLine.h"
33 #include "llvm/Support/ErrorHandling.h"
34 #include "llvm/Support/raw_ostream.h"
35 #include <algorithm>
36 
37 using namespace llvm;
38 
39 static cl::opt<unsigned> UseDerefAtPointSemantics(
40     "use-dereferenceable-at-point-semantics", cl::Hidden, cl::init(false),
41     cl::desc("Deref attributes and metadata infer facts at definition only"));
42 
43 //===----------------------------------------------------------------------===//
44 //                                Value Class
45 //===----------------------------------------------------------------------===//
46 static inline Type *checkType(Type *Ty) {
47   assert(Ty && "Value defined with a null type: Error!");
48   assert(!isa<TypedPointerType>(Ty->getScalarType()) &&
49          "Cannot have values with typed pointer types");
50   return Ty;
51 }
52 
53 Value::Value(Type *ty, unsigned scid)
54     : SubclassID(scid), HasValueHandle(0), SubclassOptionalData(0),
55       SubclassData(0), NumUserOperands(0), IsUsedByMD(false), HasName(false),
56       HasMetadata(false), VTy(checkType(ty)), UseList(nullptr) {
57   static_assert(ConstantFirstVal == 0, "!(SubclassID < ConstantFirstVal)");
58   // FIXME: Why isn't this in the subclass gunk??
59   // Note, we cannot call isa<CallInst> before the CallInst has been
60   // constructed.
61   unsigned OpCode = 0;
62   if (SubclassID >= InstructionVal)
63     OpCode = SubclassID - InstructionVal;
64   if (OpCode == Instruction::Call || OpCode == Instruction::Invoke ||
65       OpCode == Instruction::CallBr)
66     assert((VTy->isFirstClassType() || VTy->isVoidTy() || VTy->isStructTy()) &&
67            "invalid CallBase type!");
68   else if (SubclassID != BasicBlockVal &&
69            (/*SubclassID < ConstantFirstVal ||*/ SubclassID > ConstantLastVal))
70     assert((VTy->isFirstClassType() || VTy->isVoidTy()) &&
71            "Cannot create non-first-class values except for constants!");
72   static_assert(sizeof(Value) == 2 * sizeof(void *) + 2 * sizeof(unsigned),
73                 "Value too big");
74 }
75 
76 Value::~Value() {
77   // Notify all ValueHandles (if present) that this value is going away.
78   if (HasValueHandle)
79     ValueHandleBase::ValueIsDeleted(this);
80   if (isUsedByMetadata())
81     ValueAsMetadata::handleDeletion(this);
82 
83   // Remove associated metadata from context.
84   if (HasMetadata)
85     clearMetadata();
86 
87 #ifndef NDEBUG      // Only in -g mode...
88   // Check to make sure that there are no uses of this value that are still
89   // around when the value is destroyed.  If there are, then we have a dangling
90   // reference and something is wrong.  This code is here to print out where
91   // the value is still being referenced.
92   //
93   // Note that use_empty() cannot be called here, as it eventually downcasts
94   // 'this' to GlobalValue (derived class of Value), but GlobalValue has already
95   // been destructed, so accessing it is UB.
96   //
97   if (!materialized_use_empty()) {
98     dbgs() << "While deleting: " << *VTy << " %" << getName() << "\n";
99     for (auto *U : users())
100       dbgs() << "Use still stuck around after Def is destroyed:" << *U << "\n";
101   }
102 #endif
103   assert(materialized_use_empty() && "Uses remain when a value is destroyed!");
104 
105   // If this value is named, destroy the name.  This should not be in a symtab
106   // at this point.
107   destroyValueName();
108 }
109 
110 void Value::deleteValue() {
111   switch (getValueID()) {
112 #define HANDLE_VALUE(Name)                                                     \
113   case Value::Name##Val:                                                       \
114     delete static_cast<Name *>(this);                                          \
115     break;
116 #define HANDLE_MEMORY_VALUE(Name)                                              \
117   case Value::Name##Val:                                                       \
118     static_cast<DerivedUser *>(this)->DeleteValue(                             \
119         static_cast<DerivedUser *>(this));                                     \
120     break;
121 #define HANDLE_CONSTANT(Name)                                                  \
122   case Value::Name##Val:                                                       \
123     llvm_unreachable("constants should be destroyed with destroyConstant");    \
124     break;
125 #define HANDLE_INSTRUCTION(Name)  /* nothing */
126 #include "llvm/IR/Value.def"
127 
128 #define HANDLE_INST(N, OPC, CLASS)                                             \
129   case Value::InstructionVal + Instruction::OPC:                               \
130     delete static_cast<CLASS *>(this);                                         \
131     break;
132 #define HANDLE_USER_INST(N, OPC, CLASS)
133 #include "llvm/IR/Instruction.def"
134 
135   default:
136     llvm_unreachable("attempting to delete unknown value kind");
137   }
138 }
139 
140 void Value::destroyValueName() {
141   ValueName *Name = getValueName();
142   if (Name) {
143     MallocAllocator Allocator;
144     Name->Destroy(Allocator);
145   }
146   setValueName(nullptr);
147 }
148 
149 bool Value::hasNUses(unsigned N) const {
150   return hasNItems(use_begin(), use_end(), N);
151 }
152 
153 bool Value::hasNUsesOrMore(unsigned N) const {
154   return hasNItemsOrMore(use_begin(), use_end(), N);
155 }
156 
157 bool Value::hasOneUser() const {
158   if (use_empty())
159     return false;
160   if (hasOneUse())
161     return true;
162   return std::equal(++user_begin(), user_end(), user_begin());
163 }
164 
165 static bool isUnDroppableUser(const User *U) { return !U->isDroppable(); }
166 
167 Use *Value::getSingleUndroppableUse() {
168   Use *Result = nullptr;
169   for (Use &U : uses()) {
170     if (!U.getUser()->isDroppable()) {
171       if (Result)
172         return nullptr;
173       Result = &U;
174     }
175   }
176   return Result;
177 }
178 
179 User *Value::getUniqueUndroppableUser() {
180   User *Result = nullptr;
181   for (auto *U : users()) {
182     if (!U->isDroppable()) {
183       if (Result && Result != U)
184         return nullptr;
185       Result = U;
186     }
187   }
188   return Result;
189 }
190 
191 bool Value::hasNUndroppableUses(unsigned int N) const {
192   return hasNItems(user_begin(), user_end(), N, isUnDroppableUser);
193 }
194 
195 bool Value::hasNUndroppableUsesOrMore(unsigned int N) const {
196   return hasNItemsOrMore(user_begin(), user_end(), N, isUnDroppableUser);
197 }
198 
199 void Value::dropDroppableUses(
200     llvm::function_ref<bool(const Use *)> ShouldDrop) {
201   SmallVector<Use *, 8> ToBeEdited;
202   for (Use &U : uses())
203     if (U.getUser()->isDroppable() && ShouldDrop(&U))
204       ToBeEdited.push_back(&U);
205   for (Use *U : ToBeEdited)
206     dropDroppableUse(*U);
207 }
208 
209 void Value::dropDroppableUsesIn(User &Usr) {
210   assert(Usr.isDroppable() && "Expected a droppable user!");
211   for (Use &UsrOp : Usr.operands()) {
212     if (UsrOp.get() == this)
213       dropDroppableUse(UsrOp);
214   }
215 }
216 
217 void Value::dropDroppableUse(Use &U) {
218   U.removeFromList();
219   if (auto *Assume = dyn_cast<AssumeInst>(U.getUser())) {
220     unsigned OpNo = U.getOperandNo();
221     if (OpNo == 0)
222       U.set(ConstantInt::getTrue(Assume->getContext()));
223     else {
224       U.set(UndefValue::get(U.get()->getType()));
225       CallInst::BundleOpInfo &BOI = Assume->getBundleOpInfoForOperand(OpNo);
226       BOI.Tag = Assume->getContext().pImpl->getOrInsertBundleTag("ignore");
227     }
228     return;
229   }
230 
231   llvm_unreachable("unkown droppable use");
232 }
233 
234 bool Value::isUsedInBasicBlock(const BasicBlock *BB) const {
235   // This can be computed either by scanning the instructions in BB, or by
236   // scanning the use list of this Value. Both lists can be very long, but
237   // usually one is quite short.
238   //
239   // Scan both lists simultaneously until one is exhausted. This limits the
240   // search to the shorter list.
241   BasicBlock::const_iterator BI = BB->begin(), BE = BB->end();
242   const_user_iterator UI = user_begin(), UE = user_end();
243   for (; BI != BE && UI != UE; ++BI, ++UI) {
244     // Scan basic block: Check if this Value is used by the instruction at BI.
245     if (is_contained(BI->operands(), this))
246       return true;
247     // Scan use list: Check if the use at UI is in BB.
248     const auto *User = dyn_cast<Instruction>(*UI);
249     if (User && User->getParent() == BB)
250       return true;
251   }
252   return false;
253 }
254 
255 unsigned Value::getNumUses() const {
256   return (unsigned)std::distance(use_begin(), use_end());
257 }
258 
259 static bool getSymTab(Value *V, ValueSymbolTable *&ST) {
260   ST = nullptr;
261   if (Instruction *I = dyn_cast<Instruction>(V)) {
262     if (BasicBlock *P = I->getParent())
263       if (Function *PP = P->getParent())
264         ST = PP->getValueSymbolTable();
265   } else if (BasicBlock *BB = dyn_cast<BasicBlock>(V)) {
266     if (Function *P = BB->getParent())
267       ST = P->getValueSymbolTable();
268   } else if (GlobalValue *GV = dyn_cast<GlobalValue>(V)) {
269     if (Module *P = GV->getParent())
270       ST = &P->getValueSymbolTable();
271   } else if (Argument *A = dyn_cast<Argument>(V)) {
272     if (Function *P = A->getParent())
273       ST = P->getValueSymbolTable();
274   } else {
275     assert(isa<Constant>(V) && "Unknown value type!");
276     return true;  // no name is setable for this.
277   }
278   return false;
279 }
280 
281 ValueName *Value::getValueName() const {
282   if (!HasName) return nullptr;
283 
284   LLVMContext &Ctx = getContext();
285   auto I = Ctx.pImpl->ValueNames.find(this);
286   assert(I != Ctx.pImpl->ValueNames.end() &&
287          "No name entry found!");
288 
289   return I->second;
290 }
291 
292 void Value::setValueName(ValueName *VN) {
293   LLVMContext &Ctx = getContext();
294 
295   assert(HasName == Ctx.pImpl->ValueNames.count(this) &&
296          "HasName bit out of sync!");
297 
298   if (!VN) {
299     if (HasName)
300       Ctx.pImpl->ValueNames.erase(this);
301     HasName = false;
302     return;
303   }
304 
305   HasName = true;
306   Ctx.pImpl->ValueNames[this] = VN;
307 }
308 
309 StringRef Value::getName() const {
310   // Make sure the empty string is still a C string. For historical reasons,
311   // some clients want to call .data() on the result and expect it to be null
312   // terminated.
313   if (!hasName())
314     return StringRef("", 0);
315   return getValueName()->getKey();
316 }
317 
318 void Value::setNameImpl(const Twine &NewName) {
319   bool NeedNewName =
320       !getContext().shouldDiscardValueNames() || isa<GlobalValue>(this);
321 
322   // Fast-path: LLVMContext can be set to strip out non-GlobalValue names
323   // and there is no need to delete the old name.
324   if (!NeedNewName && !hasName())
325     return;
326 
327   // Fast path for common IRBuilder case of setName("") when there is no name.
328   if (NewName.isTriviallyEmpty() && !hasName())
329     return;
330 
331   SmallString<256> NameData;
332   StringRef NameRef = NeedNewName ? NewName.toStringRef(NameData) : "";
333   assert(!NameRef.contains(0) && "Null bytes are not allowed in names");
334 
335   // Name isn't changing?
336   if (getName() == NameRef)
337     return;
338 
339   assert(!getType()->isVoidTy() && "Cannot assign a name to void values!");
340 
341   // Get the symbol table to update for this object.
342   ValueSymbolTable *ST;
343   if (getSymTab(this, ST))
344     return;  // Cannot set a name on this value (e.g. constant).
345 
346   if (!ST) { // No symbol table to update?  Just do the change.
347     // NOTE: Could optimize for the case the name is shrinking to not deallocate
348     // then reallocated.
349     destroyValueName();
350 
351     if (!NameRef.empty()) {
352       // Create the new name.
353       assert(NeedNewName);
354       MallocAllocator Allocator;
355       setValueName(ValueName::create(NameRef, Allocator));
356       getValueName()->setValue(this);
357     }
358     return;
359   }
360 
361   // NOTE: Could optimize for the case the name is shrinking to not deallocate
362   // then reallocated.
363   if (hasName()) {
364     // Remove old name.
365     ST->removeValueName(getValueName());
366     destroyValueName();
367 
368     if (NameRef.empty())
369       return;
370   }
371 
372   // Name is changing to something new.
373   assert(NeedNewName);
374   setValueName(ST->createValueName(NameRef, this));
375 }
376 
377 void Value::setName(const Twine &NewName) {
378   setNameImpl(NewName);
379   if (Function *F = dyn_cast<Function>(this))
380     F->updateAfterNameChange();
381 }
382 
383 void Value::takeName(Value *V) {
384   assert(V != this && "Illegal call to this->takeName(this)!");
385   ValueSymbolTable *ST = nullptr;
386   // If this value has a name, drop it.
387   if (hasName()) {
388     // Get the symtab this is in.
389     if (getSymTab(this, ST)) {
390       // We can't set a name on this value, but we need to clear V's name if
391       // it has one.
392       if (V->hasName()) V->setName("");
393       return;  // Cannot set a name on this value (e.g. constant).
394     }
395 
396     // Remove old name.
397     if (ST)
398       ST->removeValueName(getValueName());
399     destroyValueName();
400   }
401 
402   // Now we know that this has no name.
403 
404   // If V has no name either, we're done.
405   if (!V->hasName()) return;
406 
407   // Get this's symtab if we didn't before.
408   if (!ST) {
409     if (getSymTab(this, ST)) {
410       // Clear V's name.
411       V->setName("");
412       return;  // Cannot set a name on this value (e.g. constant).
413     }
414   }
415 
416   // Get V's ST, this should always succeed, because V has a name.
417   ValueSymbolTable *VST;
418   bool Failure = getSymTab(V, VST);
419   assert(!Failure && "V has a name, so it should have a ST!"); (void)Failure;
420 
421   // If these values are both in the same symtab, we can do this very fast.
422   // This works even if both values have no symtab yet.
423   if (ST == VST) {
424     // Take the name!
425     setValueName(V->getValueName());
426     V->setValueName(nullptr);
427     getValueName()->setValue(this);
428     return;
429   }
430 
431   // Otherwise, things are slightly more complex.  Remove V's name from VST and
432   // then reinsert it into ST.
433 
434   if (VST)
435     VST->removeValueName(V->getValueName());
436   setValueName(V->getValueName());
437   V->setValueName(nullptr);
438   getValueName()->setValue(this);
439 
440   if (ST)
441     ST->reinsertValue(this);
442 }
443 
444 #ifndef NDEBUG
445 std::string Value::getNameOrAsOperand() const {
446   if (!getName().empty())
447     return std::string(getName());
448 
449   std::string BBName;
450   raw_string_ostream OS(BBName);
451   printAsOperand(OS, false);
452   return OS.str();
453 }
454 #endif
455 
456 void Value::assertModuleIsMaterializedImpl() const {
457 #ifndef NDEBUG
458   const GlobalValue *GV = dyn_cast<GlobalValue>(this);
459   if (!GV)
460     return;
461   const Module *M = GV->getParent();
462   if (!M)
463     return;
464   assert(M->isMaterialized());
465 #endif
466 }
467 
468 #ifndef NDEBUG
469 static bool contains(SmallPtrSetImpl<ConstantExpr *> &Cache, ConstantExpr *Expr,
470                      Constant *C) {
471   if (!Cache.insert(Expr).second)
472     return false;
473 
474   for (auto &O : Expr->operands()) {
475     if (O == C)
476       return true;
477     auto *CE = dyn_cast<ConstantExpr>(O);
478     if (!CE)
479       continue;
480     if (contains(Cache, CE, C))
481       return true;
482   }
483   return false;
484 }
485 
486 static bool contains(Value *Expr, Value *V) {
487   if (Expr == V)
488     return true;
489 
490   auto *C = dyn_cast<Constant>(V);
491   if (!C)
492     return false;
493 
494   auto *CE = dyn_cast<ConstantExpr>(Expr);
495   if (!CE)
496     return false;
497 
498   SmallPtrSet<ConstantExpr *, 4> Cache;
499   return contains(Cache, CE, C);
500 }
501 #endif // NDEBUG
502 
503 void Value::doRAUW(Value *New, ReplaceMetadataUses ReplaceMetaUses) {
504   assert(New && "Value::replaceAllUsesWith(<null>) is invalid!");
505   assert(!contains(New, this) &&
506          "this->replaceAllUsesWith(expr(this)) is NOT valid!");
507   assert(New->getType() == getType() &&
508          "replaceAllUses of value with new value of different type!");
509 
510   // Notify all ValueHandles (if present) that this value is going away.
511   if (HasValueHandle)
512     ValueHandleBase::ValueIsRAUWd(this, New);
513   if (ReplaceMetaUses == ReplaceMetadataUses::Yes && isUsedByMetadata())
514     ValueAsMetadata::handleRAUW(this, New);
515 
516   while (!materialized_use_empty()) {
517     Use &U = *UseList;
518     // Must handle Constants specially, we cannot call replaceUsesOfWith on a
519     // constant because they are uniqued.
520     if (auto *C = dyn_cast<Constant>(U.getUser())) {
521       if (!isa<GlobalValue>(C)) {
522         C->handleOperandChange(this, New);
523         continue;
524       }
525     }
526 
527     U.set(New);
528   }
529 
530   if (BasicBlock *BB = dyn_cast<BasicBlock>(this))
531     BB->replaceSuccessorsPhiUsesWith(cast<BasicBlock>(New));
532 }
533 
534 void Value::replaceAllUsesWith(Value *New) {
535   doRAUW(New, ReplaceMetadataUses::Yes);
536 }
537 
538 void Value::replaceNonMetadataUsesWith(Value *New) {
539   doRAUW(New, ReplaceMetadataUses::No);
540 }
541 
542 void Value::replaceUsesWithIf(Value *New,
543                               llvm::function_ref<bool(Use &U)> ShouldReplace) {
544   assert(New && "Value::replaceUsesWithIf(<null>) is invalid!");
545   assert(New->getType() == getType() &&
546          "replaceUses of value with new value of different type!");
547 
548   SmallVector<TrackingVH<Constant>, 8> Consts;
549   SmallPtrSet<Constant *, 8> Visited;
550 
551   for (Use &U : llvm::make_early_inc_range(uses())) {
552     if (!ShouldReplace(U))
553       continue;
554     // Must handle Constants specially, we cannot call replaceUsesOfWith on a
555     // constant because they are uniqued.
556     if (auto *C = dyn_cast<Constant>(U.getUser())) {
557       if (!isa<GlobalValue>(C)) {
558         if (Visited.insert(C).second)
559           Consts.push_back(TrackingVH<Constant>(C));
560         continue;
561       }
562     }
563     U.set(New);
564   }
565 
566   while (!Consts.empty()) {
567     // FIXME: handleOperandChange() updates all the uses in a given Constant,
568     //        not just the one passed to ShouldReplace
569     Consts.pop_back_val()->handleOperandChange(this, New);
570   }
571 }
572 
573 /// Replace llvm.dbg.* uses of MetadataAsValue(ValueAsMetadata(V)) outside BB
574 /// with New.
575 static void replaceDbgUsesOutsideBlock(Value *V, Value *New, BasicBlock *BB) {
576   SmallVector<DbgVariableIntrinsic *> DbgUsers;
577   SmallVector<DPValue *> DPUsers;
578   findDbgUsers(DbgUsers, V, &DPUsers);
579   for (auto *DVI : DbgUsers) {
580     if (DVI->getParent() != BB)
581       DVI->replaceVariableLocationOp(V, New);
582   }
583   for (auto *DPV : DPUsers) {
584     DPMarker *Marker = DPV->getMarker();
585     if (Marker->getParent() != BB)
586       DPV->replaceVariableLocationOp(V, New);
587   }
588 }
589 
590 // Like replaceAllUsesWith except it does not handle constants or basic blocks.
591 // This routine leaves uses within BB.
592 void Value::replaceUsesOutsideBlock(Value *New, BasicBlock *BB) {
593   assert(New && "Value::replaceUsesOutsideBlock(<null>, BB) is invalid!");
594   assert(!contains(New, this) &&
595          "this->replaceUsesOutsideBlock(expr(this), BB) is NOT valid!");
596   assert(New->getType() == getType() &&
597          "replaceUses of value with new value of different type!");
598   assert(BB && "Basic block that may contain a use of 'New' must be defined\n");
599 
600   replaceDbgUsesOutsideBlock(this, New, BB);
601   replaceUsesWithIf(New, [BB](Use &U) {
602     auto *I = dyn_cast<Instruction>(U.getUser());
603     // Don't replace if it's an instruction in the BB basic block.
604     return !I || I->getParent() != BB;
605   });
606 }
607 
608 namespace {
609 // Various metrics for how much to strip off of pointers.
610 enum PointerStripKind {
611   PSK_ZeroIndices,
612   PSK_ZeroIndicesAndAliases,
613   PSK_ZeroIndicesSameRepresentation,
614   PSK_ForAliasAnalysis,
615   PSK_InBoundsConstantIndices,
616   PSK_InBounds
617 };
618 
619 template <PointerStripKind StripKind> static void NoopCallback(const Value *) {}
620 
621 template <PointerStripKind StripKind>
622 static const Value *stripPointerCastsAndOffsets(
623     const Value *V,
624     function_ref<void(const Value *)> Func = NoopCallback<StripKind>) {
625   if (!V->getType()->isPointerTy())
626     return V;
627 
628   // Even though we don't look through PHI nodes, we could be called on an
629   // instruction in an unreachable block, which may be on a cycle.
630   SmallPtrSet<const Value *, 4> Visited;
631 
632   Visited.insert(V);
633   do {
634     Func(V);
635     if (auto *GEP = dyn_cast<GEPOperator>(V)) {
636       switch (StripKind) {
637       case PSK_ZeroIndices:
638       case PSK_ZeroIndicesAndAliases:
639       case PSK_ZeroIndicesSameRepresentation:
640       case PSK_ForAliasAnalysis:
641         if (!GEP->hasAllZeroIndices())
642           return V;
643         break;
644       case PSK_InBoundsConstantIndices:
645         if (!GEP->hasAllConstantIndices())
646           return V;
647         [[fallthrough]];
648       case PSK_InBounds:
649         if (!GEP->isInBounds())
650           return V;
651         break;
652       }
653       V = GEP->getPointerOperand();
654     } else if (Operator::getOpcode(V) == Instruction::BitCast) {
655       V = cast<Operator>(V)->getOperand(0);
656       if (!V->getType()->isPointerTy())
657         return V;
658     } else if (StripKind != PSK_ZeroIndicesSameRepresentation &&
659                Operator::getOpcode(V) == Instruction::AddrSpaceCast) {
660       // TODO: If we know an address space cast will not change the
661       //       representation we could look through it here as well.
662       V = cast<Operator>(V)->getOperand(0);
663     } else if (StripKind == PSK_ZeroIndicesAndAliases && isa<GlobalAlias>(V)) {
664       V = cast<GlobalAlias>(V)->getAliasee();
665     } else if (StripKind == PSK_ForAliasAnalysis && isa<PHINode>(V) &&
666                cast<PHINode>(V)->getNumIncomingValues() == 1) {
667       V = cast<PHINode>(V)->getIncomingValue(0);
668     } else {
669       if (const auto *Call = dyn_cast<CallBase>(V)) {
670         if (const Value *RV = Call->getReturnedArgOperand()) {
671           V = RV;
672           continue;
673         }
674         // The result of launder.invariant.group must alias it's argument,
675         // but it can't be marked with returned attribute, that's why it needs
676         // special case.
677         if (StripKind == PSK_ForAliasAnalysis &&
678             (Call->getIntrinsicID() == Intrinsic::launder_invariant_group ||
679              Call->getIntrinsicID() == Intrinsic::strip_invariant_group)) {
680           V = Call->getArgOperand(0);
681           continue;
682         }
683       }
684       return V;
685     }
686     assert(V->getType()->isPointerTy() && "Unexpected operand type!");
687   } while (Visited.insert(V).second);
688 
689   return V;
690 }
691 } // end anonymous namespace
692 
693 const Value *Value::stripPointerCasts() const {
694   return stripPointerCastsAndOffsets<PSK_ZeroIndices>(this);
695 }
696 
697 const Value *Value::stripPointerCastsAndAliases() const {
698   return stripPointerCastsAndOffsets<PSK_ZeroIndicesAndAliases>(this);
699 }
700 
701 const Value *Value::stripPointerCastsSameRepresentation() const {
702   return stripPointerCastsAndOffsets<PSK_ZeroIndicesSameRepresentation>(this);
703 }
704 
705 const Value *Value::stripInBoundsConstantOffsets() const {
706   return stripPointerCastsAndOffsets<PSK_InBoundsConstantIndices>(this);
707 }
708 
709 const Value *Value::stripPointerCastsForAliasAnalysis() const {
710   return stripPointerCastsAndOffsets<PSK_ForAliasAnalysis>(this);
711 }
712 
713 const Value *Value::stripAndAccumulateConstantOffsets(
714     const DataLayout &DL, APInt &Offset, bool AllowNonInbounds,
715     bool AllowInvariantGroup,
716     function_ref<bool(Value &, APInt &)> ExternalAnalysis) const {
717   if (!getType()->isPtrOrPtrVectorTy())
718     return this;
719 
720   unsigned BitWidth = Offset.getBitWidth();
721   assert(BitWidth == DL.getIndexTypeSizeInBits(getType()) &&
722          "The offset bit width does not match the DL specification.");
723 
724   // Even though we don't look through PHI nodes, we could be called on an
725   // instruction in an unreachable block, which may be on a cycle.
726   SmallPtrSet<const Value *, 4> Visited;
727   Visited.insert(this);
728   const Value *V = this;
729   do {
730     if (auto *GEP = dyn_cast<GEPOperator>(V)) {
731       // If in-bounds was requested, we do not strip non-in-bounds GEPs.
732       if (!AllowNonInbounds && !GEP->isInBounds())
733         return V;
734 
735       // If one of the values we have visited is an addrspacecast, then
736       // the pointer type of this GEP may be different from the type
737       // of the Ptr parameter which was passed to this function.  This
738       // means when we construct GEPOffset, we need to use the size
739       // of GEP's pointer type rather than the size of the original
740       // pointer type.
741       APInt GEPOffset(DL.getIndexTypeSizeInBits(V->getType()), 0);
742       if (!GEP->accumulateConstantOffset(DL, GEPOffset, ExternalAnalysis))
743         return V;
744 
745       // Stop traversal if the pointer offset wouldn't fit in the bit-width
746       // provided by the Offset argument. This can happen due to AddrSpaceCast
747       // stripping.
748       if (GEPOffset.getSignificantBits() > BitWidth)
749         return V;
750 
751       // External Analysis can return a result higher/lower than the value
752       // represents. We need to detect overflow/underflow.
753       APInt GEPOffsetST = GEPOffset.sextOrTrunc(BitWidth);
754       if (!ExternalAnalysis) {
755         Offset += GEPOffsetST;
756       } else {
757         bool Overflow = false;
758         APInt OldOffset = Offset;
759         Offset = Offset.sadd_ov(GEPOffsetST, Overflow);
760         if (Overflow) {
761           Offset = OldOffset;
762           return V;
763         }
764       }
765       V = GEP->getPointerOperand();
766     } else if (Operator::getOpcode(V) == Instruction::BitCast ||
767                Operator::getOpcode(V) == Instruction::AddrSpaceCast) {
768       V = cast<Operator>(V)->getOperand(0);
769     } else if (auto *GA = dyn_cast<GlobalAlias>(V)) {
770       if (!GA->isInterposable())
771         V = GA->getAliasee();
772     } else if (const auto *Call = dyn_cast<CallBase>(V)) {
773         if (const Value *RV = Call->getReturnedArgOperand())
774           V = RV;
775         if (AllowInvariantGroup && Call->isLaunderOrStripInvariantGroup())
776           V = Call->getArgOperand(0);
777     }
778     assert(V->getType()->isPtrOrPtrVectorTy() && "Unexpected operand type!");
779   } while (Visited.insert(V).second);
780 
781   return V;
782 }
783 
784 const Value *
785 Value::stripInBoundsOffsets(function_ref<void(const Value *)> Func) const {
786   return stripPointerCastsAndOffsets<PSK_InBounds>(this, Func);
787 }
788 
789 bool Value::canBeFreed() const {
790   assert(getType()->isPointerTy());
791 
792   // Cases that can simply never be deallocated
793   // *) Constants aren't allocated per se, thus not deallocated either.
794   if (isa<Constant>(this))
795     return false;
796 
797   // Handle byval/byref/sret/inalloca/preallocated arguments.  The storage
798   // lifetime is guaranteed to be longer than the callee's lifetime.
799   if (auto *A = dyn_cast<Argument>(this)) {
800     if (A->hasPointeeInMemoryValueAttr())
801       return false;
802     // A pointer to an object in a function which neither frees, nor can arrange
803     // for another thread to free on its behalf, can not be freed in the scope
804     // of the function.  Note that this logic is restricted to memory
805     // allocations in existance before the call; a nofree function *is* allowed
806     // to free memory it allocated.
807     const Function *F = A->getParent();
808     if (F->doesNotFreeMemory() && F->hasNoSync())
809       return false;
810   }
811 
812   const Function *F = nullptr;
813   if (auto *I = dyn_cast<Instruction>(this))
814     F = I->getFunction();
815   if (auto *A = dyn_cast<Argument>(this))
816     F = A->getParent();
817 
818   if (!F)
819     return true;
820 
821   // With garbage collection, deallocation typically occurs solely at or after
822   // safepoints.  If we're compiling for a collector which uses the
823   // gc.statepoint infrastructure, safepoints aren't explicitly present
824   // in the IR until after lowering from abstract to physical machine model.
825   // The collector could chose to mix explicit deallocation and gc'd objects
826   // which is why we need the explicit opt in on a per collector basis.
827   if (!F->hasGC())
828     return true;
829 
830   const auto &GCName = F->getGC();
831   if (GCName == "statepoint-example") {
832     auto *PT = cast<PointerType>(this->getType());
833     if (PT->getAddressSpace() != 1)
834       // For the sake of this example GC, we arbitrarily pick addrspace(1) as
835       // our GC managed heap.  This must match the same check in
836       // RewriteStatepointsForGC (and probably needs better factored.)
837       return true;
838 
839     // It is cheaper to scan for a declaration than to scan for a use in this
840     // function.  Note that gc.statepoint is a type overloaded function so the
841     // usual trick of requesting declaration of the intrinsic from the module
842     // doesn't work.
843     for (auto &Fn : *F->getParent())
844       if (Fn.getIntrinsicID() == Intrinsic::experimental_gc_statepoint)
845         return true;
846     return false;
847   }
848   return true;
849 }
850 
851 uint64_t Value::getPointerDereferenceableBytes(const DataLayout &DL,
852                                                bool &CanBeNull,
853                                                bool &CanBeFreed) const {
854   assert(getType()->isPointerTy() && "must be pointer");
855 
856   uint64_t DerefBytes = 0;
857   CanBeNull = false;
858   CanBeFreed = UseDerefAtPointSemantics && canBeFreed();
859   if (const Argument *A = dyn_cast<Argument>(this)) {
860     DerefBytes = A->getDereferenceableBytes();
861     if (DerefBytes == 0) {
862       // Handle byval/byref/inalloca/preallocated arguments
863       if (Type *ArgMemTy = A->getPointeeInMemoryValueType()) {
864         if (ArgMemTy->isSized()) {
865           // FIXME: Why isn't this the type alloc size?
866           DerefBytes = DL.getTypeStoreSize(ArgMemTy).getKnownMinValue();
867         }
868       }
869     }
870 
871     if (DerefBytes == 0) {
872       DerefBytes = A->getDereferenceableOrNullBytes();
873       CanBeNull = true;
874     }
875   } else if (const auto *Call = dyn_cast<CallBase>(this)) {
876     DerefBytes = Call->getRetDereferenceableBytes();
877     if (DerefBytes == 0) {
878       DerefBytes = Call->getRetDereferenceableOrNullBytes();
879       CanBeNull = true;
880     }
881   } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) {
882     if (MDNode *MD = LI->getMetadata(LLVMContext::MD_dereferenceable)) {
883       ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
884       DerefBytes = CI->getLimitedValue();
885     }
886     if (DerefBytes == 0) {
887       if (MDNode *MD =
888               LI->getMetadata(LLVMContext::MD_dereferenceable_or_null)) {
889         ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
890         DerefBytes = CI->getLimitedValue();
891       }
892       CanBeNull = true;
893     }
894   } else if (auto *IP = dyn_cast<IntToPtrInst>(this)) {
895     if (MDNode *MD = IP->getMetadata(LLVMContext::MD_dereferenceable)) {
896       ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
897       DerefBytes = CI->getLimitedValue();
898     }
899     if (DerefBytes == 0) {
900       if (MDNode *MD =
901               IP->getMetadata(LLVMContext::MD_dereferenceable_or_null)) {
902         ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
903         DerefBytes = CI->getLimitedValue();
904       }
905       CanBeNull = true;
906     }
907   } else if (auto *AI = dyn_cast<AllocaInst>(this)) {
908     if (!AI->isArrayAllocation()) {
909       DerefBytes =
910           DL.getTypeStoreSize(AI->getAllocatedType()).getKnownMinValue();
911       CanBeNull = false;
912       CanBeFreed = false;
913     }
914   } else if (auto *GV = dyn_cast<GlobalVariable>(this)) {
915     if (GV->getValueType()->isSized() && !GV->hasExternalWeakLinkage()) {
916       // TODO: Don't outright reject hasExternalWeakLinkage but set the
917       // CanBeNull flag.
918       DerefBytes = DL.getTypeStoreSize(GV->getValueType()).getFixedValue();
919       CanBeNull = false;
920       CanBeFreed = false;
921     }
922   }
923   return DerefBytes;
924 }
925 
926 Align Value::getPointerAlignment(const DataLayout &DL) const {
927   assert(getType()->isPointerTy() && "must be pointer");
928   if (auto *GO = dyn_cast<GlobalObject>(this)) {
929     if (isa<Function>(GO)) {
930       Align FunctionPtrAlign = DL.getFunctionPtrAlign().valueOrOne();
931       switch (DL.getFunctionPtrAlignType()) {
932       case DataLayout::FunctionPtrAlignType::Independent:
933         return FunctionPtrAlign;
934       case DataLayout::FunctionPtrAlignType::MultipleOfFunctionAlign:
935         return std::max(FunctionPtrAlign, GO->getAlign().valueOrOne());
936       }
937       llvm_unreachable("Unhandled FunctionPtrAlignType");
938     }
939     const MaybeAlign Alignment(GO->getAlign());
940     if (!Alignment) {
941       if (auto *GVar = dyn_cast<GlobalVariable>(GO)) {
942         Type *ObjectType = GVar->getValueType();
943         if (ObjectType->isSized()) {
944           // If the object is defined in the current Module, we'll be giving
945           // it the preferred alignment. Otherwise, we have to assume that it
946           // may only have the minimum ABI alignment.
947           if (GVar->isStrongDefinitionForLinker())
948             return DL.getPreferredAlign(GVar);
949           else
950             return DL.getABITypeAlign(ObjectType);
951         }
952       }
953     }
954     return Alignment.valueOrOne();
955   } else if (const Argument *A = dyn_cast<Argument>(this)) {
956     const MaybeAlign Alignment = A->getParamAlign();
957     if (!Alignment && A->hasStructRetAttr()) {
958       // An sret parameter has at least the ABI alignment of the return type.
959       Type *EltTy = A->getParamStructRetType();
960       if (EltTy->isSized())
961         return DL.getABITypeAlign(EltTy);
962     }
963     return Alignment.valueOrOne();
964   } else if (const AllocaInst *AI = dyn_cast<AllocaInst>(this)) {
965     return AI->getAlign();
966   } else if (const auto *Call = dyn_cast<CallBase>(this)) {
967     MaybeAlign Alignment = Call->getRetAlign();
968     if (!Alignment && Call->getCalledFunction())
969       Alignment = Call->getCalledFunction()->getAttributes().getRetAlignment();
970     return Alignment.valueOrOne();
971   } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) {
972     if (MDNode *MD = LI->getMetadata(LLVMContext::MD_align)) {
973       ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
974       return Align(CI->getLimitedValue());
975     }
976   } else if (auto *CstPtr = dyn_cast<Constant>(this)) {
977     // Strip pointer casts to avoid creating unnecessary ptrtoint expression
978     // if the only "reduction" is combining a bitcast + ptrtoint.
979     CstPtr = CstPtr->stripPointerCasts();
980     if (auto *CstInt = dyn_cast_or_null<ConstantInt>(ConstantExpr::getPtrToInt(
981             const_cast<Constant *>(CstPtr), DL.getIntPtrType(getType()),
982             /*OnlyIfReduced=*/true))) {
983       size_t TrailingZeros = CstInt->getValue().countr_zero();
984       // While the actual alignment may be large, elsewhere we have
985       // an arbitrary upper alignmet limit, so let's clamp to it.
986       return Align(TrailingZeros < Value::MaxAlignmentExponent
987                        ? uint64_t(1) << TrailingZeros
988                        : Value::MaximumAlignment);
989     }
990   }
991   return Align(1);
992 }
993 
994 static std::optional<int64_t>
995 getOffsetFromIndex(const GEPOperator *GEP, unsigned Idx, const DataLayout &DL) {
996   // Skip over the first indices.
997   gep_type_iterator GTI = gep_type_begin(GEP);
998   for (unsigned i = 1; i != Idx; ++i, ++GTI)
999     /*skip along*/;
1000 
1001   // Compute the offset implied by the rest of the indices.
1002   int64_t Offset = 0;
1003   for (unsigned i = Idx, e = GEP->getNumOperands(); i != e; ++i, ++GTI) {
1004     ConstantInt *OpC = dyn_cast<ConstantInt>(GEP->getOperand(i));
1005     if (!OpC)
1006       return std::nullopt;
1007     if (OpC->isZero())
1008       continue; // No offset.
1009 
1010     // Handle struct indices, which add their field offset to the pointer.
1011     if (StructType *STy = GTI.getStructTypeOrNull()) {
1012       Offset += DL.getStructLayout(STy)->getElementOffset(OpC->getZExtValue());
1013       continue;
1014     }
1015 
1016     // Otherwise, we have a sequential type like an array or fixed-length
1017     // vector. Multiply the index by the ElementSize.
1018     TypeSize Size = GTI.getSequentialElementStride(DL);
1019     if (Size.isScalable())
1020       return std::nullopt;
1021     Offset += Size.getFixedValue() * OpC->getSExtValue();
1022   }
1023 
1024   return Offset;
1025 }
1026 
1027 std::optional<int64_t> Value::getPointerOffsetFrom(const Value *Other,
1028                                                    const DataLayout &DL) const {
1029   const Value *Ptr1 = Other;
1030   const Value *Ptr2 = this;
1031   APInt Offset1(DL.getIndexTypeSizeInBits(Ptr1->getType()), 0);
1032   APInt Offset2(DL.getIndexTypeSizeInBits(Ptr2->getType()), 0);
1033   Ptr1 = Ptr1->stripAndAccumulateConstantOffsets(DL, Offset1, true);
1034   Ptr2 = Ptr2->stripAndAccumulateConstantOffsets(DL, Offset2, true);
1035 
1036   // Handle the trivial case first.
1037   if (Ptr1 == Ptr2)
1038     return Offset2.getSExtValue() - Offset1.getSExtValue();
1039 
1040   const GEPOperator *GEP1 = dyn_cast<GEPOperator>(Ptr1);
1041   const GEPOperator *GEP2 = dyn_cast<GEPOperator>(Ptr2);
1042 
1043   // Right now we handle the case when Ptr1/Ptr2 are both GEPs with an identical
1044   // base.  After that base, they may have some number of common (and
1045   // potentially variable) indices.  After that they handle some constant
1046   // offset, which determines their offset from each other.  At this point, we
1047   // handle no other case.
1048   if (!GEP1 || !GEP2 || GEP1->getOperand(0) != GEP2->getOperand(0) ||
1049       GEP1->getSourceElementType() != GEP2->getSourceElementType())
1050     return std::nullopt;
1051 
1052   // Skip any common indices and track the GEP types.
1053   unsigned Idx = 1;
1054   for (; Idx != GEP1->getNumOperands() && Idx != GEP2->getNumOperands(); ++Idx)
1055     if (GEP1->getOperand(Idx) != GEP2->getOperand(Idx))
1056       break;
1057 
1058   auto IOffset1 = getOffsetFromIndex(GEP1, Idx, DL);
1059   auto IOffset2 = getOffsetFromIndex(GEP2, Idx, DL);
1060   if (!IOffset1 || !IOffset2)
1061     return std::nullopt;
1062   return *IOffset2 - *IOffset1 + Offset2.getSExtValue() -
1063          Offset1.getSExtValue();
1064 }
1065 
1066 const Value *Value::DoPHITranslation(const BasicBlock *CurBB,
1067                                      const BasicBlock *PredBB) const {
1068   auto *PN = dyn_cast<PHINode>(this);
1069   if (PN && PN->getParent() == CurBB)
1070     return PN->getIncomingValueForBlock(PredBB);
1071   return this;
1072 }
1073 
1074 LLVMContext &Value::getContext() const { return VTy->getContext(); }
1075 
1076 void Value::reverseUseList() {
1077   if (!UseList || !UseList->Next)
1078     // No need to reverse 0 or 1 uses.
1079     return;
1080 
1081   Use *Head = UseList;
1082   Use *Current = UseList->Next;
1083   Head->Next = nullptr;
1084   while (Current) {
1085     Use *Next = Current->Next;
1086     Current->Next = Head;
1087     Head->Prev = &Current->Next;
1088     Head = Current;
1089     Current = Next;
1090   }
1091   UseList = Head;
1092   Head->Prev = &UseList;
1093 }
1094 
1095 bool Value::isSwiftError() const {
1096   auto *Arg = dyn_cast<Argument>(this);
1097   if (Arg)
1098     return Arg->hasSwiftErrorAttr();
1099   auto *Alloca = dyn_cast<AllocaInst>(this);
1100   if (!Alloca)
1101     return false;
1102   return Alloca->isSwiftError();
1103 }
1104 
1105 //===----------------------------------------------------------------------===//
1106 //                             ValueHandleBase Class
1107 //===----------------------------------------------------------------------===//
1108 
1109 void ValueHandleBase::AddToExistingUseList(ValueHandleBase **List) {
1110   assert(List && "Handle list is null?");
1111 
1112   // Splice ourselves into the list.
1113   Next = *List;
1114   *List = this;
1115   setPrevPtr(List);
1116   if (Next) {
1117     Next->setPrevPtr(&Next);
1118     assert(getValPtr() == Next->getValPtr() && "Added to wrong list?");
1119   }
1120 }
1121 
1122 void ValueHandleBase::AddToExistingUseListAfter(ValueHandleBase *List) {
1123   assert(List && "Must insert after existing node");
1124 
1125   Next = List->Next;
1126   setPrevPtr(&List->Next);
1127   List->Next = this;
1128   if (Next)
1129     Next->setPrevPtr(&Next);
1130 }
1131 
1132 void ValueHandleBase::AddToUseList() {
1133   assert(getValPtr() && "Null pointer doesn't have a use list!");
1134 
1135   LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl;
1136 
1137   if (getValPtr()->HasValueHandle) {
1138     // If this value already has a ValueHandle, then it must be in the
1139     // ValueHandles map already.
1140     ValueHandleBase *&Entry = pImpl->ValueHandles[getValPtr()];
1141     assert(Entry && "Value doesn't have any handles?");
1142     AddToExistingUseList(&Entry);
1143     return;
1144   }
1145 
1146   // Ok, it doesn't have any handles yet, so we must insert it into the
1147   // DenseMap.  However, doing this insertion could cause the DenseMap to
1148   // reallocate itself, which would invalidate all of the PrevP pointers that
1149   // point into the old table.  Handle this by checking for reallocation and
1150   // updating the stale pointers only if needed.
1151   DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles;
1152   const void *OldBucketPtr = Handles.getPointerIntoBucketsArray();
1153 
1154   ValueHandleBase *&Entry = Handles[getValPtr()];
1155   assert(!Entry && "Value really did already have handles?");
1156   AddToExistingUseList(&Entry);
1157   getValPtr()->HasValueHandle = true;
1158 
1159   // If reallocation didn't happen or if this was the first insertion, don't
1160   // walk the table.
1161   if (Handles.isPointerIntoBucketsArray(OldBucketPtr) ||
1162       Handles.size() == 1) {
1163     return;
1164   }
1165 
1166   // Okay, reallocation did happen.  Fix the Prev Pointers.
1167   for (DenseMap<Value*, ValueHandleBase*>::iterator I = Handles.begin(),
1168        E = Handles.end(); I != E; ++I) {
1169     assert(I->second && I->first == I->second->getValPtr() &&
1170            "List invariant broken!");
1171     I->second->setPrevPtr(&I->second);
1172   }
1173 }
1174 
1175 void ValueHandleBase::RemoveFromUseList() {
1176   assert(getValPtr() && getValPtr()->HasValueHandle &&
1177          "Pointer doesn't have a use list!");
1178 
1179   // Unlink this from its use list.
1180   ValueHandleBase **PrevPtr = getPrevPtr();
1181   assert(*PrevPtr == this && "List invariant broken");
1182 
1183   *PrevPtr = Next;
1184   if (Next) {
1185     assert(Next->getPrevPtr() == &Next && "List invariant broken");
1186     Next->setPrevPtr(PrevPtr);
1187     return;
1188   }
1189 
1190   // If the Next pointer was null, then it is possible that this was the last
1191   // ValueHandle watching VP.  If so, delete its entry from the ValueHandles
1192   // map.
1193   LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl;
1194   DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles;
1195   if (Handles.isPointerIntoBucketsArray(PrevPtr)) {
1196     Handles.erase(getValPtr());
1197     getValPtr()->HasValueHandle = false;
1198   }
1199 }
1200 
1201 void ValueHandleBase::ValueIsDeleted(Value *V) {
1202   assert(V->HasValueHandle && "Should only be called if ValueHandles present");
1203 
1204   // Get the linked list base, which is guaranteed to exist since the
1205   // HasValueHandle flag is set.
1206   LLVMContextImpl *pImpl = V->getContext().pImpl;
1207   ValueHandleBase *Entry = pImpl->ValueHandles[V];
1208   assert(Entry && "Value bit set but no entries exist");
1209 
1210   // We use a local ValueHandleBase as an iterator so that ValueHandles can add
1211   // and remove themselves from the list without breaking our iteration.  This
1212   // is not really an AssertingVH; we just have to give ValueHandleBase a kind.
1213   // Note that we deliberately do not the support the case when dropping a value
1214   // handle results in a new value handle being permanently added to the list
1215   // (as might occur in theory for CallbackVH's): the new value handle will not
1216   // be processed and the checking code will mete out righteous punishment if
1217   // the handle is still present once we have finished processing all the other
1218   // value handles (it is fine to momentarily add then remove a value handle).
1219   for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next) {
1220     Iterator.RemoveFromUseList();
1221     Iterator.AddToExistingUseListAfter(Entry);
1222     assert(Entry->Next == &Iterator && "Loop invariant broken.");
1223 
1224     switch (Entry->getKind()) {
1225     case Assert:
1226       break;
1227     case Weak:
1228     case WeakTracking:
1229       // WeakTracking and Weak just go to null, which unlinks them
1230       // from the list.
1231       Entry->operator=(nullptr);
1232       break;
1233     case Callback:
1234       // Forward to the subclass's implementation.
1235       static_cast<CallbackVH*>(Entry)->deleted();
1236       break;
1237     }
1238   }
1239 
1240   // All callbacks, weak references, and assertingVHs should be dropped by now.
1241   if (V->HasValueHandle) {
1242 #ifndef NDEBUG      // Only in +Asserts mode...
1243     dbgs() << "While deleting: " << *V->getType() << " %" << V->getName()
1244            << "\n";
1245     if (pImpl->ValueHandles[V]->getKind() == Assert)
1246       llvm_unreachable("An asserting value handle still pointed to this"
1247                        " value!");
1248 
1249 #endif
1250     llvm_unreachable("All references to V were not removed?");
1251   }
1252 }
1253 
1254 void ValueHandleBase::ValueIsRAUWd(Value *Old, Value *New) {
1255   assert(Old->HasValueHandle &&"Should only be called if ValueHandles present");
1256   assert(Old != New && "Changing value into itself!");
1257   assert(Old->getType() == New->getType() &&
1258          "replaceAllUses of value with new value of different type!");
1259 
1260   // Get the linked list base, which is guaranteed to exist since the
1261   // HasValueHandle flag is set.
1262   LLVMContextImpl *pImpl = Old->getContext().pImpl;
1263   ValueHandleBase *Entry = pImpl->ValueHandles[Old];
1264 
1265   assert(Entry && "Value bit set but no entries exist");
1266 
1267   // We use a local ValueHandleBase as an iterator so that
1268   // ValueHandles can add and remove themselves from the list without
1269   // breaking our iteration.  This is not really an AssertingVH; we
1270   // just have to give ValueHandleBase some kind.
1271   for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next) {
1272     Iterator.RemoveFromUseList();
1273     Iterator.AddToExistingUseListAfter(Entry);
1274     assert(Entry->Next == &Iterator && "Loop invariant broken.");
1275 
1276     switch (Entry->getKind()) {
1277     case Assert:
1278     case Weak:
1279       // Asserting and Weak handles do not follow RAUW implicitly.
1280       break;
1281     case WeakTracking:
1282       // Weak goes to the new value, which will unlink it from Old's list.
1283       Entry->operator=(New);
1284       break;
1285     case Callback:
1286       // Forward to the subclass's implementation.
1287       static_cast<CallbackVH*>(Entry)->allUsesReplacedWith(New);
1288       break;
1289     }
1290   }
1291 
1292 #ifndef NDEBUG
1293   // If any new weak value handles were added while processing the
1294   // list, then complain about it now.
1295   if (Old->HasValueHandle)
1296     for (Entry = pImpl->ValueHandles[Old]; Entry; Entry = Entry->Next)
1297       switch (Entry->getKind()) {
1298       case WeakTracking:
1299         dbgs() << "After RAUW from " << *Old->getType() << " %"
1300                << Old->getName() << " to " << *New->getType() << " %"
1301                << New->getName() << "\n";
1302         llvm_unreachable(
1303             "A weak tracking value handle still pointed to the old value!\n");
1304       default:
1305         break;
1306       }
1307 #endif
1308 }
1309 
1310 // Pin the vtable to this file.
1311 void CallbackVH::anchor() {}
1312