1 //===-- Value.cpp - Implement the Value class -----------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the Value, ValueHandle, and User classes.
10 //
11 //===----------------------------------------------------------------------===//
12
13 #include "llvm/IR/Value.h"
14 #include "LLVMContextImpl.h"
15 #include "llvm/ADT/DenseMap.h"
16 #include "llvm/ADT/SetVector.h"
17 #include "llvm/ADT/SmallString.h"
18 #include "llvm/IR/Constant.h"
19 #include "llvm/IR/Constants.h"
20 #include "llvm/IR/DataLayout.h"
21 #include "llvm/IR/DebugInfo.h"
22 #include "llvm/IR/DerivedTypes.h"
23 #include "llvm/IR/DerivedUser.h"
24 #include "llvm/IR/GetElementPtrTypeIterator.h"
25 #include "llvm/IR/InstrTypes.h"
26 #include "llvm/IR/Instructions.h"
27 #include "llvm/IR/IntrinsicInst.h"
28 #include "llvm/IR/Module.h"
29 #include "llvm/IR/Operator.h"
30 #include "llvm/IR/ValueHandle.h"
31 #include "llvm/IR/ValueSymbolTable.h"
32 #include "llvm/Support/CommandLine.h"
33 #include "llvm/Support/Debug.h"
34 #include "llvm/Support/ErrorHandling.h"
35 #include "llvm/Support/ManagedStatic.h"
36 #include "llvm/Support/raw_ostream.h"
37 #include <algorithm>
38
39 using namespace llvm;
40
41 static cl::opt<unsigned> UseDerefAtPointSemantics(
42 "use-dereferenceable-at-point-semantics", cl::Hidden, cl::init(false),
43 cl::desc("Deref attributes and metadata infer facts at definition only"));
44
45
46 static cl::opt<unsigned> NonGlobalValueMaxNameSize(
47 "non-global-value-max-name-size", cl::Hidden, cl::init(1024),
48 cl::desc("Maximum size for the name of non-global values."));
49
50 //===----------------------------------------------------------------------===//
51 // Value Class
52 //===----------------------------------------------------------------------===//
checkType(Type * Ty)53 static inline Type *checkType(Type *Ty) {
54 assert(Ty && "Value defined with a null type: Error!");
55 return Ty;
56 }
57
Value(Type * ty,unsigned scid)58 Value::Value(Type *ty, unsigned scid)
59 : VTy(checkType(ty)), UseList(nullptr), SubclassID(scid), HasValueHandle(0),
60 SubclassOptionalData(0), SubclassData(0), NumUserOperands(0),
61 IsUsedByMD(false), HasName(false), HasMetadata(false) {
62 static_assert(ConstantFirstVal == 0, "!(SubclassID < ConstantFirstVal)");
63 // FIXME: Why isn't this in the subclass gunk??
64 // Note, we cannot call isa<CallInst> before the CallInst has been
65 // constructed.
66 if (SubclassID == Instruction::Call || SubclassID == Instruction::Invoke ||
67 SubclassID == Instruction::CallBr)
68 assert((VTy->isFirstClassType() || VTy->isVoidTy() || VTy->isStructTy()) &&
69 "invalid CallInst type!");
70 else if (SubclassID != BasicBlockVal &&
71 (/*SubclassID < ConstantFirstVal ||*/ SubclassID > ConstantLastVal))
72 assert((VTy->isFirstClassType() || VTy->isVoidTy()) &&
73 "Cannot create non-first-class values except for constants!");
74 static_assert(sizeof(Value) == 2 * sizeof(void *) + 2 * sizeof(unsigned),
75 "Value too big");
76 }
77
~Value()78 Value::~Value() {
79 // Notify all ValueHandles (if present) that this value is going away.
80 if (HasValueHandle)
81 ValueHandleBase::ValueIsDeleted(this);
82 if (isUsedByMetadata())
83 ValueAsMetadata::handleDeletion(this);
84
85 // Remove associated metadata from context.
86 if (HasMetadata)
87 clearMetadata();
88
89 #ifndef NDEBUG // Only in -g mode...
90 // Check to make sure that there are no uses of this value that are still
91 // around when the value is destroyed. If there are, then we have a dangling
92 // reference and something is wrong. This code is here to print out where
93 // the value is still being referenced.
94 //
95 // Note that use_empty() cannot be called here, as it eventually downcasts
96 // 'this' to GlobalValue (derived class of Value), but GlobalValue has already
97 // been destructed, so accessing it is UB.
98 //
99 if (!materialized_use_empty()) {
100 dbgs() << "While deleting: " << *VTy << " %" << getName() << "\n";
101 for (auto *U : users())
102 dbgs() << "Use still stuck around after Def is destroyed:" << *U << "\n";
103 }
104 #endif
105 assert(materialized_use_empty() && "Uses remain when a value is destroyed!");
106
107 // If this value is named, destroy the name. This should not be in a symtab
108 // at this point.
109 destroyValueName();
110 }
111
deleteValue()112 void Value::deleteValue() {
113 switch (getValueID()) {
114 #define HANDLE_VALUE(Name) \
115 case Value::Name##Val: \
116 delete static_cast<Name *>(this); \
117 break;
118 #define HANDLE_MEMORY_VALUE(Name) \
119 case Value::Name##Val: \
120 static_cast<DerivedUser *>(this)->DeleteValue( \
121 static_cast<DerivedUser *>(this)); \
122 break;
123 #define HANDLE_CONSTANT(Name) \
124 case Value::Name##Val: \
125 llvm_unreachable("constants should be destroyed with destroyConstant"); \
126 break;
127 #define HANDLE_INSTRUCTION(Name) /* nothing */
128 #include "llvm/IR/Value.def"
129
130 #define HANDLE_INST(N, OPC, CLASS) \
131 case Value::InstructionVal + Instruction::OPC: \
132 delete static_cast<CLASS *>(this); \
133 break;
134 #define HANDLE_USER_INST(N, OPC, CLASS)
135 #include "llvm/IR/Instruction.def"
136
137 default:
138 llvm_unreachable("attempting to delete unknown value kind");
139 }
140 }
141
destroyValueName()142 void Value::destroyValueName() {
143 ValueName *Name = getValueName();
144 if (Name) {
145 MallocAllocator Allocator;
146 Name->Destroy(Allocator);
147 }
148 setValueName(nullptr);
149 }
150
hasNUses(unsigned N) const151 bool Value::hasNUses(unsigned N) const {
152 return hasNItems(use_begin(), use_end(), N);
153 }
154
hasNUsesOrMore(unsigned N) const155 bool Value::hasNUsesOrMore(unsigned N) const {
156 return hasNItemsOrMore(use_begin(), use_end(), N);
157 }
158
hasOneUser() const159 bool Value::hasOneUser() const {
160 if (use_empty())
161 return false;
162 if (hasOneUse())
163 return true;
164 return std::equal(++user_begin(), user_end(), user_begin());
165 }
166
isUnDroppableUser(const User * U)167 static bool isUnDroppableUser(const User *U) { return !U->isDroppable(); }
168
getSingleUndroppableUse()169 Use *Value::getSingleUndroppableUse() {
170 Use *Result = nullptr;
171 for (Use &U : uses()) {
172 if (!U.getUser()->isDroppable()) {
173 if (Result)
174 return nullptr;
175 Result = &U;
176 }
177 }
178 return Result;
179 }
180
hasNUndroppableUses(unsigned int N) const181 bool Value::hasNUndroppableUses(unsigned int N) const {
182 return hasNItems(user_begin(), user_end(), N, isUnDroppableUser);
183 }
184
hasNUndroppableUsesOrMore(unsigned int N) const185 bool Value::hasNUndroppableUsesOrMore(unsigned int N) const {
186 return hasNItemsOrMore(user_begin(), user_end(), N, isUnDroppableUser);
187 }
188
dropDroppableUses(llvm::function_ref<bool (const Use *)> ShouldDrop)189 void Value::dropDroppableUses(
190 llvm::function_ref<bool(const Use *)> ShouldDrop) {
191 SmallVector<Use *, 8> ToBeEdited;
192 for (Use &U : uses())
193 if (U.getUser()->isDroppable() && ShouldDrop(&U))
194 ToBeEdited.push_back(&U);
195 for (Use *U : ToBeEdited)
196 dropDroppableUse(*U);
197 }
198
dropDroppableUsesIn(User & Usr)199 void Value::dropDroppableUsesIn(User &Usr) {
200 assert(Usr.isDroppable() && "Expected a droppable user!");
201 for (Use &UsrOp : Usr.operands()) {
202 if (UsrOp.get() == this)
203 dropDroppableUse(UsrOp);
204 }
205 }
206
dropDroppableUse(Use & U)207 void Value::dropDroppableUse(Use &U) {
208 U.removeFromList();
209 if (auto *Assume = dyn_cast<AssumeInst>(U.getUser())) {
210 unsigned OpNo = U.getOperandNo();
211 if (OpNo == 0)
212 U.set(ConstantInt::getTrue(Assume->getContext()));
213 else {
214 U.set(UndefValue::get(U.get()->getType()));
215 CallInst::BundleOpInfo &BOI = Assume->getBundleOpInfoForOperand(OpNo);
216 BOI.Tag = Assume->getContext().pImpl->getOrInsertBundleTag("ignore");
217 }
218 return;
219 }
220
221 llvm_unreachable("unkown droppable use");
222 }
223
isUsedInBasicBlock(const BasicBlock * BB) const224 bool Value::isUsedInBasicBlock(const BasicBlock *BB) const {
225 // This can be computed either by scanning the instructions in BB, or by
226 // scanning the use list of this Value. Both lists can be very long, but
227 // usually one is quite short.
228 //
229 // Scan both lists simultaneously until one is exhausted. This limits the
230 // search to the shorter list.
231 BasicBlock::const_iterator BI = BB->begin(), BE = BB->end();
232 const_user_iterator UI = user_begin(), UE = user_end();
233 for (; BI != BE && UI != UE; ++BI, ++UI) {
234 // Scan basic block: Check if this Value is used by the instruction at BI.
235 if (is_contained(BI->operands(), this))
236 return true;
237 // Scan use list: Check if the use at UI is in BB.
238 const auto *User = dyn_cast<Instruction>(*UI);
239 if (User && User->getParent() == BB)
240 return true;
241 }
242 return false;
243 }
244
getNumUses() const245 unsigned Value::getNumUses() const {
246 return (unsigned)std::distance(use_begin(), use_end());
247 }
248
getSymTab(Value * V,ValueSymbolTable * & ST)249 static bool getSymTab(Value *V, ValueSymbolTable *&ST) {
250 ST = nullptr;
251 if (Instruction *I = dyn_cast<Instruction>(V)) {
252 if (BasicBlock *P = I->getParent())
253 if (Function *PP = P->getParent())
254 ST = PP->getValueSymbolTable();
255 } else if (BasicBlock *BB = dyn_cast<BasicBlock>(V)) {
256 if (Function *P = BB->getParent())
257 ST = P->getValueSymbolTable();
258 } else if (GlobalValue *GV = dyn_cast<GlobalValue>(V)) {
259 if (Module *P = GV->getParent())
260 ST = &P->getValueSymbolTable();
261 } else if (Argument *A = dyn_cast<Argument>(V)) {
262 if (Function *P = A->getParent())
263 ST = P->getValueSymbolTable();
264 } else {
265 assert(isa<Constant>(V) && "Unknown value type!");
266 return true; // no name is setable for this.
267 }
268 return false;
269 }
270
getValueName() const271 ValueName *Value::getValueName() const {
272 if (!HasName) return nullptr;
273
274 LLVMContext &Ctx = getContext();
275 auto I = Ctx.pImpl->ValueNames.find(this);
276 assert(I != Ctx.pImpl->ValueNames.end() &&
277 "No name entry found!");
278
279 return I->second;
280 }
281
setValueName(ValueName * VN)282 void Value::setValueName(ValueName *VN) {
283 LLVMContext &Ctx = getContext();
284
285 assert(HasName == Ctx.pImpl->ValueNames.count(this) &&
286 "HasName bit out of sync!");
287
288 if (!VN) {
289 if (HasName)
290 Ctx.pImpl->ValueNames.erase(this);
291 HasName = false;
292 return;
293 }
294
295 HasName = true;
296 Ctx.pImpl->ValueNames[this] = VN;
297 }
298
getName() const299 StringRef Value::getName() const {
300 // Make sure the empty string is still a C string. For historical reasons,
301 // some clients want to call .data() on the result and expect it to be null
302 // terminated.
303 if (!hasName())
304 return StringRef("", 0);
305 return getValueName()->getKey();
306 }
307
setNameImpl(const Twine & NewName)308 void Value::setNameImpl(const Twine &NewName) {
309 // Fast-path: LLVMContext can be set to strip out non-GlobalValue names
310 if (getContext().shouldDiscardValueNames() && !isa<GlobalValue>(this))
311 return;
312
313 // Fast path for common IRBuilder case of setName("") when there is no name.
314 if (NewName.isTriviallyEmpty() && !hasName())
315 return;
316
317 SmallString<256> NameData;
318 StringRef NameRef = NewName.toStringRef(NameData);
319 assert(NameRef.find_first_of(0) == StringRef::npos &&
320 "Null bytes are not allowed in names");
321
322 // Name isn't changing?
323 if (getName() == NameRef)
324 return;
325
326 // Cap the size of non-GlobalValue names.
327 if (NameRef.size() > NonGlobalValueMaxNameSize && !isa<GlobalValue>(this))
328 NameRef =
329 NameRef.substr(0, std::max(1u, (unsigned)NonGlobalValueMaxNameSize));
330
331 assert(!getType()->isVoidTy() && "Cannot assign a name to void values!");
332
333 // Get the symbol table to update for this object.
334 ValueSymbolTable *ST;
335 if (getSymTab(this, ST))
336 return; // Cannot set a name on this value (e.g. constant).
337
338 if (!ST) { // No symbol table to update? Just do the change.
339 if (NameRef.empty()) {
340 // Free the name for this value.
341 destroyValueName();
342 return;
343 }
344
345 // NOTE: Could optimize for the case the name is shrinking to not deallocate
346 // then reallocated.
347 destroyValueName();
348
349 // Create the new name.
350 MallocAllocator Allocator;
351 setValueName(ValueName::Create(NameRef, Allocator));
352 getValueName()->setValue(this);
353 return;
354 }
355
356 // NOTE: Could optimize for the case the name is shrinking to not deallocate
357 // then reallocated.
358 if (hasName()) {
359 // Remove old name.
360 ST->removeValueName(getValueName());
361 destroyValueName();
362
363 if (NameRef.empty())
364 return;
365 }
366
367 // Name is changing to something new.
368 setValueName(ST->createValueName(NameRef, this));
369 }
370
setName(const Twine & NewName)371 void Value::setName(const Twine &NewName) {
372 setNameImpl(NewName);
373 if (Function *F = dyn_cast<Function>(this))
374 F->recalculateIntrinsicID();
375 }
376
takeName(Value * V)377 void Value::takeName(Value *V) {
378 ValueSymbolTable *ST = nullptr;
379 // If this value has a name, drop it.
380 if (hasName()) {
381 // Get the symtab this is in.
382 if (getSymTab(this, ST)) {
383 // We can't set a name on this value, but we need to clear V's name if
384 // it has one.
385 if (V->hasName()) V->setName("");
386 return; // Cannot set a name on this value (e.g. constant).
387 }
388
389 // Remove old name.
390 if (ST)
391 ST->removeValueName(getValueName());
392 destroyValueName();
393 }
394
395 // Now we know that this has no name.
396
397 // If V has no name either, we're done.
398 if (!V->hasName()) return;
399
400 // Get this's symtab if we didn't before.
401 if (!ST) {
402 if (getSymTab(this, ST)) {
403 // Clear V's name.
404 V->setName("");
405 return; // Cannot set a name on this value (e.g. constant).
406 }
407 }
408
409 // Get V's ST, this should always succed, because V has a name.
410 ValueSymbolTable *VST;
411 bool Failure = getSymTab(V, VST);
412 assert(!Failure && "V has a name, so it should have a ST!"); (void)Failure;
413
414 // If these values are both in the same symtab, we can do this very fast.
415 // This works even if both values have no symtab yet.
416 if (ST == VST) {
417 // Take the name!
418 setValueName(V->getValueName());
419 V->setValueName(nullptr);
420 getValueName()->setValue(this);
421 return;
422 }
423
424 // Otherwise, things are slightly more complex. Remove V's name from VST and
425 // then reinsert it into ST.
426
427 if (VST)
428 VST->removeValueName(V->getValueName());
429 setValueName(V->getValueName());
430 V->setValueName(nullptr);
431 getValueName()->setValue(this);
432
433 if (ST)
434 ST->reinsertValue(this);
435 }
436
437 #ifndef NDEBUG
getNameOrAsOperand() const438 std::string Value::getNameOrAsOperand() const {
439 if (!getName().empty())
440 return std::string(getName());
441
442 std::string BBName;
443 raw_string_ostream OS(BBName);
444 printAsOperand(OS, false);
445 return OS.str();
446 }
447 #endif
448
assertModuleIsMaterializedImpl() const449 void Value::assertModuleIsMaterializedImpl() const {
450 #ifndef NDEBUG
451 const GlobalValue *GV = dyn_cast<GlobalValue>(this);
452 if (!GV)
453 return;
454 const Module *M = GV->getParent();
455 if (!M)
456 return;
457 assert(M->isMaterialized());
458 #endif
459 }
460
461 #ifndef NDEBUG
contains(SmallPtrSetImpl<ConstantExpr * > & Cache,ConstantExpr * Expr,Constant * C)462 static bool contains(SmallPtrSetImpl<ConstantExpr *> &Cache, ConstantExpr *Expr,
463 Constant *C) {
464 if (!Cache.insert(Expr).second)
465 return false;
466
467 for (auto &O : Expr->operands()) {
468 if (O == C)
469 return true;
470 auto *CE = dyn_cast<ConstantExpr>(O);
471 if (!CE)
472 continue;
473 if (contains(Cache, CE, C))
474 return true;
475 }
476 return false;
477 }
478
contains(Value * Expr,Value * V)479 static bool contains(Value *Expr, Value *V) {
480 if (Expr == V)
481 return true;
482
483 auto *C = dyn_cast<Constant>(V);
484 if (!C)
485 return false;
486
487 auto *CE = dyn_cast<ConstantExpr>(Expr);
488 if (!CE)
489 return false;
490
491 SmallPtrSet<ConstantExpr *, 4> Cache;
492 return contains(Cache, CE, C);
493 }
494 #endif // NDEBUG
495
doRAUW(Value * New,ReplaceMetadataUses ReplaceMetaUses)496 void Value::doRAUW(Value *New, ReplaceMetadataUses ReplaceMetaUses) {
497 assert(New && "Value::replaceAllUsesWith(<null>) is invalid!");
498 assert(!contains(New, this) &&
499 "this->replaceAllUsesWith(expr(this)) is NOT valid!");
500 assert(New->getType() == getType() &&
501 "replaceAllUses of value with new value of different type!");
502
503 // Notify all ValueHandles (if present) that this value is going away.
504 if (HasValueHandle)
505 ValueHandleBase::ValueIsRAUWd(this, New);
506 if (ReplaceMetaUses == ReplaceMetadataUses::Yes && isUsedByMetadata())
507 ValueAsMetadata::handleRAUW(this, New);
508
509 while (!materialized_use_empty()) {
510 Use &U = *UseList;
511 // Must handle Constants specially, we cannot call replaceUsesOfWith on a
512 // constant because they are uniqued.
513 if (auto *C = dyn_cast<Constant>(U.getUser())) {
514 if (!isa<GlobalValue>(C)) {
515 C->handleOperandChange(this, New);
516 continue;
517 }
518 }
519
520 U.set(New);
521 }
522
523 if (BasicBlock *BB = dyn_cast<BasicBlock>(this))
524 BB->replaceSuccessorsPhiUsesWith(cast<BasicBlock>(New));
525 }
526
replaceAllUsesWith(Value * New)527 void Value::replaceAllUsesWith(Value *New) {
528 doRAUW(New, ReplaceMetadataUses::Yes);
529 }
530
replaceNonMetadataUsesWith(Value * New)531 void Value::replaceNonMetadataUsesWith(Value *New) {
532 doRAUW(New, ReplaceMetadataUses::No);
533 }
534
535 /// Replace llvm.dbg.* uses of MetadataAsValue(ValueAsMetadata(V)) outside BB
536 /// with New.
replaceDbgUsesOutsideBlock(Value * V,Value * New,BasicBlock * BB)537 static void replaceDbgUsesOutsideBlock(Value *V, Value *New, BasicBlock *BB) {
538 SmallVector<DbgVariableIntrinsic *> DbgUsers;
539 findDbgUsers(DbgUsers, V);
540 for (auto *DVI : DbgUsers) {
541 if (DVI->getParent() != BB)
542 DVI->replaceVariableLocationOp(V, New);
543 }
544 }
545
546 // Like replaceAllUsesWith except it does not handle constants or basic blocks.
547 // This routine leaves uses within BB.
replaceUsesOutsideBlock(Value * New,BasicBlock * BB)548 void Value::replaceUsesOutsideBlock(Value *New, BasicBlock *BB) {
549 assert(New && "Value::replaceUsesOutsideBlock(<null>, BB) is invalid!");
550 assert(!contains(New, this) &&
551 "this->replaceUsesOutsideBlock(expr(this), BB) is NOT valid!");
552 assert(New->getType() == getType() &&
553 "replaceUses of value with new value of different type!");
554 assert(BB && "Basic block that may contain a use of 'New' must be defined\n");
555
556 replaceDbgUsesOutsideBlock(this, New, BB);
557 replaceUsesWithIf(New, [BB](Use &U) {
558 auto *I = dyn_cast<Instruction>(U.getUser());
559 // Don't replace if it's an instruction in the BB basic block.
560 return !I || I->getParent() != BB;
561 });
562 }
563
564 namespace {
565 // Various metrics for how much to strip off of pointers.
566 enum PointerStripKind {
567 PSK_ZeroIndices,
568 PSK_ZeroIndicesAndAliases,
569 PSK_ZeroIndicesSameRepresentation,
570 PSK_ForAliasAnalysis,
571 PSK_InBoundsConstantIndices,
572 PSK_InBounds
573 };
574
NoopCallback(const Value *)575 template <PointerStripKind StripKind> static void NoopCallback(const Value *) {}
576
577 template <PointerStripKind StripKind>
stripPointerCastsAndOffsets(const Value * V,function_ref<void (const Value *)> Func=NoopCallback<StripKind>)578 static const Value *stripPointerCastsAndOffsets(
579 const Value *V,
580 function_ref<void(const Value *)> Func = NoopCallback<StripKind>) {
581 if (!V->getType()->isPointerTy())
582 return V;
583
584 // Even though we don't look through PHI nodes, we could be called on an
585 // instruction in an unreachable block, which may be on a cycle.
586 SmallPtrSet<const Value *, 4> Visited;
587
588 Visited.insert(V);
589 do {
590 Func(V);
591 if (auto *GEP = dyn_cast<GEPOperator>(V)) {
592 switch (StripKind) {
593 case PSK_ZeroIndices:
594 case PSK_ZeroIndicesAndAliases:
595 case PSK_ZeroIndicesSameRepresentation:
596 case PSK_ForAliasAnalysis:
597 if (!GEP->hasAllZeroIndices())
598 return V;
599 break;
600 case PSK_InBoundsConstantIndices:
601 if (!GEP->hasAllConstantIndices())
602 return V;
603 LLVM_FALLTHROUGH;
604 case PSK_InBounds:
605 if (!GEP->isInBounds())
606 return V;
607 break;
608 }
609 V = GEP->getPointerOperand();
610 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
611 V = cast<Operator>(V)->getOperand(0);
612 if (!V->getType()->isPointerTy())
613 return V;
614 } else if (StripKind != PSK_ZeroIndicesSameRepresentation &&
615 Operator::getOpcode(V) == Instruction::AddrSpaceCast) {
616 // TODO: If we know an address space cast will not change the
617 // representation we could look through it here as well.
618 V = cast<Operator>(V)->getOperand(0);
619 } else if (StripKind == PSK_ZeroIndicesAndAliases && isa<GlobalAlias>(V)) {
620 V = cast<GlobalAlias>(V)->getAliasee();
621 } else if (StripKind == PSK_ForAliasAnalysis && isa<PHINode>(V) &&
622 cast<PHINode>(V)->getNumIncomingValues() == 1) {
623 V = cast<PHINode>(V)->getIncomingValue(0);
624 } else {
625 if (const auto *Call = dyn_cast<CallBase>(V)) {
626 if (const Value *RV = Call->getReturnedArgOperand()) {
627 V = RV;
628 continue;
629 }
630 // The result of launder.invariant.group must alias it's argument,
631 // but it can't be marked with returned attribute, that's why it needs
632 // special case.
633 if (StripKind == PSK_ForAliasAnalysis &&
634 (Call->getIntrinsicID() == Intrinsic::launder_invariant_group ||
635 Call->getIntrinsicID() == Intrinsic::strip_invariant_group)) {
636 V = Call->getArgOperand(0);
637 continue;
638 }
639 }
640 return V;
641 }
642 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
643 } while (Visited.insert(V).second);
644
645 return V;
646 }
647 } // end anonymous namespace
648
stripPointerCasts() const649 const Value *Value::stripPointerCasts() const {
650 return stripPointerCastsAndOffsets<PSK_ZeroIndices>(this);
651 }
652
stripPointerCastsAndAliases() const653 const Value *Value::stripPointerCastsAndAliases() const {
654 return stripPointerCastsAndOffsets<PSK_ZeroIndicesAndAliases>(this);
655 }
656
stripPointerCastsSameRepresentation() const657 const Value *Value::stripPointerCastsSameRepresentation() const {
658 return stripPointerCastsAndOffsets<PSK_ZeroIndicesSameRepresentation>(this);
659 }
660
stripInBoundsConstantOffsets() const661 const Value *Value::stripInBoundsConstantOffsets() const {
662 return stripPointerCastsAndOffsets<PSK_InBoundsConstantIndices>(this);
663 }
664
stripPointerCastsForAliasAnalysis() const665 const Value *Value::stripPointerCastsForAliasAnalysis() const {
666 return stripPointerCastsAndOffsets<PSK_ForAliasAnalysis>(this);
667 }
668
stripAndAccumulateConstantOffsets(const DataLayout & DL,APInt & Offset,bool AllowNonInbounds,function_ref<bool (Value &,APInt &)> ExternalAnalysis) const669 const Value *Value::stripAndAccumulateConstantOffsets(
670 const DataLayout &DL, APInt &Offset, bool AllowNonInbounds,
671 function_ref<bool(Value &, APInt &)> ExternalAnalysis) const {
672 if (!getType()->isPtrOrPtrVectorTy())
673 return this;
674
675 unsigned BitWidth = Offset.getBitWidth();
676 assert(BitWidth == DL.getIndexTypeSizeInBits(getType()) &&
677 "The offset bit width does not match the DL specification.");
678
679 // Even though we don't look through PHI nodes, we could be called on an
680 // instruction in an unreachable block, which may be on a cycle.
681 SmallPtrSet<const Value *, 4> Visited;
682 Visited.insert(this);
683 const Value *V = this;
684 do {
685 if (auto *GEP = dyn_cast<GEPOperator>(V)) {
686 // If in-bounds was requested, we do not strip non-in-bounds GEPs.
687 if (!AllowNonInbounds && !GEP->isInBounds())
688 return V;
689
690 // If one of the values we have visited is an addrspacecast, then
691 // the pointer type of this GEP may be different from the type
692 // of the Ptr parameter which was passed to this function. This
693 // means when we construct GEPOffset, we need to use the size
694 // of GEP's pointer type rather than the size of the original
695 // pointer type.
696 APInt GEPOffset(DL.getIndexTypeSizeInBits(V->getType()), 0);
697 if (!GEP->accumulateConstantOffset(DL, GEPOffset, ExternalAnalysis))
698 return V;
699
700 // Stop traversal if the pointer offset wouldn't fit in the bit-width
701 // provided by the Offset argument. This can happen due to AddrSpaceCast
702 // stripping.
703 if (GEPOffset.getMinSignedBits() > BitWidth)
704 return V;
705
706 // External Analysis can return a result higher/lower than the value
707 // represents. We need to detect overflow/underflow.
708 APInt GEPOffsetST = GEPOffset.sextOrTrunc(BitWidth);
709 if (!ExternalAnalysis) {
710 Offset += GEPOffsetST;
711 } else {
712 bool Overflow = false;
713 APInt OldOffset = Offset;
714 Offset = Offset.sadd_ov(GEPOffsetST, Overflow);
715 if (Overflow) {
716 Offset = OldOffset;
717 return V;
718 }
719 }
720 V = GEP->getPointerOperand();
721 } else if (Operator::getOpcode(V) == Instruction::BitCast ||
722 Operator::getOpcode(V) == Instruction::AddrSpaceCast) {
723 V = cast<Operator>(V)->getOperand(0);
724 } else if (auto *GA = dyn_cast<GlobalAlias>(V)) {
725 if (!GA->isInterposable())
726 V = GA->getAliasee();
727 } else if (const auto *Call = dyn_cast<CallBase>(V)) {
728 if (const Value *RV = Call->getReturnedArgOperand())
729 V = RV;
730 }
731 assert(V->getType()->isPtrOrPtrVectorTy() && "Unexpected operand type!");
732 } while (Visited.insert(V).second);
733
734 return V;
735 }
736
737 const Value *
stripInBoundsOffsets(function_ref<void (const Value *)> Func) const738 Value::stripInBoundsOffsets(function_ref<void(const Value *)> Func) const {
739 return stripPointerCastsAndOffsets<PSK_InBounds>(this, Func);
740 }
741
canBeFreed() const742 bool Value::canBeFreed() const {
743 assert(getType()->isPointerTy());
744
745 // Cases that can simply never be deallocated
746 // *) Constants aren't allocated per se, thus not deallocated either.
747 if (isa<Constant>(this))
748 return false;
749
750 // Handle byval/byref/sret/inalloca/preallocated arguments. The storage
751 // lifetime is guaranteed to be longer than the callee's lifetime.
752 if (auto *A = dyn_cast<Argument>(this)) {
753 if (A->hasPointeeInMemoryValueAttr())
754 return false;
755 // A pointer to an object in a function which neither frees, nor can arrange
756 // for another thread to free on its behalf, can not be freed in the scope
757 // of the function. Note that this logic is restricted to memory
758 // allocations in existance before the call; a nofree function *is* allowed
759 // to free memory it allocated.
760 const Function *F = A->getParent();
761 if (F->doesNotFreeMemory() && F->hasNoSync())
762 return false;
763 }
764
765 const Function *F = nullptr;
766 if (auto *I = dyn_cast<Instruction>(this))
767 F = I->getFunction();
768 if (auto *A = dyn_cast<Argument>(this))
769 F = A->getParent();
770
771 if (!F)
772 return true;
773
774 // With garbage collection, deallocation typically occurs solely at or after
775 // safepoints. If we're compiling for a collector which uses the
776 // gc.statepoint infrastructure, safepoints aren't explicitly present
777 // in the IR until after lowering from abstract to physical machine model.
778 // The collector could chose to mix explicit deallocation and gc'd objects
779 // which is why we need the explicit opt in on a per collector basis.
780 if (!F->hasGC())
781 return true;
782
783 const auto &GCName = F->getGC();
784 if (GCName == "statepoint-example") {
785 auto *PT = cast<PointerType>(this->getType());
786 if (PT->getAddressSpace() != 1)
787 // For the sake of this example GC, we arbitrarily pick addrspace(1) as
788 // our GC managed heap. This must match the same check in
789 // RewriteStatepointsForGC (and probably needs better factored.)
790 return true;
791
792 // It is cheaper to scan for a declaration than to scan for a use in this
793 // function. Note that gc.statepoint is a type overloaded function so the
794 // usual trick of requesting declaration of the intrinsic from the module
795 // doesn't work.
796 for (auto &Fn : *F->getParent())
797 if (Fn.getIntrinsicID() == Intrinsic::experimental_gc_statepoint)
798 return true;
799 return false;
800 }
801 return true;
802 }
803
getPointerDereferenceableBytes(const DataLayout & DL,bool & CanBeNull,bool & CanBeFreed) const804 uint64_t Value::getPointerDereferenceableBytes(const DataLayout &DL,
805 bool &CanBeNull,
806 bool &CanBeFreed) const {
807 assert(getType()->isPointerTy() && "must be pointer");
808
809 uint64_t DerefBytes = 0;
810 CanBeNull = false;
811 CanBeFreed = UseDerefAtPointSemantics && canBeFreed();
812 if (const Argument *A = dyn_cast<Argument>(this)) {
813 DerefBytes = A->getDereferenceableBytes();
814 if (DerefBytes == 0) {
815 // Handle byval/byref/inalloca/preallocated arguments
816 if (Type *ArgMemTy = A->getPointeeInMemoryValueType()) {
817 if (ArgMemTy->isSized()) {
818 // FIXME: Why isn't this the type alloc size?
819 DerefBytes = DL.getTypeStoreSize(ArgMemTy).getKnownMinSize();
820 }
821 }
822 }
823
824 if (DerefBytes == 0) {
825 DerefBytes = A->getDereferenceableOrNullBytes();
826 CanBeNull = true;
827 }
828 } else if (const auto *Call = dyn_cast<CallBase>(this)) {
829 DerefBytes = Call->getDereferenceableBytes(AttributeList::ReturnIndex);
830 if (DerefBytes == 0) {
831 DerefBytes =
832 Call->getDereferenceableOrNullBytes(AttributeList::ReturnIndex);
833 CanBeNull = true;
834 }
835 } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) {
836 if (MDNode *MD = LI->getMetadata(LLVMContext::MD_dereferenceable)) {
837 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
838 DerefBytes = CI->getLimitedValue();
839 }
840 if (DerefBytes == 0) {
841 if (MDNode *MD =
842 LI->getMetadata(LLVMContext::MD_dereferenceable_or_null)) {
843 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
844 DerefBytes = CI->getLimitedValue();
845 }
846 CanBeNull = true;
847 }
848 } else if (auto *IP = dyn_cast<IntToPtrInst>(this)) {
849 if (MDNode *MD = IP->getMetadata(LLVMContext::MD_dereferenceable)) {
850 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
851 DerefBytes = CI->getLimitedValue();
852 }
853 if (DerefBytes == 0) {
854 if (MDNode *MD =
855 IP->getMetadata(LLVMContext::MD_dereferenceable_or_null)) {
856 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
857 DerefBytes = CI->getLimitedValue();
858 }
859 CanBeNull = true;
860 }
861 } else if (auto *AI = dyn_cast<AllocaInst>(this)) {
862 if (!AI->isArrayAllocation()) {
863 DerefBytes =
864 DL.getTypeStoreSize(AI->getAllocatedType()).getKnownMinSize();
865 CanBeNull = false;
866 CanBeFreed = false;
867 }
868 } else if (auto *GV = dyn_cast<GlobalVariable>(this)) {
869 if (GV->getValueType()->isSized() && !GV->hasExternalWeakLinkage()) {
870 // TODO: Don't outright reject hasExternalWeakLinkage but set the
871 // CanBeNull flag.
872 DerefBytes = DL.getTypeStoreSize(GV->getValueType()).getFixedSize();
873 CanBeNull = false;
874 }
875 }
876 return DerefBytes;
877 }
878
getPointerAlignment(const DataLayout & DL) const879 Align Value::getPointerAlignment(const DataLayout &DL) const {
880 assert(getType()->isPointerTy() && "must be pointer");
881 if (auto *GO = dyn_cast<GlobalObject>(this)) {
882 if (isa<Function>(GO)) {
883 Align FunctionPtrAlign = DL.getFunctionPtrAlign().valueOrOne();
884 switch (DL.getFunctionPtrAlignType()) {
885 case DataLayout::FunctionPtrAlignType::Independent:
886 return FunctionPtrAlign;
887 case DataLayout::FunctionPtrAlignType::MultipleOfFunctionAlign:
888 return std::max(FunctionPtrAlign, GO->getAlign().valueOrOne());
889 }
890 llvm_unreachable("Unhandled FunctionPtrAlignType");
891 }
892 const MaybeAlign Alignment(GO->getAlignment());
893 if (!Alignment) {
894 if (auto *GVar = dyn_cast<GlobalVariable>(GO)) {
895 Type *ObjectType = GVar->getValueType();
896 if (ObjectType->isSized()) {
897 // If the object is defined in the current Module, we'll be giving
898 // it the preferred alignment. Otherwise, we have to assume that it
899 // may only have the minimum ABI alignment.
900 if (GVar->isStrongDefinitionForLinker())
901 return DL.getPreferredAlign(GVar);
902 else
903 return DL.getABITypeAlign(ObjectType);
904 }
905 }
906 }
907 return Alignment.valueOrOne();
908 } else if (const Argument *A = dyn_cast<Argument>(this)) {
909 const MaybeAlign Alignment = A->getParamAlign();
910 if (!Alignment && A->hasStructRetAttr()) {
911 // An sret parameter has at least the ABI alignment of the return type.
912 Type *EltTy = A->getParamStructRetType();
913 if (EltTy->isSized())
914 return DL.getABITypeAlign(EltTy);
915 }
916 return Alignment.valueOrOne();
917 } else if (const AllocaInst *AI = dyn_cast<AllocaInst>(this)) {
918 return AI->getAlign();
919 } else if (const auto *Call = dyn_cast<CallBase>(this)) {
920 MaybeAlign Alignment = Call->getRetAlign();
921 if (!Alignment && Call->getCalledFunction())
922 Alignment = Call->getCalledFunction()->getAttributes().getRetAlignment();
923 return Alignment.valueOrOne();
924 } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) {
925 if (MDNode *MD = LI->getMetadata(LLVMContext::MD_align)) {
926 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
927 return Align(CI->getLimitedValue());
928 }
929 } else if (auto *CstPtr = dyn_cast<Constant>(this)) {
930 if (auto *CstInt = dyn_cast_or_null<ConstantInt>(ConstantExpr::getPtrToInt(
931 const_cast<Constant *>(CstPtr), DL.getIntPtrType(getType()),
932 /*OnlyIfReduced=*/true))) {
933 size_t TrailingZeros = CstInt->getValue().countTrailingZeros();
934 // While the actual alignment may be large, elsewhere we have
935 // an arbitrary upper alignmet limit, so let's clamp to it.
936 return Align(TrailingZeros < Value::MaxAlignmentExponent
937 ? uint64_t(1) << TrailingZeros
938 : Value::MaximumAlignment);
939 }
940 }
941 return Align(1);
942 }
943
DoPHITranslation(const BasicBlock * CurBB,const BasicBlock * PredBB) const944 const Value *Value::DoPHITranslation(const BasicBlock *CurBB,
945 const BasicBlock *PredBB) const {
946 auto *PN = dyn_cast<PHINode>(this);
947 if (PN && PN->getParent() == CurBB)
948 return PN->getIncomingValueForBlock(PredBB);
949 return this;
950 }
951
getContext() const952 LLVMContext &Value::getContext() const { return VTy->getContext(); }
953
reverseUseList()954 void Value::reverseUseList() {
955 if (!UseList || !UseList->Next)
956 // No need to reverse 0 or 1 uses.
957 return;
958
959 Use *Head = UseList;
960 Use *Current = UseList->Next;
961 Head->Next = nullptr;
962 while (Current) {
963 Use *Next = Current->Next;
964 Current->Next = Head;
965 Head->Prev = &Current->Next;
966 Head = Current;
967 Current = Next;
968 }
969 UseList = Head;
970 Head->Prev = &UseList;
971 }
972
isSwiftError() const973 bool Value::isSwiftError() const {
974 auto *Arg = dyn_cast<Argument>(this);
975 if (Arg)
976 return Arg->hasSwiftErrorAttr();
977 auto *Alloca = dyn_cast<AllocaInst>(this);
978 if (!Alloca)
979 return false;
980 return Alloca->isSwiftError();
981 }
982
isTransitiveUsedByMetadataOnly() const983 bool Value::isTransitiveUsedByMetadataOnly() const {
984 if (use_empty())
985 return false;
986 llvm::SmallVector<const User *, 32> WorkList;
987 llvm::SmallPtrSet<const User *, 32> Visited;
988 WorkList.insert(WorkList.begin(), user_begin(), user_end());
989 while (!WorkList.empty()) {
990 const User *U = WorkList.back();
991 WorkList.pop_back();
992 Visited.insert(U);
993 // If it is transitively used by a global value or a non-constant value,
994 // it's obviously not only used by metadata.
995 if (!isa<Constant>(U) || isa<GlobalValue>(U))
996 return false;
997 for (const User *UU : U->users())
998 if (!Visited.count(UU))
999 WorkList.push_back(UU);
1000 }
1001 return true;
1002 }
1003
1004 //===----------------------------------------------------------------------===//
1005 // ValueHandleBase Class
1006 //===----------------------------------------------------------------------===//
1007
AddToExistingUseList(ValueHandleBase ** List)1008 void ValueHandleBase::AddToExistingUseList(ValueHandleBase **List) {
1009 assert(List && "Handle list is null?");
1010
1011 // Splice ourselves into the list.
1012 Next = *List;
1013 *List = this;
1014 setPrevPtr(List);
1015 if (Next) {
1016 Next->setPrevPtr(&Next);
1017 assert(getValPtr() == Next->getValPtr() && "Added to wrong list?");
1018 }
1019 }
1020
AddToExistingUseListAfter(ValueHandleBase * List)1021 void ValueHandleBase::AddToExistingUseListAfter(ValueHandleBase *List) {
1022 assert(List && "Must insert after existing node");
1023
1024 Next = List->Next;
1025 setPrevPtr(&List->Next);
1026 List->Next = this;
1027 if (Next)
1028 Next->setPrevPtr(&Next);
1029 }
1030
AddToUseList()1031 void ValueHandleBase::AddToUseList() {
1032 assert(getValPtr() && "Null pointer doesn't have a use list!");
1033
1034 LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl;
1035
1036 if (getValPtr()->HasValueHandle) {
1037 // If this value already has a ValueHandle, then it must be in the
1038 // ValueHandles map already.
1039 ValueHandleBase *&Entry = pImpl->ValueHandles[getValPtr()];
1040 assert(Entry && "Value doesn't have any handles?");
1041 AddToExistingUseList(&Entry);
1042 return;
1043 }
1044
1045 // Ok, it doesn't have any handles yet, so we must insert it into the
1046 // DenseMap. However, doing this insertion could cause the DenseMap to
1047 // reallocate itself, which would invalidate all of the PrevP pointers that
1048 // point into the old table. Handle this by checking for reallocation and
1049 // updating the stale pointers only if needed.
1050 DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles;
1051 const void *OldBucketPtr = Handles.getPointerIntoBucketsArray();
1052
1053 ValueHandleBase *&Entry = Handles[getValPtr()];
1054 assert(!Entry && "Value really did already have handles?");
1055 AddToExistingUseList(&Entry);
1056 getValPtr()->HasValueHandle = true;
1057
1058 // If reallocation didn't happen or if this was the first insertion, don't
1059 // walk the table.
1060 if (Handles.isPointerIntoBucketsArray(OldBucketPtr) ||
1061 Handles.size() == 1) {
1062 return;
1063 }
1064
1065 // Okay, reallocation did happen. Fix the Prev Pointers.
1066 for (DenseMap<Value*, ValueHandleBase*>::iterator I = Handles.begin(),
1067 E = Handles.end(); I != E; ++I) {
1068 assert(I->second && I->first == I->second->getValPtr() &&
1069 "List invariant broken!");
1070 I->second->setPrevPtr(&I->second);
1071 }
1072 }
1073
RemoveFromUseList()1074 void ValueHandleBase::RemoveFromUseList() {
1075 assert(getValPtr() && getValPtr()->HasValueHandle &&
1076 "Pointer doesn't have a use list!");
1077
1078 // Unlink this from its use list.
1079 ValueHandleBase **PrevPtr = getPrevPtr();
1080 assert(*PrevPtr == this && "List invariant broken");
1081
1082 *PrevPtr = Next;
1083 if (Next) {
1084 assert(Next->getPrevPtr() == &Next && "List invariant broken");
1085 Next->setPrevPtr(PrevPtr);
1086 return;
1087 }
1088
1089 // If the Next pointer was null, then it is possible that this was the last
1090 // ValueHandle watching VP. If so, delete its entry from the ValueHandles
1091 // map.
1092 LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl;
1093 DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles;
1094 if (Handles.isPointerIntoBucketsArray(PrevPtr)) {
1095 Handles.erase(getValPtr());
1096 getValPtr()->HasValueHandle = false;
1097 }
1098 }
1099
ValueIsDeleted(Value * V)1100 void ValueHandleBase::ValueIsDeleted(Value *V) {
1101 assert(V->HasValueHandle && "Should only be called if ValueHandles present");
1102
1103 // Get the linked list base, which is guaranteed to exist since the
1104 // HasValueHandle flag is set.
1105 LLVMContextImpl *pImpl = V->getContext().pImpl;
1106 ValueHandleBase *Entry = pImpl->ValueHandles[V];
1107 assert(Entry && "Value bit set but no entries exist");
1108
1109 // We use a local ValueHandleBase as an iterator so that ValueHandles can add
1110 // and remove themselves from the list without breaking our iteration. This
1111 // is not really an AssertingVH; we just have to give ValueHandleBase a kind.
1112 // Note that we deliberately do not the support the case when dropping a value
1113 // handle results in a new value handle being permanently added to the list
1114 // (as might occur in theory for CallbackVH's): the new value handle will not
1115 // be processed and the checking code will mete out righteous punishment if
1116 // the handle is still present once we have finished processing all the other
1117 // value handles (it is fine to momentarily add then remove a value handle).
1118 for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next) {
1119 Iterator.RemoveFromUseList();
1120 Iterator.AddToExistingUseListAfter(Entry);
1121 assert(Entry->Next == &Iterator && "Loop invariant broken.");
1122
1123 switch (Entry->getKind()) {
1124 case Assert:
1125 break;
1126 case Weak:
1127 case WeakTracking:
1128 // WeakTracking and Weak just go to null, which unlinks them
1129 // from the list.
1130 Entry->operator=(nullptr);
1131 break;
1132 case Callback:
1133 // Forward to the subclass's implementation.
1134 static_cast<CallbackVH*>(Entry)->deleted();
1135 break;
1136 }
1137 }
1138
1139 // All callbacks, weak references, and assertingVHs should be dropped by now.
1140 if (V->HasValueHandle) {
1141 #ifndef NDEBUG // Only in +Asserts mode...
1142 dbgs() << "While deleting: " << *V->getType() << " %" << V->getName()
1143 << "\n";
1144 if (pImpl->ValueHandles[V]->getKind() == Assert)
1145 llvm_unreachable("An asserting value handle still pointed to this"
1146 " value!");
1147
1148 #endif
1149 llvm_unreachable("All references to V were not removed?");
1150 }
1151 }
1152
ValueIsRAUWd(Value * Old,Value * New)1153 void ValueHandleBase::ValueIsRAUWd(Value *Old, Value *New) {
1154 assert(Old->HasValueHandle &&"Should only be called if ValueHandles present");
1155 assert(Old != New && "Changing value into itself!");
1156 assert(Old->getType() == New->getType() &&
1157 "replaceAllUses of value with new value of different type!");
1158
1159 // Get the linked list base, which is guaranteed to exist since the
1160 // HasValueHandle flag is set.
1161 LLVMContextImpl *pImpl = Old->getContext().pImpl;
1162 ValueHandleBase *Entry = pImpl->ValueHandles[Old];
1163
1164 assert(Entry && "Value bit set but no entries exist");
1165
1166 // We use a local ValueHandleBase as an iterator so that
1167 // ValueHandles can add and remove themselves from the list without
1168 // breaking our iteration. This is not really an AssertingVH; we
1169 // just have to give ValueHandleBase some kind.
1170 for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next) {
1171 Iterator.RemoveFromUseList();
1172 Iterator.AddToExistingUseListAfter(Entry);
1173 assert(Entry->Next == &Iterator && "Loop invariant broken.");
1174
1175 switch (Entry->getKind()) {
1176 case Assert:
1177 case Weak:
1178 // Asserting and Weak handles do not follow RAUW implicitly.
1179 break;
1180 case WeakTracking:
1181 // Weak goes to the new value, which will unlink it from Old's list.
1182 Entry->operator=(New);
1183 break;
1184 case Callback:
1185 // Forward to the subclass's implementation.
1186 static_cast<CallbackVH*>(Entry)->allUsesReplacedWith(New);
1187 break;
1188 }
1189 }
1190
1191 #ifndef NDEBUG
1192 // If any new weak value handles were added while processing the
1193 // list, then complain about it now.
1194 if (Old->HasValueHandle)
1195 for (Entry = pImpl->ValueHandles[Old]; Entry; Entry = Entry->Next)
1196 switch (Entry->getKind()) {
1197 case WeakTracking:
1198 dbgs() << "After RAUW from " << *Old->getType() << " %"
1199 << Old->getName() << " to " << *New->getType() << " %"
1200 << New->getName() << "\n";
1201 llvm_unreachable(
1202 "A weak tracking value handle still pointed to the old value!\n");
1203 default:
1204 break;
1205 }
1206 #endif
1207 }
1208
1209 // Pin the vtable to this file.
anchor()1210 void CallbackVH::anchor() {}
1211