1 //===- ValueMapper.cpp - Interface shared by lib/Transforms/Utils ---------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines the MapValue function, which is shared by various parts of
10 // the lib/Transforms/Utils library.
11 //
12 //===----------------------------------------------------------------------===//
13
14 #include "llvm/Transforms/Utils/ValueMapper.h"
15 #include "llvm/ADT/ArrayRef.h"
16 #include "llvm/ADT/DenseMap.h"
17 #include "llvm/ADT/DenseSet.h"
18 #include "llvm/ADT/STLExtras.h"
19 #include "llvm/ADT/SmallVector.h"
20 #include "llvm/IR/Argument.h"
21 #include "llvm/IR/BasicBlock.h"
22 #include "llvm/IR/Constant.h"
23 #include "llvm/IR/Constants.h"
24 #include "llvm/IR/DebugInfoMetadata.h"
25 #include "llvm/IR/DerivedTypes.h"
26 #include "llvm/IR/Function.h"
27 #include "llvm/IR/GlobalAlias.h"
28 #include "llvm/IR/GlobalIFunc.h"
29 #include "llvm/IR/GlobalObject.h"
30 #include "llvm/IR/GlobalVariable.h"
31 #include "llvm/IR/InlineAsm.h"
32 #include "llvm/IR/Instruction.h"
33 #include "llvm/IR/Instructions.h"
34 #include "llvm/IR/Metadata.h"
35 #include "llvm/IR/Operator.h"
36 #include "llvm/IR/Type.h"
37 #include "llvm/IR/Value.h"
38 #include "llvm/Support/Casting.h"
39 #include "llvm/Support/Debug.h"
40 #include <cassert>
41 #include <limits>
42 #include <memory>
43 #include <utility>
44
45 using namespace llvm;
46
47 #define DEBUG_TYPE "value-mapper"
48
49 // Out of line method to get vtable etc for class.
anchor()50 void ValueMapTypeRemapper::anchor() {}
anchor()51 void ValueMaterializer::anchor() {}
52
53 namespace {
54
55 /// A basic block used in a BlockAddress whose function body is not yet
56 /// materialized.
57 struct DelayedBasicBlock {
58 BasicBlock *OldBB;
59 std::unique_ptr<BasicBlock> TempBB;
60
DelayedBasicBlock__anona97101fd0111::DelayedBasicBlock61 DelayedBasicBlock(const BlockAddress &Old)
62 : OldBB(Old.getBasicBlock()),
63 TempBB(BasicBlock::Create(Old.getContext())) {}
64 };
65
66 struct WorklistEntry {
67 enum EntryKind {
68 MapGlobalInit,
69 MapAppendingVar,
70 MapAliasOrIFunc,
71 RemapFunction
72 };
73 struct GVInitTy {
74 GlobalVariable *GV;
75 Constant *Init;
76 };
77 struct AppendingGVTy {
78 GlobalVariable *GV;
79 Constant *InitPrefix;
80 };
81 struct AliasOrIFuncTy {
82 GlobalValue *GV;
83 Constant *Target;
84 };
85
86 unsigned Kind : 2;
87 unsigned MCID : 29;
88 unsigned AppendingGVIsOldCtorDtor : 1;
89 unsigned AppendingGVNumNewMembers;
90 union {
91 GVInitTy GVInit;
92 AppendingGVTy AppendingGV;
93 AliasOrIFuncTy AliasOrIFunc;
94 Function *RemapF;
95 } Data;
96 };
97
98 struct MappingContext {
99 ValueToValueMapTy *VM;
100 ValueMaterializer *Materializer = nullptr;
101
102 /// Construct a MappingContext with a value map and materializer.
MappingContext__anona97101fd0111::MappingContext103 explicit MappingContext(ValueToValueMapTy &VM,
104 ValueMaterializer *Materializer = nullptr)
105 : VM(&VM), Materializer(Materializer) {}
106 };
107
108 class Mapper {
109 friend class MDNodeMapper;
110
111 #ifndef NDEBUG
112 DenseSet<GlobalValue *> AlreadyScheduled;
113 #endif
114
115 RemapFlags Flags;
116 ValueMapTypeRemapper *TypeMapper;
117 unsigned CurrentMCID = 0;
118 SmallVector<MappingContext, 2> MCs;
119 SmallVector<WorklistEntry, 4> Worklist;
120 SmallVector<DelayedBasicBlock, 1> DelayedBBs;
121 SmallVector<Constant *, 16> AppendingInits;
122
123 public:
Mapper(ValueToValueMapTy & VM,RemapFlags Flags,ValueMapTypeRemapper * TypeMapper,ValueMaterializer * Materializer)124 Mapper(ValueToValueMapTy &VM, RemapFlags Flags,
125 ValueMapTypeRemapper *TypeMapper, ValueMaterializer *Materializer)
126 : Flags(Flags), TypeMapper(TypeMapper),
127 MCs(1, MappingContext(VM, Materializer)) {}
128
129 /// ValueMapper should explicitly call \a flush() before destruction.
~Mapper()130 ~Mapper() { assert(!hasWorkToDo() && "Expected to be flushed"); }
131
hasWorkToDo() const132 bool hasWorkToDo() const { return !Worklist.empty(); }
133
134 unsigned
registerAlternateMappingContext(ValueToValueMapTy & VM,ValueMaterializer * Materializer=nullptr)135 registerAlternateMappingContext(ValueToValueMapTy &VM,
136 ValueMaterializer *Materializer = nullptr) {
137 MCs.push_back(MappingContext(VM, Materializer));
138 return MCs.size() - 1;
139 }
140
141 void addFlags(RemapFlags Flags);
142
143 void remapGlobalObjectMetadata(GlobalObject &GO);
144
145 Value *mapValue(const Value *V);
146 void remapInstruction(Instruction *I);
147 void remapFunction(Function &F);
148
mapConstant(const Constant * C)149 Constant *mapConstant(const Constant *C) {
150 return cast_or_null<Constant>(mapValue(C));
151 }
152
153 /// Map metadata.
154 ///
155 /// Find the mapping for MD. Guarantees that the return will be resolved
156 /// (not an MDNode, or MDNode::isResolved() returns true).
157 Metadata *mapMetadata(const Metadata *MD);
158
159 void scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init,
160 unsigned MCID);
161 void scheduleMapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix,
162 bool IsOldCtorDtor,
163 ArrayRef<Constant *> NewMembers,
164 unsigned MCID);
165 void scheduleMapAliasOrIFunc(GlobalValue &GV, Constant &Target,
166 unsigned MCID);
167 void scheduleRemapFunction(Function &F, unsigned MCID);
168
169 void flush();
170
171 private:
172 void mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix,
173 bool IsOldCtorDtor,
174 ArrayRef<Constant *> NewMembers);
175
getVM()176 ValueToValueMapTy &getVM() { return *MCs[CurrentMCID].VM; }
getMaterializer()177 ValueMaterializer *getMaterializer() { return MCs[CurrentMCID].Materializer; }
178
179 Value *mapBlockAddress(const BlockAddress &BA);
180
181 /// Map metadata that doesn't require visiting operands.
182 std::optional<Metadata *> mapSimpleMetadata(const Metadata *MD);
183
184 Metadata *mapToMetadata(const Metadata *Key, Metadata *Val);
185 Metadata *mapToSelf(const Metadata *MD);
186 };
187
188 class MDNodeMapper {
189 Mapper &M;
190
191 /// Data about a node in \a UniquedGraph.
192 struct Data {
193 bool HasChanged = false;
194 unsigned ID = std::numeric_limits<unsigned>::max();
195 TempMDNode Placeholder;
196 };
197
198 /// A graph of uniqued nodes.
199 struct UniquedGraph {
200 SmallDenseMap<const Metadata *, Data, 32> Info; // Node properties.
201 SmallVector<MDNode *, 16> POT; // Post-order traversal.
202
203 /// Propagate changed operands through the post-order traversal.
204 ///
205 /// Iteratively update \a Data::HasChanged for each node based on \a
206 /// Data::HasChanged of its operands, until fixed point.
207 void propagateChanges();
208
209 /// Get a forward reference to a node to use as an operand.
210 Metadata &getFwdReference(MDNode &Op);
211 };
212
213 /// Worklist of distinct nodes whose operands need to be remapped.
214 SmallVector<MDNode *, 16> DistinctWorklist;
215
216 // Storage for a UniquedGraph.
217 SmallDenseMap<const Metadata *, Data, 32> InfoStorage;
218 SmallVector<MDNode *, 16> POTStorage;
219
220 public:
MDNodeMapper(Mapper & M)221 MDNodeMapper(Mapper &M) : M(M) {}
222
223 /// Map a metadata node (and its transitive operands).
224 ///
225 /// Map all the (unmapped) nodes in the subgraph under \c N. The iterative
226 /// algorithm handles distinct nodes and uniqued node subgraphs using
227 /// different strategies.
228 ///
229 /// Distinct nodes are immediately mapped and added to \a DistinctWorklist
230 /// using \a mapDistinctNode(). Their mapping can always be computed
231 /// immediately without visiting operands, even if their operands change.
232 ///
233 /// The mapping for uniqued nodes depends on whether their operands change.
234 /// \a mapTopLevelUniquedNode() traverses the transitive uniqued subgraph of
235 /// a node to calculate uniqued node mappings in bulk. Distinct leafs are
236 /// added to \a DistinctWorklist with \a mapDistinctNode().
237 ///
238 /// After mapping \c N itself, this function remaps the operands of the
239 /// distinct nodes in \a DistinctWorklist until the entire subgraph under \c
240 /// N has been mapped.
241 Metadata *map(const MDNode &N);
242
243 private:
244 /// Map a top-level uniqued node and the uniqued subgraph underneath it.
245 ///
246 /// This builds up a post-order traversal of the (unmapped) uniqued subgraph
247 /// underneath \c FirstN and calculates the nodes' mapping. Each node uses
248 /// the identity mapping (\a Mapper::mapToSelf()) as long as all of its
249 /// operands uses the identity mapping.
250 ///
251 /// The algorithm works as follows:
252 ///
253 /// 1. \a createPOT(): traverse the uniqued subgraph under \c FirstN and
254 /// save the post-order traversal in the given \a UniquedGraph, tracking
255 /// nodes' operands change.
256 ///
257 /// 2. \a UniquedGraph::propagateChanges(): propagate changed operands
258 /// through the \a UniquedGraph until fixed point, following the rule
259 /// that if a node changes, any node that references must also change.
260 ///
261 /// 3. \a mapNodesInPOT(): map the uniqued nodes, creating new uniqued nodes
262 /// (referencing new operands) where necessary.
263 Metadata *mapTopLevelUniquedNode(const MDNode &FirstN);
264
265 /// Try to map the operand of an \a MDNode.
266 ///
267 /// If \c Op is already mapped, return the mapping. If it's not an \a
268 /// MDNode, compute and return the mapping. If it's a distinct \a MDNode,
269 /// return the result of \a mapDistinctNode().
270 ///
271 /// \return std::nullopt if \c Op is an unmapped uniqued \a MDNode.
272 /// \post getMappedOp(Op) only returns std::nullopt if this returns
273 /// std::nullopt.
274 std::optional<Metadata *> tryToMapOperand(const Metadata *Op);
275
276 /// Map a distinct node.
277 ///
278 /// Return the mapping for the distinct node \c N, saving the result in \a
279 /// DistinctWorklist for later remapping.
280 ///
281 /// \pre \c N is not yet mapped.
282 /// \pre \c N.isDistinct().
283 MDNode *mapDistinctNode(const MDNode &N);
284
285 /// Get a previously mapped node.
286 std::optional<Metadata *> getMappedOp(const Metadata *Op) const;
287
288 /// Create a post-order traversal of an unmapped uniqued node subgraph.
289 ///
290 /// This traverses the metadata graph deeply enough to map \c FirstN. It
291 /// uses \a tryToMapOperand() (via \a Mapper::mapSimplifiedNode()), so any
292 /// metadata that has already been mapped will not be part of the POT.
293 ///
294 /// Each node that has a changed operand from outside the graph (e.g., a
295 /// distinct node, an already-mapped uniqued node, or \a ConstantAsMetadata)
296 /// is marked with \a Data::HasChanged.
297 ///
298 /// \return \c true if any nodes in \c G have \a Data::HasChanged.
299 /// \post \c G.POT is a post-order traversal ending with \c FirstN.
300 /// \post \a Data::hasChanged in \c G.Info indicates whether any node needs
301 /// to change because of operands outside the graph.
302 bool createPOT(UniquedGraph &G, const MDNode &FirstN);
303
304 /// Visit the operands of a uniqued node in the POT.
305 ///
306 /// Visit the operands in the range from \c I to \c E, returning the first
307 /// uniqued node we find that isn't yet in \c G. \c I is always advanced to
308 /// where to continue the loop through the operands.
309 ///
310 /// This sets \c HasChanged if any of the visited operands change.
311 MDNode *visitOperands(UniquedGraph &G, MDNode::op_iterator &I,
312 MDNode::op_iterator E, bool &HasChanged);
313
314 /// Map all the nodes in the given uniqued graph.
315 ///
316 /// This visits all the nodes in \c G in post-order, using the identity
317 /// mapping or creating a new node depending on \a Data::HasChanged.
318 ///
319 /// \pre \a getMappedOp() returns std::nullopt for nodes in \c G, but not for
320 /// any of their operands outside of \c G. \pre \a Data::HasChanged is true
321 /// for a node in \c G iff any of its operands have changed. \post \a
322 /// getMappedOp() returns the mapped node for every node in \c G.
323 void mapNodesInPOT(UniquedGraph &G);
324
325 /// Remap a node's operands using the given functor.
326 ///
327 /// Iterate through the operands of \c N and update them in place using \c
328 /// mapOperand.
329 ///
330 /// \pre N.isDistinct() or N.isTemporary().
331 template <class OperandMapper>
332 void remapOperands(MDNode &N, OperandMapper mapOperand);
333 };
334
335 } // end anonymous namespace
336
mapValue(const Value * V)337 Value *Mapper::mapValue(const Value *V) {
338 ValueToValueMapTy::iterator I = getVM().find(V);
339
340 // If the value already exists in the map, use it.
341 if (I != getVM().end()) {
342 assert(I->second && "Unexpected null mapping");
343 return I->second;
344 }
345
346 // If we have a materializer and it can materialize a value, use that.
347 if (auto *Materializer = getMaterializer()) {
348 if (Value *NewV = Materializer->materialize(const_cast<Value *>(V))) {
349 getVM()[V] = NewV;
350 return NewV;
351 }
352 }
353
354 // Global values do not need to be seeded into the VM if they
355 // are using the identity mapping.
356 if (isa<GlobalValue>(V)) {
357 if (Flags & RF_NullMapMissingGlobalValues)
358 return nullptr;
359 return getVM()[V] = const_cast<Value *>(V);
360 }
361
362 if (const InlineAsm *IA = dyn_cast<InlineAsm>(V)) {
363 // Inline asm may need *type* remapping.
364 FunctionType *NewTy = IA->getFunctionType();
365 if (TypeMapper) {
366 NewTy = cast<FunctionType>(TypeMapper->remapType(NewTy));
367
368 if (NewTy != IA->getFunctionType())
369 V = InlineAsm::get(NewTy, IA->getAsmString(), IA->getConstraintString(),
370 IA->hasSideEffects(), IA->isAlignStack(),
371 IA->getDialect(), IA->canThrow());
372 }
373
374 return getVM()[V] = const_cast<Value *>(V);
375 }
376
377 if (const auto *MDV = dyn_cast<MetadataAsValue>(V)) {
378 const Metadata *MD = MDV->getMetadata();
379
380 if (auto *LAM = dyn_cast<LocalAsMetadata>(MD)) {
381 // Look through to grab the local value.
382 if (Value *LV = mapValue(LAM->getValue())) {
383 if (V == LAM->getValue())
384 return const_cast<Value *>(V);
385 return MetadataAsValue::get(V->getContext(), ValueAsMetadata::get(LV));
386 }
387
388 // FIXME: always return nullptr once Verifier::verifyDominatesUse()
389 // ensures metadata operands only reference defined SSA values.
390 return (Flags & RF_IgnoreMissingLocals)
391 ? nullptr
392 : MetadataAsValue::get(
393 V->getContext(),
394 MDTuple::get(V->getContext(), std::nullopt));
395 }
396 if (auto *AL = dyn_cast<DIArgList>(MD)) {
397 SmallVector<ValueAsMetadata *, 4> MappedArgs;
398 for (auto *VAM : AL->getArgs()) {
399 // Map both Local and Constant VAMs here; they will both ultimately
400 // be mapped via mapValue. The exceptions are constants when we have no
401 // module level changes and locals when they have no existing mapped
402 // value and RF_IgnoreMissingLocals is set; these have identity
403 // mappings.
404 if ((Flags & RF_NoModuleLevelChanges) && isa<ConstantAsMetadata>(VAM)) {
405 MappedArgs.push_back(VAM);
406 } else if (Value *LV = mapValue(VAM->getValue())) {
407 MappedArgs.push_back(
408 LV == VAM->getValue() ? VAM : ValueAsMetadata::get(LV));
409 } else if ((Flags & RF_IgnoreMissingLocals) && isa<LocalAsMetadata>(VAM)) {
410 MappedArgs.push_back(VAM);
411 } else {
412 // If we cannot map the value, set the argument as undef.
413 MappedArgs.push_back(ValueAsMetadata::get(
414 UndefValue::get(VAM->getValue()->getType())));
415 }
416 }
417 return MetadataAsValue::get(V->getContext(),
418 DIArgList::get(V->getContext(), MappedArgs));
419 }
420
421 // If this is a module-level metadata and we know that nothing at the module
422 // level is changing, then use an identity mapping.
423 if (Flags & RF_NoModuleLevelChanges)
424 return getVM()[V] = const_cast<Value *>(V);
425
426 // Map the metadata and turn it into a value.
427 auto *MappedMD = mapMetadata(MD);
428 if (MD == MappedMD)
429 return getVM()[V] = const_cast<Value *>(V);
430 return getVM()[V] = MetadataAsValue::get(V->getContext(), MappedMD);
431 }
432
433 // Okay, this either must be a constant (which may or may not be mappable) or
434 // is something that is not in the mapping table.
435 Constant *C = const_cast<Constant*>(dyn_cast<Constant>(V));
436 if (!C)
437 return nullptr;
438
439 if (BlockAddress *BA = dyn_cast<BlockAddress>(C))
440 return mapBlockAddress(*BA);
441
442 if (const auto *E = dyn_cast<DSOLocalEquivalent>(C)) {
443 auto *Val = mapValue(E->getGlobalValue());
444 GlobalValue *GV = dyn_cast<GlobalValue>(Val);
445 if (GV)
446 return getVM()[E] = DSOLocalEquivalent::get(GV);
447
448 auto *Func = cast<Function>(Val->stripPointerCastsAndAliases());
449 Type *NewTy = E->getType();
450 if (TypeMapper)
451 NewTy = TypeMapper->remapType(NewTy);
452 return getVM()[E] = llvm::ConstantExpr::getBitCast(
453 DSOLocalEquivalent::get(Func), NewTy);
454 }
455
456 if (const auto *NC = dyn_cast<NoCFIValue>(C)) {
457 auto *Val = mapValue(NC->getGlobalValue());
458 GlobalValue *GV = cast<GlobalValue>(Val);
459 return getVM()[NC] = NoCFIValue::get(GV);
460 }
461
462 auto mapValueOrNull = [this](Value *V) {
463 auto Mapped = mapValue(V);
464 assert((Mapped || (Flags & RF_NullMapMissingGlobalValues)) &&
465 "Unexpected null mapping for constant operand without "
466 "NullMapMissingGlobalValues flag");
467 return Mapped;
468 };
469
470 // Otherwise, we have some other constant to remap. Start by checking to see
471 // if all operands have an identity remapping.
472 unsigned OpNo = 0, NumOperands = C->getNumOperands();
473 Value *Mapped = nullptr;
474 for (; OpNo != NumOperands; ++OpNo) {
475 Value *Op = C->getOperand(OpNo);
476 Mapped = mapValueOrNull(Op);
477 if (!Mapped)
478 return nullptr;
479 if (Mapped != Op)
480 break;
481 }
482
483 // See if the type mapper wants to remap the type as well.
484 Type *NewTy = C->getType();
485 if (TypeMapper)
486 NewTy = TypeMapper->remapType(NewTy);
487
488 // If the result type and all operands match up, then just insert an identity
489 // mapping.
490 if (OpNo == NumOperands && NewTy == C->getType())
491 return getVM()[V] = C;
492
493 // Okay, we need to create a new constant. We've already processed some or
494 // all of the operands, set them all up now.
495 SmallVector<Constant*, 8> Ops;
496 Ops.reserve(NumOperands);
497 for (unsigned j = 0; j != OpNo; ++j)
498 Ops.push_back(cast<Constant>(C->getOperand(j)));
499
500 // If one of the operands mismatch, push it and the other mapped operands.
501 if (OpNo != NumOperands) {
502 Ops.push_back(cast<Constant>(Mapped));
503
504 // Map the rest of the operands that aren't processed yet.
505 for (++OpNo; OpNo != NumOperands; ++OpNo) {
506 Mapped = mapValueOrNull(C->getOperand(OpNo));
507 if (!Mapped)
508 return nullptr;
509 Ops.push_back(cast<Constant>(Mapped));
510 }
511 }
512 Type *NewSrcTy = nullptr;
513 if (TypeMapper)
514 if (auto *GEPO = dyn_cast<GEPOperator>(C))
515 NewSrcTy = TypeMapper->remapType(GEPO->getSourceElementType());
516
517 if (ConstantExpr *CE = dyn_cast<ConstantExpr>(C))
518 return getVM()[V] = CE->getWithOperands(Ops, NewTy, false, NewSrcTy);
519 if (isa<ConstantArray>(C))
520 return getVM()[V] = ConstantArray::get(cast<ArrayType>(NewTy), Ops);
521 if (isa<ConstantStruct>(C))
522 return getVM()[V] = ConstantStruct::get(cast<StructType>(NewTy), Ops);
523 if (isa<ConstantVector>(C))
524 return getVM()[V] = ConstantVector::get(Ops);
525 // If this is a no-operand constant, it must be because the type was remapped.
526 if (isa<UndefValue>(C))
527 return getVM()[V] = UndefValue::get(NewTy);
528 if (isa<ConstantAggregateZero>(C))
529 return getVM()[V] = ConstantAggregateZero::get(NewTy);
530 assert(isa<ConstantPointerNull>(C));
531 return getVM()[V] = ConstantPointerNull::get(cast<PointerType>(NewTy));
532 }
533
mapBlockAddress(const BlockAddress & BA)534 Value *Mapper::mapBlockAddress(const BlockAddress &BA) {
535 Function *F = cast<Function>(mapValue(BA.getFunction()));
536
537 // F may not have materialized its initializer. In that case, create a
538 // dummy basic block for now, and replace it once we've materialized all
539 // the initializers.
540 BasicBlock *BB;
541 if (F->empty()) {
542 DelayedBBs.push_back(DelayedBasicBlock(BA));
543 BB = DelayedBBs.back().TempBB.get();
544 } else {
545 BB = cast_or_null<BasicBlock>(mapValue(BA.getBasicBlock()));
546 }
547
548 return getVM()[&BA] = BlockAddress::get(F, BB ? BB : BA.getBasicBlock());
549 }
550
mapToMetadata(const Metadata * Key,Metadata * Val)551 Metadata *Mapper::mapToMetadata(const Metadata *Key, Metadata *Val) {
552 getVM().MD()[Key].reset(Val);
553 return Val;
554 }
555
mapToSelf(const Metadata * MD)556 Metadata *Mapper::mapToSelf(const Metadata *MD) {
557 return mapToMetadata(MD, const_cast<Metadata *>(MD));
558 }
559
tryToMapOperand(const Metadata * Op)560 std::optional<Metadata *> MDNodeMapper::tryToMapOperand(const Metadata *Op) {
561 if (!Op)
562 return nullptr;
563
564 if (std::optional<Metadata *> MappedOp = M.mapSimpleMetadata(Op)) {
565 #ifndef NDEBUG
566 if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op))
567 assert((!*MappedOp || M.getVM().count(CMD->getValue()) ||
568 M.getVM().getMappedMD(Op)) &&
569 "Expected Value to be memoized");
570 else
571 assert((isa<MDString>(Op) || M.getVM().getMappedMD(Op)) &&
572 "Expected result to be memoized");
573 #endif
574 return *MappedOp;
575 }
576
577 const MDNode &N = *cast<MDNode>(Op);
578 if (N.isDistinct())
579 return mapDistinctNode(N);
580 return std::nullopt;
581 }
582
mapDistinctNode(const MDNode & N)583 MDNode *MDNodeMapper::mapDistinctNode(const MDNode &N) {
584 assert(N.isDistinct() && "Expected a distinct node");
585 assert(!M.getVM().getMappedMD(&N) && "Expected an unmapped node");
586 Metadata *NewM = nullptr;
587
588 if (M.Flags & RF_ReuseAndMutateDistinctMDs) {
589 NewM = M.mapToSelf(&N);
590 } else {
591 NewM = MDNode::replaceWithDistinct(N.clone());
592 LLVM_DEBUG(dbgs() << "\nMap " << N << "\n"
593 << "To " << *NewM << "\n\n");
594 M.mapToMetadata(&N, NewM);
595 }
596 DistinctWorklist.push_back(cast<MDNode>(NewM));
597
598 return DistinctWorklist.back();
599 }
600
wrapConstantAsMetadata(const ConstantAsMetadata & CMD,Value * MappedV)601 static ConstantAsMetadata *wrapConstantAsMetadata(const ConstantAsMetadata &CMD,
602 Value *MappedV) {
603 if (CMD.getValue() == MappedV)
604 return const_cast<ConstantAsMetadata *>(&CMD);
605 return MappedV ? ConstantAsMetadata::getConstant(MappedV) : nullptr;
606 }
607
getMappedOp(const Metadata * Op) const608 std::optional<Metadata *> MDNodeMapper::getMappedOp(const Metadata *Op) const {
609 if (!Op)
610 return nullptr;
611
612 if (std::optional<Metadata *> MappedOp = M.getVM().getMappedMD(Op))
613 return *MappedOp;
614
615 if (isa<MDString>(Op))
616 return const_cast<Metadata *>(Op);
617
618 if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op))
619 return wrapConstantAsMetadata(*CMD, M.getVM().lookup(CMD->getValue()));
620
621 return std::nullopt;
622 }
623
getFwdReference(MDNode & Op)624 Metadata &MDNodeMapper::UniquedGraph::getFwdReference(MDNode &Op) {
625 auto Where = Info.find(&Op);
626 assert(Where != Info.end() && "Expected a valid reference");
627
628 auto &OpD = Where->second;
629 if (!OpD.HasChanged)
630 return Op;
631
632 // Lazily construct a temporary node.
633 if (!OpD.Placeholder)
634 OpD.Placeholder = Op.clone();
635
636 return *OpD.Placeholder;
637 }
638
639 template <class OperandMapper>
remapOperands(MDNode & N,OperandMapper mapOperand)640 void MDNodeMapper::remapOperands(MDNode &N, OperandMapper mapOperand) {
641 assert(!N.isUniqued() && "Expected distinct or temporary nodes");
642 for (unsigned I = 0, E = N.getNumOperands(); I != E; ++I) {
643 Metadata *Old = N.getOperand(I);
644 Metadata *New = mapOperand(Old);
645 if (Old != New)
646 LLVM_DEBUG(dbgs() << "Replacing Op " << Old << " with " << New << " in "
647 << N << "\n");
648
649 if (Old != New)
650 N.replaceOperandWith(I, New);
651 }
652 }
653
654 namespace {
655
656 /// An entry in the worklist for the post-order traversal.
657 struct POTWorklistEntry {
658 MDNode *N; ///< Current node.
659 MDNode::op_iterator Op; ///< Current operand of \c N.
660
661 /// Keep a flag of whether operands have changed in the worklist to avoid
662 /// hitting the map in \a UniquedGraph.
663 bool HasChanged = false;
664
POTWorklistEntry__anona97101fd0411::POTWorklistEntry665 POTWorklistEntry(MDNode &N) : N(&N), Op(N.op_begin()) {}
666 };
667
668 } // end anonymous namespace
669
createPOT(UniquedGraph & G,const MDNode & FirstN)670 bool MDNodeMapper::createPOT(UniquedGraph &G, const MDNode &FirstN) {
671 assert(G.Info.empty() && "Expected a fresh traversal");
672 assert(FirstN.isUniqued() && "Expected uniqued node in POT");
673
674 // Construct a post-order traversal of the uniqued subgraph under FirstN.
675 bool AnyChanges = false;
676 SmallVector<POTWorklistEntry, 16> Worklist;
677 Worklist.push_back(POTWorklistEntry(const_cast<MDNode &>(FirstN)));
678 (void)G.Info[&FirstN];
679 while (!Worklist.empty()) {
680 // Start or continue the traversal through the this node's operands.
681 auto &WE = Worklist.back();
682 if (MDNode *N = visitOperands(G, WE.Op, WE.N->op_end(), WE.HasChanged)) {
683 // Push a new node to traverse first.
684 Worklist.push_back(POTWorklistEntry(*N));
685 continue;
686 }
687
688 // Push the node onto the POT.
689 assert(WE.N->isUniqued() && "Expected only uniqued nodes");
690 assert(WE.Op == WE.N->op_end() && "Expected to visit all operands");
691 auto &D = G.Info[WE.N];
692 AnyChanges |= D.HasChanged = WE.HasChanged;
693 D.ID = G.POT.size();
694 G.POT.push_back(WE.N);
695
696 // Pop the node off the worklist.
697 Worklist.pop_back();
698 }
699 return AnyChanges;
700 }
701
visitOperands(UniquedGraph & G,MDNode::op_iterator & I,MDNode::op_iterator E,bool & HasChanged)702 MDNode *MDNodeMapper::visitOperands(UniquedGraph &G, MDNode::op_iterator &I,
703 MDNode::op_iterator E, bool &HasChanged) {
704 while (I != E) {
705 Metadata *Op = *I++; // Increment even on early return.
706 if (std::optional<Metadata *> MappedOp = tryToMapOperand(Op)) {
707 // Check if the operand changes.
708 HasChanged |= Op != *MappedOp;
709 continue;
710 }
711
712 // A uniqued metadata node.
713 MDNode &OpN = *cast<MDNode>(Op);
714 assert(OpN.isUniqued() &&
715 "Only uniqued operands cannot be mapped immediately");
716 if (G.Info.insert(std::make_pair(&OpN, Data())).second)
717 return &OpN; // This is a new one. Return it.
718 }
719 return nullptr;
720 }
721
propagateChanges()722 void MDNodeMapper::UniquedGraph::propagateChanges() {
723 bool AnyChanges;
724 do {
725 AnyChanges = false;
726 for (MDNode *N : POT) {
727 auto &D = Info[N];
728 if (D.HasChanged)
729 continue;
730
731 if (llvm::none_of(N->operands(), [&](const Metadata *Op) {
732 auto Where = Info.find(Op);
733 return Where != Info.end() && Where->second.HasChanged;
734 }))
735 continue;
736
737 AnyChanges = D.HasChanged = true;
738 }
739 } while (AnyChanges);
740 }
741
mapNodesInPOT(UniquedGraph & G)742 void MDNodeMapper::mapNodesInPOT(UniquedGraph &G) {
743 // Construct uniqued nodes, building forward references as necessary.
744 SmallVector<MDNode *, 16> CyclicNodes;
745 for (auto *N : G.POT) {
746 auto &D = G.Info[N];
747 if (!D.HasChanged) {
748 // The node hasn't changed.
749 M.mapToSelf(N);
750 continue;
751 }
752
753 // Remember whether this node had a placeholder.
754 bool HadPlaceholder(D.Placeholder);
755
756 // Clone the uniqued node and remap the operands.
757 TempMDNode ClonedN = D.Placeholder ? std::move(D.Placeholder) : N->clone();
758 remapOperands(*ClonedN, [this, &D, &G](Metadata *Old) {
759 if (std::optional<Metadata *> MappedOp = getMappedOp(Old))
760 return *MappedOp;
761 (void)D;
762 assert(G.Info[Old].ID > D.ID && "Expected a forward reference");
763 return &G.getFwdReference(*cast<MDNode>(Old));
764 });
765
766 auto *NewN = MDNode::replaceWithUniqued(std::move(ClonedN));
767 if (N && NewN && N != NewN) {
768 LLVM_DEBUG(dbgs() << "\nMap " << *N << "\n"
769 << "To " << *NewN << "\n\n");
770 }
771
772 M.mapToMetadata(N, NewN);
773
774 // Nodes that were referenced out of order in the POT are involved in a
775 // uniquing cycle.
776 if (HadPlaceholder)
777 CyclicNodes.push_back(NewN);
778 }
779
780 // Resolve cycles.
781 for (auto *N : CyclicNodes)
782 if (!N->isResolved())
783 N->resolveCycles();
784 }
785
map(const MDNode & N)786 Metadata *MDNodeMapper::map(const MDNode &N) {
787 assert(DistinctWorklist.empty() && "MDNodeMapper::map is not recursive");
788 assert(!(M.Flags & RF_NoModuleLevelChanges) &&
789 "MDNodeMapper::map assumes module-level changes");
790
791 // Require resolved nodes whenever metadata might be remapped.
792 assert(N.isResolved() && "Unexpected unresolved node");
793
794 Metadata *MappedN =
795 N.isUniqued() ? mapTopLevelUniquedNode(N) : mapDistinctNode(N);
796 while (!DistinctWorklist.empty())
797 remapOperands(*DistinctWorklist.pop_back_val(), [this](Metadata *Old) {
798 if (std::optional<Metadata *> MappedOp = tryToMapOperand(Old))
799 return *MappedOp;
800 return mapTopLevelUniquedNode(*cast<MDNode>(Old));
801 });
802 return MappedN;
803 }
804
mapTopLevelUniquedNode(const MDNode & FirstN)805 Metadata *MDNodeMapper::mapTopLevelUniquedNode(const MDNode &FirstN) {
806 assert(FirstN.isUniqued() && "Expected uniqued node");
807
808 // Create a post-order traversal of uniqued nodes under FirstN.
809 UniquedGraph G;
810 if (!createPOT(G, FirstN)) {
811 // Return early if no nodes have changed.
812 for (const MDNode *N : G.POT)
813 M.mapToSelf(N);
814 return &const_cast<MDNode &>(FirstN);
815 }
816
817 // Update graph with all nodes that have changed.
818 G.propagateChanges();
819
820 // Map all the nodes in the graph.
821 mapNodesInPOT(G);
822
823 // Return the original node, remapped.
824 return *getMappedOp(&FirstN);
825 }
826
mapSimpleMetadata(const Metadata * MD)827 std::optional<Metadata *> Mapper::mapSimpleMetadata(const Metadata *MD) {
828 // If the value already exists in the map, use it.
829 if (std::optional<Metadata *> NewMD = getVM().getMappedMD(MD))
830 return *NewMD;
831
832 if (isa<MDString>(MD))
833 return const_cast<Metadata *>(MD);
834
835 // This is a module-level metadata. If nothing at the module level is
836 // changing, use an identity mapping.
837 if ((Flags & RF_NoModuleLevelChanges))
838 return const_cast<Metadata *>(MD);
839
840 if (auto *CMD = dyn_cast<ConstantAsMetadata>(MD)) {
841 // Don't memoize ConstantAsMetadata. Instead of lasting until the
842 // LLVMContext is destroyed, they can be deleted when the GlobalValue they
843 // reference is destructed. These aren't super common, so the extra
844 // indirection isn't that expensive.
845 return wrapConstantAsMetadata(*CMD, mapValue(CMD->getValue()));
846 }
847
848 assert(isa<MDNode>(MD) && "Expected a metadata node");
849
850 return std::nullopt;
851 }
852
mapMetadata(const Metadata * MD)853 Metadata *Mapper::mapMetadata(const Metadata *MD) {
854 assert(MD && "Expected valid metadata");
855 assert(!isa<LocalAsMetadata>(MD) && "Unexpected local metadata");
856
857 if (std::optional<Metadata *> NewMD = mapSimpleMetadata(MD))
858 return *NewMD;
859
860 return MDNodeMapper(*this).map(*cast<MDNode>(MD));
861 }
862
flush()863 void Mapper::flush() {
864 // Flush out the worklist of global values.
865 while (!Worklist.empty()) {
866 WorklistEntry E = Worklist.pop_back_val();
867 CurrentMCID = E.MCID;
868 switch (E.Kind) {
869 case WorklistEntry::MapGlobalInit:
870 E.Data.GVInit.GV->setInitializer(mapConstant(E.Data.GVInit.Init));
871 remapGlobalObjectMetadata(*E.Data.GVInit.GV);
872 break;
873 case WorklistEntry::MapAppendingVar: {
874 unsigned PrefixSize = AppendingInits.size() - E.AppendingGVNumNewMembers;
875 // mapAppendingVariable call can change AppendingInits if initalizer for
876 // the variable depends on another appending global, because of that inits
877 // need to be extracted and updated before the call.
878 SmallVector<Constant *, 8> NewInits(
879 drop_begin(AppendingInits, PrefixSize));
880 AppendingInits.resize(PrefixSize);
881 mapAppendingVariable(*E.Data.AppendingGV.GV,
882 E.Data.AppendingGV.InitPrefix,
883 E.AppendingGVIsOldCtorDtor, ArrayRef(NewInits));
884 break;
885 }
886 case WorklistEntry::MapAliasOrIFunc: {
887 GlobalValue *GV = E.Data.AliasOrIFunc.GV;
888 Constant *Target = mapConstant(E.Data.AliasOrIFunc.Target);
889 if (auto *GA = dyn_cast<GlobalAlias>(GV))
890 GA->setAliasee(Target);
891 else if (auto *GI = dyn_cast<GlobalIFunc>(GV))
892 GI->setResolver(Target);
893 else
894 llvm_unreachable("Not alias or ifunc");
895 break;
896 }
897 case WorklistEntry::RemapFunction:
898 remapFunction(*E.Data.RemapF);
899 break;
900 }
901 }
902 CurrentMCID = 0;
903
904 // Finish logic for block addresses now that all global values have been
905 // handled.
906 while (!DelayedBBs.empty()) {
907 DelayedBasicBlock DBB = DelayedBBs.pop_back_val();
908 BasicBlock *BB = cast_or_null<BasicBlock>(mapValue(DBB.OldBB));
909 DBB.TempBB->replaceAllUsesWith(BB ? BB : DBB.OldBB);
910 }
911 }
912
remapInstruction(Instruction * I)913 void Mapper::remapInstruction(Instruction *I) {
914 // Remap operands.
915 for (Use &Op : I->operands()) {
916 Value *V = mapValue(Op);
917 // If we aren't ignoring missing entries, assert that something happened.
918 if (V)
919 Op = V;
920 else
921 assert((Flags & RF_IgnoreMissingLocals) &&
922 "Referenced value not in value map!");
923 }
924
925 // Remap phi nodes' incoming blocks.
926 if (PHINode *PN = dyn_cast<PHINode>(I)) {
927 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
928 Value *V = mapValue(PN->getIncomingBlock(i));
929 // If we aren't ignoring missing entries, assert that something happened.
930 if (V)
931 PN->setIncomingBlock(i, cast<BasicBlock>(V));
932 else
933 assert((Flags & RF_IgnoreMissingLocals) &&
934 "Referenced block not in value map!");
935 }
936 }
937
938 // Remap attached metadata.
939 SmallVector<std::pair<unsigned, MDNode *>, 4> MDs;
940 I->getAllMetadata(MDs);
941 for (const auto &MI : MDs) {
942 MDNode *Old = MI.second;
943 MDNode *New = cast_or_null<MDNode>(mapMetadata(Old));
944 if (New != Old)
945 I->setMetadata(MI.first, New);
946 }
947
948 if (!TypeMapper)
949 return;
950
951 // If the instruction's type is being remapped, do so now.
952 if (auto *CB = dyn_cast<CallBase>(I)) {
953 SmallVector<Type *, 3> Tys;
954 FunctionType *FTy = CB->getFunctionType();
955 Tys.reserve(FTy->getNumParams());
956 for (Type *Ty : FTy->params())
957 Tys.push_back(TypeMapper->remapType(Ty));
958 CB->mutateFunctionType(FunctionType::get(
959 TypeMapper->remapType(I->getType()), Tys, FTy->isVarArg()));
960
961 LLVMContext &C = CB->getContext();
962 AttributeList Attrs = CB->getAttributes();
963 for (unsigned i = 0; i < Attrs.getNumAttrSets(); ++i) {
964 for (int AttrIdx = Attribute::FirstTypeAttr;
965 AttrIdx <= Attribute::LastTypeAttr; AttrIdx++) {
966 Attribute::AttrKind TypedAttr = (Attribute::AttrKind)AttrIdx;
967 if (Type *Ty =
968 Attrs.getAttributeAtIndex(i, TypedAttr).getValueAsType()) {
969 Attrs = Attrs.replaceAttributeTypeAtIndex(C, i, TypedAttr,
970 TypeMapper->remapType(Ty));
971 break;
972 }
973 }
974 }
975 CB->setAttributes(Attrs);
976 return;
977 }
978 if (auto *AI = dyn_cast<AllocaInst>(I))
979 AI->setAllocatedType(TypeMapper->remapType(AI->getAllocatedType()));
980 if (auto *GEP = dyn_cast<GetElementPtrInst>(I)) {
981 GEP->setSourceElementType(
982 TypeMapper->remapType(GEP->getSourceElementType()));
983 GEP->setResultElementType(
984 TypeMapper->remapType(GEP->getResultElementType()));
985 }
986 I->mutateType(TypeMapper->remapType(I->getType()));
987 }
988
remapGlobalObjectMetadata(GlobalObject & GO)989 void Mapper::remapGlobalObjectMetadata(GlobalObject &GO) {
990 SmallVector<std::pair<unsigned, MDNode *>, 8> MDs;
991 GO.getAllMetadata(MDs);
992 GO.clearMetadata();
993 for (const auto &I : MDs)
994 GO.addMetadata(I.first, *cast<MDNode>(mapMetadata(I.second)));
995 }
996
remapFunction(Function & F)997 void Mapper::remapFunction(Function &F) {
998 // Remap the operands.
999 for (Use &Op : F.operands())
1000 if (Op)
1001 Op = mapValue(Op);
1002
1003 // Remap the metadata attachments.
1004 remapGlobalObjectMetadata(F);
1005
1006 // Remap the argument types.
1007 if (TypeMapper)
1008 for (Argument &A : F.args())
1009 A.mutateType(TypeMapper->remapType(A.getType()));
1010
1011 // Remap the instructions.
1012 for (BasicBlock &BB : F)
1013 for (Instruction &I : BB)
1014 remapInstruction(&I);
1015 }
1016
mapAppendingVariable(GlobalVariable & GV,Constant * InitPrefix,bool IsOldCtorDtor,ArrayRef<Constant * > NewMembers)1017 void Mapper::mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix,
1018 bool IsOldCtorDtor,
1019 ArrayRef<Constant *> NewMembers) {
1020 SmallVector<Constant *, 16> Elements;
1021 if (InitPrefix) {
1022 unsigned NumElements =
1023 cast<ArrayType>(InitPrefix->getType())->getNumElements();
1024 for (unsigned I = 0; I != NumElements; ++I)
1025 Elements.push_back(InitPrefix->getAggregateElement(I));
1026 }
1027
1028 PointerType *VoidPtrTy;
1029 Type *EltTy;
1030 if (IsOldCtorDtor) {
1031 // FIXME: This upgrade is done during linking to support the C API. See
1032 // also IRLinker::linkAppendingVarProto() in IRMover.cpp.
1033 VoidPtrTy = Type::getInt8Ty(GV.getContext())->getPointerTo();
1034 auto &ST = *cast<StructType>(NewMembers.front()->getType());
1035 Type *Tys[3] = {ST.getElementType(0), ST.getElementType(1), VoidPtrTy};
1036 EltTy = StructType::get(GV.getContext(), Tys, false);
1037 }
1038
1039 for (auto *V : NewMembers) {
1040 Constant *NewV;
1041 if (IsOldCtorDtor) {
1042 auto *S = cast<ConstantStruct>(V);
1043 auto *E1 = cast<Constant>(mapValue(S->getOperand(0)));
1044 auto *E2 = cast<Constant>(mapValue(S->getOperand(1)));
1045 Constant *Null = Constant::getNullValue(VoidPtrTy);
1046 NewV = ConstantStruct::get(cast<StructType>(EltTy), E1, E2, Null);
1047 } else {
1048 NewV = cast_or_null<Constant>(mapValue(V));
1049 }
1050 Elements.push_back(NewV);
1051 }
1052
1053 GV.setInitializer(
1054 ConstantArray::get(cast<ArrayType>(GV.getValueType()), Elements));
1055 }
1056
scheduleMapGlobalInitializer(GlobalVariable & GV,Constant & Init,unsigned MCID)1057 void Mapper::scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init,
1058 unsigned MCID) {
1059 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule");
1060 assert(MCID < MCs.size() && "Invalid mapping context");
1061
1062 WorklistEntry WE;
1063 WE.Kind = WorklistEntry::MapGlobalInit;
1064 WE.MCID = MCID;
1065 WE.Data.GVInit.GV = &GV;
1066 WE.Data.GVInit.Init = &Init;
1067 Worklist.push_back(WE);
1068 }
1069
scheduleMapAppendingVariable(GlobalVariable & GV,Constant * InitPrefix,bool IsOldCtorDtor,ArrayRef<Constant * > NewMembers,unsigned MCID)1070 void Mapper::scheduleMapAppendingVariable(GlobalVariable &GV,
1071 Constant *InitPrefix,
1072 bool IsOldCtorDtor,
1073 ArrayRef<Constant *> NewMembers,
1074 unsigned MCID) {
1075 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule");
1076 assert(MCID < MCs.size() && "Invalid mapping context");
1077
1078 WorklistEntry WE;
1079 WE.Kind = WorklistEntry::MapAppendingVar;
1080 WE.MCID = MCID;
1081 WE.Data.AppendingGV.GV = &GV;
1082 WE.Data.AppendingGV.InitPrefix = InitPrefix;
1083 WE.AppendingGVIsOldCtorDtor = IsOldCtorDtor;
1084 WE.AppendingGVNumNewMembers = NewMembers.size();
1085 Worklist.push_back(WE);
1086 AppendingInits.append(NewMembers.begin(), NewMembers.end());
1087 }
1088
scheduleMapAliasOrIFunc(GlobalValue & GV,Constant & Target,unsigned MCID)1089 void Mapper::scheduleMapAliasOrIFunc(GlobalValue &GV, Constant &Target,
1090 unsigned MCID) {
1091 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule");
1092 assert((isa<GlobalAlias>(GV) || isa<GlobalIFunc>(GV)) &&
1093 "Should be alias or ifunc");
1094 assert(MCID < MCs.size() && "Invalid mapping context");
1095
1096 WorklistEntry WE;
1097 WE.Kind = WorklistEntry::MapAliasOrIFunc;
1098 WE.MCID = MCID;
1099 WE.Data.AliasOrIFunc.GV = &GV;
1100 WE.Data.AliasOrIFunc.Target = &Target;
1101 Worklist.push_back(WE);
1102 }
1103
scheduleRemapFunction(Function & F,unsigned MCID)1104 void Mapper::scheduleRemapFunction(Function &F, unsigned MCID) {
1105 assert(AlreadyScheduled.insert(&F).second && "Should not reschedule");
1106 assert(MCID < MCs.size() && "Invalid mapping context");
1107
1108 WorklistEntry WE;
1109 WE.Kind = WorklistEntry::RemapFunction;
1110 WE.MCID = MCID;
1111 WE.Data.RemapF = &F;
1112 Worklist.push_back(WE);
1113 }
1114
addFlags(RemapFlags Flags)1115 void Mapper::addFlags(RemapFlags Flags) {
1116 assert(!hasWorkToDo() && "Expected to have flushed the worklist");
1117 this->Flags = this->Flags | Flags;
1118 }
1119
getAsMapper(void * pImpl)1120 static Mapper *getAsMapper(void *pImpl) {
1121 return reinterpret_cast<Mapper *>(pImpl);
1122 }
1123
1124 namespace {
1125
1126 class FlushingMapper {
1127 Mapper &M;
1128
1129 public:
FlushingMapper(void * pImpl)1130 explicit FlushingMapper(void *pImpl) : M(*getAsMapper(pImpl)) {
1131 assert(!M.hasWorkToDo() && "Expected to be flushed");
1132 }
1133
~FlushingMapper()1134 ~FlushingMapper() { M.flush(); }
1135
operator ->() const1136 Mapper *operator->() const { return &M; }
1137 };
1138
1139 } // end anonymous namespace
1140
ValueMapper(ValueToValueMapTy & VM,RemapFlags Flags,ValueMapTypeRemapper * TypeMapper,ValueMaterializer * Materializer)1141 ValueMapper::ValueMapper(ValueToValueMapTy &VM, RemapFlags Flags,
1142 ValueMapTypeRemapper *TypeMapper,
1143 ValueMaterializer *Materializer)
1144 : pImpl(new Mapper(VM, Flags, TypeMapper, Materializer)) {}
1145
~ValueMapper()1146 ValueMapper::~ValueMapper() { delete getAsMapper(pImpl); }
1147
1148 unsigned
registerAlternateMappingContext(ValueToValueMapTy & VM,ValueMaterializer * Materializer)1149 ValueMapper::registerAlternateMappingContext(ValueToValueMapTy &VM,
1150 ValueMaterializer *Materializer) {
1151 return getAsMapper(pImpl)->registerAlternateMappingContext(VM, Materializer);
1152 }
1153
addFlags(RemapFlags Flags)1154 void ValueMapper::addFlags(RemapFlags Flags) {
1155 FlushingMapper(pImpl)->addFlags(Flags);
1156 }
1157
mapValue(const Value & V)1158 Value *ValueMapper::mapValue(const Value &V) {
1159 return FlushingMapper(pImpl)->mapValue(&V);
1160 }
1161
mapConstant(const Constant & C)1162 Constant *ValueMapper::mapConstant(const Constant &C) {
1163 return cast_or_null<Constant>(mapValue(C));
1164 }
1165
mapMetadata(const Metadata & MD)1166 Metadata *ValueMapper::mapMetadata(const Metadata &MD) {
1167 return FlushingMapper(pImpl)->mapMetadata(&MD);
1168 }
1169
mapMDNode(const MDNode & N)1170 MDNode *ValueMapper::mapMDNode(const MDNode &N) {
1171 return cast_or_null<MDNode>(mapMetadata(N));
1172 }
1173
remapInstruction(Instruction & I)1174 void ValueMapper::remapInstruction(Instruction &I) {
1175 FlushingMapper(pImpl)->remapInstruction(&I);
1176 }
1177
remapFunction(Function & F)1178 void ValueMapper::remapFunction(Function &F) {
1179 FlushingMapper(pImpl)->remapFunction(F);
1180 }
1181
scheduleMapGlobalInitializer(GlobalVariable & GV,Constant & Init,unsigned MCID)1182 void ValueMapper::scheduleMapGlobalInitializer(GlobalVariable &GV,
1183 Constant &Init,
1184 unsigned MCID) {
1185 getAsMapper(pImpl)->scheduleMapGlobalInitializer(GV, Init, MCID);
1186 }
1187
scheduleMapAppendingVariable(GlobalVariable & GV,Constant * InitPrefix,bool IsOldCtorDtor,ArrayRef<Constant * > NewMembers,unsigned MCID)1188 void ValueMapper::scheduleMapAppendingVariable(GlobalVariable &GV,
1189 Constant *InitPrefix,
1190 bool IsOldCtorDtor,
1191 ArrayRef<Constant *> NewMembers,
1192 unsigned MCID) {
1193 getAsMapper(pImpl)->scheduleMapAppendingVariable(
1194 GV, InitPrefix, IsOldCtorDtor, NewMembers, MCID);
1195 }
1196
scheduleMapGlobalAlias(GlobalAlias & GA,Constant & Aliasee,unsigned MCID)1197 void ValueMapper::scheduleMapGlobalAlias(GlobalAlias &GA, Constant &Aliasee,
1198 unsigned MCID) {
1199 getAsMapper(pImpl)->scheduleMapAliasOrIFunc(GA, Aliasee, MCID);
1200 }
1201
scheduleMapGlobalIFunc(GlobalIFunc & GI,Constant & Resolver,unsigned MCID)1202 void ValueMapper::scheduleMapGlobalIFunc(GlobalIFunc &GI, Constant &Resolver,
1203 unsigned MCID) {
1204 getAsMapper(pImpl)->scheduleMapAliasOrIFunc(GI, Resolver, MCID);
1205 }
1206
scheduleRemapFunction(Function & F,unsigned MCID)1207 void ValueMapper::scheduleRemapFunction(Function &F, unsigned MCID) {
1208 getAsMapper(pImpl)->scheduleRemapFunction(F, MCID);
1209 }
1210