1 /**
2  * Copyright (c) Glow Contributors. See CONTRIBUTORS file.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #include "glow/Backend/BackendUtils.h"
17 #include "glow/IR/IRUtils.h"
18 #include "glow/IR/Instrs.h"
19 #include "glow/Support/Debug.h"
20 
21 #include "llvm/Support/CommandLine.h"
22 
23 #include <glog/logging.h>
24 
25 #define DEBUG_TYPE "backend-utils"
26 
27 using namespace glow;
28 
29 using llvm::cast;
30 using llvm::dyn_cast;
31 using llvm::isa;
32 
33 static llvm::cl::OptionCategory BackendUtilsCat("Glow Backend Utils Options");
34 
35 static llvm::cl::opt<bool> reuseActivationsMemory(
36     "reuse-activation-memory-allocations",
37     llvm::cl::desc("Should activation memory allocations be reused"),
38     llvm::cl::init(true), llvm::cl::cat(BackendUtilsCat));
39 
40 namespace {
41 /// Allocate space for the activations of \p instrs using \p allocator and store
42 /// the resultant symbols in \p symbolTable.
allocateActivations(const glow::IRFunction::InstListTy & instrs,MemoryAllocator & allocator,glow::runtime::SymbolTableTy & symbolTable)43 void allocateActivations(const glow::IRFunction::InstListTy &instrs,
44                          MemoryAllocator &allocator,
45                          glow::runtime::SymbolTableTy &symbolTable) {
46   for (const auto &I : instrs) {
47     if (auto *A = dyn_cast<AllocActivationInst>(&I)) {
48       auto numBytes = I.getSizeInBytes();
49       size_t addr = allocator.allocate(numBytes, A);
50       assert(!symbolTable.count(std::string(A->getName())) &&
51              "Allocation already made!");
52       runtime::RuntimeSymbolInfo symbol;
53       symbol.offset = addr;
54       symbol.size = numBytes;
55       symbol.type = *A->getType();
56       symbol.input = false;
57       symbol.output = false;
58       symbol.symbolCategory = glow::runtime::SymbolCategory::Activation;
59       symbolTable.emplace(std::string(A->getName()), symbol);
60       DEBUG_GLOW(LOG(INFO) << strFormat(
61                      "Assigned address to activation %s: %zx (%zd bytes)\n",
62                      A->getName().data(), symbol.offset, symbol.size));
63       continue;
64     }
65 
66     if (auto *TV = dyn_cast<TensorViewInst>(&I)) {
67       // Calculate and store the length of the offset into the base, using the
68       // source of the tensorview.
69       assert(!symbolTable.count(std::string(TV->getName())) &&
70              "Allocation already made!");
71       auto *tvSource = getOrigin(TV);
72       assert(symbolTable.count(std::string(tvSource->getName())) &&
73              "Source allocation not found!");
74       runtime::RuntimeSymbolInfo symbol;
75       size_t originAddr = symbolTable[std::string(tvSource->getName())].offset;
76       size_t offset = calculateTensorViewOffset(TV);
77 
78       symbol.offset = originAddr + offset;
79       symbol.size = TV->getSizeInBytes();
80       symbol.type = *TV->getType();
81       symbol.input = false;
82       symbol.output = false;
83       auto parentCategory =
84           symbolTable.find(tvSource->getName())->second.symbolCategory;
85       if (parentCategory == glow::runtime::SymbolCategory::Placeholder) {
86         symbol.symbolCategory =
87             glow::runtime::SymbolCategory::PlaceholderTensorView;
88       } else {
89         symbol.symbolCategory =
90             glow::runtime::SymbolCategory::ConstantTensorView;
91       }
92       symbolTable.emplace(std::string(TV->getName()), symbol);
93       DEBUG_GLOW(LOG(INFO) << strFormat(
94                      "Assigned address to activation %s: %zx (%zd bytes)\n",
95                      TV->getName().data(), symbol.offset, symbol.size));
96       continue;
97     }
98 
99     if (auto *D = dyn_cast<DeallocActivationInst>(&I)) {
100       auto *A = D->getAlloc();
101       assert(symbolTable.count(std::string(A->getName())) &&
102              "Invalid deallocation!");
103       if (reuseActivationsMemory) {
104         allocator.deallocate(A);
105       }
106       continue;
107     }
108   }
109 }
110 
111 /// Allocate space for the Constants in \p constants using \p allocator and
112 /// store the resultant symbols in \p symbolTable.
allocateConstants(const glow::ConstList & constants,MemoryAllocator & allocator,glow::runtime::SymbolTableTy & symbolTable)113 void allocateConstants(const glow::ConstList &constants,
114                        MemoryAllocator &allocator,
115                        glow::runtime::SymbolTableTy &symbolTable) {
116   for (auto const *V : constants) {
117     auto size = V->getType()->getSizeInBytes();
118     auto offset = allocator.allocate(size, V);
119     runtime::RuntimeSymbolInfo symbol;
120     symbol.offset = offset;
121     symbol.size = size;
122     symbol.type = *V->getType();
123     symbol.input = false;
124     symbol.output = false;
125     symbol.symbolCategory = glow::runtime::SymbolCategory::Constant;
126     symbolTable.emplace(V->getName(), symbol);
127   }
128 }
129 
130 /// Allocate space for the Placeholders in \p placeholders using \p allocator
131 /// and store the resultant symbols in \p symbolTable.
allocatePlaceholders(const ContiguousPlaceholders & placeholders,MemoryAllocator & allocator,glow::runtime::SymbolTableTy & symbolTable)132 void allocatePlaceholders(const ContiguousPlaceholders &placeholders,
133                           MemoryAllocator &allocator,
134                           glow::runtime::SymbolTableTy &symbolTable) {
135   for (const auto &p : placeholders) {
136     auto &V = p.addr;
137     auto size = V->getType()->getSizeInBytes();
138     auto offset = allocator.allocate(size, V);
139     runtime::RuntimeSymbolInfo symbol;
140     symbol.offset = offset;
141     symbol.size = size;
142     symbol.type = *V->getType();
143     symbol.output = p.isOutput;
144     symbol.input = p.isInput;
145     symbol.symbolCategory = glow::runtime::SymbolCategory::Placeholder;
146     symbolTable.emplace(V->getName(), symbol);
147   }
148 }
149 } // namespace
150 
RuntimeBundle(glow::runtime::RuntimeBundle && rhs)151 glow::runtime::RuntimeBundle::RuntimeBundle(
152     glow::runtime::RuntimeBundle &&rhs) {
153   *this = std::move(rhs);
154 }
155 
156 glow::runtime::RuntimeBundle &glow::runtime::RuntimeBundle::
operator =(glow::runtime::RuntimeBundle && rhs)157 operator=(glow::runtime::RuntimeBundle &&rhs) {
158   if (this == &rhs) {
159     // Do nothing if rhs is the same object as this.
160     return *this;
161   }
162 
163   std::swap(symbolTable_, rhs.symbolTable_);
164   std::swap(constants_, rhs.constants_);
165   std::swap(constantWeightVarsMemSize_, rhs.constantWeightVarsMemSize_);
166   std::swap(mutableWeightVarsMemSize_, rhs.mutableWeightVarsMemSize_);
167   std::swap(activationsMemSize_, rhs.activationsMemSize_);
168   std::swap(isValid_, rhs.isValid_);
169   // rhs is not valid now that all of its contents have been stolen.
170   rhs.isValid_ = false;
171   return *this;
172 }
173 
collectConstants(const IRFunction * F)174 void glow::runtime::RuntimeBundle::collectConstants(const IRFunction *F) {
175   DCHECK(isValid_);
176   collectConstants(F->getGraph()->getParent());
177 }
178 
freeConstants()179 void glow::runtime::RuntimeBundle::freeConstants() {
180   DCHECK(isValid_);
181 
182   if (constants_) {
183     glow::alignedFree(constants_);
184     constants_ = nullptr;
185   }
186 }
collectConstants(const Module * M)187 void glow::runtime::RuntimeBundle::collectConstants(const Module *M) {
188   DCHECK(isValid_);
189 
190   // At compile time condense constants to a single block of memory.
191   // This allows the graph to go away after compile time.
192   // If there are no constants return nullptr.
193   if (constantWeightVarsMemSize_ == 0) {
194     constants_ = nullptr;
195     return;
196   }
197 
198   assert(constants_ == nullptr && "constants already allocated");
199   constants_ =
200       (uint8_t *)alignedAlloc(constantWeightVarsMemSize_, TensorAlignment);
201 
202   for (const auto &symbol : symbolTable_) {
203     llvm::StringRef name = symbol.first;
204     const RuntimeSymbolInfo &info = symbol.second;
205 
206     Constant *c = M->getConstantByName(name);
207     if (!c) {
208       continue;
209     }
210     auto *payload = c->getPayload().getUnsafePtr();
211     assert(info.size == c->getPayload().getSizeInBytes() &&
212            "Mismatched constant size");
213 
214     // Copy weight to offset.
215     memcpy(constants_ + info.offset, payload, info.size);
216   }
217 }
218 
getValueOffset(const Named * v) const219 size_t glow::runtime::RuntimeBundle::getValueOffset(const Named *v) const {
220   DCHECK(isValid_);
221   auto it = symbolTable_.find(std::string(v->getName()));
222   assert(it != symbolTable_.end() && "Symbol not found.");
223   return it->second.offset;
224 }
225 
226 const runtime::RuntimeSymbolInfo &
getSymbolInfo(const Named * v) const227 runtime::RuntimeBundle::getSymbolInfo(const Named *v) const {
228   DCHECK(isValid_);
229   auto it = symbolTable_.find(std::string(v->getName()));
230   assert(it != symbolTable_.end() && "Symbol not found.");
231   return it->second;
232 }
233 
234 namespace glow {
235 
236 /// If \p PH is an output placeholder in the function \p F, \returns true.
237 /// This is determined by checking if the PH has a user which uses the PH as an
238 /// overwritten input.
isOutput(const Placeholder * PH,const IRFunction & F)239 bool isOutput(const Placeholder *PH, const IRFunction &F) {
240   auto *weight = F.getWeightForNode(PH);
241   assert(weight && "Weight for a node was not found");
242   for (const auto &use : ValueUses(weight)) {
243     Instruction *user = use.get();
244     // Ignore deallocs.
245     if (isa<DeallocActivationInst>(user)) {
246       continue;
247     }
248     OperandKind kind = use.getOperand().second;
249     if (kind == OperandKind::Out || kind == OperandKind::InOut) {
250       return true;
251     }
252   }
253   return false;
254 }
255 
256 /// If \p PH is an input placeholder in the function \p F, \returns true.
isInput(const Placeholder * PH,const IRFunction & F)257 bool isInput(const Placeholder *PH, const IRFunction &F) {
258   // Check that the PH is always used as an @in parameter by the current
259   // function.
260   auto *weight = F.getWeightForNode(PH);
261   assert(weight && "Weight for a node was not found");
262   for (const auto &use : ValueUses(weight)) {
263     Instruction *user = use.get();
264     // Ignore deallocs.
265     if (isa<DeallocActivationInst>(user)) {
266       continue;
267     }
268     OperandKind kind = use.getOperand().second;
269     if (kind == OperandKind::In || kind == OperandKind::InOut) {
270       return true;
271     }
272   }
273   return false;
274 }
275 
276 /// \returns true if \p PH is an output Placeholder for any function in \p
277 /// funcs.
isOutput(const Placeholder * PH,const std::vector<const Function * > & funcs)278 bool isOutput(const Placeholder *PH,
279               const std::vector<const Function *> &funcs) {
280   for (const auto &f : funcs) {
281     if (isOutput(PH, *f)) {
282       return true;
283     }
284   }
285 
286   return false;
287 }
288 
289 /// \returns true if \p PH is an input Placeholder for any function in \p funcs.
isInput(const Placeholder * PH,const std::vector<const Function * > & funcs)290 bool isInput(const Placeholder *PH,
291              const std::vector<const Function *> &funcs) {
292   for (const auto &f : funcs) {
293     if (isInput(PH, *f)) {
294       return true;
295     }
296   }
297 
298   return false;
299 }
300 
301 /// If \p N does not have fused activation \returns true.
checkNoFusionForNode(const Node & N)302 bool checkNoFusionForNode(const Node &N) {
303 #define DEF_NODE(CLASS, NAME)                                                  \
304   case Kinded::Kind::CLASS##Kind: {                                            \
305     const CLASS *CI = llvm::cast<CLASS>(&N);                                   \
306     return checkNoFusion(*CI);                                                 \
307     break;                                                                     \
308   }
309   switch (N.getKind()) {
310 #include "glow/AutoGenNodes.def"
311   default:
312     llvm_unreachable("Invalid node.");
313   }
314   return true;
315 }
316 
317 /// If \p I does not have fused activation \returns true.
checkNoFusionForInstr(const Instruction & I)318 bool checkNoFusionForInstr(const Instruction &I) {
319 #define DEF_VALUE(CLASS, NAME)
320 #define DEF_INSTR(CLASS, NAME)                                                 \
321   case Kinded::Kind::CLASS##Kind: {                                            \
322     const CLASS *CI = llvm::cast<CLASS>(&I);                                   \
323     return checkNoFusion(*CI);                                                 \
324     break;                                                                     \
325   }
326 #define DEF_BACKEND_SPECIFIC_INSTR(CLASS, NAME)                                \
327   case Kinded::Kind::CLASS##Kind: {                                            \
328     const CLASS *CI = llvm::cast<CLASS>(&I);                                   \
329     return checkNoFusion(*CI);                                                 \
330     break;                                                                     \
331   }
332   switch (I.getKind()) {
333 #include "glow/AutoGenInstr.def"
334   default:
335     llvm_unreachable("Invalid instruction.");
336   }
337   return true;
338 }
339 
340 template <typename FUN, typename ARR>
getContiguousPlaceHolder(const ARR & holders,const FUN & F)341 ContiguousPlaceholders getContiguousPlaceHolder(const ARR &holders,
342                                                 const FUN &F) {
343   // Pure input placeholders.
344   std::vector<const Placeholder *> intputPlaceholders;
345   // Pure output placeholders.
346   std::vector<const Placeholder *> outputPlaceholders;
347   // Input&output placeholders.
348   std::vector<const Placeholder *> inputOutputPlaceholders;
349   // Neither input nor output placeholders.
350   std::vector<const Placeholder *> emptyPlaceholders;
351   // Return value.
352   ContiguousPlaceholders ret;
353 
354   for (auto &v : holders) {
355     if (isInput(v, F)) {
356       if (!isOutput(v, F)) {
357         intputPlaceholders.push_back(v);
358       } else {
359         inputOutputPlaceholders.push_back(v);
360       }
361     } else {
362       if (isOutput(v, F)) {
363         outputPlaceholders.push_back(v);
364       } else {
365         emptyPlaceholders.push_back(v);
366       }
367     }
368   }
369 
370   for (auto &v : intputPlaceholders) {
371     PlaceholderInputOutputInfo holder;
372     holder.addr = v;
373     holder.isInput = true;
374     holder.isOutput = false;
375     ret.push_back(holder);
376   }
377 
378   for (auto &v : inputOutputPlaceholders) {
379     PlaceholderInputOutputInfo holder;
380     holder.addr = v;
381     holder.isInput = true;
382     holder.isOutput = true;
383     ret.push_back(holder);
384   }
385 
386   for (auto &v : outputPlaceholders) {
387     PlaceholderInputOutputInfo holder;
388     holder.addr = v;
389     holder.isInput = false;
390     holder.isOutput = true;
391     ret.push_back(holder);
392   }
393 
394   for (auto &v : emptyPlaceholders) {
395     PlaceholderInputOutputInfo holder;
396     holder.addr = v;
397     holder.isInput = false;
398     holder.isOutput = false;
399     ret.push_back(holder);
400   }
401 
402   return ret;
403 }
404 
405 /// \returns true if \p dst is capable of handling a partial tensor as input
406 /// from \p src.
allowsPartialInput(const Node * src,const Node * dst)407 static bool allowsPartialInput(const Node *src, const Node *dst) {
408   // If N is used as the indices or weights of a sparse lookup, it is safe to
409   // access a partial tensor.
410   if (auto *SLS =
411           llvm::dyn_cast<FusedRowwiseQuantizedSparseLengthsWeightedSumNode>(
412               dst)) {
413     return src == SLS->getIndices() || src == SLS->getWeights();
414   } else if (auto *SLS =
415                  llvm::dyn_cast<FusedRowwiseQuantizedSparseLengthsSumNode>(
416                      dst)) {
417     return src == SLS->getIndices();
418   } else if (auto *SLS = llvm::dyn_cast<SparseLengthsWeightedSumNode>(dst)) {
419     return src == SLS->getIndices() || src == SLS->getWeights();
420   } else if (auto *SLS = llvm::dyn_cast<SparseLengthsSumNode>(dst)) {
421     return src == SLS->getIndices();
422   } else if (auto *EBB = llvm::dyn_cast<EmbeddingBagNode>(dst)) {
423     return src == EBB->getIndices() || src == EBB->getWeights();
424   } else if (auto *EBB =
425                  llvm::dyn_cast<EmbeddingBagByteRowwiseOffsetsNode>(dst)) {
426     return src == EBB->getIndices() || src == EBB->getWeights();
427   }
428   return false;
429 }
430 
allowsPartialInput(const Placeholder * V,const Function * F)431 bool allowsPartialInput(const Placeholder *V, const Function *F) {
432   for (auto const &U : V->getUsers()) {
433     if (U.getUser()->getParent() != F) {
434       continue;
435     }
436     if (!allowsPartialInput(*U.get(), U.getUser())) {
437       return false;
438     }
439   }
440   return true;
441 }
442 
443 /// \returns true if \p dst requires last-element padding for \p src
444 /// It is assumed that \p src cannot be partial input
requiresPadding(const Node * src,const Node * dst)445 static bool requiresPadding(const Node *src, const Node *dst) {
446   if (auto *EBB = llvm::dyn_cast<EmbeddingBagNode>(dst)) {
447     return src == EBB->getOffsets();
448   } else if (auto *EBB =
449                  llvm::dyn_cast<EmbeddingBagByteRowwiseOffsetsNode>(dst)) {
450     return src == EBB->getOffsets();
451   }
452   return false;
453 }
454 
requiresPadding(const Placeholder * V,const Function * F)455 bool requiresPadding(const Placeholder *V, const Function *F) {
456   // TODO: this function is largely duplicated with allowsPartialInput()
457   // we should consider merging the two
458   for (auto const &U : V->getUsers()) {
459     if (U.getUser()->getParent() != F) {
460       continue;
461     }
462     if (!requiresPadding(*U.get(), U.getUser())) {
463       return false;
464     }
465   }
466   return true;
467 }
468 
usedInFunction(const Placeholder * V,const Function * F)469 bool usedInFunction(const Placeholder *V, const Function *F) {
470   for (auto const &U : V->getUsers()) {
471     if (U.getUser()->getParent() == F) {
472       return true;
473     }
474   }
475   return false;
476 }
477 
478 } // namespace glow
479 
480 runtime::RuntimeBundle
create(const Function & F,const std::vector<const IRFunction * > & funcs)481 runtime::RuntimeBundle::create(const Function &F,
482                                const std::vector<const IRFunction *> &funcs) {
483   std::map<std::string, runtime::RuntimeSymbolInfo> symbolTable;
484   MemoryAllocator allocator("allocator", 0);
485   uint64_t constantsMaxMem = 0, placeholdersMaxMem = 0, activationsMaxMem = 0;
486 
487   // Allocate constants.
488   allocateConstants(F.getParent()->getConstants(), allocator, symbolTable);
489   constantsMaxMem = allocator.getMaxMemoryUsage();
490 
491   // Allocate placeholders. Placeholders should be allocated in a order of
492   // Input|InputOutput|Output.
493   std::vector<const Function *> graphs;
494   graphs.reserve(funcs.size());
495   for (const auto &f : funcs) {
496     graphs.emplace_back(f->getGraph());
497   }
498 
499   auto contiguousPlaceholders =
500       getContiguousPlaceHolder(F.getParent()->getPlaceholders(), graphs);
501   allocatePlaceholders(contiguousPlaceholders, allocator, symbolTable);
502   placeholdersMaxMem = allocator.getMaxMemoryUsage() - constantsMaxMem;
503 
504   // Allocate activations.
505   for (const auto &f : funcs) {
506     allocateActivations(f->getInstrs(), allocator, symbolTable);
507   }
508 
509   activationsMaxMem =
510       allocator.getMaxMemoryUsage() - constantsMaxMem - placeholdersMaxMem;
511 
512   return runtime::RuntimeBundle(symbolTable, constantsMaxMem,
513                                 placeholdersMaxMem, activationsMaxMem);
514 }
515 
create(const Function & F)516 runtime::RuntimeBundle runtime::RuntimeBundle::create(const Function &F) {
517   std::map<std::string, runtime::RuntimeSymbolInfo> symbolTable;
518 
519   MemoryAllocator constants("constants", 0);
520   MemoryAllocator placeholders("placeholders", 0);
521 
522   // Allocate constants.
523   allocateConstants(F.findConstants(), constants, symbolTable);
524 
525   // Allocate placeholders.
526   // Placeholders should be allocated in a order of Input|InputOutput|Output.
527   auto contiguousPlaceholders =
528       getContiguousPlaceHolder(F.findPlaceholders(), F);
529 
530   // Compute the offsets for Placeholders.
531   allocatePlaceholders(contiguousPlaceholders, placeholders, symbolTable);
532 
533   return runtime::RuntimeBundle(symbolTable, constants.getMaxMemoryUsage(),
534                                 placeholders.getMaxMemoryUsage(),
535                                 /*activationsMaxSize*/ 0);
536 }
537 
538 runtime::RuntimeBundle
create(const IRFunction & F,MemoryAllocator & constantAllocator,MemoryAllocator & placeholderAllocator,MemoryAllocator & activationsAllocator)539 runtime::RuntimeBundle::create(const IRFunction &F,
540                                MemoryAllocator &constantAllocator,
541                                MemoryAllocator &placeholderAllocator,
542                                MemoryAllocator &activationsAllocator) {
543 
544   // If all allocators refer to the same underlying allocator, Constants,
545   // Placeholders and activations will be allocated contiguously. The maximum
546   // memory usage reported by the allocator for each kind of storage will
547   // include the memory usage of all previously allocated types of storage and
548   // needs to be adjusted accordingly.
549   bool contiguous = (&constantAllocator == &placeholderAllocator &&
550                      &constantAllocator == &activationsAllocator);
551   // Handle Constants, Placeholders, and Activations, in that order.
552   // Symbol table mapping symbol name to offset for runtime.
553   std::map<std::string, runtime::RuntimeSymbolInfo> symbolTable;
554   // Compute the offsets for Constants.
555   for (auto &v : F.findConstants()) {
556     assert(isa<WeightVar>(F.getWeightForNode(v)) && "Expected WeightVar");
557     auto *w = cast<WeightVar>(F.getWeightForNode(v));
558     auto numBytes = w->getSizeInBytes();
559     size_t addr = constantAllocator.allocate(numBytes, v);
560     runtime::RuntimeSymbolInfo symbol;
561     symbol.size = numBytes;
562     symbol.offset = addr;
563     symbol.type = *w->getType();
564     symbol.input = false;
565     symbol.output = false;
566     symbol.symbolCategory = SymbolCategory::Constant;
567     symbolTable.emplace(std::string(v->getName()), symbol);
568     DEBUG_GLOW(LOG(INFO) << strFormat(
569                    "Assigned address to constant %s: %zx (%zd bytes)\n",
570                    v->getName().data(), symbol.offset, symbol.size));
571   }
572   auto constantMaxSize = constantAllocator.getMaxMemoryUsage();
573 
574   // Placeholders should be allocated in a order of Input|InputOutput|Output.
575   auto contiguousPlaceholders =
576       getContiguousPlaceHolder(F.findPlaceholders(), F);
577 
578   // Compute the offsets for Placeholders.
579   for (auto it = contiguousPlaceholders.begin();
580        it != contiguousPlaceholders.end(); it++) {
581     auto &v = it->addr;
582     assert(isa<WeightVar>(F.getWeightForNode(v)) && "Expected WeightVar");
583     auto *w = cast<WeightVar>(F.getWeightForNode(v));
584     auto numBytes = w->getSizeInBytes();
585     size_t addr = placeholderAllocator.allocate(numBytes, w);
586     runtime::RuntimeSymbolInfo symbol;
587     symbol.offset = addr;
588     symbol.size = numBytes;
589     symbol.type = *w->getType();
590     symbol.output = it->isOutput;
591     symbol.input = it->isInput;
592     symbol.symbolCategory = SymbolCategory::Placeholder;
593     symbolTable.emplace(std::string(v->getName()), symbol);
594     DEBUG_GLOW(LOG(INFO) << strFormat(
595                    "Assigned address to mutable weight %s: %zx (%zd bytes)\n",
596                    w->getName().data(), symbol.offset, symbol.size));
597   }
598   auto placeholderMaxSize = placeholderAllocator.getMaxMemoryUsage();
599   if (contiguous) {
600     placeholderMaxSize -= constantMaxSize;
601   }
602 
603   // Compute the offsets for Activations.
604   allocateActivations(F.getInstrs(), activationsAllocator, symbolTable);
605 
606   auto activationsMaxSize = activationsAllocator.getMaxMemoryUsage();
607   if (contiguous) {
608     activationsMaxSize -= constantMaxSize + placeholderMaxSize;
609     DCHECK_EQ(constantAllocator.getMaxMemoryUsage(),
610               constantMaxSize + placeholderMaxSize + activationsMaxSize);
611   }
612 
613   return runtime::RuntimeBundle(symbolTable, constantMaxSize,
614                                 placeholderMaxSize, activationsMaxSize);
615 }
616 
617 runtime::RuntimeBundle
create(const IRFunction & F,MemoryAllocator & allocator)618 runtime::RuntimeBundle::create(const IRFunction &F,
619                                MemoryAllocator &allocator) {
620   return create(F, allocator, allocator, allocator);
621 }
622