1 //===--- CGBlocks.cpp - Emit LLVM Code for declarations ---------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This contains code to emit blocks.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "CGBlocks.h"
14 #include "CGCXXABI.h"
15 #include "CGDebugInfo.h"
16 #include "CGObjCRuntime.h"
17 #include "CGOpenCLRuntime.h"
18 #include "CodeGenFunction.h"
19 #include "CodeGenModule.h"
20 #include "ConstantEmitter.h"
21 #include "TargetInfo.h"
22 #include "clang/AST/Attr.h"
23 #include "clang/AST/DeclObjC.h"
24 #include "clang/CodeGen/ConstantInitBuilder.h"
25 #include "llvm/ADT/SmallSet.h"
26 #include "llvm/IR/DataLayout.h"
27 #include "llvm/IR/Module.h"
28 #include "llvm/Support/ScopedPrinter.h"
29 #include <algorithm>
30 #include <cstdio>
31 
32 using namespace clang;
33 using namespace CodeGen;
34 
35 CGBlockInfo::CGBlockInfo(const BlockDecl *block, StringRef name)
36     : Name(name), CXXThisIndex(0), CanBeGlobal(false), NeedsCopyDispose(false),
37       NoEscape(false), HasCXXObject(false), UsesStret(false),
38       HasCapturedVariableLayout(false), CapturesNonExternalType(false),
39       LocalAddress(Address::invalid()), StructureType(nullptr), Block(block) {
40 
41   // Skip asm prefix, if any.  'name' is usually taken directly from
42   // the mangled name of the enclosing function.
43   if (!name.empty() && name[0] == '\01')
44     name = name.substr(1);
45 }
46 
47 // Anchor the vtable to this translation unit.
48 BlockByrefHelpers::~BlockByrefHelpers() {}
49 
50 /// Build the given block as a global block.
51 static llvm::Constant *buildGlobalBlock(CodeGenModule &CGM,
52                                         const CGBlockInfo &blockInfo,
53                                         llvm::Constant *blockFn);
54 
55 /// Build the helper function to copy a block.
56 static llvm::Constant *buildCopyHelper(CodeGenModule &CGM,
57                                        const CGBlockInfo &blockInfo) {
58   return CodeGenFunction(CGM).GenerateCopyHelperFunction(blockInfo);
59 }
60 
61 /// Build the helper function to dispose of a block.
62 static llvm::Constant *buildDisposeHelper(CodeGenModule &CGM,
63                                           const CGBlockInfo &blockInfo) {
64   return CodeGenFunction(CGM).GenerateDestroyHelperFunction(blockInfo);
65 }
66 
67 namespace {
68 
69 /// Represents a captured entity that requires extra operations in order for
70 /// this entity to be copied or destroyed correctly.
71 struct BlockCaptureManagedEntity {
72   BlockCaptureEntityKind CopyKind, DisposeKind;
73   BlockFieldFlags CopyFlags, DisposeFlags;
74   const BlockDecl::Capture *CI;
75   const CGBlockInfo::Capture *Capture;
76 
77   BlockCaptureManagedEntity(BlockCaptureEntityKind CopyType,
78                             BlockCaptureEntityKind DisposeType,
79                             BlockFieldFlags CopyFlags,
80                             BlockFieldFlags DisposeFlags,
81                             const BlockDecl::Capture &CI,
82                             const CGBlockInfo::Capture &Capture)
83       : CopyKind(CopyType), DisposeKind(DisposeType), CopyFlags(CopyFlags),
84         DisposeFlags(DisposeFlags), CI(&CI), Capture(&Capture) {}
85 
86   bool operator<(const BlockCaptureManagedEntity &Other) const {
87     return Capture->getOffset() < Other.Capture->getOffset();
88   }
89 };
90 
91 enum class CaptureStrKind {
92   // String for the copy helper.
93   CopyHelper,
94   // String for the dispose helper.
95   DisposeHelper,
96   // Merge the strings for the copy helper and dispose helper.
97   Merged
98 };
99 
100 } // end anonymous namespace
101 
102 static std::string getBlockCaptureStr(const CGBlockInfo::Capture &Cap,
103                                       CaptureStrKind StrKind,
104                                       CharUnits BlockAlignment,
105                                       CodeGenModule &CGM);
106 
107 static std::string getBlockDescriptorName(const CGBlockInfo &BlockInfo,
108                                           CodeGenModule &CGM) {
109   std::string Name = "__block_descriptor_";
110   Name += llvm::to_string(BlockInfo.BlockSize.getQuantity()) + "_";
111 
112   if (BlockInfo.NeedsCopyDispose) {
113     if (CGM.getLangOpts().Exceptions)
114       Name += "e";
115     if (CGM.getCodeGenOpts().ObjCAutoRefCountExceptions)
116       Name += "a";
117     Name += llvm::to_string(BlockInfo.BlockAlign.getQuantity()) + "_";
118 
119     for (auto &Cap : BlockInfo.SortedCaptures) {
120       if (Cap.isConstantOrTrivial())
121         continue;
122 
123       Name += llvm::to_string(Cap.getOffset().getQuantity());
124 
125       if (Cap.CopyKind == Cap.DisposeKind) {
126         // If CopyKind and DisposeKind are the same, merge the capture
127         // information.
128         assert(Cap.CopyKind != BlockCaptureEntityKind::None &&
129                "shouldn't see BlockCaptureManagedEntity that is None");
130         Name += getBlockCaptureStr(Cap, CaptureStrKind::Merged,
131                                    BlockInfo.BlockAlign, CGM);
132       } else {
133         // If CopyKind and DisposeKind are not the same, which can happen when
134         // either Kind is None or the captured object is a __strong block,
135         // concatenate the copy and dispose strings.
136         Name += getBlockCaptureStr(Cap, CaptureStrKind::CopyHelper,
137                                    BlockInfo.BlockAlign, CGM);
138         Name += getBlockCaptureStr(Cap, CaptureStrKind::DisposeHelper,
139                                    BlockInfo.BlockAlign, CGM);
140       }
141     }
142     Name += "_";
143   }
144 
145   std::string TypeAtEncoding =
146       CGM.getContext().getObjCEncodingForBlock(BlockInfo.getBlockExpr());
147   /// Replace occurrences of '@' with '\1'. '@' is reserved on ELF platforms as
148   /// a separator between symbol name and symbol version.
149   std::replace(TypeAtEncoding.begin(), TypeAtEncoding.end(), '@', '\1');
150   Name += "e" + llvm::to_string(TypeAtEncoding.size()) + "_" + TypeAtEncoding;
151   Name += "l" + CGM.getObjCRuntime().getRCBlockLayoutStr(CGM, BlockInfo);
152   return Name;
153 }
154 
155 /// buildBlockDescriptor - Build the block descriptor meta-data for a block.
156 /// buildBlockDescriptor is accessed from 5th field of the Block_literal
157 /// meta-data and contains stationary information about the block literal.
158 /// Its definition will have 4 (or optionally 6) words.
159 /// \code
160 /// struct Block_descriptor {
161 ///   unsigned long reserved;
162 ///   unsigned long size;  // size of Block_literal metadata in bytes.
163 ///   void *copy_func_helper_decl;  // optional copy helper.
164 ///   void *destroy_func_decl; // optional destructor helper.
165 ///   void *block_method_encoding_address; // @encode for block literal signature.
166 ///   void *block_layout_info; // encoding of captured block variables.
167 /// };
168 /// \endcode
169 static llvm::Constant *buildBlockDescriptor(CodeGenModule &CGM,
170                                             const CGBlockInfo &blockInfo) {
171   ASTContext &C = CGM.getContext();
172 
173   llvm::IntegerType *ulong =
174     cast<llvm::IntegerType>(CGM.getTypes().ConvertType(C.UnsignedLongTy));
175   llvm::PointerType *i8p = nullptr;
176   if (CGM.getLangOpts().OpenCL)
177     i8p =
178       llvm::Type::getInt8PtrTy(
179            CGM.getLLVMContext(), C.getTargetAddressSpace(LangAS::opencl_constant));
180   else
181     i8p = CGM.VoidPtrTy;
182 
183   std::string descName;
184 
185   // If an equivalent block descriptor global variable exists, return it.
186   if (C.getLangOpts().ObjC &&
187       CGM.getLangOpts().getGC() == LangOptions::NonGC) {
188     descName = getBlockDescriptorName(blockInfo, CGM);
189     if (llvm::GlobalValue *desc = CGM.getModule().getNamedValue(descName))
190       return llvm::ConstantExpr::getBitCast(desc,
191                                             CGM.getBlockDescriptorType());
192   }
193 
194   // If there isn't an equivalent block descriptor global variable, create a new
195   // one.
196   ConstantInitBuilder builder(CGM);
197   auto elements = builder.beginStruct();
198 
199   // reserved
200   elements.addInt(ulong, 0);
201 
202   // Size
203   // FIXME: What is the right way to say this doesn't fit?  We should give
204   // a user diagnostic in that case.  Better fix would be to change the
205   // API to size_t.
206   elements.addInt(ulong, blockInfo.BlockSize.getQuantity());
207 
208   // Optional copy/dispose helpers.
209   bool hasInternalHelper = false;
210   if (blockInfo.NeedsCopyDispose) {
211     // copy_func_helper_decl
212     llvm::Constant *copyHelper = buildCopyHelper(CGM, blockInfo);
213     elements.add(copyHelper);
214 
215     // destroy_func_decl
216     llvm::Constant *disposeHelper = buildDisposeHelper(CGM, blockInfo);
217     elements.add(disposeHelper);
218 
219     if (cast<llvm::Function>(copyHelper->stripPointerCasts())
220             ->hasInternalLinkage() ||
221         cast<llvm::Function>(disposeHelper->stripPointerCasts())
222             ->hasInternalLinkage())
223       hasInternalHelper = true;
224   }
225 
226   // Signature.  Mandatory ObjC-style method descriptor @encode sequence.
227   std::string typeAtEncoding =
228     CGM.getContext().getObjCEncodingForBlock(blockInfo.getBlockExpr());
229   elements.add(llvm::ConstantExpr::getBitCast(
230     CGM.GetAddrOfConstantCString(typeAtEncoding).getPointer(), i8p));
231 
232   // GC layout.
233   if (C.getLangOpts().ObjC) {
234     if (CGM.getLangOpts().getGC() != LangOptions::NonGC)
235       elements.add(CGM.getObjCRuntime().BuildGCBlockLayout(CGM, blockInfo));
236     else
237       elements.add(CGM.getObjCRuntime().BuildRCBlockLayout(CGM, blockInfo));
238   }
239   else
240     elements.addNullPointer(i8p);
241 
242   unsigned AddrSpace = 0;
243   if (C.getLangOpts().OpenCL)
244     AddrSpace = C.getTargetAddressSpace(LangAS::opencl_constant);
245 
246   llvm::GlobalValue::LinkageTypes linkage;
247   if (descName.empty()) {
248     linkage = llvm::GlobalValue::InternalLinkage;
249     descName = "__block_descriptor_tmp";
250   } else if (hasInternalHelper) {
251     // If either the copy helper or the dispose helper has internal linkage,
252     // the block descriptor must have internal linkage too.
253     linkage = llvm::GlobalValue::InternalLinkage;
254   } else {
255     linkage = llvm::GlobalValue::LinkOnceODRLinkage;
256   }
257 
258   llvm::GlobalVariable *global =
259       elements.finishAndCreateGlobal(descName, CGM.getPointerAlign(),
260                                      /*constant*/ true, linkage, AddrSpace);
261 
262   if (linkage == llvm::GlobalValue::LinkOnceODRLinkage) {
263     if (CGM.supportsCOMDAT())
264       global->setComdat(CGM.getModule().getOrInsertComdat(descName));
265     global->setVisibility(llvm::GlobalValue::HiddenVisibility);
266     global->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
267   }
268 
269   return llvm::ConstantExpr::getBitCast(global, CGM.getBlockDescriptorType());
270 }
271 
272 /*
273   Purely notional variadic template describing the layout of a block.
274 
275   template <class _ResultType, class... _ParamTypes, class... _CaptureTypes>
276   struct Block_literal {
277     /// Initialized to one of:
278     ///   extern void *_NSConcreteStackBlock[];
279     ///   extern void *_NSConcreteGlobalBlock[];
280     ///
281     /// In theory, we could start one off malloc'ed by setting
282     /// BLOCK_NEEDS_FREE, giving it a refcount of 1, and using
283     /// this isa:
284     ///   extern void *_NSConcreteMallocBlock[];
285     struct objc_class *isa;
286 
287     /// These are the flags (with corresponding bit number) that the
288     /// compiler is actually supposed to know about.
289     ///  23. BLOCK_IS_NOESCAPE - indicates that the block is non-escaping
290     ///  25. BLOCK_HAS_COPY_DISPOSE - indicates that the block
291     ///   descriptor provides copy and dispose helper functions
292     ///  26. BLOCK_HAS_CXX_OBJ - indicates that there's a captured
293     ///   object with a nontrivial destructor or copy constructor
294     ///  28. BLOCK_IS_GLOBAL - indicates that the block is allocated
295     ///   as global memory
296     ///  29. BLOCK_USE_STRET - indicates that the block function
297     ///   uses stret, which objc_msgSend needs to know about
298     ///  30. BLOCK_HAS_SIGNATURE - indicates that the block has an
299     ///   @encoded signature string
300     /// And we're not supposed to manipulate these:
301     ///  24. BLOCK_NEEDS_FREE - indicates that the block has been moved
302     ///   to malloc'ed memory
303     ///  27. BLOCK_IS_GC - indicates that the block has been moved to
304     ///   to GC-allocated memory
305     /// Additionally, the bottom 16 bits are a reference count which
306     /// should be zero on the stack.
307     int flags;
308 
309     /// Reserved;  should be zero-initialized.
310     int reserved;
311 
312     /// Function pointer generated from block literal.
313     _ResultType (*invoke)(Block_literal *, _ParamTypes...);
314 
315     /// Block description metadata generated from block literal.
316     struct Block_descriptor *block_descriptor;
317 
318     /// Captured values follow.
319     _CapturesTypes captures...;
320   };
321  */
322 
323 namespace {
324   /// A chunk of data that we actually have to capture in the block.
325   struct BlockLayoutChunk {
326     CharUnits Alignment;
327     CharUnits Size;
328     const BlockDecl::Capture *Capture; // null for 'this'
329     llvm::Type *Type;
330     QualType FieldType;
331     BlockCaptureEntityKind CopyKind, DisposeKind;
332     BlockFieldFlags CopyFlags, DisposeFlags;
333 
334     BlockLayoutChunk(CharUnits align, CharUnits size,
335                      const BlockDecl::Capture *capture, llvm::Type *type,
336                      QualType fieldType, BlockCaptureEntityKind CopyKind,
337                      BlockFieldFlags CopyFlags,
338                      BlockCaptureEntityKind DisposeKind,
339                      BlockFieldFlags DisposeFlags)
340         : Alignment(align), Size(size), Capture(capture), Type(type),
341           FieldType(fieldType), CopyKind(CopyKind), DisposeKind(DisposeKind),
342           CopyFlags(CopyFlags), DisposeFlags(DisposeFlags) {}
343 
344     /// Tell the block info that this chunk has the given field index.
345     void setIndex(CGBlockInfo &info, unsigned index, CharUnits offset) {
346       if (!Capture) {
347         info.CXXThisIndex = index;
348         info.CXXThisOffset = offset;
349       } else {
350         info.SortedCaptures.push_back(CGBlockInfo::Capture::makeIndex(
351             index, offset, FieldType, CopyKind, CopyFlags, DisposeKind,
352             DisposeFlags, Capture));
353       }
354     }
355 
356     bool isTrivial() const {
357       return CopyKind == BlockCaptureEntityKind::None &&
358              DisposeKind == BlockCaptureEntityKind::None;
359     }
360   };
361 
362   /// Order by 1) all __strong together 2) next, all block together 3) next,
363   /// all byref together 4) next, all __weak together. Preserve descending
364   /// alignment in all situations.
365   bool operator<(const BlockLayoutChunk &left, const BlockLayoutChunk &right) {
366     if (left.Alignment != right.Alignment)
367       return left.Alignment > right.Alignment;
368 
369     auto getPrefOrder = [](const BlockLayoutChunk &chunk) {
370       switch (chunk.CopyKind) {
371       case BlockCaptureEntityKind::ARCStrong:
372         return 0;
373       case BlockCaptureEntityKind::BlockObject:
374         switch (chunk.CopyFlags.getBitMask()) {
375         case BLOCK_FIELD_IS_OBJECT:
376           return 0;
377         case BLOCK_FIELD_IS_BLOCK:
378           return 1;
379         case BLOCK_FIELD_IS_BYREF:
380           return 2;
381         default:
382           break;
383         }
384         break;
385       case BlockCaptureEntityKind::ARCWeak:
386         return 3;
387       default:
388         break;
389       }
390       return 4;
391     };
392 
393     return getPrefOrder(left) < getPrefOrder(right);
394   }
395 } // end anonymous namespace
396 
397 static std::pair<BlockCaptureEntityKind, BlockFieldFlags>
398 computeCopyInfoForBlockCapture(const BlockDecl::Capture &CI, QualType T,
399                                const LangOptions &LangOpts);
400 
401 static std::pair<BlockCaptureEntityKind, BlockFieldFlags>
402 computeDestroyInfoForBlockCapture(const BlockDecl::Capture &CI, QualType T,
403                                   const LangOptions &LangOpts);
404 
405 static void addBlockLayout(CharUnits align, CharUnits size,
406                            const BlockDecl::Capture *capture, llvm::Type *type,
407                            QualType fieldType,
408                            SmallVectorImpl<BlockLayoutChunk> &Layout,
409                            CGBlockInfo &Info, CodeGenModule &CGM) {
410   if (!capture) {
411     // 'this' capture.
412     Layout.push_back(BlockLayoutChunk(
413         align, size, capture, type, fieldType, BlockCaptureEntityKind::None,
414         BlockFieldFlags(), BlockCaptureEntityKind::None, BlockFieldFlags()));
415     return;
416   }
417 
418   const LangOptions &LangOpts = CGM.getLangOpts();
419   BlockCaptureEntityKind CopyKind, DisposeKind;
420   BlockFieldFlags CopyFlags, DisposeFlags;
421 
422   std::tie(CopyKind, CopyFlags) =
423       computeCopyInfoForBlockCapture(*capture, fieldType, LangOpts);
424   std::tie(DisposeKind, DisposeFlags) =
425       computeDestroyInfoForBlockCapture(*capture, fieldType, LangOpts);
426   Layout.push_back(BlockLayoutChunk(align, size, capture, type, fieldType,
427                                     CopyKind, CopyFlags, DisposeKind,
428                                     DisposeFlags));
429 
430   if (Info.NoEscape)
431     return;
432 
433   if (!Layout.back().isTrivial())
434     Info.NeedsCopyDispose = true;
435 }
436 
437 /// Determines if the given type is safe for constant capture in C++.
438 static bool isSafeForCXXConstantCapture(QualType type) {
439   const RecordType *recordType =
440     type->getBaseElementTypeUnsafe()->getAs<RecordType>();
441 
442   // Only records can be unsafe.
443   if (!recordType) return true;
444 
445   const auto *record = cast<CXXRecordDecl>(recordType->getDecl());
446 
447   // Maintain semantics for classes with non-trivial dtors or copy ctors.
448   if (!record->hasTrivialDestructor()) return false;
449   if (record->hasNonTrivialCopyConstructor()) return false;
450 
451   // Otherwise, we just have to make sure there aren't any mutable
452   // fields that might have changed since initialization.
453   return !record->hasMutableFields();
454 }
455 
456 /// It is illegal to modify a const object after initialization.
457 /// Therefore, if a const object has a constant initializer, we don't
458 /// actually need to keep storage for it in the block; we'll just
459 /// rematerialize it at the start of the block function.  This is
460 /// acceptable because we make no promises about address stability of
461 /// captured variables.
462 static llvm::Constant *tryCaptureAsConstant(CodeGenModule &CGM,
463                                             CodeGenFunction *CGF,
464                                             const VarDecl *var) {
465   // Return if this is a function parameter. We shouldn't try to
466   // rematerialize default arguments of function parameters.
467   if (isa<ParmVarDecl>(var))
468     return nullptr;
469 
470   QualType type = var->getType();
471 
472   // We can only do this if the variable is const.
473   if (!type.isConstQualified()) return nullptr;
474 
475   // Furthermore, in C++ we have to worry about mutable fields:
476   // C++ [dcl.type.cv]p4:
477   //   Except that any class member declared mutable can be
478   //   modified, any attempt to modify a const object during its
479   //   lifetime results in undefined behavior.
480   if (CGM.getLangOpts().CPlusPlus && !isSafeForCXXConstantCapture(type))
481     return nullptr;
482 
483   // If the variable doesn't have any initializer (shouldn't this be
484   // invalid?), it's not clear what we should do.  Maybe capture as
485   // zero?
486   const Expr *init = var->getInit();
487   if (!init) return nullptr;
488 
489   return ConstantEmitter(CGM, CGF).tryEmitAbstractForInitializer(*var);
490 }
491 
492 /// Get the low bit of a nonzero character count.  This is the
493 /// alignment of the nth byte if the 0th byte is universally aligned.
494 static CharUnits getLowBit(CharUnits v) {
495   return CharUnits::fromQuantity(v.getQuantity() & (~v.getQuantity() + 1));
496 }
497 
498 static void initializeForBlockHeader(CodeGenModule &CGM, CGBlockInfo &info,
499                              SmallVectorImpl<llvm::Type*> &elementTypes) {
500 
501   assert(elementTypes.empty());
502   if (CGM.getLangOpts().OpenCL) {
503     // The header is basically 'struct { int; int; generic void *;
504     // custom_fields; }'. Assert that struct is packed.
505     auto GenPtrAlign = CharUnits::fromQuantity(
506         CGM.getTarget().getPointerAlign(LangAS::opencl_generic) / 8);
507     auto GenPtrSize = CharUnits::fromQuantity(
508         CGM.getTarget().getPointerWidth(LangAS::opencl_generic) / 8);
509     assert(CGM.getIntSize() <= GenPtrSize);
510     assert(CGM.getIntAlign() <= GenPtrAlign);
511     assert((2 * CGM.getIntSize()).isMultipleOf(GenPtrAlign));
512     elementTypes.push_back(CGM.IntTy); /* total size */
513     elementTypes.push_back(CGM.IntTy); /* align */
514     elementTypes.push_back(
515         CGM.getOpenCLRuntime()
516             .getGenericVoidPointerType()); /* invoke function */
517     unsigned Offset =
518         2 * CGM.getIntSize().getQuantity() + GenPtrSize.getQuantity();
519     unsigned BlockAlign = GenPtrAlign.getQuantity();
520     if (auto *Helper =
521             CGM.getTargetCodeGenInfo().getTargetOpenCLBlockHelper()) {
522       for (auto *I : Helper->getCustomFieldTypes()) /* custom fields */ {
523         // TargetOpenCLBlockHelp needs to make sure the struct is packed.
524         // If necessary, add padding fields to the custom fields.
525         unsigned Align = CGM.getDataLayout().getABITypeAlign(I).value();
526         if (BlockAlign < Align)
527           BlockAlign = Align;
528         assert(Offset % Align == 0);
529         Offset += CGM.getDataLayout().getTypeAllocSize(I);
530         elementTypes.push_back(I);
531       }
532     }
533     info.BlockAlign = CharUnits::fromQuantity(BlockAlign);
534     info.BlockSize = CharUnits::fromQuantity(Offset);
535   } else {
536     // The header is basically 'struct { void *; int; int; void *; void *; }'.
537     // Assert that the struct is packed.
538     assert(CGM.getIntSize() <= CGM.getPointerSize());
539     assert(CGM.getIntAlign() <= CGM.getPointerAlign());
540     assert((2 * CGM.getIntSize()).isMultipleOf(CGM.getPointerAlign()));
541     info.BlockAlign = CGM.getPointerAlign();
542     info.BlockSize = 3 * CGM.getPointerSize() + 2 * CGM.getIntSize();
543     elementTypes.push_back(CGM.VoidPtrTy);
544     elementTypes.push_back(CGM.IntTy);
545     elementTypes.push_back(CGM.IntTy);
546     elementTypes.push_back(CGM.VoidPtrTy);
547     elementTypes.push_back(CGM.getBlockDescriptorType());
548   }
549 }
550 
551 static QualType getCaptureFieldType(const CodeGenFunction &CGF,
552                                     const BlockDecl::Capture &CI) {
553   const VarDecl *VD = CI.getVariable();
554 
555   // If the variable is captured by an enclosing block or lambda expression,
556   // use the type of the capture field.
557   if (CGF.BlockInfo && CI.isNested())
558     return CGF.BlockInfo->getCapture(VD).fieldType();
559   if (auto *FD = CGF.LambdaCaptureFields.lookup(VD))
560     return FD->getType();
561   // If the captured variable is a non-escaping __block variable, the field
562   // type is the reference type. If the variable is a __block variable that
563   // already has a reference type, the field type is the variable's type.
564   return VD->isNonEscapingByref() ?
565          CGF.getContext().getLValueReferenceType(VD->getType()) : VD->getType();
566 }
567 
568 /// Compute the layout of the given block.  Attempts to lay the block
569 /// out with minimal space requirements.
570 static void computeBlockInfo(CodeGenModule &CGM, CodeGenFunction *CGF,
571                              CGBlockInfo &info) {
572   ASTContext &C = CGM.getContext();
573   const BlockDecl *block = info.getBlockDecl();
574 
575   SmallVector<llvm::Type*, 8> elementTypes;
576   initializeForBlockHeader(CGM, info, elementTypes);
577   bool hasNonConstantCustomFields = false;
578   if (auto *OpenCLHelper =
579           CGM.getTargetCodeGenInfo().getTargetOpenCLBlockHelper())
580     hasNonConstantCustomFields =
581         !OpenCLHelper->areAllCustomFieldValuesConstant(info);
582   if (!block->hasCaptures() && !hasNonConstantCustomFields) {
583     info.StructureType =
584       llvm::StructType::get(CGM.getLLVMContext(), elementTypes, true);
585     info.CanBeGlobal = true;
586     return;
587   }
588   else if (C.getLangOpts().ObjC &&
589            CGM.getLangOpts().getGC() == LangOptions::NonGC)
590     info.HasCapturedVariableLayout = true;
591 
592   if (block->doesNotEscape())
593     info.NoEscape = true;
594 
595   // Collect the layout chunks.
596   SmallVector<BlockLayoutChunk, 16> layout;
597   layout.reserve(block->capturesCXXThis() +
598                  (block->capture_end() - block->capture_begin()));
599 
600   CharUnits maxFieldAlign;
601 
602   // First, 'this'.
603   if (block->capturesCXXThis()) {
604     assert(CGF && CGF->CurFuncDecl && isa<CXXMethodDecl>(CGF->CurFuncDecl) &&
605            "Can't capture 'this' outside a method");
606     QualType thisType = cast<CXXMethodDecl>(CGF->CurFuncDecl)->getThisType();
607 
608     // Theoretically, this could be in a different address space, so
609     // don't assume standard pointer size/align.
610     llvm::Type *llvmType = CGM.getTypes().ConvertType(thisType);
611     auto TInfo = CGM.getContext().getTypeInfoInChars(thisType);
612     maxFieldAlign = std::max(maxFieldAlign, TInfo.Align);
613 
614     addBlockLayout(TInfo.Align, TInfo.Width, nullptr, llvmType, thisType,
615                    layout, info, CGM);
616   }
617 
618   // Next, all the block captures.
619   for (const auto &CI : block->captures()) {
620     const VarDecl *variable = CI.getVariable();
621 
622     if (CI.isEscapingByref()) {
623       // Just use void* instead of a pointer to the byref type.
624       CharUnits align = CGM.getPointerAlign();
625       maxFieldAlign = std::max(maxFieldAlign, align);
626 
627       // Since a __block variable cannot be captured by lambdas, its type and
628       // the capture field type should always match.
629       assert(CGF && getCaptureFieldType(*CGF, CI) == variable->getType() &&
630              "capture type differs from the variable type");
631       addBlockLayout(align, CGM.getPointerSize(), &CI, CGM.VoidPtrTy,
632                      variable->getType(), layout, info, CGM);
633       continue;
634     }
635 
636     // Otherwise, build a layout chunk with the size and alignment of
637     // the declaration.
638     if (llvm::Constant *constant = tryCaptureAsConstant(CGM, CGF, variable)) {
639       info.SortedCaptures.push_back(
640           CGBlockInfo::Capture::makeConstant(constant, &CI));
641       continue;
642     }
643 
644     QualType VT = getCaptureFieldType(*CGF, CI);
645 
646     if (CGM.getLangOpts().CPlusPlus)
647       if (const CXXRecordDecl *record = VT->getAsCXXRecordDecl())
648         if (CI.hasCopyExpr() || !record->hasTrivialDestructor()) {
649           info.HasCXXObject = true;
650           if (!record->isExternallyVisible())
651             info.CapturesNonExternalType = true;
652         }
653 
654     CharUnits size = C.getTypeSizeInChars(VT);
655     CharUnits align = C.getDeclAlign(variable);
656 
657     maxFieldAlign = std::max(maxFieldAlign, align);
658 
659     llvm::Type *llvmType =
660       CGM.getTypes().ConvertTypeForMem(VT);
661 
662     addBlockLayout(align, size, &CI, llvmType, VT, layout, info, CGM);
663   }
664 
665   // If that was everything, we're done here.
666   if (layout.empty()) {
667     info.StructureType =
668       llvm::StructType::get(CGM.getLLVMContext(), elementTypes, true);
669     info.CanBeGlobal = true;
670     info.buildCaptureMap();
671     return;
672   }
673 
674   // Sort the layout by alignment.  We have to use a stable sort here
675   // to get reproducible results.  There should probably be an
676   // llvm::array_pod_stable_sort.
677   llvm::stable_sort(layout);
678 
679   // Needed for blocks layout info.
680   info.BlockHeaderForcedGapOffset = info.BlockSize;
681   info.BlockHeaderForcedGapSize = CharUnits::Zero();
682 
683   CharUnits &blockSize = info.BlockSize;
684   info.BlockAlign = std::max(maxFieldAlign, info.BlockAlign);
685 
686   // Assuming that the first byte in the header is maximally aligned,
687   // get the alignment of the first byte following the header.
688   CharUnits endAlign = getLowBit(blockSize);
689 
690   // If the end of the header isn't satisfactorily aligned for the
691   // maximum thing, look for things that are okay with the header-end
692   // alignment, and keep appending them until we get something that's
693   // aligned right.  This algorithm is only guaranteed optimal if
694   // that condition is satisfied at some point; otherwise we can get
695   // things like:
696   //   header                 // next byte has alignment 4
697   //   something_with_size_5; // next byte has alignment 1
698   //   something_with_alignment_8;
699   // which has 7 bytes of padding, as opposed to the naive solution
700   // which might have less (?).
701   if (endAlign < maxFieldAlign) {
702     SmallVectorImpl<BlockLayoutChunk>::iterator
703       li = layout.begin() + 1, le = layout.end();
704 
705     // Look for something that the header end is already
706     // satisfactorily aligned for.
707     for (; li != le && endAlign < li->Alignment; ++li)
708       ;
709 
710     // If we found something that's naturally aligned for the end of
711     // the header, keep adding things...
712     if (li != le) {
713       SmallVectorImpl<BlockLayoutChunk>::iterator first = li;
714       for (; li != le; ++li) {
715         assert(endAlign >= li->Alignment);
716 
717         li->setIndex(info, elementTypes.size(), blockSize);
718         elementTypes.push_back(li->Type);
719         blockSize += li->Size;
720         endAlign = getLowBit(blockSize);
721 
722         // ...until we get to the alignment of the maximum field.
723         if (endAlign >= maxFieldAlign) {
724           ++li;
725           break;
726         }
727       }
728       // Don't re-append everything we just appended.
729       layout.erase(first, li);
730     }
731   }
732 
733   assert(endAlign == getLowBit(blockSize));
734 
735   // At this point, we just have to add padding if the end align still
736   // isn't aligned right.
737   if (endAlign < maxFieldAlign) {
738     CharUnits newBlockSize = blockSize.alignTo(maxFieldAlign);
739     CharUnits padding = newBlockSize - blockSize;
740 
741     // If we haven't yet added any fields, remember that there was an
742     // initial gap; this need to go into the block layout bit map.
743     if (blockSize == info.BlockHeaderForcedGapOffset) {
744       info.BlockHeaderForcedGapSize = padding;
745     }
746 
747     elementTypes.push_back(llvm::ArrayType::get(CGM.Int8Ty,
748                                                 padding.getQuantity()));
749     blockSize = newBlockSize;
750     endAlign = getLowBit(blockSize); // might be > maxFieldAlign
751   }
752 
753   assert(endAlign >= maxFieldAlign);
754   assert(endAlign == getLowBit(blockSize));
755   // Slam everything else on now.  This works because they have
756   // strictly decreasing alignment and we expect that size is always a
757   // multiple of alignment.
758   for (SmallVectorImpl<BlockLayoutChunk>::iterator
759          li = layout.begin(), le = layout.end(); li != le; ++li) {
760     if (endAlign < li->Alignment) {
761       // size may not be multiple of alignment. This can only happen with
762       // an over-aligned variable. We will be adding a padding field to
763       // make the size be multiple of alignment.
764       CharUnits padding = li->Alignment - endAlign;
765       elementTypes.push_back(llvm::ArrayType::get(CGM.Int8Ty,
766                                                   padding.getQuantity()));
767       blockSize += padding;
768       endAlign = getLowBit(blockSize);
769     }
770     assert(endAlign >= li->Alignment);
771     li->setIndex(info, elementTypes.size(), blockSize);
772     elementTypes.push_back(li->Type);
773     blockSize += li->Size;
774     endAlign = getLowBit(blockSize);
775   }
776 
777   info.buildCaptureMap();
778   info.StructureType =
779     llvm::StructType::get(CGM.getLLVMContext(), elementTypes, true);
780 }
781 
782 /// Emit a block literal expression in the current function.
783 llvm::Value *CodeGenFunction::EmitBlockLiteral(const BlockExpr *blockExpr) {
784   // If the block has no captures, we won't have a pre-computed
785   // layout for it.
786   if (!blockExpr->getBlockDecl()->hasCaptures())
787     // The block literal is emitted as a global variable, and the block invoke
788     // function has to be extracted from its initializer.
789     if (llvm::Constant *Block = CGM.getAddrOfGlobalBlockIfEmitted(blockExpr))
790       return Block;
791 
792   CGBlockInfo blockInfo(blockExpr->getBlockDecl(), CurFn->getName());
793   computeBlockInfo(CGM, this, blockInfo);
794   blockInfo.BlockExpression = blockExpr;
795   if (!blockInfo.CanBeGlobal)
796     blockInfo.LocalAddress = CreateTempAlloca(blockInfo.StructureType,
797                                               blockInfo.BlockAlign, "block");
798   return EmitBlockLiteral(blockInfo);
799 }
800 
801 llvm::Value *CodeGenFunction::EmitBlockLiteral(const CGBlockInfo &blockInfo) {
802   bool IsOpenCL = CGM.getContext().getLangOpts().OpenCL;
803   auto GenVoidPtrTy =
804       IsOpenCL ? CGM.getOpenCLRuntime().getGenericVoidPointerType() : VoidPtrTy;
805   LangAS GenVoidPtrAddr = IsOpenCL ? LangAS::opencl_generic : LangAS::Default;
806   auto GenVoidPtrSize = CharUnits::fromQuantity(
807       CGM.getTarget().getPointerWidth(GenVoidPtrAddr) / 8);
808   // Using the computed layout, generate the actual block function.
809   bool isLambdaConv = blockInfo.getBlockDecl()->isConversionFromLambda();
810   CodeGenFunction BlockCGF{CGM, true};
811   BlockCGF.SanOpts = SanOpts;
812   auto *InvokeFn = BlockCGF.GenerateBlockFunction(
813       CurGD, blockInfo, LocalDeclMap, isLambdaConv, blockInfo.CanBeGlobal);
814   auto *blockFn = llvm::ConstantExpr::getPointerCast(InvokeFn, GenVoidPtrTy);
815 
816   // If there is nothing to capture, we can emit this as a global block.
817   if (blockInfo.CanBeGlobal)
818     return CGM.getAddrOfGlobalBlockIfEmitted(blockInfo.BlockExpression);
819 
820   // Otherwise, we have to emit this as a local block.
821 
822   Address blockAddr = blockInfo.LocalAddress;
823   assert(blockAddr.isValid() && "block has no address!");
824 
825   llvm::Constant *isa;
826   llvm::Constant *descriptor;
827   BlockFlags flags;
828   if (!IsOpenCL) {
829     // If the block is non-escaping, set field 'isa 'to NSConcreteGlobalBlock
830     // and set the BLOCK_IS_GLOBAL bit of field 'flags'. Copying a non-escaping
831     // block just returns the original block and releasing it is a no-op.
832     llvm::Constant *blockISA = blockInfo.NoEscape
833                                    ? CGM.getNSConcreteGlobalBlock()
834                                    : CGM.getNSConcreteStackBlock();
835     isa = llvm::ConstantExpr::getBitCast(blockISA, VoidPtrTy);
836 
837     // Build the block descriptor.
838     descriptor = buildBlockDescriptor(CGM, blockInfo);
839 
840     // Compute the initial on-stack block flags.
841     flags = BLOCK_HAS_SIGNATURE;
842     if (blockInfo.HasCapturedVariableLayout)
843       flags |= BLOCK_HAS_EXTENDED_LAYOUT;
844     if (blockInfo.NeedsCopyDispose)
845       flags |= BLOCK_HAS_COPY_DISPOSE;
846     if (blockInfo.HasCXXObject)
847       flags |= BLOCK_HAS_CXX_OBJ;
848     if (blockInfo.UsesStret)
849       flags |= BLOCK_USE_STRET;
850     if (blockInfo.NoEscape)
851       flags |= BLOCK_IS_NOESCAPE | BLOCK_IS_GLOBAL;
852   }
853 
854   auto projectField = [&](unsigned index, const Twine &name) -> Address {
855     return Builder.CreateStructGEP(blockAddr, index, name);
856   };
857   auto storeField = [&](llvm::Value *value, unsigned index, const Twine &name) {
858     Builder.CreateStore(value, projectField(index, name));
859   };
860 
861   // Initialize the block header.
862   {
863     // We assume all the header fields are densely packed.
864     unsigned index = 0;
865     CharUnits offset;
866     auto addHeaderField = [&](llvm::Value *value, CharUnits size,
867                               const Twine &name) {
868       storeField(value, index, name);
869       offset += size;
870       index++;
871     };
872 
873     if (!IsOpenCL) {
874       addHeaderField(isa, getPointerSize(), "block.isa");
875       addHeaderField(llvm::ConstantInt::get(IntTy, flags.getBitMask()),
876                      getIntSize(), "block.flags");
877       addHeaderField(llvm::ConstantInt::get(IntTy, 0), getIntSize(),
878                      "block.reserved");
879     } else {
880       addHeaderField(
881           llvm::ConstantInt::get(IntTy, blockInfo.BlockSize.getQuantity()),
882           getIntSize(), "block.size");
883       addHeaderField(
884           llvm::ConstantInt::get(IntTy, blockInfo.BlockAlign.getQuantity()),
885           getIntSize(), "block.align");
886     }
887     addHeaderField(blockFn, GenVoidPtrSize, "block.invoke");
888     if (!IsOpenCL)
889       addHeaderField(descriptor, getPointerSize(), "block.descriptor");
890     else if (auto *Helper =
891                  CGM.getTargetCodeGenInfo().getTargetOpenCLBlockHelper()) {
892       for (auto I : Helper->getCustomFieldValues(*this, blockInfo)) {
893         addHeaderField(
894             I.first,
895             CharUnits::fromQuantity(
896                 CGM.getDataLayout().getTypeAllocSize(I.first->getType())),
897             I.second);
898       }
899     }
900   }
901 
902   // Finally, capture all the values into the block.
903   const BlockDecl *blockDecl = blockInfo.getBlockDecl();
904 
905   // First, 'this'.
906   if (blockDecl->capturesCXXThis()) {
907     Address addr =
908         projectField(blockInfo.CXXThisIndex, "block.captured-this.addr");
909     Builder.CreateStore(LoadCXXThis(), addr);
910   }
911 
912   // Next, captured variables.
913   for (const auto &CI : blockDecl->captures()) {
914     const VarDecl *variable = CI.getVariable();
915     const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
916 
917     // Ignore constant captures.
918     if (capture.isConstant()) continue;
919 
920     QualType type = capture.fieldType();
921 
922     // This will be a [[type]]*, except that a byref entry will just be
923     // an i8**.
924     Address blockField = projectField(capture.getIndex(), "block.captured");
925 
926     // Compute the address of the thing we're going to move into the
927     // block literal.
928     Address src = Address::invalid();
929 
930     if (blockDecl->isConversionFromLambda()) {
931       // The lambda capture in a lambda's conversion-to-block-pointer is
932       // special; we'll simply emit it directly.
933       src = Address::invalid();
934     } else if (CI.isEscapingByref()) {
935       if (BlockInfo && CI.isNested()) {
936         // We need to use the capture from the enclosing block.
937         const CGBlockInfo::Capture &enclosingCapture =
938             BlockInfo->getCapture(variable);
939 
940         // This is a [[type]]*, except that a byref entry will just be an i8**.
941         src = Builder.CreateStructGEP(LoadBlockStruct(),
942                                       enclosingCapture.getIndex(),
943                                       "block.capture.addr");
944       } else {
945         auto I = LocalDeclMap.find(variable);
946         assert(I != LocalDeclMap.end());
947         src = I->second;
948       }
949     } else {
950       DeclRefExpr declRef(getContext(), const_cast<VarDecl *>(variable),
951                           /*RefersToEnclosingVariableOrCapture*/ CI.isNested(),
952                           type.getNonReferenceType(), VK_LValue,
953                           SourceLocation());
954       src = EmitDeclRefLValue(&declRef).getAddress(*this);
955     };
956 
957     // For byrefs, we just write the pointer to the byref struct into
958     // the block field.  There's no need to chase the forwarding
959     // pointer at this point, since we're building something that will
960     // live a shorter life than the stack byref anyway.
961     if (CI.isEscapingByref()) {
962       // Get a void* that points to the byref struct.
963       llvm::Value *byrefPointer;
964       if (CI.isNested())
965         byrefPointer = Builder.CreateLoad(src, "byref.capture");
966       else
967         byrefPointer = Builder.CreateBitCast(src.getPointer(), VoidPtrTy);
968 
969       // Write that void* into the capture field.
970       Builder.CreateStore(byrefPointer, blockField);
971 
972     // If we have a copy constructor, evaluate that into the block field.
973     } else if (const Expr *copyExpr = CI.getCopyExpr()) {
974       if (blockDecl->isConversionFromLambda()) {
975         // If we have a lambda conversion, emit the expression
976         // directly into the block instead.
977         AggValueSlot Slot =
978             AggValueSlot::forAddr(blockField, Qualifiers(),
979                                   AggValueSlot::IsDestructed,
980                                   AggValueSlot::DoesNotNeedGCBarriers,
981                                   AggValueSlot::IsNotAliased,
982                                   AggValueSlot::DoesNotOverlap);
983         EmitAggExpr(copyExpr, Slot);
984       } else {
985         EmitSynthesizedCXXCopyCtor(blockField, src, copyExpr);
986       }
987 
988     // If it's a reference variable, copy the reference into the block field.
989     } else if (type->isReferenceType()) {
990       Builder.CreateStore(src.getPointer(), blockField);
991 
992     // If type is const-qualified, copy the value into the block field.
993     } else if (type.isConstQualified() &&
994                type.getObjCLifetime() == Qualifiers::OCL_Strong &&
995                CGM.getCodeGenOpts().OptimizationLevel != 0) {
996       llvm::Value *value = Builder.CreateLoad(src, "captured");
997       Builder.CreateStore(value, blockField);
998 
999     // If this is an ARC __strong block-pointer variable, don't do a
1000     // block copy.
1001     //
1002     // TODO: this can be generalized into the normal initialization logic:
1003     // we should never need to do a block-copy when initializing a local
1004     // variable, because the local variable's lifetime should be strictly
1005     // contained within the stack block's.
1006     } else if (type.getObjCLifetime() == Qualifiers::OCL_Strong &&
1007                type->isBlockPointerType()) {
1008       // Load the block and do a simple retain.
1009       llvm::Value *value = Builder.CreateLoad(src, "block.captured_block");
1010       value = EmitARCRetainNonBlock(value);
1011 
1012       // Do a primitive store to the block field.
1013       Builder.CreateStore(value, blockField);
1014 
1015     // Otherwise, fake up a POD copy into the block field.
1016     } else {
1017       // Fake up a new variable so that EmitScalarInit doesn't think
1018       // we're referring to the variable in its own initializer.
1019       ImplicitParamDecl BlockFieldPseudoVar(getContext(), type,
1020                                             ImplicitParamDecl::Other);
1021 
1022       // We use one of these or the other depending on whether the
1023       // reference is nested.
1024       DeclRefExpr declRef(getContext(), const_cast<VarDecl *>(variable),
1025                           /*RefersToEnclosingVariableOrCapture*/ CI.isNested(),
1026                           type, VK_LValue, SourceLocation());
1027 
1028       ImplicitCastExpr l2r(ImplicitCastExpr::OnStack, type, CK_LValueToRValue,
1029                            &declRef, VK_PRValue, FPOptionsOverride());
1030       // FIXME: Pass a specific location for the expr init so that the store is
1031       // attributed to a reasonable location - otherwise it may be attributed to
1032       // locations of subexpressions in the initialization.
1033       EmitExprAsInit(&l2r, &BlockFieldPseudoVar,
1034                      MakeAddrLValue(blockField, type, AlignmentSource::Decl),
1035                      /*captured by init*/ false);
1036     }
1037 
1038     // Push a cleanup for the capture if necessary.
1039     if (!blockInfo.NoEscape && !blockInfo.NeedsCopyDispose)
1040       continue;
1041 
1042     // Ignore __block captures; there's nothing special in the on-stack block
1043     // that we need to do for them.
1044     if (CI.isByRef())
1045       continue;
1046 
1047     // Ignore objects that aren't destructed.
1048     QualType::DestructionKind dtorKind = type.isDestructedType();
1049     if (dtorKind == QualType::DK_none)
1050       continue;
1051 
1052     CodeGenFunction::Destroyer *destroyer;
1053 
1054     // Block captures count as local values and have imprecise semantics.
1055     // They also can't be arrays, so need to worry about that.
1056     //
1057     // For const-qualified captures, emit clang.arc.use to ensure the captured
1058     // object doesn't get released while we are still depending on its validity
1059     // within the block.
1060     if (type.isConstQualified() &&
1061         type.getObjCLifetime() == Qualifiers::OCL_Strong &&
1062         CGM.getCodeGenOpts().OptimizationLevel != 0) {
1063       assert(CGM.getLangOpts().ObjCAutoRefCount &&
1064              "expected ObjC ARC to be enabled");
1065       destroyer = emitARCIntrinsicUse;
1066     } else if (dtorKind == QualType::DK_objc_strong_lifetime) {
1067       destroyer = destroyARCStrongImprecise;
1068     } else {
1069       destroyer = getDestroyer(dtorKind);
1070     }
1071 
1072     CleanupKind cleanupKind = NormalCleanup;
1073     bool useArrayEHCleanup = needsEHCleanup(dtorKind);
1074     if (useArrayEHCleanup)
1075       cleanupKind = NormalAndEHCleanup;
1076 
1077     // Extend the lifetime of the capture to the end of the scope enclosing the
1078     // block expression except when the block decl is in the list of RetExpr's
1079     // cleanup objects, in which case its lifetime ends after the full
1080     // expression.
1081     auto IsBlockDeclInRetExpr = [&]() {
1082       auto *EWC = llvm::dyn_cast_or_null<ExprWithCleanups>(RetExpr);
1083       if (EWC)
1084         for (auto &C : EWC->getObjects())
1085           if (auto *BD = C.dyn_cast<BlockDecl *>())
1086             if (BD == blockDecl)
1087               return true;
1088       return false;
1089     };
1090 
1091     if (IsBlockDeclInRetExpr())
1092       pushDestroy(cleanupKind, blockField, type, destroyer, useArrayEHCleanup);
1093     else
1094       pushLifetimeExtendedDestroy(cleanupKind, blockField, type, destroyer,
1095                                   useArrayEHCleanup);
1096   }
1097 
1098   // Cast to the converted block-pointer type, which happens (somewhat
1099   // unfortunately) to be a pointer to function type.
1100   llvm::Value *result = Builder.CreatePointerCast(
1101       blockAddr.getPointer(), ConvertType(blockInfo.getBlockExpr()->getType()));
1102 
1103   if (IsOpenCL) {
1104     CGM.getOpenCLRuntime().recordBlockInfo(blockInfo.BlockExpression, InvokeFn,
1105                                            result, blockInfo.StructureType);
1106   }
1107 
1108   return result;
1109 }
1110 
1111 
1112 llvm::Type *CodeGenModule::getBlockDescriptorType() {
1113   if (BlockDescriptorType)
1114     return BlockDescriptorType;
1115 
1116   llvm::Type *UnsignedLongTy =
1117     getTypes().ConvertType(getContext().UnsignedLongTy);
1118 
1119   // struct __block_descriptor {
1120   //   unsigned long reserved;
1121   //   unsigned long block_size;
1122   //
1123   //   // later, the following will be added
1124   //
1125   //   struct {
1126   //     void (*copyHelper)();
1127   //     void (*copyHelper)();
1128   //   } helpers;                // !!! optional
1129   //
1130   //   const char *signature;   // the block signature
1131   //   const char *layout;      // reserved
1132   // };
1133   BlockDescriptorType = llvm::StructType::create(
1134       "struct.__block_descriptor", UnsignedLongTy, UnsignedLongTy);
1135 
1136   // Now form a pointer to that.
1137   unsigned AddrSpace = 0;
1138   if (getLangOpts().OpenCL)
1139     AddrSpace = getContext().getTargetAddressSpace(LangAS::opencl_constant);
1140   BlockDescriptorType = llvm::PointerType::get(BlockDescriptorType, AddrSpace);
1141   return BlockDescriptorType;
1142 }
1143 
1144 llvm::Type *CodeGenModule::getGenericBlockLiteralType() {
1145   if (GenericBlockLiteralType)
1146     return GenericBlockLiteralType;
1147 
1148   llvm::Type *BlockDescPtrTy = getBlockDescriptorType();
1149 
1150   if (getLangOpts().OpenCL) {
1151     // struct __opencl_block_literal_generic {
1152     //   int __size;
1153     //   int __align;
1154     //   __generic void *__invoke;
1155     //   /* custom fields */
1156     // };
1157     SmallVector<llvm::Type *, 8> StructFields(
1158         {IntTy, IntTy, getOpenCLRuntime().getGenericVoidPointerType()});
1159     if (auto *Helper = getTargetCodeGenInfo().getTargetOpenCLBlockHelper()) {
1160       llvm::append_range(StructFields, Helper->getCustomFieldTypes());
1161     }
1162     GenericBlockLiteralType = llvm::StructType::create(
1163         StructFields, "struct.__opencl_block_literal_generic");
1164   } else {
1165     // struct __block_literal_generic {
1166     //   void *__isa;
1167     //   int __flags;
1168     //   int __reserved;
1169     //   void (*__invoke)(void *);
1170     //   struct __block_descriptor *__descriptor;
1171     // };
1172     GenericBlockLiteralType =
1173         llvm::StructType::create("struct.__block_literal_generic", VoidPtrTy,
1174                                  IntTy, IntTy, VoidPtrTy, BlockDescPtrTy);
1175   }
1176 
1177   return GenericBlockLiteralType;
1178 }
1179 
1180 RValue CodeGenFunction::EmitBlockCallExpr(const CallExpr *E,
1181                                           ReturnValueSlot ReturnValue) {
1182   const auto *BPT = E->getCallee()->getType()->castAs<BlockPointerType>();
1183   llvm::Value *BlockPtr = EmitScalarExpr(E->getCallee());
1184   llvm::Type *GenBlockTy = CGM.getGenericBlockLiteralType();
1185   llvm::Value *Func = nullptr;
1186   QualType FnType = BPT->getPointeeType();
1187   ASTContext &Ctx = getContext();
1188   CallArgList Args;
1189 
1190   if (getLangOpts().OpenCL) {
1191     // For OpenCL, BlockPtr is already casted to generic block literal.
1192 
1193     // First argument of a block call is a generic block literal casted to
1194     // generic void pointer, i.e. i8 addrspace(4)*
1195     llvm::Type *GenericVoidPtrTy =
1196         CGM.getOpenCLRuntime().getGenericVoidPointerType();
1197     llvm::Value *BlockDescriptor = Builder.CreatePointerCast(
1198         BlockPtr, GenericVoidPtrTy);
1199     QualType VoidPtrQualTy = Ctx.getPointerType(
1200         Ctx.getAddrSpaceQualType(Ctx.VoidTy, LangAS::opencl_generic));
1201     Args.add(RValue::get(BlockDescriptor), VoidPtrQualTy);
1202     // And the rest of the arguments.
1203     EmitCallArgs(Args, FnType->getAs<FunctionProtoType>(), E->arguments());
1204 
1205     // We *can* call the block directly unless it is a function argument.
1206     if (!isa<ParmVarDecl>(E->getCalleeDecl()))
1207       Func = CGM.getOpenCLRuntime().getInvokeFunction(E->getCallee());
1208     else {
1209       llvm::Value *FuncPtr = Builder.CreateStructGEP(GenBlockTy, BlockPtr, 2);
1210       Func = Builder.CreateAlignedLoad(GenericVoidPtrTy, FuncPtr,
1211                                        getPointerAlign());
1212     }
1213   } else {
1214     // Bitcast the block literal to a generic block literal.
1215     BlockPtr = Builder.CreatePointerCast(
1216         BlockPtr, llvm::PointerType::get(GenBlockTy, 0), "block.literal");
1217     // Get pointer to the block invoke function
1218     llvm::Value *FuncPtr = Builder.CreateStructGEP(GenBlockTy, BlockPtr, 3);
1219 
1220     // First argument is a block literal casted to a void pointer
1221     BlockPtr = Builder.CreatePointerCast(BlockPtr, VoidPtrTy);
1222     Args.add(RValue::get(BlockPtr), Ctx.VoidPtrTy);
1223     // And the rest of the arguments.
1224     EmitCallArgs(Args, FnType->getAs<FunctionProtoType>(), E->arguments());
1225 
1226     // Load the function.
1227     Func = Builder.CreateAlignedLoad(VoidPtrTy, FuncPtr, getPointerAlign());
1228   }
1229 
1230   const FunctionType *FuncTy = FnType->castAs<FunctionType>();
1231   const CGFunctionInfo &FnInfo =
1232     CGM.getTypes().arrangeBlockFunctionCall(Args, FuncTy);
1233 
1234   // Cast the function pointer to the right type.
1235   llvm::Type *BlockFTy = CGM.getTypes().GetFunctionType(FnInfo);
1236 
1237   llvm::Type *BlockFTyPtr = llvm::PointerType::getUnqual(BlockFTy);
1238   Func = Builder.CreatePointerCast(Func, BlockFTyPtr);
1239 
1240   // Prepare the callee.
1241   CGCallee Callee(CGCalleeInfo(), Func);
1242 
1243   // And call the block.
1244   return EmitCall(FnInfo, Callee, ReturnValue, Args);
1245 }
1246 
1247 Address CodeGenFunction::GetAddrOfBlockDecl(const VarDecl *variable) {
1248   assert(BlockInfo && "evaluating block ref without block information?");
1249   const CGBlockInfo::Capture &capture = BlockInfo->getCapture(variable);
1250 
1251   // Handle constant captures.
1252   if (capture.isConstant()) return LocalDeclMap.find(variable)->second;
1253 
1254   Address addr = Builder.CreateStructGEP(LoadBlockStruct(), capture.getIndex(),
1255                                          "block.capture.addr");
1256 
1257   if (variable->isEscapingByref()) {
1258     // addr should be a void** right now.  Load, then cast the result
1259     // to byref*.
1260 
1261     auto &byrefInfo = getBlockByrefInfo(variable);
1262     addr = Address(Builder.CreateLoad(addr), byrefInfo.Type,
1263                    byrefInfo.ByrefAlignment);
1264 
1265     addr = emitBlockByrefAddress(addr, byrefInfo, /*follow*/ true,
1266                                  variable->getName());
1267   }
1268 
1269   assert((!variable->isNonEscapingByref() ||
1270           capture.fieldType()->isReferenceType()) &&
1271          "the capture field of a non-escaping variable should have a "
1272          "reference type");
1273   if (capture.fieldType()->isReferenceType())
1274     addr = EmitLoadOfReference(MakeAddrLValue(addr, capture.fieldType()));
1275 
1276   return addr;
1277 }
1278 
1279 void CodeGenModule::setAddrOfGlobalBlock(const BlockExpr *BE,
1280                                          llvm::Constant *Addr) {
1281   bool Ok = EmittedGlobalBlocks.insert(std::make_pair(BE, Addr)).second;
1282   (void)Ok;
1283   assert(Ok && "Trying to replace an already-existing global block!");
1284 }
1285 
1286 llvm::Constant *
1287 CodeGenModule::GetAddrOfGlobalBlock(const BlockExpr *BE,
1288                                     StringRef Name) {
1289   if (llvm::Constant *Block = getAddrOfGlobalBlockIfEmitted(BE))
1290     return Block;
1291 
1292   CGBlockInfo blockInfo(BE->getBlockDecl(), Name);
1293   blockInfo.BlockExpression = BE;
1294 
1295   // Compute information about the layout, etc., of this block.
1296   computeBlockInfo(*this, nullptr, blockInfo);
1297 
1298   // Using that metadata, generate the actual block function.
1299   {
1300     CodeGenFunction::DeclMapTy LocalDeclMap;
1301     CodeGenFunction(*this).GenerateBlockFunction(
1302         GlobalDecl(), blockInfo, LocalDeclMap,
1303         /*IsLambdaConversionToBlock*/ false, /*BuildGlobalBlock*/ true);
1304   }
1305 
1306   return getAddrOfGlobalBlockIfEmitted(BE);
1307 }
1308 
1309 static llvm::Constant *buildGlobalBlock(CodeGenModule &CGM,
1310                                         const CGBlockInfo &blockInfo,
1311                                         llvm::Constant *blockFn) {
1312   assert(blockInfo.CanBeGlobal);
1313   // Callers should detect this case on their own: calling this function
1314   // generally requires computing layout information, which is a waste of time
1315   // if we've already emitted this block.
1316   assert(!CGM.getAddrOfGlobalBlockIfEmitted(blockInfo.BlockExpression) &&
1317          "Refusing to re-emit a global block.");
1318 
1319   // Generate the constants for the block literal initializer.
1320   ConstantInitBuilder builder(CGM);
1321   auto fields = builder.beginStruct();
1322 
1323   bool IsOpenCL = CGM.getLangOpts().OpenCL;
1324   bool IsWindows = CGM.getTarget().getTriple().isOSWindows();
1325   if (!IsOpenCL) {
1326     // isa
1327     if (IsWindows)
1328       fields.addNullPointer(CGM.Int8PtrPtrTy);
1329     else
1330       fields.add(CGM.getNSConcreteGlobalBlock());
1331 
1332     // __flags
1333     BlockFlags flags = BLOCK_IS_GLOBAL | BLOCK_HAS_SIGNATURE;
1334     if (blockInfo.UsesStret)
1335       flags |= BLOCK_USE_STRET;
1336 
1337     fields.addInt(CGM.IntTy, flags.getBitMask());
1338 
1339     // Reserved
1340     fields.addInt(CGM.IntTy, 0);
1341   } else {
1342     fields.addInt(CGM.IntTy, blockInfo.BlockSize.getQuantity());
1343     fields.addInt(CGM.IntTy, blockInfo.BlockAlign.getQuantity());
1344   }
1345 
1346   // Function
1347   fields.add(blockFn);
1348 
1349   if (!IsOpenCL) {
1350     // Descriptor
1351     fields.add(buildBlockDescriptor(CGM, blockInfo));
1352   } else if (auto *Helper =
1353                  CGM.getTargetCodeGenInfo().getTargetOpenCLBlockHelper()) {
1354     for (auto *I : Helper->getCustomFieldValues(CGM, blockInfo)) {
1355       fields.add(I);
1356     }
1357   }
1358 
1359   unsigned AddrSpace = 0;
1360   if (CGM.getContext().getLangOpts().OpenCL)
1361     AddrSpace = CGM.getContext().getTargetAddressSpace(LangAS::opencl_global);
1362 
1363   llvm::GlobalVariable *literal = fields.finishAndCreateGlobal(
1364       "__block_literal_global", blockInfo.BlockAlign,
1365       /*constant*/ !IsWindows, llvm::GlobalVariable::InternalLinkage, AddrSpace);
1366 
1367   literal->addAttribute("objc_arc_inert");
1368 
1369   // Windows does not allow globals to be initialised to point to globals in
1370   // different DLLs.  Any such variables must run code to initialise them.
1371   if (IsWindows) {
1372     auto *Init = llvm::Function::Create(llvm::FunctionType::get(CGM.VoidTy,
1373           {}), llvm::GlobalValue::InternalLinkage, ".block_isa_init",
1374         &CGM.getModule());
1375     llvm::IRBuilder<> b(llvm::BasicBlock::Create(CGM.getLLVMContext(), "entry",
1376           Init));
1377     b.CreateAlignedStore(CGM.getNSConcreteGlobalBlock(),
1378                          b.CreateStructGEP(literal->getValueType(), literal, 0),
1379                          CGM.getPointerAlign().getAsAlign());
1380     b.CreateRetVoid();
1381     // We can't use the normal LLVM global initialisation array, because we
1382     // need to specify that this runs early in library initialisation.
1383     auto *InitVar = new llvm::GlobalVariable(CGM.getModule(), Init->getType(),
1384         /*isConstant*/true, llvm::GlobalValue::InternalLinkage,
1385         Init, ".block_isa_init_ptr");
1386     InitVar->setSection(".CRT$XCLa");
1387     CGM.addUsedGlobal(InitVar);
1388   }
1389 
1390   // Return a constant of the appropriately-casted type.
1391   llvm::Type *RequiredType =
1392     CGM.getTypes().ConvertType(blockInfo.getBlockExpr()->getType());
1393   llvm::Constant *Result =
1394       llvm::ConstantExpr::getPointerCast(literal, RequiredType);
1395   CGM.setAddrOfGlobalBlock(blockInfo.BlockExpression, Result);
1396   if (CGM.getContext().getLangOpts().OpenCL)
1397     CGM.getOpenCLRuntime().recordBlockInfo(
1398         blockInfo.BlockExpression,
1399         cast<llvm::Function>(blockFn->stripPointerCasts()), Result,
1400         literal->getValueType());
1401   return Result;
1402 }
1403 
1404 void CodeGenFunction::setBlockContextParameter(const ImplicitParamDecl *D,
1405                                                unsigned argNum,
1406                                                llvm::Value *arg) {
1407   assert(BlockInfo && "not emitting prologue of block invocation function?!");
1408 
1409   // Allocate a stack slot like for any local variable to guarantee optimal
1410   // debug info at -O0. The mem2reg pass will eliminate it when optimizing.
1411   Address alloc = CreateMemTemp(D->getType(), D->getName() + ".addr");
1412   Builder.CreateStore(arg, alloc);
1413   if (CGDebugInfo *DI = getDebugInfo()) {
1414     if (CGM.getCodeGenOpts().hasReducedDebugInfo()) {
1415       DI->setLocation(D->getLocation());
1416       DI->EmitDeclareOfBlockLiteralArgVariable(
1417           *BlockInfo, D->getName(), argNum,
1418           cast<llvm::AllocaInst>(alloc.getPointer()), Builder);
1419     }
1420   }
1421 
1422   SourceLocation StartLoc = BlockInfo->getBlockExpr()->getBody()->getBeginLoc();
1423   ApplyDebugLocation Scope(*this, StartLoc);
1424 
1425   // Instead of messing around with LocalDeclMap, just set the value
1426   // directly as BlockPointer.
1427   BlockPointer = Builder.CreatePointerCast(
1428       arg,
1429       llvm::PointerType::get(
1430           getLLVMContext(),
1431           getContext().getLangOpts().OpenCL
1432               ? getContext().getTargetAddressSpace(LangAS::opencl_generic)
1433               : 0),
1434       "block");
1435 }
1436 
1437 Address CodeGenFunction::LoadBlockStruct() {
1438   assert(BlockInfo && "not in a block invocation function!");
1439   assert(BlockPointer && "no block pointer set!");
1440   return Address(BlockPointer, BlockInfo->StructureType, BlockInfo->BlockAlign);
1441 }
1442 
1443 llvm::Function *CodeGenFunction::GenerateBlockFunction(
1444     GlobalDecl GD, const CGBlockInfo &blockInfo, const DeclMapTy &ldm,
1445     bool IsLambdaConversionToBlock, bool BuildGlobalBlock) {
1446   const BlockDecl *blockDecl = blockInfo.getBlockDecl();
1447 
1448   CurGD = GD;
1449 
1450   CurEHLocation = blockInfo.getBlockExpr()->getEndLoc();
1451 
1452   BlockInfo = &blockInfo;
1453 
1454   // Arrange for local static and local extern declarations to appear
1455   // to be local to this function as well, in case they're directly
1456   // referenced in a block.
1457   for (DeclMapTy::const_iterator i = ldm.begin(), e = ldm.end(); i != e; ++i) {
1458     const auto *var = dyn_cast<VarDecl>(i->first);
1459     if (var && !var->hasLocalStorage())
1460       setAddrOfLocalVar(var, i->second);
1461   }
1462 
1463   // Begin building the function declaration.
1464 
1465   // Build the argument list.
1466   FunctionArgList args;
1467 
1468   // The first argument is the block pointer.  Just take it as a void*
1469   // and cast it later.
1470   QualType selfTy = getContext().VoidPtrTy;
1471 
1472   // For OpenCL passed block pointer can be private AS local variable or
1473   // global AS program scope variable (for the case with and without captures).
1474   // Generic AS is used therefore to be able to accommodate both private and
1475   // generic AS in one implementation.
1476   if (getLangOpts().OpenCL)
1477     selfTy = getContext().getPointerType(getContext().getAddrSpaceQualType(
1478         getContext().VoidTy, LangAS::opencl_generic));
1479 
1480   IdentifierInfo *II = &CGM.getContext().Idents.get(".block_descriptor");
1481 
1482   ImplicitParamDecl SelfDecl(getContext(), const_cast<BlockDecl *>(blockDecl),
1483                              SourceLocation(), II, selfTy,
1484                              ImplicitParamDecl::ObjCSelf);
1485   args.push_back(&SelfDecl);
1486 
1487   // Now add the rest of the parameters.
1488   args.append(blockDecl->param_begin(), blockDecl->param_end());
1489 
1490   // Create the function declaration.
1491   const FunctionProtoType *fnType = blockInfo.getBlockExpr()->getFunctionType();
1492   const CGFunctionInfo &fnInfo =
1493     CGM.getTypes().arrangeBlockFunctionDeclaration(fnType, args);
1494   if (CGM.ReturnSlotInterferesWithArgs(fnInfo))
1495     blockInfo.UsesStret = true;
1496 
1497   llvm::FunctionType *fnLLVMType = CGM.getTypes().GetFunctionType(fnInfo);
1498 
1499   StringRef name = CGM.getBlockMangledName(GD, blockDecl);
1500   llvm::Function *fn = llvm::Function::Create(
1501       fnLLVMType, llvm::GlobalValue::InternalLinkage, name, &CGM.getModule());
1502   CGM.SetInternalFunctionAttributes(blockDecl, fn, fnInfo);
1503 
1504   if (BuildGlobalBlock) {
1505     auto GenVoidPtrTy = getContext().getLangOpts().OpenCL
1506                             ? CGM.getOpenCLRuntime().getGenericVoidPointerType()
1507                             : VoidPtrTy;
1508     buildGlobalBlock(CGM, blockInfo,
1509                      llvm::ConstantExpr::getPointerCast(fn, GenVoidPtrTy));
1510   }
1511 
1512   // Begin generating the function.
1513   StartFunction(blockDecl, fnType->getReturnType(), fn, fnInfo, args,
1514                 blockDecl->getLocation(),
1515                 blockInfo.getBlockExpr()->getBody()->getBeginLoc());
1516 
1517   // Okay.  Undo some of what StartFunction did.
1518 
1519   // At -O0 we generate an explicit alloca for the BlockPointer, so the RA
1520   // won't delete the dbg.declare intrinsics for captured variables.
1521   llvm::Value *BlockPointerDbgLoc = BlockPointer;
1522   if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
1523     // Allocate a stack slot for it, so we can point the debugger to it
1524     Address Alloca = CreateTempAlloca(BlockPointer->getType(),
1525                                       getPointerAlign(),
1526                                       "block.addr");
1527     // Set the DebugLocation to empty, so the store is recognized as a
1528     // frame setup instruction by llvm::DwarfDebug::beginFunction().
1529     auto NL = ApplyDebugLocation::CreateEmpty(*this);
1530     Builder.CreateStore(BlockPointer, Alloca);
1531     BlockPointerDbgLoc = Alloca.getPointer();
1532   }
1533 
1534   // If we have a C++ 'this' reference, go ahead and force it into
1535   // existence now.
1536   if (blockDecl->capturesCXXThis()) {
1537     Address addr = Builder.CreateStructGEP(
1538         LoadBlockStruct(), blockInfo.CXXThisIndex, "block.captured-this");
1539     CXXThisValue = Builder.CreateLoad(addr, "this");
1540   }
1541 
1542   // Also force all the constant captures.
1543   for (const auto &CI : blockDecl->captures()) {
1544     const VarDecl *variable = CI.getVariable();
1545     const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
1546     if (!capture.isConstant()) continue;
1547 
1548     CharUnits align = getContext().getDeclAlign(variable);
1549     Address alloca =
1550       CreateMemTemp(variable->getType(), align, "block.captured-const");
1551 
1552     Builder.CreateStore(capture.getConstant(), alloca);
1553 
1554     setAddrOfLocalVar(variable, alloca);
1555   }
1556 
1557   // Save a spot to insert the debug information for all the DeclRefExprs.
1558   llvm::BasicBlock *entry = Builder.GetInsertBlock();
1559   llvm::BasicBlock::iterator entry_ptr = Builder.GetInsertPoint();
1560   --entry_ptr;
1561 
1562   if (IsLambdaConversionToBlock)
1563     EmitLambdaBlockInvokeBody();
1564   else {
1565     PGO.assignRegionCounters(GlobalDecl(blockDecl), fn);
1566     incrementProfileCounter(blockDecl->getBody());
1567     EmitStmt(blockDecl->getBody());
1568   }
1569 
1570   // Remember where we were...
1571   llvm::BasicBlock *resume = Builder.GetInsertBlock();
1572 
1573   // Go back to the entry.
1574   ++entry_ptr;
1575   Builder.SetInsertPoint(entry, entry_ptr);
1576 
1577   // Emit debug information for all the DeclRefExprs.
1578   // FIXME: also for 'this'
1579   if (CGDebugInfo *DI = getDebugInfo()) {
1580     for (const auto &CI : blockDecl->captures()) {
1581       const VarDecl *variable = CI.getVariable();
1582       DI->EmitLocation(Builder, variable->getLocation());
1583 
1584       if (CGM.getCodeGenOpts().hasReducedDebugInfo()) {
1585         const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
1586         if (capture.isConstant()) {
1587           auto addr = LocalDeclMap.find(variable)->second;
1588           (void)DI->EmitDeclareOfAutoVariable(variable, addr.getPointer(),
1589                                               Builder);
1590           continue;
1591         }
1592 
1593         DI->EmitDeclareOfBlockDeclRefVariable(
1594             variable, BlockPointerDbgLoc, Builder, blockInfo,
1595             entry_ptr == entry->end() ? nullptr : &*entry_ptr);
1596       }
1597     }
1598     // Recover location if it was changed in the above loop.
1599     DI->EmitLocation(Builder,
1600                      cast<CompoundStmt>(blockDecl->getBody())->getRBracLoc());
1601   }
1602 
1603   // And resume where we left off.
1604   if (resume == nullptr)
1605     Builder.ClearInsertionPoint();
1606   else
1607     Builder.SetInsertPoint(resume);
1608 
1609   FinishFunction(cast<CompoundStmt>(blockDecl->getBody())->getRBracLoc());
1610 
1611   return fn;
1612 }
1613 
1614 static std::pair<BlockCaptureEntityKind, BlockFieldFlags>
1615 computeCopyInfoForBlockCapture(const BlockDecl::Capture &CI, QualType T,
1616                                const LangOptions &LangOpts) {
1617   if (CI.getCopyExpr()) {
1618     assert(!CI.isByRef());
1619     // don't bother computing flags
1620     return std::make_pair(BlockCaptureEntityKind::CXXRecord, BlockFieldFlags());
1621   }
1622   BlockFieldFlags Flags;
1623   if (CI.isEscapingByref()) {
1624     Flags = BLOCK_FIELD_IS_BYREF;
1625     if (T.isObjCGCWeak())
1626       Flags |= BLOCK_FIELD_IS_WEAK;
1627     return std::make_pair(BlockCaptureEntityKind::BlockObject, Flags);
1628   }
1629 
1630   Flags = BLOCK_FIELD_IS_OBJECT;
1631   bool isBlockPointer = T->isBlockPointerType();
1632   if (isBlockPointer)
1633     Flags = BLOCK_FIELD_IS_BLOCK;
1634 
1635   switch (T.isNonTrivialToPrimitiveCopy()) {
1636   case QualType::PCK_Struct:
1637     return std::make_pair(BlockCaptureEntityKind::NonTrivialCStruct,
1638                           BlockFieldFlags());
1639   case QualType::PCK_ARCWeak:
1640     // We need to register __weak direct captures with the runtime.
1641     return std::make_pair(BlockCaptureEntityKind::ARCWeak, Flags);
1642   case QualType::PCK_ARCStrong:
1643     // We need to retain the copied value for __strong direct captures.
1644     // If it's a block pointer, we have to copy the block and assign that to
1645     // the destination pointer, so we might as well use _Block_object_assign.
1646     // Otherwise we can avoid that.
1647     return std::make_pair(!isBlockPointer ? BlockCaptureEntityKind::ARCStrong
1648                                           : BlockCaptureEntityKind::BlockObject,
1649                           Flags);
1650   case QualType::PCK_Trivial:
1651   case QualType::PCK_VolatileTrivial: {
1652     if (!T->isObjCRetainableType())
1653       // For all other types, the memcpy is fine.
1654       return std::make_pair(BlockCaptureEntityKind::None, BlockFieldFlags());
1655 
1656     // Honor the inert __unsafe_unretained qualifier, which doesn't actually
1657     // make it into the type system.
1658     if (T->isObjCInertUnsafeUnretainedType())
1659       return std::make_pair(BlockCaptureEntityKind::None, BlockFieldFlags());
1660 
1661     // Special rules for ARC captures:
1662     Qualifiers QS = T.getQualifiers();
1663 
1664     // Non-ARC captures of retainable pointers are strong and
1665     // therefore require a call to _Block_object_assign.
1666     if (!QS.getObjCLifetime() && !LangOpts.ObjCAutoRefCount)
1667       return std::make_pair(BlockCaptureEntityKind::BlockObject, Flags);
1668 
1669     // Otherwise the memcpy is fine.
1670     return std::make_pair(BlockCaptureEntityKind::None, BlockFieldFlags());
1671   }
1672   }
1673   llvm_unreachable("after exhaustive PrimitiveCopyKind switch");
1674 }
1675 
1676 namespace {
1677 /// Release a __block variable.
1678 struct CallBlockRelease final : EHScopeStack::Cleanup {
1679   Address Addr;
1680   BlockFieldFlags FieldFlags;
1681   bool LoadBlockVarAddr, CanThrow;
1682 
1683   CallBlockRelease(Address Addr, BlockFieldFlags Flags, bool LoadValue,
1684                    bool CT)
1685       : Addr(Addr), FieldFlags(Flags), LoadBlockVarAddr(LoadValue),
1686         CanThrow(CT) {}
1687 
1688   void Emit(CodeGenFunction &CGF, Flags flags) override {
1689     llvm::Value *BlockVarAddr;
1690     if (LoadBlockVarAddr) {
1691       BlockVarAddr = CGF.Builder.CreateLoad(Addr);
1692       BlockVarAddr = CGF.Builder.CreateBitCast(BlockVarAddr, CGF.VoidPtrTy);
1693     } else {
1694       BlockVarAddr = Addr.getPointer();
1695     }
1696 
1697     CGF.BuildBlockRelease(BlockVarAddr, FieldFlags, CanThrow);
1698   }
1699 };
1700 } // end anonymous namespace
1701 
1702 /// Check if \p T is a C++ class that has a destructor that can throw.
1703 bool CodeGenFunction::cxxDestructorCanThrow(QualType T) {
1704   if (const auto *RD = T->getAsCXXRecordDecl())
1705     if (const CXXDestructorDecl *DD = RD->getDestructor())
1706       return DD->getType()->castAs<FunctionProtoType>()->canThrow();
1707   return false;
1708 }
1709 
1710 // Return a string that has the information about a capture.
1711 static std::string getBlockCaptureStr(const CGBlockInfo::Capture &Cap,
1712                                       CaptureStrKind StrKind,
1713                                       CharUnits BlockAlignment,
1714                                       CodeGenModule &CGM) {
1715   std::string Str;
1716   ASTContext &Ctx = CGM.getContext();
1717   const BlockDecl::Capture &CI = *Cap.Cap;
1718   QualType CaptureTy = CI.getVariable()->getType();
1719 
1720   BlockCaptureEntityKind Kind;
1721   BlockFieldFlags Flags;
1722 
1723   // CaptureStrKind::Merged should be passed only when the operations and the
1724   // flags are the same for copy and dispose.
1725   assert((StrKind != CaptureStrKind::Merged ||
1726           (Cap.CopyKind == Cap.DisposeKind &&
1727            Cap.CopyFlags == Cap.DisposeFlags)) &&
1728          "different operations and flags");
1729 
1730   if (StrKind == CaptureStrKind::DisposeHelper) {
1731     Kind = Cap.DisposeKind;
1732     Flags = Cap.DisposeFlags;
1733   } else {
1734     Kind = Cap.CopyKind;
1735     Flags = Cap.CopyFlags;
1736   }
1737 
1738   switch (Kind) {
1739   case BlockCaptureEntityKind::CXXRecord: {
1740     Str += "c";
1741     SmallString<256> TyStr;
1742     llvm::raw_svector_ostream Out(TyStr);
1743     CGM.getCXXABI().getMangleContext().mangleTypeName(CaptureTy, Out);
1744     Str += llvm::to_string(TyStr.size()) + TyStr.c_str();
1745     break;
1746   }
1747   case BlockCaptureEntityKind::ARCWeak:
1748     Str += "w";
1749     break;
1750   case BlockCaptureEntityKind::ARCStrong:
1751     Str += "s";
1752     break;
1753   case BlockCaptureEntityKind::BlockObject: {
1754     const VarDecl *Var = CI.getVariable();
1755     unsigned F = Flags.getBitMask();
1756     if (F & BLOCK_FIELD_IS_BYREF) {
1757       Str += "r";
1758       if (F & BLOCK_FIELD_IS_WEAK)
1759         Str += "w";
1760       else {
1761         // If CaptureStrKind::Merged is passed, check both the copy expression
1762         // and the destructor.
1763         if (StrKind != CaptureStrKind::DisposeHelper) {
1764           if (Ctx.getBlockVarCopyInit(Var).canThrow())
1765             Str += "c";
1766         }
1767         if (StrKind != CaptureStrKind::CopyHelper) {
1768           if (CodeGenFunction::cxxDestructorCanThrow(CaptureTy))
1769             Str += "d";
1770         }
1771       }
1772     } else {
1773       assert((F & BLOCK_FIELD_IS_OBJECT) && "unexpected flag value");
1774       if (F == BLOCK_FIELD_IS_BLOCK)
1775         Str += "b";
1776       else
1777         Str += "o";
1778     }
1779     break;
1780   }
1781   case BlockCaptureEntityKind::NonTrivialCStruct: {
1782     bool IsVolatile = CaptureTy.isVolatileQualified();
1783     CharUnits Alignment = BlockAlignment.alignmentAtOffset(Cap.getOffset());
1784 
1785     Str += "n";
1786     std::string FuncStr;
1787     if (StrKind == CaptureStrKind::DisposeHelper)
1788       FuncStr = CodeGenFunction::getNonTrivialDestructorStr(
1789           CaptureTy, Alignment, IsVolatile, Ctx);
1790     else
1791       // If CaptureStrKind::Merged is passed, use the copy constructor string.
1792       // It has all the information that the destructor string has.
1793       FuncStr = CodeGenFunction::getNonTrivialCopyConstructorStr(
1794           CaptureTy, Alignment, IsVolatile, Ctx);
1795     // The underscore is necessary here because non-trivial copy constructor
1796     // and destructor strings can start with a number.
1797     Str += llvm::to_string(FuncStr.size()) + "_" + FuncStr;
1798     break;
1799   }
1800   case BlockCaptureEntityKind::None:
1801     break;
1802   }
1803 
1804   return Str;
1805 }
1806 
1807 static std::string getCopyDestroyHelperFuncName(
1808     const SmallVectorImpl<CGBlockInfo::Capture> &Captures,
1809     CharUnits BlockAlignment, CaptureStrKind StrKind, CodeGenModule &CGM) {
1810   assert((StrKind == CaptureStrKind::CopyHelper ||
1811           StrKind == CaptureStrKind::DisposeHelper) &&
1812          "unexpected CaptureStrKind");
1813   std::string Name = StrKind == CaptureStrKind::CopyHelper
1814                          ? "__copy_helper_block_"
1815                          : "__destroy_helper_block_";
1816   if (CGM.getLangOpts().Exceptions)
1817     Name += "e";
1818   if (CGM.getCodeGenOpts().ObjCAutoRefCountExceptions)
1819     Name += "a";
1820   Name += llvm::to_string(BlockAlignment.getQuantity()) + "_";
1821 
1822   for (auto &Cap : Captures) {
1823     if (Cap.isConstantOrTrivial())
1824       continue;
1825     Name += llvm::to_string(Cap.getOffset().getQuantity());
1826     Name += getBlockCaptureStr(Cap, StrKind, BlockAlignment, CGM);
1827   }
1828 
1829   return Name;
1830 }
1831 
1832 static void pushCaptureCleanup(BlockCaptureEntityKind CaptureKind,
1833                                Address Field, QualType CaptureType,
1834                                BlockFieldFlags Flags, bool ForCopyHelper,
1835                                VarDecl *Var, CodeGenFunction &CGF) {
1836   bool EHOnly = ForCopyHelper;
1837 
1838   switch (CaptureKind) {
1839   case BlockCaptureEntityKind::CXXRecord:
1840   case BlockCaptureEntityKind::ARCWeak:
1841   case BlockCaptureEntityKind::NonTrivialCStruct:
1842   case BlockCaptureEntityKind::ARCStrong: {
1843     if (CaptureType.isDestructedType() &&
1844         (!EHOnly || CGF.needsEHCleanup(CaptureType.isDestructedType()))) {
1845       CodeGenFunction::Destroyer *Destroyer =
1846           CaptureKind == BlockCaptureEntityKind::ARCStrong
1847               ? CodeGenFunction::destroyARCStrongImprecise
1848               : CGF.getDestroyer(CaptureType.isDestructedType());
1849       CleanupKind Kind =
1850           EHOnly ? EHCleanup
1851                  : CGF.getCleanupKind(CaptureType.isDestructedType());
1852       CGF.pushDestroy(Kind, Field, CaptureType, Destroyer, Kind & EHCleanup);
1853     }
1854     break;
1855   }
1856   case BlockCaptureEntityKind::BlockObject: {
1857     if (!EHOnly || CGF.getLangOpts().Exceptions) {
1858       CleanupKind Kind = EHOnly ? EHCleanup : NormalAndEHCleanup;
1859       // Calls to _Block_object_dispose along the EH path in the copy helper
1860       // function don't throw as newly-copied __block variables always have a
1861       // reference count of 2.
1862       bool CanThrow =
1863           !ForCopyHelper && CGF.cxxDestructorCanThrow(CaptureType);
1864       CGF.enterByrefCleanup(Kind, Field, Flags, /*LoadBlockVarAddr*/ true,
1865                             CanThrow);
1866     }
1867     break;
1868   }
1869   case BlockCaptureEntityKind::None:
1870     break;
1871   }
1872 }
1873 
1874 static void setBlockHelperAttributesVisibility(bool CapturesNonExternalType,
1875                                                llvm::Function *Fn,
1876                                                const CGFunctionInfo &FI,
1877                                                CodeGenModule &CGM) {
1878   if (CapturesNonExternalType) {
1879     CGM.SetInternalFunctionAttributes(GlobalDecl(), Fn, FI);
1880   } else {
1881     Fn->setVisibility(llvm::GlobalValue::HiddenVisibility);
1882     Fn->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
1883     CGM.SetLLVMFunctionAttributes(GlobalDecl(), FI, Fn, /*IsThunk=*/false);
1884     CGM.SetLLVMFunctionAttributesForDefinition(nullptr, Fn);
1885   }
1886 }
1887 /// Generate the copy-helper function for a block closure object:
1888 ///   static void block_copy_helper(block_t *dst, block_t *src);
1889 /// The runtime will have previously initialized 'dst' by doing a
1890 /// bit-copy of 'src'.
1891 ///
1892 /// Note that this copies an entire block closure object to the heap;
1893 /// it should not be confused with a 'byref copy helper', which moves
1894 /// the contents of an individual __block variable to the heap.
1895 llvm::Constant *
1896 CodeGenFunction::GenerateCopyHelperFunction(const CGBlockInfo &blockInfo) {
1897   std::string FuncName = getCopyDestroyHelperFuncName(
1898       blockInfo.SortedCaptures, blockInfo.BlockAlign,
1899       CaptureStrKind::CopyHelper, CGM);
1900 
1901   if (llvm::GlobalValue *Func = CGM.getModule().getNamedValue(FuncName))
1902     return llvm::ConstantExpr::getBitCast(Func, VoidPtrTy);
1903 
1904   ASTContext &C = getContext();
1905 
1906   QualType ReturnTy = C.VoidTy;
1907 
1908   FunctionArgList args;
1909   ImplicitParamDecl DstDecl(C, C.VoidPtrTy, ImplicitParamDecl::Other);
1910   args.push_back(&DstDecl);
1911   ImplicitParamDecl SrcDecl(C, C.VoidPtrTy, ImplicitParamDecl::Other);
1912   args.push_back(&SrcDecl);
1913 
1914   const CGFunctionInfo &FI =
1915       CGM.getTypes().arrangeBuiltinFunctionDeclaration(ReturnTy, args);
1916 
1917   // FIXME: it would be nice if these were mergeable with things with
1918   // identical semantics.
1919   llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(FI);
1920 
1921   llvm::Function *Fn =
1922     llvm::Function::Create(LTy, llvm::GlobalValue::LinkOnceODRLinkage,
1923                            FuncName, &CGM.getModule());
1924   if (CGM.supportsCOMDAT())
1925     Fn->setComdat(CGM.getModule().getOrInsertComdat(FuncName));
1926 
1927   SmallVector<QualType, 2> ArgTys;
1928   ArgTys.push_back(C.VoidPtrTy);
1929   ArgTys.push_back(C.VoidPtrTy);
1930 
1931   setBlockHelperAttributesVisibility(blockInfo.CapturesNonExternalType, Fn, FI,
1932                                      CGM);
1933   StartFunction(GlobalDecl(), ReturnTy, Fn, FI, args);
1934   auto AL = ApplyDebugLocation::CreateArtificial(*this);
1935 
1936   Address src = GetAddrOfLocalVar(&SrcDecl);
1937   src = Address(Builder.CreateLoad(src), blockInfo.StructureType,
1938                 blockInfo.BlockAlign);
1939 
1940   Address dst = GetAddrOfLocalVar(&DstDecl);
1941   dst = Address(Builder.CreateLoad(dst), blockInfo.StructureType,
1942                 blockInfo.BlockAlign);
1943 
1944   for (auto &capture : blockInfo.SortedCaptures) {
1945     if (capture.isConstantOrTrivial())
1946       continue;
1947 
1948     const BlockDecl::Capture &CI = *capture.Cap;
1949     QualType captureType = CI.getVariable()->getType();
1950     BlockFieldFlags flags = capture.CopyFlags;
1951 
1952     unsigned index = capture.getIndex();
1953     Address srcField = Builder.CreateStructGEP(src, index);
1954     Address dstField = Builder.CreateStructGEP(dst, index);
1955 
1956     switch (capture.CopyKind) {
1957     case BlockCaptureEntityKind::CXXRecord:
1958       // If there's an explicit copy expression, we do that.
1959       assert(CI.getCopyExpr() && "copy expression for variable is missing");
1960       EmitSynthesizedCXXCopyCtor(dstField, srcField, CI.getCopyExpr());
1961       break;
1962     case BlockCaptureEntityKind::ARCWeak:
1963       EmitARCCopyWeak(dstField, srcField);
1964       break;
1965     case BlockCaptureEntityKind::NonTrivialCStruct: {
1966       // If this is a C struct that requires non-trivial copy construction,
1967       // emit a call to its copy constructor.
1968       QualType varType = CI.getVariable()->getType();
1969       callCStructCopyConstructor(MakeAddrLValue(dstField, varType),
1970                                  MakeAddrLValue(srcField, varType));
1971       break;
1972     }
1973     case BlockCaptureEntityKind::ARCStrong: {
1974       llvm::Value *srcValue = Builder.CreateLoad(srcField, "blockcopy.src");
1975       // At -O0, store null into the destination field (so that the
1976       // storeStrong doesn't over-release) and then call storeStrong.
1977       // This is a workaround to not having an initStrong call.
1978       if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
1979         auto *ty = cast<llvm::PointerType>(srcValue->getType());
1980         llvm::Value *null = llvm::ConstantPointerNull::get(ty);
1981         Builder.CreateStore(null, dstField);
1982         EmitARCStoreStrongCall(dstField, srcValue, true);
1983 
1984       // With optimization enabled, take advantage of the fact that
1985       // the blocks runtime guarantees a memcpy of the block data, and
1986       // just emit a retain of the src field.
1987       } else {
1988         EmitARCRetainNonBlock(srcValue);
1989 
1990         // Unless EH cleanup is required, we don't need this anymore, so kill
1991         // it. It's not quite worth the annoyance to avoid creating it in the
1992         // first place.
1993         if (!needsEHCleanup(captureType.isDestructedType()))
1994           cast<llvm::Instruction>(dstField.getPointer())->eraseFromParent();
1995       }
1996       break;
1997     }
1998     case BlockCaptureEntityKind::BlockObject: {
1999       llvm::Value *srcValue = Builder.CreateLoad(srcField, "blockcopy.src");
2000       srcValue = Builder.CreateBitCast(srcValue, VoidPtrTy);
2001       llvm::Value *dstAddr =
2002           Builder.CreateBitCast(dstField.getPointer(), VoidPtrTy);
2003       llvm::Value *args[] = {
2004         dstAddr, srcValue, llvm::ConstantInt::get(Int32Ty, flags.getBitMask())
2005       };
2006 
2007       if (CI.isByRef() && C.getBlockVarCopyInit(CI.getVariable()).canThrow())
2008         EmitRuntimeCallOrInvoke(CGM.getBlockObjectAssign(), args);
2009       else
2010         EmitNounwindRuntimeCall(CGM.getBlockObjectAssign(), args);
2011       break;
2012     }
2013     case BlockCaptureEntityKind::None:
2014       continue;
2015     }
2016 
2017     // Ensure that we destroy the copied object if an exception is thrown later
2018     // in the helper function.
2019     pushCaptureCleanup(capture.CopyKind, dstField, captureType, flags,
2020                        /*ForCopyHelper*/ true, CI.getVariable(), *this);
2021   }
2022 
2023   FinishFunction();
2024 
2025   return llvm::ConstantExpr::getBitCast(Fn, VoidPtrTy);
2026 }
2027 
2028 static BlockFieldFlags
2029 getBlockFieldFlagsForObjCObjectPointer(const BlockDecl::Capture &CI,
2030                                        QualType T) {
2031   BlockFieldFlags Flags = BLOCK_FIELD_IS_OBJECT;
2032   if (T->isBlockPointerType())
2033     Flags = BLOCK_FIELD_IS_BLOCK;
2034   return Flags;
2035 }
2036 
2037 static std::pair<BlockCaptureEntityKind, BlockFieldFlags>
2038 computeDestroyInfoForBlockCapture(const BlockDecl::Capture &CI, QualType T,
2039                                   const LangOptions &LangOpts) {
2040   if (CI.isEscapingByref()) {
2041     BlockFieldFlags Flags = BLOCK_FIELD_IS_BYREF;
2042     if (T.isObjCGCWeak())
2043       Flags |= BLOCK_FIELD_IS_WEAK;
2044     return std::make_pair(BlockCaptureEntityKind::BlockObject, Flags);
2045   }
2046 
2047   switch (T.isDestructedType()) {
2048   case QualType::DK_cxx_destructor:
2049     return std::make_pair(BlockCaptureEntityKind::CXXRecord, BlockFieldFlags());
2050   case QualType::DK_objc_strong_lifetime:
2051     // Use objc_storeStrong for __strong direct captures; the
2052     // dynamic tools really like it when we do this.
2053     return std::make_pair(BlockCaptureEntityKind::ARCStrong,
2054                           getBlockFieldFlagsForObjCObjectPointer(CI, T));
2055   case QualType::DK_objc_weak_lifetime:
2056     // Support __weak direct captures.
2057     return std::make_pair(BlockCaptureEntityKind::ARCWeak,
2058                           getBlockFieldFlagsForObjCObjectPointer(CI, T));
2059   case QualType::DK_nontrivial_c_struct:
2060     return std::make_pair(BlockCaptureEntityKind::NonTrivialCStruct,
2061                           BlockFieldFlags());
2062   case QualType::DK_none: {
2063     // Non-ARC captures are strong, and we need to use _Block_object_dispose.
2064     // But honor the inert __unsafe_unretained qualifier, which doesn't actually
2065     // make it into the type system.
2066     if (T->isObjCRetainableType() && !T.getQualifiers().hasObjCLifetime() &&
2067         !LangOpts.ObjCAutoRefCount && !T->isObjCInertUnsafeUnretainedType())
2068       return std::make_pair(BlockCaptureEntityKind::BlockObject,
2069                             getBlockFieldFlagsForObjCObjectPointer(CI, T));
2070     // Otherwise, we have nothing to do.
2071     return std::make_pair(BlockCaptureEntityKind::None, BlockFieldFlags());
2072   }
2073   }
2074   llvm_unreachable("after exhaustive DestructionKind switch");
2075 }
2076 
2077 /// Generate the destroy-helper function for a block closure object:
2078 ///   static void block_destroy_helper(block_t *theBlock);
2079 ///
2080 /// Note that this destroys a heap-allocated block closure object;
2081 /// it should not be confused with a 'byref destroy helper', which
2082 /// destroys the heap-allocated contents of an individual __block
2083 /// variable.
2084 llvm::Constant *
2085 CodeGenFunction::GenerateDestroyHelperFunction(const CGBlockInfo &blockInfo) {
2086   std::string FuncName = getCopyDestroyHelperFuncName(
2087       blockInfo.SortedCaptures, blockInfo.BlockAlign,
2088       CaptureStrKind::DisposeHelper, CGM);
2089 
2090   if (llvm::GlobalValue *Func = CGM.getModule().getNamedValue(FuncName))
2091     return llvm::ConstantExpr::getBitCast(Func, VoidPtrTy);
2092 
2093   ASTContext &C = getContext();
2094 
2095   QualType ReturnTy = C.VoidTy;
2096 
2097   FunctionArgList args;
2098   ImplicitParamDecl SrcDecl(C, C.VoidPtrTy, ImplicitParamDecl::Other);
2099   args.push_back(&SrcDecl);
2100 
2101   const CGFunctionInfo &FI =
2102       CGM.getTypes().arrangeBuiltinFunctionDeclaration(ReturnTy, args);
2103 
2104   // FIXME: We'd like to put these into a mergable by content, with
2105   // internal linkage.
2106   llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(FI);
2107 
2108   llvm::Function *Fn =
2109     llvm::Function::Create(LTy, llvm::GlobalValue::LinkOnceODRLinkage,
2110                            FuncName, &CGM.getModule());
2111   if (CGM.supportsCOMDAT())
2112     Fn->setComdat(CGM.getModule().getOrInsertComdat(FuncName));
2113 
2114   SmallVector<QualType, 1> ArgTys;
2115   ArgTys.push_back(C.VoidPtrTy);
2116 
2117   setBlockHelperAttributesVisibility(blockInfo.CapturesNonExternalType, Fn, FI,
2118                                      CGM);
2119   StartFunction(GlobalDecl(), ReturnTy, Fn, FI, args);
2120   markAsIgnoreThreadCheckingAtRuntime(Fn);
2121 
2122   auto AL = ApplyDebugLocation::CreateArtificial(*this);
2123 
2124   Address src = GetAddrOfLocalVar(&SrcDecl);
2125   src = Address(Builder.CreateLoad(src), blockInfo.StructureType,
2126                 blockInfo.BlockAlign);
2127 
2128   CodeGenFunction::RunCleanupsScope cleanups(*this);
2129 
2130   for (auto &capture : blockInfo.SortedCaptures) {
2131     if (capture.isConstantOrTrivial())
2132       continue;
2133 
2134     const BlockDecl::Capture &CI = *capture.Cap;
2135     BlockFieldFlags flags = capture.DisposeFlags;
2136 
2137     Address srcField = Builder.CreateStructGEP(src, capture.getIndex());
2138 
2139     pushCaptureCleanup(capture.DisposeKind, srcField,
2140                        CI.getVariable()->getType(), flags,
2141                        /*ForCopyHelper*/ false, CI.getVariable(), *this);
2142   }
2143 
2144   cleanups.ForceCleanup();
2145 
2146   FinishFunction();
2147 
2148   return llvm::ConstantExpr::getBitCast(Fn, VoidPtrTy);
2149 }
2150 
2151 namespace {
2152 
2153 /// Emits the copy/dispose helper functions for a __block object of id type.
2154 class ObjectByrefHelpers final : public BlockByrefHelpers {
2155   BlockFieldFlags Flags;
2156 
2157 public:
2158   ObjectByrefHelpers(CharUnits alignment, BlockFieldFlags flags)
2159     : BlockByrefHelpers(alignment), Flags(flags) {}
2160 
2161   void emitCopy(CodeGenFunction &CGF, Address destField,
2162                 Address srcField) override {
2163     destField = destField.withElementType(CGF.Int8Ty);
2164 
2165     srcField = srcField.withElementType(CGF.Int8PtrTy);
2166     llvm::Value *srcValue = CGF.Builder.CreateLoad(srcField);
2167 
2168     unsigned flags = (Flags | BLOCK_BYREF_CALLER).getBitMask();
2169 
2170     llvm::Value *flagsVal = llvm::ConstantInt::get(CGF.Int32Ty, flags);
2171     llvm::FunctionCallee fn = CGF.CGM.getBlockObjectAssign();
2172 
2173     llvm::Value *args[] = { destField.getPointer(), srcValue, flagsVal };
2174     CGF.EmitNounwindRuntimeCall(fn, args);
2175   }
2176 
2177   void emitDispose(CodeGenFunction &CGF, Address field) override {
2178     field = field.withElementType(CGF.Int8PtrTy);
2179     llvm::Value *value = CGF.Builder.CreateLoad(field);
2180 
2181     CGF.BuildBlockRelease(value, Flags | BLOCK_BYREF_CALLER, false);
2182   }
2183 
2184   void profileImpl(llvm::FoldingSetNodeID &id) const override {
2185     id.AddInteger(Flags.getBitMask());
2186   }
2187 };
2188 
2189 /// Emits the copy/dispose helpers for an ARC __block __weak variable.
2190 class ARCWeakByrefHelpers final : public BlockByrefHelpers {
2191 public:
2192   ARCWeakByrefHelpers(CharUnits alignment) : BlockByrefHelpers(alignment) {}
2193 
2194   void emitCopy(CodeGenFunction &CGF, Address destField,
2195                 Address srcField) override {
2196     CGF.EmitARCMoveWeak(destField, srcField);
2197   }
2198 
2199   void emitDispose(CodeGenFunction &CGF, Address field) override {
2200     CGF.EmitARCDestroyWeak(field);
2201   }
2202 
2203   void profileImpl(llvm::FoldingSetNodeID &id) const override {
2204     // 0 is distinguishable from all pointers and byref flags
2205     id.AddInteger(0);
2206   }
2207 };
2208 
2209 /// Emits the copy/dispose helpers for an ARC __block __strong variable
2210 /// that's not of block-pointer type.
2211 class ARCStrongByrefHelpers final : public BlockByrefHelpers {
2212 public:
2213   ARCStrongByrefHelpers(CharUnits alignment) : BlockByrefHelpers(alignment) {}
2214 
2215   void emitCopy(CodeGenFunction &CGF, Address destField,
2216                 Address srcField) override {
2217     // Do a "move" by copying the value and then zeroing out the old
2218     // variable.
2219 
2220     llvm::Value *value = CGF.Builder.CreateLoad(srcField);
2221 
2222     llvm::Value *null =
2223       llvm::ConstantPointerNull::get(cast<llvm::PointerType>(value->getType()));
2224 
2225     if (CGF.CGM.getCodeGenOpts().OptimizationLevel == 0) {
2226       CGF.Builder.CreateStore(null, destField);
2227       CGF.EmitARCStoreStrongCall(destField, value, /*ignored*/ true);
2228       CGF.EmitARCStoreStrongCall(srcField, null, /*ignored*/ true);
2229       return;
2230     }
2231     CGF.Builder.CreateStore(value, destField);
2232     CGF.Builder.CreateStore(null, srcField);
2233   }
2234 
2235   void emitDispose(CodeGenFunction &CGF, Address field) override {
2236     CGF.EmitARCDestroyStrong(field, ARCImpreciseLifetime);
2237   }
2238 
2239   void profileImpl(llvm::FoldingSetNodeID &id) const override {
2240     // 1 is distinguishable from all pointers and byref flags
2241     id.AddInteger(1);
2242   }
2243 };
2244 
2245 /// Emits the copy/dispose helpers for an ARC __block __strong
2246 /// variable that's of block-pointer type.
2247 class ARCStrongBlockByrefHelpers final : public BlockByrefHelpers {
2248 public:
2249   ARCStrongBlockByrefHelpers(CharUnits alignment)
2250     : BlockByrefHelpers(alignment) {}
2251 
2252   void emitCopy(CodeGenFunction &CGF, Address destField,
2253                 Address srcField) override {
2254     // Do the copy with objc_retainBlock; that's all that
2255     // _Block_object_assign would do anyway, and we'd have to pass the
2256     // right arguments to make sure it doesn't get no-op'ed.
2257     llvm::Value *oldValue = CGF.Builder.CreateLoad(srcField);
2258     llvm::Value *copy = CGF.EmitARCRetainBlock(oldValue, /*mandatory*/ true);
2259     CGF.Builder.CreateStore(copy, destField);
2260   }
2261 
2262   void emitDispose(CodeGenFunction &CGF, Address field) override {
2263     CGF.EmitARCDestroyStrong(field, ARCImpreciseLifetime);
2264   }
2265 
2266   void profileImpl(llvm::FoldingSetNodeID &id) const override {
2267     // 2 is distinguishable from all pointers and byref flags
2268     id.AddInteger(2);
2269   }
2270 };
2271 
2272 /// Emits the copy/dispose helpers for a __block variable with a
2273 /// nontrivial copy constructor or destructor.
2274 class CXXByrefHelpers final : public BlockByrefHelpers {
2275   QualType VarType;
2276   const Expr *CopyExpr;
2277 
2278 public:
2279   CXXByrefHelpers(CharUnits alignment, QualType type,
2280                   const Expr *copyExpr)
2281     : BlockByrefHelpers(alignment), VarType(type), CopyExpr(copyExpr) {}
2282 
2283   bool needsCopy() const override { return CopyExpr != nullptr; }
2284   void emitCopy(CodeGenFunction &CGF, Address destField,
2285                 Address srcField) override {
2286     if (!CopyExpr) return;
2287     CGF.EmitSynthesizedCXXCopyCtor(destField, srcField, CopyExpr);
2288   }
2289 
2290   void emitDispose(CodeGenFunction &CGF, Address field) override {
2291     EHScopeStack::stable_iterator cleanupDepth = CGF.EHStack.stable_begin();
2292     CGF.PushDestructorCleanup(VarType, field);
2293     CGF.PopCleanupBlocks(cleanupDepth);
2294   }
2295 
2296   void profileImpl(llvm::FoldingSetNodeID &id) const override {
2297     id.AddPointer(VarType.getCanonicalType().getAsOpaquePtr());
2298   }
2299 };
2300 
2301 /// Emits the copy/dispose helpers for a __block variable that is a non-trivial
2302 /// C struct.
2303 class NonTrivialCStructByrefHelpers final : public BlockByrefHelpers {
2304   QualType VarType;
2305 
2306 public:
2307   NonTrivialCStructByrefHelpers(CharUnits alignment, QualType type)
2308     : BlockByrefHelpers(alignment), VarType(type) {}
2309 
2310   void emitCopy(CodeGenFunction &CGF, Address destField,
2311                 Address srcField) override {
2312     CGF.callCStructMoveConstructor(CGF.MakeAddrLValue(destField, VarType),
2313                                    CGF.MakeAddrLValue(srcField, VarType));
2314   }
2315 
2316   bool needsDispose() const override {
2317     return VarType.isDestructedType();
2318   }
2319 
2320   void emitDispose(CodeGenFunction &CGF, Address field) override {
2321     EHScopeStack::stable_iterator cleanupDepth = CGF.EHStack.stable_begin();
2322     CGF.pushDestroy(VarType.isDestructedType(), field, VarType);
2323     CGF.PopCleanupBlocks(cleanupDepth);
2324   }
2325 
2326   void profileImpl(llvm::FoldingSetNodeID &id) const override {
2327     id.AddPointer(VarType.getCanonicalType().getAsOpaquePtr());
2328   }
2329 };
2330 } // end anonymous namespace
2331 
2332 static llvm::Constant *
2333 generateByrefCopyHelper(CodeGenFunction &CGF, const BlockByrefInfo &byrefInfo,
2334                         BlockByrefHelpers &generator) {
2335   ASTContext &Context = CGF.getContext();
2336 
2337   QualType ReturnTy = Context.VoidTy;
2338 
2339   FunctionArgList args;
2340   ImplicitParamDecl Dst(Context, Context.VoidPtrTy, ImplicitParamDecl::Other);
2341   args.push_back(&Dst);
2342 
2343   ImplicitParamDecl Src(Context, Context.VoidPtrTy, ImplicitParamDecl::Other);
2344   args.push_back(&Src);
2345 
2346   const CGFunctionInfo &FI =
2347       CGF.CGM.getTypes().arrangeBuiltinFunctionDeclaration(ReturnTy, args);
2348 
2349   llvm::FunctionType *LTy = CGF.CGM.getTypes().GetFunctionType(FI);
2350 
2351   // FIXME: We'd like to put these into a mergable by content, with
2352   // internal linkage.
2353   llvm::Function *Fn =
2354     llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
2355                            "__Block_byref_object_copy_", &CGF.CGM.getModule());
2356 
2357   SmallVector<QualType, 2> ArgTys;
2358   ArgTys.push_back(Context.VoidPtrTy);
2359   ArgTys.push_back(Context.VoidPtrTy);
2360 
2361   CGF.CGM.SetInternalFunctionAttributes(GlobalDecl(), Fn, FI);
2362 
2363   CGF.StartFunction(GlobalDecl(), ReturnTy, Fn, FI, args);
2364     // Create a scope with an artificial location for the body of this function.
2365   auto AL = ApplyDebugLocation::CreateArtificial(CGF);
2366 
2367   if (generator.needsCopy()) {
2368     // dst->x
2369     Address destField = CGF.GetAddrOfLocalVar(&Dst);
2370     destField = Address(CGF.Builder.CreateLoad(destField), byrefInfo.Type,
2371                         byrefInfo.ByrefAlignment);
2372     destField =
2373         CGF.emitBlockByrefAddress(destField, byrefInfo, false, "dest-object");
2374 
2375     // src->x
2376     Address srcField = CGF.GetAddrOfLocalVar(&Src);
2377     srcField = Address(CGF.Builder.CreateLoad(srcField), byrefInfo.Type,
2378                        byrefInfo.ByrefAlignment);
2379     srcField =
2380         CGF.emitBlockByrefAddress(srcField, byrefInfo, false, "src-object");
2381 
2382     generator.emitCopy(CGF, destField, srcField);
2383   }
2384 
2385   CGF.FinishFunction();
2386 
2387   return llvm::ConstantExpr::getBitCast(Fn, CGF.Int8PtrTy);
2388 }
2389 
2390 /// Build the copy helper for a __block variable.
2391 static llvm::Constant *buildByrefCopyHelper(CodeGenModule &CGM,
2392                                             const BlockByrefInfo &byrefInfo,
2393                                             BlockByrefHelpers &generator) {
2394   CodeGenFunction CGF(CGM);
2395   return generateByrefCopyHelper(CGF, byrefInfo, generator);
2396 }
2397 
2398 /// Generate code for a __block variable's dispose helper.
2399 static llvm::Constant *
2400 generateByrefDisposeHelper(CodeGenFunction &CGF,
2401                            const BlockByrefInfo &byrefInfo,
2402                            BlockByrefHelpers &generator) {
2403   ASTContext &Context = CGF.getContext();
2404   QualType R = Context.VoidTy;
2405 
2406   FunctionArgList args;
2407   ImplicitParamDecl Src(CGF.getContext(), Context.VoidPtrTy,
2408                         ImplicitParamDecl::Other);
2409   args.push_back(&Src);
2410 
2411   const CGFunctionInfo &FI =
2412     CGF.CGM.getTypes().arrangeBuiltinFunctionDeclaration(R, args);
2413 
2414   llvm::FunctionType *LTy = CGF.CGM.getTypes().GetFunctionType(FI);
2415 
2416   // FIXME: We'd like to put these into a mergable by content, with
2417   // internal linkage.
2418   llvm::Function *Fn =
2419     llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
2420                            "__Block_byref_object_dispose_",
2421                            &CGF.CGM.getModule());
2422 
2423   SmallVector<QualType, 1> ArgTys;
2424   ArgTys.push_back(Context.VoidPtrTy);
2425 
2426   CGF.CGM.SetInternalFunctionAttributes(GlobalDecl(), Fn, FI);
2427 
2428   CGF.StartFunction(GlobalDecl(), R, Fn, FI, args);
2429     // Create a scope with an artificial location for the body of this function.
2430   auto AL = ApplyDebugLocation::CreateArtificial(CGF);
2431 
2432   if (generator.needsDispose()) {
2433     Address addr = CGF.GetAddrOfLocalVar(&Src);
2434     addr = Address(CGF.Builder.CreateLoad(addr), byrefInfo.Type,
2435                    byrefInfo.ByrefAlignment);
2436     addr = CGF.emitBlockByrefAddress(addr, byrefInfo, false, "object");
2437 
2438     generator.emitDispose(CGF, addr);
2439   }
2440 
2441   CGF.FinishFunction();
2442 
2443   return llvm::ConstantExpr::getBitCast(Fn, CGF.Int8PtrTy);
2444 }
2445 
2446 /// Build the dispose helper for a __block variable.
2447 static llvm::Constant *buildByrefDisposeHelper(CodeGenModule &CGM,
2448                                                const BlockByrefInfo &byrefInfo,
2449                                                BlockByrefHelpers &generator) {
2450   CodeGenFunction CGF(CGM);
2451   return generateByrefDisposeHelper(CGF, byrefInfo, generator);
2452 }
2453 
2454 /// Lazily build the copy and dispose helpers for a __block variable
2455 /// with the given information.
2456 template <class T>
2457 static T *buildByrefHelpers(CodeGenModule &CGM, const BlockByrefInfo &byrefInfo,
2458                             T &&generator) {
2459   llvm::FoldingSetNodeID id;
2460   generator.Profile(id);
2461 
2462   void *insertPos;
2463   BlockByrefHelpers *node
2464     = CGM.ByrefHelpersCache.FindNodeOrInsertPos(id, insertPos);
2465   if (node) return static_cast<T*>(node);
2466 
2467   generator.CopyHelper = buildByrefCopyHelper(CGM, byrefInfo, generator);
2468   generator.DisposeHelper = buildByrefDisposeHelper(CGM, byrefInfo, generator);
2469 
2470   T *copy = new (CGM.getContext()) T(std::forward<T>(generator));
2471   CGM.ByrefHelpersCache.InsertNode(copy, insertPos);
2472   return copy;
2473 }
2474 
2475 /// Build the copy and dispose helpers for the given __block variable
2476 /// emission.  Places the helpers in the global cache.  Returns null
2477 /// if no helpers are required.
2478 BlockByrefHelpers *
2479 CodeGenFunction::buildByrefHelpers(llvm::StructType &byrefType,
2480                                    const AutoVarEmission &emission) {
2481   const VarDecl &var = *emission.Variable;
2482   assert(var.isEscapingByref() &&
2483          "only escaping __block variables need byref helpers");
2484 
2485   QualType type = var.getType();
2486 
2487   auto &byrefInfo = getBlockByrefInfo(&var);
2488 
2489   // The alignment we care about for the purposes of uniquing byref
2490   // helpers is the alignment of the actual byref value field.
2491   CharUnits valueAlignment =
2492     byrefInfo.ByrefAlignment.alignmentAtOffset(byrefInfo.FieldOffset);
2493 
2494   if (const CXXRecordDecl *record = type->getAsCXXRecordDecl()) {
2495     const Expr *copyExpr =
2496         CGM.getContext().getBlockVarCopyInit(&var).getCopyExpr();
2497     if (!copyExpr && record->hasTrivialDestructor()) return nullptr;
2498 
2499     return ::buildByrefHelpers(
2500         CGM, byrefInfo, CXXByrefHelpers(valueAlignment, type, copyExpr));
2501   }
2502 
2503   // If type is a non-trivial C struct type that is non-trivial to
2504   // destructly move or destroy, build the copy and dispose helpers.
2505   if (type.isNonTrivialToPrimitiveDestructiveMove() == QualType::PCK_Struct ||
2506       type.isDestructedType() == QualType::DK_nontrivial_c_struct)
2507     return ::buildByrefHelpers(
2508         CGM, byrefInfo, NonTrivialCStructByrefHelpers(valueAlignment, type));
2509 
2510   // Otherwise, if we don't have a retainable type, there's nothing to do.
2511   // that the runtime does extra copies.
2512   if (!type->isObjCRetainableType()) return nullptr;
2513 
2514   Qualifiers qs = type.getQualifiers();
2515 
2516   // If we have lifetime, that dominates.
2517   if (Qualifiers::ObjCLifetime lifetime = qs.getObjCLifetime()) {
2518     switch (lifetime) {
2519     case Qualifiers::OCL_None: llvm_unreachable("impossible");
2520 
2521     // These are just bits as far as the runtime is concerned.
2522     case Qualifiers::OCL_ExplicitNone:
2523     case Qualifiers::OCL_Autoreleasing:
2524       return nullptr;
2525 
2526     // Tell the runtime that this is ARC __weak, called by the
2527     // byref routines.
2528     case Qualifiers::OCL_Weak:
2529       return ::buildByrefHelpers(CGM, byrefInfo,
2530                                  ARCWeakByrefHelpers(valueAlignment));
2531 
2532     // ARC __strong __block variables need to be retained.
2533     case Qualifiers::OCL_Strong:
2534       // Block pointers need to be copied, and there's no direct
2535       // transfer possible.
2536       if (type->isBlockPointerType()) {
2537         return ::buildByrefHelpers(CGM, byrefInfo,
2538                                    ARCStrongBlockByrefHelpers(valueAlignment));
2539 
2540       // Otherwise, we transfer ownership of the retain from the stack
2541       // to the heap.
2542       } else {
2543         return ::buildByrefHelpers(CGM, byrefInfo,
2544                                    ARCStrongByrefHelpers(valueAlignment));
2545       }
2546     }
2547     llvm_unreachable("fell out of lifetime switch!");
2548   }
2549 
2550   BlockFieldFlags flags;
2551   if (type->isBlockPointerType()) {
2552     flags |= BLOCK_FIELD_IS_BLOCK;
2553   } else if (CGM.getContext().isObjCNSObjectType(type) ||
2554              type->isObjCObjectPointerType()) {
2555     flags |= BLOCK_FIELD_IS_OBJECT;
2556   } else {
2557     return nullptr;
2558   }
2559 
2560   if (type.isObjCGCWeak())
2561     flags |= BLOCK_FIELD_IS_WEAK;
2562 
2563   return ::buildByrefHelpers(CGM, byrefInfo,
2564                              ObjectByrefHelpers(valueAlignment, flags));
2565 }
2566 
2567 Address CodeGenFunction::emitBlockByrefAddress(Address baseAddr,
2568                                                const VarDecl *var,
2569                                                bool followForward) {
2570   auto &info = getBlockByrefInfo(var);
2571   return emitBlockByrefAddress(baseAddr, info, followForward, var->getName());
2572 }
2573 
2574 Address CodeGenFunction::emitBlockByrefAddress(Address baseAddr,
2575                                                const BlockByrefInfo &info,
2576                                                bool followForward,
2577                                                const llvm::Twine &name) {
2578   // Chase the forwarding address if requested.
2579   if (followForward) {
2580     Address forwardingAddr = Builder.CreateStructGEP(baseAddr, 1, "forwarding");
2581     baseAddr = Address(Builder.CreateLoad(forwardingAddr), info.Type,
2582                        info.ByrefAlignment);
2583   }
2584 
2585   return Builder.CreateStructGEP(baseAddr, info.FieldIndex, name);
2586 }
2587 
2588 /// BuildByrefInfo - This routine changes a __block variable declared as T x
2589 ///   into:
2590 ///
2591 ///      struct {
2592 ///        void *__isa;
2593 ///        void *__forwarding;
2594 ///        int32_t __flags;
2595 ///        int32_t __size;
2596 ///        void *__copy_helper;       // only if needed
2597 ///        void *__destroy_helper;    // only if needed
2598 ///        void *__byref_variable_layout;// only if needed
2599 ///        char padding[X];           // only if needed
2600 ///        T x;
2601 ///      } x
2602 ///
2603 const BlockByrefInfo &CodeGenFunction::getBlockByrefInfo(const VarDecl *D) {
2604   auto it = BlockByrefInfos.find(D);
2605   if (it != BlockByrefInfos.end())
2606     return it->second;
2607 
2608   llvm::StructType *byrefType =
2609     llvm::StructType::create(getLLVMContext(),
2610                              "struct.__block_byref_" + D->getNameAsString());
2611 
2612   QualType Ty = D->getType();
2613 
2614   CharUnits size;
2615   SmallVector<llvm::Type *, 8> types;
2616 
2617   // void *__isa;
2618   types.push_back(Int8PtrTy);
2619   size += getPointerSize();
2620 
2621   // void *__forwarding;
2622   types.push_back(llvm::PointerType::getUnqual(byrefType));
2623   size += getPointerSize();
2624 
2625   // int32_t __flags;
2626   types.push_back(Int32Ty);
2627   size += CharUnits::fromQuantity(4);
2628 
2629   // int32_t __size;
2630   types.push_back(Int32Ty);
2631   size += CharUnits::fromQuantity(4);
2632 
2633   // Note that this must match *exactly* the logic in buildByrefHelpers.
2634   bool hasCopyAndDispose = getContext().BlockRequiresCopying(Ty, D);
2635   if (hasCopyAndDispose) {
2636     /// void *__copy_helper;
2637     types.push_back(Int8PtrTy);
2638     size += getPointerSize();
2639 
2640     /// void *__destroy_helper;
2641     types.push_back(Int8PtrTy);
2642     size += getPointerSize();
2643   }
2644 
2645   bool HasByrefExtendedLayout = false;
2646   Qualifiers::ObjCLifetime Lifetime = Qualifiers::OCL_None;
2647   if (getContext().getByrefLifetime(Ty, Lifetime, HasByrefExtendedLayout) &&
2648       HasByrefExtendedLayout) {
2649     /// void *__byref_variable_layout;
2650     types.push_back(Int8PtrTy);
2651     size += CharUnits::fromQuantity(PointerSizeInBytes);
2652   }
2653 
2654   // T x;
2655   llvm::Type *varTy = ConvertTypeForMem(Ty);
2656 
2657   bool packed = false;
2658   CharUnits varAlign = getContext().getDeclAlign(D);
2659   CharUnits varOffset = size.alignTo(varAlign);
2660 
2661   // We may have to insert padding.
2662   if (varOffset != size) {
2663     llvm::Type *paddingTy =
2664       llvm::ArrayType::get(Int8Ty, (varOffset - size).getQuantity());
2665 
2666     types.push_back(paddingTy);
2667     size = varOffset;
2668 
2669   // Conversely, we might have to prevent LLVM from inserting padding.
2670   } else if (CGM.getDataLayout().getABITypeAlign(varTy) >
2671              uint64_t(varAlign.getQuantity())) {
2672     packed = true;
2673   }
2674   types.push_back(varTy);
2675 
2676   byrefType->setBody(types, packed);
2677 
2678   BlockByrefInfo info;
2679   info.Type = byrefType;
2680   info.FieldIndex = types.size() - 1;
2681   info.FieldOffset = varOffset;
2682   info.ByrefAlignment = std::max(varAlign, getPointerAlign());
2683 
2684   auto pair = BlockByrefInfos.insert({D, info});
2685   assert(pair.second && "info was inserted recursively?");
2686   return pair.first->second;
2687 }
2688 
2689 /// Initialize the structural components of a __block variable, i.e.
2690 /// everything but the actual object.
2691 void CodeGenFunction::emitByrefStructureInit(const AutoVarEmission &emission) {
2692   // Find the address of the local.
2693   Address addr = emission.Addr;
2694 
2695   // That's an alloca of the byref structure type.
2696   llvm::StructType *byrefType = cast<llvm::StructType>(addr.getElementType());
2697 
2698   unsigned nextHeaderIndex = 0;
2699   CharUnits nextHeaderOffset;
2700   auto storeHeaderField = [&](llvm::Value *value, CharUnits fieldSize,
2701                               const Twine &name) {
2702     auto fieldAddr = Builder.CreateStructGEP(addr, nextHeaderIndex, name);
2703     Builder.CreateStore(value, fieldAddr);
2704 
2705     nextHeaderIndex++;
2706     nextHeaderOffset += fieldSize;
2707   };
2708 
2709   // Build the byref helpers if necessary.  This is null if we don't need any.
2710   BlockByrefHelpers *helpers = buildByrefHelpers(*byrefType, emission);
2711 
2712   const VarDecl &D = *emission.Variable;
2713   QualType type = D.getType();
2714 
2715   bool HasByrefExtendedLayout = false;
2716   Qualifiers::ObjCLifetime ByrefLifetime = Qualifiers::OCL_None;
2717   bool ByRefHasLifetime =
2718     getContext().getByrefLifetime(type, ByrefLifetime, HasByrefExtendedLayout);
2719 
2720   llvm::Value *V;
2721 
2722   // Initialize the 'isa', which is just 0 or 1.
2723   int isa = 0;
2724   if (type.isObjCGCWeak())
2725     isa = 1;
2726   V = Builder.CreateIntToPtr(Builder.getInt32(isa), Int8PtrTy, "isa");
2727   storeHeaderField(V, getPointerSize(), "byref.isa");
2728 
2729   // Store the address of the variable into its own forwarding pointer.
2730   storeHeaderField(addr.getPointer(), getPointerSize(), "byref.forwarding");
2731 
2732   // Blocks ABI:
2733   //   c) the flags field is set to either 0 if no helper functions are
2734   //      needed or BLOCK_BYREF_HAS_COPY_DISPOSE if they are,
2735   BlockFlags flags;
2736   if (helpers) flags |= BLOCK_BYREF_HAS_COPY_DISPOSE;
2737   if (ByRefHasLifetime) {
2738     if (HasByrefExtendedLayout) flags |= BLOCK_BYREF_LAYOUT_EXTENDED;
2739       else switch (ByrefLifetime) {
2740         case Qualifiers::OCL_Strong:
2741           flags |= BLOCK_BYREF_LAYOUT_STRONG;
2742           break;
2743         case Qualifiers::OCL_Weak:
2744           flags |= BLOCK_BYREF_LAYOUT_WEAK;
2745           break;
2746         case Qualifiers::OCL_ExplicitNone:
2747           flags |= BLOCK_BYREF_LAYOUT_UNRETAINED;
2748           break;
2749         case Qualifiers::OCL_None:
2750           if (!type->isObjCObjectPointerType() && !type->isBlockPointerType())
2751             flags |= BLOCK_BYREF_LAYOUT_NON_OBJECT;
2752           break;
2753         default:
2754           break;
2755       }
2756     if (CGM.getLangOpts().ObjCGCBitmapPrint) {
2757       printf("\n Inline flag for BYREF variable layout (%d):", flags.getBitMask());
2758       if (flags & BLOCK_BYREF_HAS_COPY_DISPOSE)
2759         printf(" BLOCK_BYREF_HAS_COPY_DISPOSE");
2760       if (flags & BLOCK_BYREF_LAYOUT_MASK) {
2761         BlockFlags ThisFlag(flags.getBitMask() & BLOCK_BYREF_LAYOUT_MASK);
2762         if (ThisFlag ==  BLOCK_BYREF_LAYOUT_EXTENDED)
2763           printf(" BLOCK_BYREF_LAYOUT_EXTENDED");
2764         if (ThisFlag ==  BLOCK_BYREF_LAYOUT_STRONG)
2765           printf(" BLOCK_BYREF_LAYOUT_STRONG");
2766         if (ThisFlag == BLOCK_BYREF_LAYOUT_WEAK)
2767           printf(" BLOCK_BYREF_LAYOUT_WEAK");
2768         if (ThisFlag == BLOCK_BYREF_LAYOUT_UNRETAINED)
2769           printf(" BLOCK_BYREF_LAYOUT_UNRETAINED");
2770         if (ThisFlag == BLOCK_BYREF_LAYOUT_NON_OBJECT)
2771           printf(" BLOCK_BYREF_LAYOUT_NON_OBJECT");
2772       }
2773       printf("\n");
2774     }
2775   }
2776   storeHeaderField(llvm::ConstantInt::get(IntTy, flags.getBitMask()),
2777                    getIntSize(), "byref.flags");
2778 
2779   CharUnits byrefSize = CGM.GetTargetTypeStoreSize(byrefType);
2780   V = llvm::ConstantInt::get(IntTy, byrefSize.getQuantity());
2781   storeHeaderField(V, getIntSize(), "byref.size");
2782 
2783   if (helpers) {
2784     storeHeaderField(helpers->CopyHelper, getPointerSize(),
2785                      "byref.copyHelper");
2786     storeHeaderField(helpers->DisposeHelper, getPointerSize(),
2787                      "byref.disposeHelper");
2788   }
2789 
2790   if (ByRefHasLifetime && HasByrefExtendedLayout) {
2791     auto layoutInfo = CGM.getObjCRuntime().BuildByrefLayout(CGM, type);
2792     storeHeaderField(layoutInfo, getPointerSize(), "byref.layout");
2793   }
2794 }
2795 
2796 void CodeGenFunction::BuildBlockRelease(llvm::Value *V, BlockFieldFlags flags,
2797                                         bool CanThrow) {
2798   llvm::FunctionCallee F = CGM.getBlockObjectDispose();
2799   llvm::Value *args[] = {
2800     Builder.CreateBitCast(V, Int8PtrTy),
2801     llvm::ConstantInt::get(Int32Ty, flags.getBitMask())
2802   };
2803 
2804   if (CanThrow)
2805     EmitRuntimeCallOrInvoke(F, args);
2806   else
2807     EmitNounwindRuntimeCall(F, args);
2808 }
2809 
2810 void CodeGenFunction::enterByrefCleanup(CleanupKind Kind, Address Addr,
2811                                         BlockFieldFlags Flags,
2812                                         bool LoadBlockVarAddr, bool CanThrow) {
2813   EHStack.pushCleanup<CallBlockRelease>(Kind, Addr, Flags, LoadBlockVarAddr,
2814                                         CanThrow);
2815 }
2816 
2817 /// Adjust the declaration of something from the blocks API.
2818 static void configureBlocksRuntimeObject(CodeGenModule &CGM,
2819                                          llvm::Constant *C) {
2820   auto *GV = cast<llvm::GlobalValue>(C->stripPointerCasts());
2821 
2822   if (CGM.getTarget().getTriple().isOSBinFormatCOFF()) {
2823     IdentifierInfo &II = CGM.getContext().Idents.get(C->getName());
2824     TranslationUnitDecl *TUDecl = CGM.getContext().getTranslationUnitDecl();
2825     DeclContext *DC = TranslationUnitDecl::castToDeclContext(TUDecl);
2826 
2827     assert((isa<llvm::Function>(C->stripPointerCasts()) ||
2828             isa<llvm::GlobalVariable>(C->stripPointerCasts())) &&
2829            "expected Function or GlobalVariable");
2830 
2831     const NamedDecl *ND = nullptr;
2832     for (const auto *Result : DC->lookup(&II))
2833       if ((ND = dyn_cast<FunctionDecl>(Result)) ||
2834           (ND = dyn_cast<VarDecl>(Result)))
2835         break;
2836 
2837     // TODO: support static blocks runtime
2838     if (GV->isDeclaration() && (!ND || !ND->hasAttr<DLLExportAttr>())) {
2839       GV->setDLLStorageClass(llvm::GlobalValue::DLLImportStorageClass);
2840       GV->setLinkage(llvm::GlobalValue::ExternalLinkage);
2841     } else {
2842       GV->setDLLStorageClass(llvm::GlobalValue::DLLExportStorageClass);
2843       GV->setLinkage(llvm::GlobalValue::ExternalLinkage);
2844     }
2845   }
2846 
2847   if (CGM.getLangOpts().BlocksRuntimeOptional && GV->isDeclaration() &&
2848       GV->hasExternalLinkage())
2849     GV->setLinkage(llvm::GlobalValue::ExternalWeakLinkage);
2850 
2851   CGM.setDSOLocal(GV);
2852 }
2853 
2854 llvm::FunctionCallee CodeGenModule::getBlockObjectDispose() {
2855   if (BlockObjectDispose)
2856     return BlockObjectDispose;
2857 
2858   llvm::Type *args[] = { Int8PtrTy, Int32Ty };
2859   llvm::FunctionType *fty
2860     = llvm::FunctionType::get(VoidTy, args, false);
2861   BlockObjectDispose = CreateRuntimeFunction(fty, "_Block_object_dispose");
2862   configureBlocksRuntimeObject(
2863       *this, cast<llvm::Constant>(BlockObjectDispose.getCallee()));
2864   return BlockObjectDispose;
2865 }
2866 
2867 llvm::FunctionCallee CodeGenModule::getBlockObjectAssign() {
2868   if (BlockObjectAssign)
2869     return BlockObjectAssign;
2870 
2871   llvm::Type *args[] = { Int8PtrTy, Int8PtrTy, Int32Ty };
2872   llvm::FunctionType *fty
2873     = llvm::FunctionType::get(VoidTy, args, false);
2874   BlockObjectAssign = CreateRuntimeFunction(fty, "_Block_object_assign");
2875   configureBlocksRuntimeObject(
2876       *this, cast<llvm::Constant>(BlockObjectAssign.getCallee()));
2877   return BlockObjectAssign;
2878 }
2879 
2880 llvm::Constant *CodeGenModule::getNSConcreteGlobalBlock() {
2881   if (NSConcreteGlobalBlock)
2882     return NSConcreteGlobalBlock;
2883 
2884   NSConcreteGlobalBlock = GetOrCreateLLVMGlobal(
2885       "_NSConcreteGlobalBlock", Int8PtrTy, LangAS::Default, nullptr);
2886   configureBlocksRuntimeObject(*this, NSConcreteGlobalBlock);
2887   return NSConcreteGlobalBlock;
2888 }
2889 
2890 llvm::Constant *CodeGenModule::getNSConcreteStackBlock() {
2891   if (NSConcreteStackBlock)
2892     return NSConcreteStackBlock;
2893 
2894   NSConcreteStackBlock = GetOrCreateLLVMGlobal(
2895       "_NSConcreteStackBlock", Int8PtrTy, LangAS::Default, nullptr);
2896   configureBlocksRuntimeObject(*this, NSConcreteStackBlock);
2897   return NSConcreteStackBlock;
2898 }
2899