1 //===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // These classes support the generation of LLVM IR for cleanups.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #ifndef LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H
14 #define LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H
15 
16 #include "EHScopeStack.h"
17 
18 #include "Address.h"
19 #include "llvm/ADT/SmallPtrSet.h"
20 #include "llvm/ADT/SmallVector.h"
21 
22 namespace llvm {
23 class BasicBlock;
24 class Value;
25 class ConstantInt;
26 }
27 
28 namespace clang {
29 class FunctionDecl;
30 namespace CodeGen {
31 class CodeGenModule;
32 class CodeGenFunction;
33 
34 /// The MS C++ ABI needs a pointer to RTTI data plus some flags to describe the
35 /// type of a catch handler, so we use this wrapper.
36 struct CatchTypeInfo {
37   llvm::Constant *RTTI;
38   unsigned Flags;
39 };
40 
41 /// A protected scope for zero-cost EH handling.
42 class EHScope {
43   llvm::BasicBlock *CachedLandingPad;
44   llvm::BasicBlock *CachedEHDispatchBlock;
45 
46   EHScopeStack::stable_iterator EnclosingEHScope;
47 
48   class CommonBitFields {
49     friend class EHScope;
50     unsigned Kind : 3;
51   };
52   enum { NumCommonBits = 3 };
53 
54 protected:
55   class CatchBitFields {
56     friend class EHCatchScope;
57     unsigned : NumCommonBits;
58 
59     unsigned NumHandlers : 32 - NumCommonBits;
60   };
61 
62   class CleanupBitFields {
63     friend class EHCleanupScope;
64     unsigned : NumCommonBits;
65 
66     /// Whether this cleanup needs to be run along normal edges.
67     unsigned IsNormalCleanup : 1;
68 
69     /// Whether this cleanup needs to be run along exception edges.
70     unsigned IsEHCleanup : 1;
71 
72     /// Whether this cleanup is currently active.
73     unsigned IsActive : 1;
74 
75     /// Whether this cleanup is a lifetime marker
76     unsigned IsLifetimeMarker : 1;
77 
78     /// Whether the normal cleanup should test the activation flag.
79     unsigned TestFlagInNormalCleanup : 1;
80 
81     /// Whether the EH cleanup should test the activation flag.
82     unsigned TestFlagInEHCleanup : 1;
83 
84     /// The amount of extra storage needed by the Cleanup.
85     /// Always a multiple of the scope-stack alignment.
86     unsigned CleanupSize : 12;
87   };
88 
89   class FilterBitFields {
90     friend class EHFilterScope;
91     unsigned : NumCommonBits;
92 
93     unsigned NumFilters : 32 - NumCommonBits;
94   };
95 
96   union {
97     CommonBitFields CommonBits;
98     CatchBitFields CatchBits;
99     CleanupBitFields CleanupBits;
100     FilterBitFields FilterBits;
101   };
102 
103 public:
104   enum Kind { Cleanup, Catch, Terminate, Filter };
105 
106   EHScope(Kind kind, EHScopeStack::stable_iterator enclosingEHScope)
107     : CachedLandingPad(nullptr), CachedEHDispatchBlock(nullptr),
108       EnclosingEHScope(enclosingEHScope) {
109     CommonBits.Kind = kind;
110   }
111 
112   Kind getKind() const { return static_cast<Kind>(CommonBits.Kind); }
113 
114   llvm::BasicBlock *getCachedLandingPad() const {
115     return CachedLandingPad;
116   }
117 
118   void setCachedLandingPad(llvm::BasicBlock *block) {
119     CachedLandingPad = block;
120   }
121 
122   llvm::BasicBlock *getCachedEHDispatchBlock() const {
123     return CachedEHDispatchBlock;
124   }
125 
126   void setCachedEHDispatchBlock(llvm::BasicBlock *block) {
127     CachedEHDispatchBlock = block;
128   }
129 
130   bool hasEHBranches() const {
131     if (llvm::BasicBlock *block = getCachedEHDispatchBlock())
132       return !block->use_empty();
133     return false;
134   }
135 
136   EHScopeStack::stable_iterator getEnclosingEHScope() const {
137     return EnclosingEHScope;
138   }
139 };
140 
141 /// A scope which attempts to handle some, possibly all, types of
142 /// exceptions.
143 ///
144 /// Objective C \@finally blocks are represented using a cleanup scope
145 /// after the catch scope.
146 class EHCatchScope : public EHScope {
147   // In effect, we have a flexible array member
148   //   Handler Handlers[0];
149   // But that's only standard in C99, not C++, so we have to do
150   // annoying pointer arithmetic instead.
151 
152 public:
153   struct Handler {
154     /// A type info value, or null (C++ null, not an LLVM null pointer)
155     /// for a catch-all.
156     CatchTypeInfo Type;
157 
158     /// The catch handler for this type.
159     llvm::BasicBlock *Block;
160 
161     bool isCatchAll() const { return Type.RTTI == nullptr; }
162   };
163 
164 private:
165   friend class EHScopeStack;
166 
167   Handler *getHandlers() {
168     return reinterpret_cast<Handler*>(this+1);
169   }
170 
171   const Handler *getHandlers() const {
172     return reinterpret_cast<const Handler*>(this+1);
173   }
174 
175 public:
176   static size_t getSizeForNumHandlers(unsigned N) {
177     return sizeof(EHCatchScope) + N * sizeof(Handler);
178   }
179 
180   EHCatchScope(unsigned numHandlers,
181                EHScopeStack::stable_iterator enclosingEHScope)
182     : EHScope(Catch, enclosingEHScope) {
183     CatchBits.NumHandlers = numHandlers;
184     assert(CatchBits.NumHandlers == numHandlers && "NumHandlers overflow?");
185   }
186 
187   unsigned getNumHandlers() const {
188     return CatchBits.NumHandlers;
189   }
190 
191   void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) {
192     setHandler(I, CatchTypeInfo{nullptr, 0}, Block);
193   }
194 
195   void setHandler(unsigned I, llvm::Constant *Type, llvm::BasicBlock *Block) {
196     assert(I < getNumHandlers());
197     getHandlers()[I].Type = CatchTypeInfo{Type, 0};
198     getHandlers()[I].Block = Block;
199   }
200 
201   void setHandler(unsigned I, CatchTypeInfo Type, llvm::BasicBlock *Block) {
202     assert(I < getNumHandlers());
203     getHandlers()[I].Type = Type;
204     getHandlers()[I].Block = Block;
205   }
206 
207   const Handler &getHandler(unsigned I) const {
208     assert(I < getNumHandlers());
209     return getHandlers()[I];
210   }
211 
212   // Clear all handler blocks.
213   // FIXME: it's better to always call clearHandlerBlocks in DTOR and have a
214   // 'takeHandler' or some such function which removes ownership from the
215   // EHCatchScope object if the handlers should live longer than EHCatchScope.
216   void clearHandlerBlocks() {
217     for (unsigned I = 0, N = getNumHandlers(); I != N; ++I)
218       delete getHandler(I).Block;
219   }
220 
221   typedef const Handler *iterator;
222   iterator begin() const { return getHandlers(); }
223   iterator end() const { return getHandlers() + getNumHandlers(); }
224 
225   static bool classof(const EHScope *Scope) {
226     return Scope->getKind() == Catch;
227   }
228 };
229 
230 /// A cleanup scope which generates the cleanup blocks lazily.
231 class alignas(8) EHCleanupScope : public EHScope {
232   /// The nearest normal cleanup scope enclosing this one.
233   EHScopeStack::stable_iterator EnclosingNormal;
234 
235   /// The nearest EH scope enclosing this one.
236   EHScopeStack::stable_iterator EnclosingEH;
237 
238   /// The dual entry/exit block along the normal edge.  This is lazily
239   /// created if needed before the cleanup is popped.
240   llvm::BasicBlock *NormalBlock;
241 
242   /// An optional i1 variable indicating whether this cleanup has been
243   /// activated yet.
244   Address ActiveFlag;
245 
246   /// Extra information required for cleanups that have resolved
247   /// branches through them.  This has to be allocated on the side
248   /// because everything on the cleanup stack has be trivially
249   /// movable.
250   struct ExtInfo {
251     /// The destinations of normal branch-afters and branch-throughs.
252     llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches;
253 
254     /// Normal branch-afters.
255     SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
256       BranchAfters;
257   };
258   mutable struct ExtInfo *ExtInfo;
259 
260   /// The number of fixups required by enclosing scopes (not including
261   /// this one).  If this is the top cleanup scope, all the fixups
262   /// from this index onwards belong to this scope.
263   unsigned FixupDepth;
264 
265   struct ExtInfo &getExtInfo() {
266     if (!ExtInfo) ExtInfo = new struct ExtInfo();
267     return *ExtInfo;
268   }
269 
270   const struct ExtInfo &getExtInfo() const {
271     if (!ExtInfo) ExtInfo = new struct ExtInfo();
272     return *ExtInfo;
273   }
274 
275 public:
276   /// Gets the size required for a lazy cleanup scope with the given
277   /// cleanup-data requirements.
278   static size_t getSizeForCleanupSize(size_t Size) {
279     return sizeof(EHCleanupScope) + Size;
280   }
281 
282   size_t getAllocatedSize() const {
283     return sizeof(EHCleanupScope) + CleanupBits.CleanupSize;
284   }
285 
286   EHCleanupScope(bool isNormal, bool isEH, unsigned cleanupSize,
287                  unsigned fixupDepth,
288                  EHScopeStack::stable_iterator enclosingNormal,
289                  EHScopeStack::stable_iterator enclosingEH)
290       : EHScope(EHScope::Cleanup, enclosingEH),
291         EnclosingNormal(enclosingNormal), NormalBlock(nullptr),
292         ActiveFlag(Address::invalid()), ExtInfo(nullptr),
293         FixupDepth(fixupDepth) {
294     CleanupBits.IsNormalCleanup = isNormal;
295     CleanupBits.IsEHCleanup = isEH;
296     CleanupBits.IsActive = true;
297     CleanupBits.IsLifetimeMarker = false;
298     CleanupBits.TestFlagInNormalCleanup = false;
299     CleanupBits.TestFlagInEHCleanup = false;
300     CleanupBits.CleanupSize = cleanupSize;
301 
302     assert(CleanupBits.CleanupSize == cleanupSize && "cleanup size overflow");
303   }
304 
305   void Destroy() {
306     delete ExtInfo;
307   }
308   // Objects of EHCleanupScope are not destructed. Use Destroy().
309   ~EHCleanupScope() = delete;
310 
311   bool isNormalCleanup() const { return CleanupBits.IsNormalCleanup; }
312   llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
313   void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; }
314 
315   bool isEHCleanup() const { return CleanupBits.IsEHCleanup; }
316 
317   bool isActive() const { return CleanupBits.IsActive; }
318   void setActive(bool A) { CleanupBits.IsActive = A; }
319 
320   bool isLifetimeMarker() const { return CleanupBits.IsLifetimeMarker; }
321   void setLifetimeMarker() { CleanupBits.IsLifetimeMarker = true; }
322 
323   bool hasActiveFlag() const { return ActiveFlag.isValid(); }
324   Address getActiveFlag() const {
325     return ActiveFlag;
326   }
327   void setActiveFlag(Address Var) {
328     assert(Var.getAlignment().isOne());
329     ActiveFlag = Var;
330   }
331 
332   void setTestFlagInNormalCleanup() {
333     CleanupBits.TestFlagInNormalCleanup = true;
334   }
335   bool shouldTestFlagInNormalCleanup() const {
336     return CleanupBits.TestFlagInNormalCleanup;
337   }
338 
339   void setTestFlagInEHCleanup() {
340     CleanupBits.TestFlagInEHCleanup = true;
341   }
342   bool shouldTestFlagInEHCleanup() const {
343     return CleanupBits.TestFlagInEHCleanup;
344   }
345 
346   unsigned getFixupDepth() const { return FixupDepth; }
347   EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
348     return EnclosingNormal;
349   }
350 
351   size_t getCleanupSize() const { return CleanupBits.CleanupSize; }
352   void *getCleanupBuffer() { return this + 1; }
353 
354   EHScopeStack::Cleanup *getCleanup() {
355     return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
356   }
357 
358   /// True if this cleanup scope has any branch-afters or branch-throughs.
359   bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); }
360 
361   /// Add a branch-after to this cleanup scope.  A branch-after is a
362   /// branch from a point protected by this (normal) cleanup to a
363   /// point in the normal cleanup scope immediately containing it.
364   /// For example,
365   ///   for (;;) { A a; break; }
366   /// contains a branch-after.
367   ///
368   /// Branch-afters each have their own destination out of the
369   /// cleanup, guaranteed distinct from anything else threaded through
370   /// it.  Therefore branch-afters usually force a switch after the
371   /// cleanup.
372   void addBranchAfter(llvm::ConstantInt *Index,
373                       llvm::BasicBlock *Block) {
374     struct ExtInfo &ExtInfo = getExtInfo();
375     if (ExtInfo.Branches.insert(Block).second)
376       ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index));
377   }
378 
379   /// Return the number of unique branch-afters on this scope.
380   unsigned getNumBranchAfters() const {
381     return ExtInfo ? ExtInfo->BranchAfters.size() : 0;
382   }
383 
384   llvm::BasicBlock *getBranchAfterBlock(unsigned I) const {
385     assert(I < getNumBranchAfters());
386     return ExtInfo->BranchAfters[I].first;
387   }
388 
389   llvm::ConstantInt *getBranchAfterIndex(unsigned I) const {
390     assert(I < getNumBranchAfters());
391     return ExtInfo->BranchAfters[I].second;
392   }
393 
394   /// Add a branch-through to this cleanup scope.  A branch-through is
395   /// a branch from a scope protected by this (normal) cleanup to an
396   /// enclosing scope other than the immediately-enclosing normal
397   /// cleanup scope.
398   ///
399   /// In the following example, the branch through B's scope is a
400   /// branch-through, while the branch through A's scope is a
401   /// branch-after:
402   ///   for (;;) { A a; B b; break; }
403   ///
404   /// All branch-throughs have a common destination out of the
405   /// cleanup, one possibly shared with the fall-through.  Therefore
406   /// branch-throughs usually don't force a switch after the cleanup.
407   ///
408   /// \return true if the branch-through was new to this scope
409   bool addBranchThrough(llvm::BasicBlock *Block) {
410     return getExtInfo().Branches.insert(Block).second;
411   }
412 
413   /// Determines if this cleanup scope has any branch throughs.
414   bool hasBranchThroughs() const {
415     if (!ExtInfo) return false;
416     return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size());
417   }
418 
419   static bool classof(const EHScope *Scope) {
420     return (Scope->getKind() == Cleanup);
421   }
422 };
423 // NOTE: there's a bunch of different data classes tacked on after an
424 // EHCleanupScope. It is asserted (in EHScopeStack::pushCleanup*) that
425 // they don't require greater alignment than ScopeStackAlignment. So,
426 // EHCleanupScope ought to have alignment equal to that -- not more
427 // (would be misaligned by the stack allocator), and not less (would
428 // break the appended classes).
429 static_assert(alignof(EHCleanupScope) == EHScopeStack::ScopeStackAlignment,
430               "EHCleanupScope expected alignment");
431 
432 /// An exceptions scope which filters exceptions thrown through it.
433 /// Only exceptions matching the filter types will be permitted to be
434 /// thrown.
435 ///
436 /// This is used to implement C++ exception specifications.
437 class EHFilterScope : public EHScope {
438   // Essentially ends in a flexible array member:
439   // llvm::Value *FilterTypes[0];
440 
441   llvm::Value **getFilters() {
442     return reinterpret_cast<llvm::Value**>(this+1);
443   }
444 
445   llvm::Value * const *getFilters() const {
446     return reinterpret_cast<llvm::Value* const *>(this+1);
447   }
448 
449 public:
450   EHFilterScope(unsigned numFilters)
451     : EHScope(Filter, EHScopeStack::stable_end()) {
452     FilterBits.NumFilters = numFilters;
453     assert(FilterBits.NumFilters == numFilters && "NumFilters overflow");
454   }
455 
456   static size_t getSizeForNumFilters(unsigned numFilters) {
457     return sizeof(EHFilterScope) + numFilters * sizeof(llvm::Value*);
458   }
459 
460   unsigned getNumFilters() const { return FilterBits.NumFilters; }
461 
462   void setFilter(unsigned i, llvm::Value *filterValue) {
463     assert(i < getNumFilters());
464     getFilters()[i] = filterValue;
465   }
466 
467   llvm::Value *getFilter(unsigned i) const {
468     assert(i < getNumFilters());
469     return getFilters()[i];
470   }
471 
472   static bool classof(const EHScope *scope) {
473     return scope->getKind() == Filter;
474   }
475 };
476 
477 /// An exceptions scope which calls std::terminate if any exception
478 /// reaches it.
479 class EHTerminateScope : public EHScope {
480 public:
481   EHTerminateScope(EHScopeStack::stable_iterator enclosingEHScope)
482     : EHScope(Terminate, enclosingEHScope) {}
483   static size_t getSize() { return sizeof(EHTerminateScope); }
484 
485   static bool classof(const EHScope *scope) {
486     return scope->getKind() == Terminate;
487   }
488 };
489 
490 /// A non-stable pointer into the scope stack.
491 class EHScopeStack::iterator {
492   char *Ptr;
493 
494   friend class EHScopeStack;
495   explicit iterator(char *Ptr) : Ptr(Ptr) {}
496 
497 public:
498   iterator() : Ptr(nullptr) {}
499 
500   EHScope *get() const {
501     return reinterpret_cast<EHScope*>(Ptr);
502   }
503 
504   EHScope *operator->() const { return get(); }
505   EHScope &operator*() const { return *get(); }
506 
507   iterator &operator++() {
508     size_t Size;
509     switch (get()->getKind()) {
510     case EHScope::Catch:
511       Size = EHCatchScope::getSizeForNumHandlers(
512           static_cast<const EHCatchScope *>(get())->getNumHandlers());
513       break;
514 
515     case EHScope::Filter:
516       Size = EHFilterScope::getSizeForNumFilters(
517           static_cast<const EHFilterScope *>(get())->getNumFilters());
518       break;
519 
520     case EHScope::Cleanup:
521       Size = static_cast<const EHCleanupScope *>(get())->getAllocatedSize();
522       break;
523 
524     case EHScope::Terminate:
525       Size = EHTerminateScope::getSize();
526       break;
527     }
528     Ptr += llvm::alignTo(Size, ScopeStackAlignment);
529     return *this;
530   }
531 
532   iterator next() {
533     iterator copy = *this;
534     ++copy;
535     return copy;
536   }
537 
538   iterator operator++(int) {
539     iterator copy = *this;
540     operator++();
541     return copy;
542   }
543 
544   bool encloses(iterator other) const { return Ptr >= other.Ptr; }
545   bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; }
546 
547   bool operator==(iterator other) const { return Ptr == other.Ptr; }
548   bool operator!=(iterator other) const { return Ptr != other.Ptr; }
549 };
550 
551 inline EHScopeStack::iterator EHScopeStack::begin() const {
552   return iterator(StartOfData);
553 }
554 
555 inline EHScopeStack::iterator EHScopeStack::end() const {
556   return iterator(EndOfBuffer);
557 }
558 
559 inline void EHScopeStack::popCatch() {
560   assert(!empty() && "popping exception stack when not empty");
561 
562   EHCatchScope &scope = cast<EHCatchScope>(*begin());
563   InnermostEHScope = scope.getEnclosingEHScope();
564   deallocate(EHCatchScope::getSizeForNumHandlers(scope.getNumHandlers()));
565 }
566 
567 inline void EHScopeStack::popTerminate() {
568   assert(!empty() && "popping exception stack when not empty");
569 
570   EHTerminateScope &scope = cast<EHTerminateScope>(*begin());
571   InnermostEHScope = scope.getEnclosingEHScope();
572   deallocate(EHTerminateScope::getSize());
573 }
574 
575 inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const {
576   assert(sp.isValid() && "finding invalid savepoint");
577   assert(sp.Size <= stable_begin().Size && "finding savepoint after pop");
578   return iterator(EndOfBuffer - sp.Size);
579 }
580 
581 inline EHScopeStack::stable_iterator
582 EHScopeStack::stabilize(iterator ir) const {
583   assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer);
584   return stable_iterator(EndOfBuffer - ir.Ptr);
585 }
586 
587 /// The exceptions personality for a function.
588 struct EHPersonality {
589   const char *PersonalityFn;
590 
591   // If this is non-null, this personality requires a non-standard
592   // function for rethrowing an exception after a catchall cleanup.
593   // This function must have prototype void(void*).
594   const char *CatchallRethrowFn;
595 
596   static const EHPersonality &get(CodeGenModule &CGM, const FunctionDecl *FD);
597   static const EHPersonality &get(CodeGenFunction &CGF);
598 
599   static const EHPersonality GNU_C;
600   static const EHPersonality GNU_C_SJLJ;
601   static const EHPersonality GNU_C_SEH;
602   static const EHPersonality GNU_ObjC;
603   static const EHPersonality GNU_ObjC_SJLJ;
604   static const EHPersonality GNU_ObjC_SEH;
605   static const EHPersonality GNUstep_ObjC;
606   static const EHPersonality GNU_ObjCXX;
607   static const EHPersonality NeXT_ObjC;
608   static const EHPersonality GNU_CPlusPlus;
609   static const EHPersonality GNU_CPlusPlus_SJLJ;
610   static const EHPersonality GNU_CPlusPlus_SEH;
611   static const EHPersonality MSVC_except_handler;
612   static const EHPersonality MSVC_C_specific_handler;
613   static const EHPersonality MSVC_CxxFrameHandler3;
614   static const EHPersonality GNU_Wasm_CPlusPlus;
615   static const EHPersonality XL_CPlusPlus;
616 
617   /// Does this personality use landingpads or the family of pad instructions
618   /// designed to form funclets?
619   bool usesFuncletPads() const {
620     return isMSVCPersonality() || isWasmPersonality();
621   }
622 
623   bool isMSVCPersonality() const {
624     return this == &MSVC_except_handler || this == &MSVC_C_specific_handler ||
625            this == &MSVC_CxxFrameHandler3;
626   }
627 
628   bool isWasmPersonality() const { return this == &GNU_Wasm_CPlusPlus; }
629 
630   bool isMSVCXXPersonality() const { return this == &MSVC_CxxFrameHandler3; }
631 };
632 }
633 }
634 
635 #endif
636