xref: /openbsd/gnu/llvm/clang/lib/CodeGen/CGExprAgg.cpp (revision e5dd7070)
1*e5dd7070Spatrick //===--- CGExprAgg.cpp - Emit LLVM Code from Aggregate Expressions --------===//
2*e5dd7070Spatrick //
3*e5dd7070Spatrick // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4*e5dd7070Spatrick // See https://llvm.org/LICENSE.txt for license information.
5*e5dd7070Spatrick // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6*e5dd7070Spatrick //
7*e5dd7070Spatrick //===----------------------------------------------------------------------===//
8*e5dd7070Spatrick //
9*e5dd7070Spatrick // This contains code to emit Aggregate Expr nodes as LLVM code.
10*e5dd7070Spatrick //
11*e5dd7070Spatrick //===----------------------------------------------------------------------===//
12*e5dd7070Spatrick 
13*e5dd7070Spatrick #include "CGCXXABI.h"
14*e5dd7070Spatrick #include "CGObjCRuntime.h"
15*e5dd7070Spatrick #include "CodeGenFunction.h"
16*e5dd7070Spatrick #include "CodeGenModule.h"
17*e5dd7070Spatrick #include "ConstantEmitter.h"
18*e5dd7070Spatrick #include "clang/AST/ASTContext.h"
19*e5dd7070Spatrick #include "clang/AST/Attr.h"
20*e5dd7070Spatrick #include "clang/AST/DeclCXX.h"
21*e5dd7070Spatrick #include "clang/AST/DeclTemplate.h"
22*e5dd7070Spatrick #include "clang/AST/StmtVisitor.h"
23*e5dd7070Spatrick #include "llvm/IR/Constants.h"
24*e5dd7070Spatrick #include "llvm/IR/Function.h"
25*e5dd7070Spatrick #include "llvm/IR/GlobalVariable.h"
26*e5dd7070Spatrick #include "llvm/IR/IntrinsicInst.h"
27*e5dd7070Spatrick #include "llvm/IR/Intrinsics.h"
28*e5dd7070Spatrick using namespace clang;
29*e5dd7070Spatrick using namespace CodeGen;
30*e5dd7070Spatrick 
31*e5dd7070Spatrick //===----------------------------------------------------------------------===//
32*e5dd7070Spatrick //                        Aggregate Expression Emitter
33*e5dd7070Spatrick //===----------------------------------------------------------------------===//
34*e5dd7070Spatrick 
35*e5dd7070Spatrick namespace  {
36*e5dd7070Spatrick class AggExprEmitter : public StmtVisitor<AggExprEmitter> {
37*e5dd7070Spatrick   CodeGenFunction &CGF;
38*e5dd7070Spatrick   CGBuilderTy &Builder;
39*e5dd7070Spatrick   AggValueSlot Dest;
40*e5dd7070Spatrick   bool IsResultUnused;
41*e5dd7070Spatrick 
42*e5dd7070Spatrick   AggValueSlot EnsureSlot(QualType T) {
43*e5dd7070Spatrick     if (!Dest.isIgnored()) return Dest;
44*e5dd7070Spatrick     return CGF.CreateAggTemp(T, "agg.tmp.ensured");
45*e5dd7070Spatrick   }
46*e5dd7070Spatrick   void EnsureDest(QualType T) {
47*e5dd7070Spatrick     if (!Dest.isIgnored()) return;
48*e5dd7070Spatrick     Dest = CGF.CreateAggTemp(T, "agg.tmp.ensured");
49*e5dd7070Spatrick   }
50*e5dd7070Spatrick 
51*e5dd7070Spatrick   // Calls `Fn` with a valid return value slot, potentially creating a temporary
52*e5dd7070Spatrick   // to do so. If a temporary is created, an appropriate copy into `Dest` will
53*e5dd7070Spatrick   // be emitted, as will lifetime markers.
54*e5dd7070Spatrick   //
55*e5dd7070Spatrick   // The given function should take a ReturnValueSlot, and return an RValue that
56*e5dd7070Spatrick   // points to said slot.
57*e5dd7070Spatrick   void withReturnValueSlot(const Expr *E,
58*e5dd7070Spatrick                            llvm::function_ref<RValue(ReturnValueSlot)> Fn);
59*e5dd7070Spatrick 
60*e5dd7070Spatrick public:
61*e5dd7070Spatrick   AggExprEmitter(CodeGenFunction &cgf, AggValueSlot Dest, bool IsResultUnused)
62*e5dd7070Spatrick     : CGF(cgf), Builder(CGF.Builder), Dest(Dest),
63*e5dd7070Spatrick     IsResultUnused(IsResultUnused) { }
64*e5dd7070Spatrick 
65*e5dd7070Spatrick   //===--------------------------------------------------------------------===//
66*e5dd7070Spatrick   //                               Utilities
67*e5dd7070Spatrick   //===--------------------------------------------------------------------===//
68*e5dd7070Spatrick 
69*e5dd7070Spatrick   /// EmitAggLoadOfLValue - Given an expression with aggregate type that
70*e5dd7070Spatrick   /// represents a value lvalue, this method emits the address of the lvalue,
71*e5dd7070Spatrick   /// then loads the result into DestPtr.
72*e5dd7070Spatrick   void EmitAggLoadOfLValue(const Expr *E);
73*e5dd7070Spatrick 
74*e5dd7070Spatrick   enum ExprValueKind {
75*e5dd7070Spatrick     EVK_RValue,
76*e5dd7070Spatrick     EVK_NonRValue
77*e5dd7070Spatrick   };
78*e5dd7070Spatrick 
79*e5dd7070Spatrick   /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
80*e5dd7070Spatrick   /// SrcIsRValue is true if source comes from an RValue.
81*e5dd7070Spatrick   void EmitFinalDestCopy(QualType type, const LValue &src,
82*e5dd7070Spatrick                          ExprValueKind SrcValueKind = EVK_NonRValue);
83*e5dd7070Spatrick   void EmitFinalDestCopy(QualType type, RValue src);
84*e5dd7070Spatrick   void EmitCopy(QualType type, const AggValueSlot &dest,
85*e5dd7070Spatrick                 const AggValueSlot &src);
86*e5dd7070Spatrick 
87*e5dd7070Spatrick   void EmitMoveFromReturnSlot(const Expr *E, RValue Src);
88*e5dd7070Spatrick 
89*e5dd7070Spatrick   void EmitArrayInit(Address DestPtr, llvm::ArrayType *AType,
90*e5dd7070Spatrick                      QualType ArrayQTy, InitListExpr *E);
91*e5dd7070Spatrick 
92*e5dd7070Spatrick   AggValueSlot::NeedsGCBarriers_t needsGC(QualType T) {
93*e5dd7070Spatrick     if (CGF.getLangOpts().getGC() && TypeRequiresGCollection(T))
94*e5dd7070Spatrick       return AggValueSlot::NeedsGCBarriers;
95*e5dd7070Spatrick     return AggValueSlot::DoesNotNeedGCBarriers;
96*e5dd7070Spatrick   }
97*e5dd7070Spatrick 
98*e5dd7070Spatrick   bool TypeRequiresGCollection(QualType T);
99*e5dd7070Spatrick 
100*e5dd7070Spatrick   //===--------------------------------------------------------------------===//
101*e5dd7070Spatrick   //                            Visitor Methods
102*e5dd7070Spatrick   //===--------------------------------------------------------------------===//
103*e5dd7070Spatrick 
104*e5dd7070Spatrick   void Visit(Expr *E) {
105*e5dd7070Spatrick     ApplyDebugLocation DL(CGF, E);
106*e5dd7070Spatrick     StmtVisitor<AggExprEmitter>::Visit(E);
107*e5dd7070Spatrick   }
108*e5dd7070Spatrick 
109*e5dd7070Spatrick   void VisitStmt(Stmt *S) {
110*e5dd7070Spatrick     CGF.ErrorUnsupported(S, "aggregate expression");
111*e5dd7070Spatrick   }
112*e5dd7070Spatrick   void VisitParenExpr(ParenExpr *PE) { Visit(PE->getSubExpr()); }
113*e5dd7070Spatrick   void VisitGenericSelectionExpr(GenericSelectionExpr *GE) {
114*e5dd7070Spatrick     Visit(GE->getResultExpr());
115*e5dd7070Spatrick   }
116*e5dd7070Spatrick   void VisitCoawaitExpr(CoawaitExpr *E) {
117*e5dd7070Spatrick     CGF.EmitCoawaitExpr(*E, Dest, IsResultUnused);
118*e5dd7070Spatrick   }
119*e5dd7070Spatrick   void VisitCoyieldExpr(CoyieldExpr *E) {
120*e5dd7070Spatrick     CGF.EmitCoyieldExpr(*E, Dest, IsResultUnused);
121*e5dd7070Spatrick   }
122*e5dd7070Spatrick   void VisitUnaryCoawait(UnaryOperator *E) { Visit(E->getSubExpr()); }
123*e5dd7070Spatrick   void VisitUnaryExtension(UnaryOperator *E) { Visit(E->getSubExpr()); }
124*e5dd7070Spatrick   void VisitSubstNonTypeTemplateParmExpr(SubstNonTypeTemplateParmExpr *E) {
125*e5dd7070Spatrick     return Visit(E->getReplacement());
126*e5dd7070Spatrick   }
127*e5dd7070Spatrick 
128*e5dd7070Spatrick   void VisitConstantExpr(ConstantExpr *E) {
129*e5dd7070Spatrick     return Visit(E->getSubExpr());
130*e5dd7070Spatrick   }
131*e5dd7070Spatrick 
132*e5dd7070Spatrick   // l-values.
133*e5dd7070Spatrick   void VisitDeclRefExpr(DeclRefExpr *E) { EmitAggLoadOfLValue(E); }
134*e5dd7070Spatrick   void VisitMemberExpr(MemberExpr *ME) { EmitAggLoadOfLValue(ME); }
135*e5dd7070Spatrick   void VisitUnaryDeref(UnaryOperator *E) { EmitAggLoadOfLValue(E); }
136*e5dd7070Spatrick   void VisitStringLiteral(StringLiteral *E) { EmitAggLoadOfLValue(E); }
137*e5dd7070Spatrick   void VisitCompoundLiteralExpr(CompoundLiteralExpr *E);
138*e5dd7070Spatrick   void VisitArraySubscriptExpr(ArraySubscriptExpr *E) {
139*e5dd7070Spatrick     EmitAggLoadOfLValue(E);
140*e5dd7070Spatrick   }
141*e5dd7070Spatrick   void VisitPredefinedExpr(const PredefinedExpr *E) {
142*e5dd7070Spatrick     EmitAggLoadOfLValue(E);
143*e5dd7070Spatrick   }
144*e5dd7070Spatrick 
145*e5dd7070Spatrick   // Operators.
146*e5dd7070Spatrick   void VisitCastExpr(CastExpr *E);
147*e5dd7070Spatrick   void VisitCallExpr(const CallExpr *E);
148*e5dd7070Spatrick   void VisitStmtExpr(const StmtExpr *E);
149*e5dd7070Spatrick   void VisitBinaryOperator(const BinaryOperator *BO);
150*e5dd7070Spatrick   void VisitPointerToDataMemberBinaryOperator(const BinaryOperator *BO);
151*e5dd7070Spatrick   void VisitBinAssign(const BinaryOperator *E);
152*e5dd7070Spatrick   void VisitBinComma(const BinaryOperator *E);
153*e5dd7070Spatrick   void VisitBinCmp(const BinaryOperator *E);
154*e5dd7070Spatrick   void VisitCXXRewrittenBinaryOperator(CXXRewrittenBinaryOperator *E) {
155*e5dd7070Spatrick     Visit(E->getSemanticForm());
156*e5dd7070Spatrick   }
157*e5dd7070Spatrick 
158*e5dd7070Spatrick   void VisitObjCMessageExpr(ObjCMessageExpr *E);
159*e5dd7070Spatrick   void VisitObjCIvarRefExpr(ObjCIvarRefExpr *E) {
160*e5dd7070Spatrick     EmitAggLoadOfLValue(E);
161*e5dd7070Spatrick   }
162*e5dd7070Spatrick 
163*e5dd7070Spatrick   void VisitDesignatedInitUpdateExpr(DesignatedInitUpdateExpr *E);
164*e5dd7070Spatrick   void VisitAbstractConditionalOperator(const AbstractConditionalOperator *CO);
165*e5dd7070Spatrick   void VisitChooseExpr(const ChooseExpr *CE);
166*e5dd7070Spatrick   void VisitInitListExpr(InitListExpr *E);
167*e5dd7070Spatrick   void VisitArrayInitLoopExpr(const ArrayInitLoopExpr *E,
168*e5dd7070Spatrick                               llvm::Value *outerBegin = nullptr);
169*e5dd7070Spatrick   void VisitImplicitValueInitExpr(ImplicitValueInitExpr *E);
170*e5dd7070Spatrick   void VisitNoInitExpr(NoInitExpr *E) { } // Do nothing.
171*e5dd7070Spatrick   void VisitCXXDefaultArgExpr(CXXDefaultArgExpr *DAE) {
172*e5dd7070Spatrick     CodeGenFunction::CXXDefaultArgExprScope Scope(CGF, DAE);
173*e5dd7070Spatrick     Visit(DAE->getExpr());
174*e5dd7070Spatrick   }
175*e5dd7070Spatrick   void VisitCXXDefaultInitExpr(CXXDefaultInitExpr *DIE) {
176*e5dd7070Spatrick     CodeGenFunction::CXXDefaultInitExprScope Scope(CGF, DIE);
177*e5dd7070Spatrick     Visit(DIE->getExpr());
178*e5dd7070Spatrick   }
179*e5dd7070Spatrick   void VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E);
180*e5dd7070Spatrick   void VisitCXXConstructExpr(const CXXConstructExpr *E);
181*e5dd7070Spatrick   void VisitCXXInheritedCtorInitExpr(const CXXInheritedCtorInitExpr *E);
182*e5dd7070Spatrick   void VisitLambdaExpr(LambdaExpr *E);
183*e5dd7070Spatrick   void VisitCXXStdInitializerListExpr(CXXStdInitializerListExpr *E);
184*e5dd7070Spatrick   void VisitExprWithCleanups(ExprWithCleanups *E);
185*e5dd7070Spatrick   void VisitCXXScalarValueInitExpr(CXXScalarValueInitExpr *E);
186*e5dd7070Spatrick   void VisitCXXTypeidExpr(CXXTypeidExpr *E) { EmitAggLoadOfLValue(E); }
187*e5dd7070Spatrick   void VisitMaterializeTemporaryExpr(MaterializeTemporaryExpr *E);
188*e5dd7070Spatrick   void VisitOpaqueValueExpr(OpaqueValueExpr *E);
189*e5dd7070Spatrick 
190*e5dd7070Spatrick   void VisitPseudoObjectExpr(PseudoObjectExpr *E) {
191*e5dd7070Spatrick     if (E->isGLValue()) {
192*e5dd7070Spatrick       LValue LV = CGF.EmitPseudoObjectLValue(E);
193*e5dd7070Spatrick       return EmitFinalDestCopy(E->getType(), LV);
194*e5dd7070Spatrick     }
195*e5dd7070Spatrick 
196*e5dd7070Spatrick     CGF.EmitPseudoObjectRValue(E, EnsureSlot(E->getType()));
197*e5dd7070Spatrick   }
198*e5dd7070Spatrick 
199*e5dd7070Spatrick   void VisitVAArgExpr(VAArgExpr *E);
200*e5dd7070Spatrick 
201*e5dd7070Spatrick   void EmitInitializationToLValue(Expr *E, LValue Address);
202*e5dd7070Spatrick   void EmitNullInitializationToLValue(LValue Address);
203*e5dd7070Spatrick   //  case Expr::ChooseExprClass:
204*e5dd7070Spatrick   void VisitCXXThrowExpr(const CXXThrowExpr *E) { CGF.EmitCXXThrowExpr(E); }
205*e5dd7070Spatrick   void VisitAtomicExpr(AtomicExpr *E) {
206*e5dd7070Spatrick     RValue Res = CGF.EmitAtomicExpr(E);
207*e5dd7070Spatrick     EmitFinalDestCopy(E->getType(), Res);
208*e5dd7070Spatrick   }
209*e5dd7070Spatrick };
210*e5dd7070Spatrick }  // end anonymous namespace.
211*e5dd7070Spatrick 
212*e5dd7070Spatrick //===----------------------------------------------------------------------===//
213*e5dd7070Spatrick //                                Utilities
214*e5dd7070Spatrick //===----------------------------------------------------------------------===//
215*e5dd7070Spatrick 
216*e5dd7070Spatrick /// EmitAggLoadOfLValue - Given an expression with aggregate type that
217*e5dd7070Spatrick /// represents a value lvalue, this method emits the address of the lvalue,
218*e5dd7070Spatrick /// then loads the result into DestPtr.
219*e5dd7070Spatrick void AggExprEmitter::EmitAggLoadOfLValue(const Expr *E) {
220*e5dd7070Spatrick   LValue LV = CGF.EmitLValue(E);
221*e5dd7070Spatrick 
222*e5dd7070Spatrick   // If the type of the l-value is atomic, then do an atomic load.
223*e5dd7070Spatrick   if (LV.getType()->isAtomicType() || CGF.LValueIsSuitableForInlineAtomic(LV)) {
224*e5dd7070Spatrick     CGF.EmitAtomicLoad(LV, E->getExprLoc(), Dest);
225*e5dd7070Spatrick     return;
226*e5dd7070Spatrick   }
227*e5dd7070Spatrick 
228*e5dd7070Spatrick   EmitFinalDestCopy(E->getType(), LV);
229*e5dd7070Spatrick }
230*e5dd7070Spatrick 
231*e5dd7070Spatrick /// True if the given aggregate type requires special GC API calls.
232*e5dd7070Spatrick bool AggExprEmitter::TypeRequiresGCollection(QualType T) {
233*e5dd7070Spatrick   // Only record types have members that might require garbage collection.
234*e5dd7070Spatrick   const RecordType *RecordTy = T->getAs<RecordType>();
235*e5dd7070Spatrick   if (!RecordTy) return false;
236*e5dd7070Spatrick 
237*e5dd7070Spatrick   // Don't mess with non-trivial C++ types.
238*e5dd7070Spatrick   RecordDecl *Record = RecordTy->getDecl();
239*e5dd7070Spatrick   if (isa<CXXRecordDecl>(Record) &&
240*e5dd7070Spatrick       (cast<CXXRecordDecl>(Record)->hasNonTrivialCopyConstructor() ||
241*e5dd7070Spatrick        !cast<CXXRecordDecl>(Record)->hasTrivialDestructor()))
242*e5dd7070Spatrick     return false;
243*e5dd7070Spatrick 
244*e5dd7070Spatrick   // Check whether the type has an object member.
245*e5dd7070Spatrick   return Record->hasObjectMember();
246*e5dd7070Spatrick }
247*e5dd7070Spatrick 
248*e5dd7070Spatrick void AggExprEmitter::withReturnValueSlot(
249*e5dd7070Spatrick     const Expr *E, llvm::function_ref<RValue(ReturnValueSlot)> EmitCall) {
250*e5dd7070Spatrick   QualType RetTy = E->getType();
251*e5dd7070Spatrick   bool RequiresDestruction =
252*e5dd7070Spatrick       Dest.isIgnored() &&
253*e5dd7070Spatrick       RetTy.isDestructedType() == QualType::DK_nontrivial_c_struct;
254*e5dd7070Spatrick 
255*e5dd7070Spatrick   // If it makes no observable difference, save a memcpy + temporary.
256*e5dd7070Spatrick   //
257*e5dd7070Spatrick   // We need to always provide our own temporary if destruction is required.
258*e5dd7070Spatrick   // Otherwise, EmitCall will emit its own, notice that it's "unused", and end
259*e5dd7070Spatrick   // its lifetime before we have the chance to emit a proper destructor call.
260*e5dd7070Spatrick   bool UseTemp = Dest.isPotentiallyAliased() || Dest.requiresGCollection() ||
261*e5dd7070Spatrick                  (RequiresDestruction && !Dest.getAddress().isValid());
262*e5dd7070Spatrick 
263*e5dd7070Spatrick   Address RetAddr = Address::invalid();
264*e5dd7070Spatrick   Address RetAllocaAddr = Address::invalid();
265*e5dd7070Spatrick 
266*e5dd7070Spatrick   EHScopeStack::stable_iterator LifetimeEndBlock;
267*e5dd7070Spatrick   llvm::Value *LifetimeSizePtr = nullptr;
268*e5dd7070Spatrick   llvm::IntrinsicInst *LifetimeStartInst = nullptr;
269*e5dd7070Spatrick   if (!UseTemp) {
270*e5dd7070Spatrick     RetAddr = Dest.getAddress();
271*e5dd7070Spatrick   } else {
272*e5dd7070Spatrick     RetAddr = CGF.CreateMemTemp(RetTy, "tmp", &RetAllocaAddr);
273*e5dd7070Spatrick     uint64_t Size =
274*e5dd7070Spatrick         CGF.CGM.getDataLayout().getTypeAllocSize(CGF.ConvertTypeForMem(RetTy));
275*e5dd7070Spatrick     LifetimeSizePtr = CGF.EmitLifetimeStart(Size, RetAllocaAddr.getPointer());
276*e5dd7070Spatrick     if (LifetimeSizePtr) {
277*e5dd7070Spatrick       LifetimeStartInst =
278*e5dd7070Spatrick           cast<llvm::IntrinsicInst>(std::prev(Builder.GetInsertPoint()));
279*e5dd7070Spatrick       assert(LifetimeStartInst->getIntrinsicID() ==
280*e5dd7070Spatrick                  llvm::Intrinsic::lifetime_start &&
281*e5dd7070Spatrick              "Last insertion wasn't a lifetime.start?");
282*e5dd7070Spatrick 
283*e5dd7070Spatrick       CGF.pushFullExprCleanup<CodeGenFunction::CallLifetimeEnd>(
284*e5dd7070Spatrick           NormalEHLifetimeMarker, RetAllocaAddr, LifetimeSizePtr);
285*e5dd7070Spatrick       LifetimeEndBlock = CGF.EHStack.stable_begin();
286*e5dd7070Spatrick     }
287*e5dd7070Spatrick   }
288*e5dd7070Spatrick 
289*e5dd7070Spatrick   RValue Src =
290*e5dd7070Spatrick       EmitCall(ReturnValueSlot(RetAddr, Dest.isVolatile(), IsResultUnused));
291*e5dd7070Spatrick 
292*e5dd7070Spatrick   if (RequiresDestruction)
293*e5dd7070Spatrick     CGF.pushDestroy(RetTy.isDestructedType(), Src.getAggregateAddress(), RetTy);
294*e5dd7070Spatrick 
295*e5dd7070Spatrick   if (!UseTemp)
296*e5dd7070Spatrick     return;
297*e5dd7070Spatrick 
298*e5dd7070Spatrick   assert(Dest.getPointer() != Src.getAggregatePointer());
299*e5dd7070Spatrick   EmitFinalDestCopy(E->getType(), Src);
300*e5dd7070Spatrick 
301*e5dd7070Spatrick   if (!RequiresDestruction && LifetimeStartInst) {
302*e5dd7070Spatrick     // If there's no dtor to run, the copy was the last use of our temporary.
303*e5dd7070Spatrick     // Since we're not guaranteed to be in an ExprWithCleanups, clean up
304*e5dd7070Spatrick     // eagerly.
305*e5dd7070Spatrick     CGF.DeactivateCleanupBlock(LifetimeEndBlock, LifetimeStartInst);
306*e5dd7070Spatrick     CGF.EmitLifetimeEnd(LifetimeSizePtr, RetAllocaAddr.getPointer());
307*e5dd7070Spatrick   }
308*e5dd7070Spatrick }
309*e5dd7070Spatrick 
310*e5dd7070Spatrick /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
311*e5dd7070Spatrick void AggExprEmitter::EmitFinalDestCopy(QualType type, RValue src) {
312*e5dd7070Spatrick   assert(src.isAggregate() && "value must be aggregate value!");
313*e5dd7070Spatrick   LValue srcLV = CGF.MakeAddrLValue(src.getAggregateAddress(), type);
314*e5dd7070Spatrick   EmitFinalDestCopy(type, srcLV, EVK_RValue);
315*e5dd7070Spatrick }
316*e5dd7070Spatrick 
317*e5dd7070Spatrick /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
318*e5dd7070Spatrick void AggExprEmitter::EmitFinalDestCopy(QualType type, const LValue &src,
319*e5dd7070Spatrick                                        ExprValueKind SrcValueKind) {
320*e5dd7070Spatrick   // If Dest is ignored, then we're evaluating an aggregate expression
321*e5dd7070Spatrick   // in a context that doesn't care about the result.  Note that loads
322*e5dd7070Spatrick   // from volatile l-values force the existence of a non-ignored
323*e5dd7070Spatrick   // destination.
324*e5dd7070Spatrick   if (Dest.isIgnored())
325*e5dd7070Spatrick     return;
326*e5dd7070Spatrick 
327*e5dd7070Spatrick   // Copy non-trivial C structs here.
328*e5dd7070Spatrick   LValue DstLV = CGF.MakeAddrLValue(
329*e5dd7070Spatrick       Dest.getAddress(), Dest.isVolatile() ? type.withVolatile() : type);
330*e5dd7070Spatrick 
331*e5dd7070Spatrick   if (SrcValueKind == EVK_RValue) {
332*e5dd7070Spatrick     if (type.isNonTrivialToPrimitiveDestructiveMove() == QualType::PCK_Struct) {
333*e5dd7070Spatrick       if (Dest.isPotentiallyAliased())
334*e5dd7070Spatrick         CGF.callCStructMoveAssignmentOperator(DstLV, src);
335*e5dd7070Spatrick       else
336*e5dd7070Spatrick         CGF.callCStructMoveConstructor(DstLV, src);
337*e5dd7070Spatrick       return;
338*e5dd7070Spatrick     }
339*e5dd7070Spatrick   } else {
340*e5dd7070Spatrick     if (type.isNonTrivialToPrimitiveCopy() == QualType::PCK_Struct) {
341*e5dd7070Spatrick       if (Dest.isPotentiallyAliased())
342*e5dd7070Spatrick         CGF.callCStructCopyAssignmentOperator(DstLV, src);
343*e5dd7070Spatrick       else
344*e5dd7070Spatrick         CGF.callCStructCopyConstructor(DstLV, src);
345*e5dd7070Spatrick       return;
346*e5dd7070Spatrick     }
347*e5dd7070Spatrick   }
348*e5dd7070Spatrick 
349*e5dd7070Spatrick   AggValueSlot srcAgg = AggValueSlot::forLValue(
350*e5dd7070Spatrick       src, CGF, AggValueSlot::IsDestructed, needsGC(type),
351*e5dd7070Spatrick       AggValueSlot::IsAliased, AggValueSlot::MayOverlap);
352*e5dd7070Spatrick   EmitCopy(type, Dest, srcAgg);
353*e5dd7070Spatrick }
354*e5dd7070Spatrick 
355*e5dd7070Spatrick /// Perform a copy from the source into the destination.
356*e5dd7070Spatrick ///
357*e5dd7070Spatrick /// \param type - the type of the aggregate being copied; qualifiers are
358*e5dd7070Spatrick ///   ignored
359*e5dd7070Spatrick void AggExprEmitter::EmitCopy(QualType type, const AggValueSlot &dest,
360*e5dd7070Spatrick                               const AggValueSlot &src) {
361*e5dd7070Spatrick   if (dest.requiresGCollection()) {
362*e5dd7070Spatrick     CharUnits sz = dest.getPreferredSize(CGF.getContext(), type);
363*e5dd7070Spatrick     llvm::Value *size = llvm::ConstantInt::get(CGF.SizeTy, sz.getQuantity());
364*e5dd7070Spatrick     CGF.CGM.getObjCRuntime().EmitGCMemmoveCollectable(CGF,
365*e5dd7070Spatrick                                                       dest.getAddress(),
366*e5dd7070Spatrick                                                       src.getAddress(),
367*e5dd7070Spatrick                                                       size);
368*e5dd7070Spatrick     return;
369*e5dd7070Spatrick   }
370*e5dd7070Spatrick 
371*e5dd7070Spatrick   // If the result of the assignment is used, copy the LHS there also.
372*e5dd7070Spatrick   // It's volatile if either side is.  Use the minimum alignment of
373*e5dd7070Spatrick   // the two sides.
374*e5dd7070Spatrick   LValue DestLV = CGF.MakeAddrLValue(dest.getAddress(), type);
375*e5dd7070Spatrick   LValue SrcLV = CGF.MakeAddrLValue(src.getAddress(), type);
376*e5dd7070Spatrick   CGF.EmitAggregateCopy(DestLV, SrcLV, type, dest.mayOverlap(),
377*e5dd7070Spatrick                         dest.isVolatile() || src.isVolatile());
378*e5dd7070Spatrick }
379*e5dd7070Spatrick 
380*e5dd7070Spatrick /// Emit the initializer for a std::initializer_list initialized with a
381*e5dd7070Spatrick /// real initializer list.
382*e5dd7070Spatrick void
383*e5dd7070Spatrick AggExprEmitter::VisitCXXStdInitializerListExpr(CXXStdInitializerListExpr *E) {
384*e5dd7070Spatrick   // Emit an array containing the elements.  The array is externally destructed
385*e5dd7070Spatrick   // if the std::initializer_list object is.
386*e5dd7070Spatrick   ASTContext &Ctx = CGF.getContext();
387*e5dd7070Spatrick   LValue Array = CGF.EmitLValue(E->getSubExpr());
388*e5dd7070Spatrick   assert(Array.isSimple() && "initializer_list array not a simple lvalue");
389*e5dd7070Spatrick   Address ArrayPtr = Array.getAddress(CGF);
390*e5dd7070Spatrick 
391*e5dd7070Spatrick   const ConstantArrayType *ArrayType =
392*e5dd7070Spatrick       Ctx.getAsConstantArrayType(E->getSubExpr()->getType());
393*e5dd7070Spatrick   assert(ArrayType && "std::initializer_list constructed from non-array");
394*e5dd7070Spatrick 
395*e5dd7070Spatrick   // FIXME: Perform the checks on the field types in SemaInit.
396*e5dd7070Spatrick   RecordDecl *Record = E->getType()->castAs<RecordType>()->getDecl();
397*e5dd7070Spatrick   RecordDecl::field_iterator Field = Record->field_begin();
398*e5dd7070Spatrick   if (Field == Record->field_end()) {
399*e5dd7070Spatrick     CGF.ErrorUnsupported(E, "weird std::initializer_list");
400*e5dd7070Spatrick     return;
401*e5dd7070Spatrick   }
402*e5dd7070Spatrick 
403*e5dd7070Spatrick   // Start pointer.
404*e5dd7070Spatrick   if (!Field->getType()->isPointerType() ||
405*e5dd7070Spatrick       !Ctx.hasSameType(Field->getType()->getPointeeType(),
406*e5dd7070Spatrick                        ArrayType->getElementType())) {
407*e5dd7070Spatrick     CGF.ErrorUnsupported(E, "weird std::initializer_list");
408*e5dd7070Spatrick     return;
409*e5dd7070Spatrick   }
410*e5dd7070Spatrick 
411*e5dd7070Spatrick   AggValueSlot Dest = EnsureSlot(E->getType());
412*e5dd7070Spatrick   LValue DestLV = CGF.MakeAddrLValue(Dest.getAddress(), E->getType());
413*e5dd7070Spatrick   LValue Start = CGF.EmitLValueForFieldInitialization(DestLV, *Field);
414*e5dd7070Spatrick   llvm::Value *Zero = llvm::ConstantInt::get(CGF.PtrDiffTy, 0);
415*e5dd7070Spatrick   llvm::Value *IdxStart[] = { Zero, Zero };
416*e5dd7070Spatrick   llvm::Value *ArrayStart =
417*e5dd7070Spatrick       Builder.CreateInBoundsGEP(ArrayPtr.getPointer(), IdxStart, "arraystart");
418*e5dd7070Spatrick   CGF.EmitStoreThroughLValue(RValue::get(ArrayStart), Start);
419*e5dd7070Spatrick   ++Field;
420*e5dd7070Spatrick 
421*e5dd7070Spatrick   if (Field == Record->field_end()) {
422*e5dd7070Spatrick     CGF.ErrorUnsupported(E, "weird std::initializer_list");
423*e5dd7070Spatrick     return;
424*e5dd7070Spatrick   }
425*e5dd7070Spatrick 
426*e5dd7070Spatrick   llvm::Value *Size = Builder.getInt(ArrayType->getSize());
427*e5dd7070Spatrick   LValue EndOrLength = CGF.EmitLValueForFieldInitialization(DestLV, *Field);
428*e5dd7070Spatrick   if (Field->getType()->isPointerType() &&
429*e5dd7070Spatrick       Ctx.hasSameType(Field->getType()->getPointeeType(),
430*e5dd7070Spatrick                       ArrayType->getElementType())) {
431*e5dd7070Spatrick     // End pointer.
432*e5dd7070Spatrick     llvm::Value *IdxEnd[] = { Zero, Size };
433*e5dd7070Spatrick     llvm::Value *ArrayEnd =
434*e5dd7070Spatrick         Builder.CreateInBoundsGEP(ArrayPtr.getPointer(), IdxEnd, "arrayend");
435*e5dd7070Spatrick     CGF.EmitStoreThroughLValue(RValue::get(ArrayEnd), EndOrLength);
436*e5dd7070Spatrick   } else if (Ctx.hasSameType(Field->getType(), Ctx.getSizeType())) {
437*e5dd7070Spatrick     // Length.
438*e5dd7070Spatrick     CGF.EmitStoreThroughLValue(RValue::get(Size), EndOrLength);
439*e5dd7070Spatrick   } else {
440*e5dd7070Spatrick     CGF.ErrorUnsupported(E, "weird std::initializer_list");
441*e5dd7070Spatrick     return;
442*e5dd7070Spatrick   }
443*e5dd7070Spatrick }
444*e5dd7070Spatrick 
445*e5dd7070Spatrick /// Determine if E is a trivial array filler, that is, one that is
446*e5dd7070Spatrick /// equivalent to zero-initialization.
447*e5dd7070Spatrick static bool isTrivialFiller(Expr *E) {
448*e5dd7070Spatrick   if (!E)
449*e5dd7070Spatrick     return true;
450*e5dd7070Spatrick 
451*e5dd7070Spatrick   if (isa<ImplicitValueInitExpr>(E))
452*e5dd7070Spatrick     return true;
453*e5dd7070Spatrick 
454*e5dd7070Spatrick   if (auto *ILE = dyn_cast<InitListExpr>(E)) {
455*e5dd7070Spatrick     if (ILE->getNumInits())
456*e5dd7070Spatrick       return false;
457*e5dd7070Spatrick     return isTrivialFiller(ILE->getArrayFiller());
458*e5dd7070Spatrick   }
459*e5dd7070Spatrick 
460*e5dd7070Spatrick   if (auto *Cons = dyn_cast_or_null<CXXConstructExpr>(E))
461*e5dd7070Spatrick     return Cons->getConstructor()->isDefaultConstructor() &&
462*e5dd7070Spatrick            Cons->getConstructor()->isTrivial();
463*e5dd7070Spatrick 
464*e5dd7070Spatrick   // FIXME: Are there other cases where we can avoid emitting an initializer?
465*e5dd7070Spatrick   return false;
466*e5dd7070Spatrick }
467*e5dd7070Spatrick 
468*e5dd7070Spatrick /// Emit initialization of an array from an initializer list.
469*e5dd7070Spatrick void AggExprEmitter::EmitArrayInit(Address DestPtr, llvm::ArrayType *AType,
470*e5dd7070Spatrick                                    QualType ArrayQTy, InitListExpr *E) {
471*e5dd7070Spatrick   uint64_t NumInitElements = E->getNumInits();
472*e5dd7070Spatrick 
473*e5dd7070Spatrick   uint64_t NumArrayElements = AType->getNumElements();
474*e5dd7070Spatrick   assert(NumInitElements <= NumArrayElements);
475*e5dd7070Spatrick 
476*e5dd7070Spatrick   QualType elementType =
477*e5dd7070Spatrick       CGF.getContext().getAsArrayType(ArrayQTy)->getElementType();
478*e5dd7070Spatrick 
479*e5dd7070Spatrick   // DestPtr is an array*.  Construct an elementType* by drilling
480*e5dd7070Spatrick   // down a level.
481*e5dd7070Spatrick   llvm::Value *zero = llvm::ConstantInt::get(CGF.SizeTy, 0);
482*e5dd7070Spatrick   llvm::Value *indices[] = { zero, zero };
483*e5dd7070Spatrick   llvm::Value *begin =
484*e5dd7070Spatrick     Builder.CreateInBoundsGEP(DestPtr.getPointer(), indices, "arrayinit.begin");
485*e5dd7070Spatrick 
486*e5dd7070Spatrick   CharUnits elementSize = CGF.getContext().getTypeSizeInChars(elementType);
487*e5dd7070Spatrick   CharUnits elementAlign =
488*e5dd7070Spatrick     DestPtr.getAlignment().alignmentOfArrayElement(elementSize);
489*e5dd7070Spatrick 
490*e5dd7070Spatrick   // Consider initializing the array by copying from a global. For this to be
491*e5dd7070Spatrick   // more efficient than per-element initialization, the size of the elements
492*e5dd7070Spatrick   // with explicit initializers should be large enough.
493*e5dd7070Spatrick   if (NumInitElements * elementSize.getQuantity() > 16 &&
494*e5dd7070Spatrick       elementType.isTriviallyCopyableType(CGF.getContext())) {
495*e5dd7070Spatrick     CodeGen::CodeGenModule &CGM = CGF.CGM;
496*e5dd7070Spatrick     ConstantEmitter Emitter(CGF);
497*e5dd7070Spatrick     LangAS AS = ArrayQTy.getAddressSpace();
498*e5dd7070Spatrick     if (llvm::Constant *C = Emitter.tryEmitForInitializer(E, AS, ArrayQTy)) {
499*e5dd7070Spatrick       auto GV = new llvm::GlobalVariable(
500*e5dd7070Spatrick           CGM.getModule(), C->getType(),
501*e5dd7070Spatrick           CGM.isTypeConstant(ArrayQTy, /* ExcludeCtorDtor= */ true),
502*e5dd7070Spatrick           llvm::GlobalValue::PrivateLinkage, C, "constinit",
503*e5dd7070Spatrick           /* InsertBefore= */ nullptr, llvm::GlobalVariable::NotThreadLocal,
504*e5dd7070Spatrick           CGM.getContext().getTargetAddressSpace(AS));
505*e5dd7070Spatrick       Emitter.finalize(GV);
506*e5dd7070Spatrick       CharUnits Align = CGM.getContext().getTypeAlignInChars(ArrayQTy);
507*e5dd7070Spatrick       GV->setAlignment(Align.getAsAlign());
508*e5dd7070Spatrick       EmitFinalDestCopy(ArrayQTy, CGF.MakeAddrLValue(GV, ArrayQTy, Align));
509*e5dd7070Spatrick       return;
510*e5dd7070Spatrick     }
511*e5dd7070Spatrick   }
512*e5dd7070Spatrick 
513*e5dd7070Spatrick   // Exception safety requires us to destroy all the
514*e5dd7070Spatrick   // already-constructed members if an initializer throws.
515*e5dd7070Spatrick   // For that, we'll need an EH cleanup.
516*e5dd7070Spatrick   QualType::DestructionKind dtorKind = elementType.isDestructedType();
517*e5dd7070Spatrick   Address endOfInit = Address::invalid();
518*e5dd7070Spatrick   EHScopeStack::stable_iterator cleanup;
519*e5dd7070Spatrick   llvm::Instruction *cleanupDominator = nullptr;
520*e5dd7070Spatrick   if (CGF.needsEHCleanup(dtorKind)) {
521*e5dd7070Spatrick     // In principle we could tell the cleanup where we are more
522*e5dd7070Spatrick     // directly, but the control flow can get so varied here that it
523*e5dd7070Spatrick     // would actually be quite complex.  Therefore we go through an
524*e5dd7070Spatrick     // alloca.
525*e5dd7070Spatrick     endOfInit = CGF.CreateTempAlloca(begin->getType(), CGF.getPointerAlign(),
526*e5dd7070Spatrick                                      "arrayinit.endOfInit");
527*e5dd7070Spatrick     cleanupDominator = Builder.CreateStore(begin, endOfInit);
528*e5dd7070Spatrick     CGF.pushIrregularPartialArrayCleanup(begin, endOfInit, elementType,
529*e5dd7070Spatrick                                          elementAlign,
530*e5dd7070Spatrick                                          CGF.getDestroyer(dtorKind));
531*e5dd7070Spatrick     cleanup = CGF.EHStack.stable_begin();
532*e5dd7070Spatrick 
533*e5dd7070Spatrick   // Otherwise, remember that we didn't need a cleanup.
534*e5dd7070Spatrick   } else {
535*e5dd7070Spatrick     dtorKind = QualType::DK_none;
536*e5dd7070Spatrick   }
537*e5dd7070Spatrick 
538*e5dd7070Spatrick   llvm::Value *one = llvm::ConstantInt::get(CGF.SizeTy, 1);
539*e5dd7070Spatrick 
540*e5dd7070Spatrick   // The 'current element to initialize'.  The invariants on this
541*e5dd7070Spatrick   // variable are complicated.  Essentially, after each iteration of
542*e5dd7070Spatrick   // the loop, it points to the last initialized element, except
543*e5dd7070Spatrick   // that it points to the beginning of the array before any
544*e5dd7070Spatrick   // elements have been initialized.
545*e5dd7070Spatrick   llvm::Value *element = begin;
546*e5dd7070Spatrick 
547*e5dd7070Spatrick   // Emit the explicit initializers.
548*e5dd7070Spatrick   for (uint64_t i = 0; i != NumInitElements; ++i) {
549*e5dd7070Spatrick     // Advance to the next element.
550*e5dd7070Spatrick     if (i > 0) {
551*e5dd7070Spatrick       element = Builder.CreateInBoundsGEP(element, one, "arrayinit.element");
552*e5dd7070Spatrick 
553*e5dd7070Spatrick       // Tell the cleanup that it needs to destroy up to this
554*e5dd7070Spatrick       // element.  TODO: some of these stores can be trivially
555*e5dd7070Spatrick       // observed to be unnecessary.
556*e5dd7070Spatrick       if (endOfInit.isValid()) Builder.CreateStore(element, endOfInit);
557*e5dd7070Spatrick     }
558*e5dd7070Spatrick 
559*e5dd7070Spatrick     LValue elementLV =
560*e5dd7070Spatrick       CGF.MakeAddrLValue(Address(element, elementAlign), elementType);
561*e5dd7070Spatrick     EmitInitializationToLValue(E->getInit(i), elementLV);
562*e5dd7070Spatrick   }
563*e5dd7070Spatrick 
564*e5dd7070Spatrick   // Check whether there's a non-trivial array-fill expression.
565*e5dd7070Spatrick   Expr *filler = E->getArrayFiller();
566*e5dd7070Spatrick   bool hasTrivialFiller = isTrivialFiller(filler);
567*e5dd7070Spatrick 
568*e5dd7070Spatrick   // Any remaining elements need to be zero-initialized, possibly
569*e5dd7070Spatrick   // using the filler expression.  We can skip this if the we're
570*e5dd7070Spatrick   // emitting to zeroed memory.
571*e5dd7070Spatrick   if (NumInitElements != NumArrayElements &&
572*e5dd7070Spatrick       !(Dest.isZeroed() && hasTrivialFiller &&
573*e5dd7070Spatrick         CGF.getTypes().isZeroInitializable(elementType))) {
574*e5dd7070Spatrick 
575*e5dd7070Spatrick     // Use an actual loop.  This is basically
576*e5dd7070Spatrick     //   do { *array++ = filler; } while (array != end);
577*e5dd7070Spatrick 
578*e5dd7070Spatrick     // Advance to the start of the rest of the array.
579*e5dd7070Spatrick     if (NumInitElements) {
580*e5dd7070Spatrick       element = Builder.CreateInBoundsGEP(element, one, "arrayinit.start");
581*e5dd7070Spatrick       if (endOfInit.isValid()) Builder.CreateStore(element, endOfInit);
582*e5dd7070Spatrick     }
583*e5dd7070Spatrick 
584*e5dd7070Spatrick     // Compute the end of the array.
585*e5dd7070Spatrick     llvm::Value *end = Builder.CreateInBoundsGEP(begin,
586*e5dd7070Spatrick                       llvm::ConstantInt::get(CGF.SizeTy, NumArrayElements),
587*e5dd7070Spatrick                                                  "arrayinit.end");
588*e5dd7070Spatrick 
589*e5dd7070Spatrick     llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
590*e5dd7070Spatrick     llvm::BasicBlock *bodyBB = CGF.createBasicBlock("arrayinit.body");
591*e5dd7070Spatrick 
592*e5dd7070Spatrick     // Jump into the body.
593*e5dd7070Spatrick     CGF.EmitBlock(bodyBB);
594*e5dd7070Spatrick     llvm::PHINode *currentElement =
595*e5dd7070Spatrick       Builder.CreatePHI(element->getType(), 2, "arrayinit.cur");
596*e5dd7070Spatrick     currentElement->addIncoming(element, entryBB);
597*e5dd7070Spatrick 
598*e5dd7070Spatrick     // Emit the actual filler expression.
599*e5dd7070Spatrick     {
600*e5dd7070Spatrick       // C++1z [class.temporary]p5:
601*e5dd7070Spatrick       //   when a default constructor is called to initialize an element of
602*e5dd7070Spatrick       //   an array with no corresponding initializer [...] the destruction of
603*e5dd7070Spatrick       //   every temporary created in a default argument is sequenced before
604*e5dd7070Spatrick       //   the construction of the next array element, if any
605*e5dd7070Spatrick       CodeGenFunction::RunCleanupsScope CleanupsScope(CGF);
606*e5dd7070Spatrick       LValue elementLV =
607*e5dd7070Spatrick         CGF.MakeAddrLValue(Address(currentElement, elementAlign), elementType);
608*e5dd7070Spatrick       if (filler)
609*e5dd7070Spatrick         EmitInitializationToLValue(filler, elementLV);
610*e5dd7070Spatrick       else
611*e5dd7070Spatrick         EmitNullInitializationToLValue(elementLV);
612*e5dd7070Spatrick     }
613*e5dd7070Spatrick 
614*e5dd7070Spatrick     // Move on to the next element.
615*e5dd7070Spatrick     llvm::Value *nextElement =
616*e5dd7070Spatrick       Builder.CreateInBoundsGEP(currentElement, one, "arrayinit.next");
617*e5dd7070Spatrick 
618*e5dd7070Spatrick     // Tell the EH cleanup that we finished with the last element.
619*e5dd7070Spatrick     if (endOfInit.isValid()) Builder.CreateStore(nextElement, endOfInit);
620*e5dd7070Spatrick 
621*e5dd7070Spatrick     // Leave the loop if we're done.
622*e5dd7070Spatrick     llvm::Value *done = Builder.CreateICmpEQ(nextElement, end,
623*e5dd7070Spatrick                                              "arrayinit.done");
624*e5dd7070Spatrick     llvm::BasicBlock *endBB = CGF.createBasicBlock("arrayinit.end");
625*e5dd7070Spatrick     Builder.CreateCondBr(done, endBB, bodyBB);
626*e5dd7070Spatrick     currentElement->addIncoming(nextElement, Builder.GetInsertBlock());
627*e5dd7070Spatrick 
628*e5dd7070Spatrick     CGF.EmitBlock(endBB);
629*e5dd7070Spatrick   }
630*e5dd7070Spatrick 
631*e5dd7070Spatrick   // Leave the partial-array cleanup if we entered one.
632*e5dd7070Spatrick   if (dtorKind) CGF.DeactivateCleanupBlock(cleanup, cleanupDominator);
633*e5dd7070Spatrick }
634*e5dd7070Spatrick 
635*e5dd7070Spatrick //===----------------------------------------------------------------------===//
636*e5dd7070Spatrick //                            Visitor Methods
637*e5dd7070Spatrick //===----------------------------------------------------------------------===//
638*e5dd7070Spatrick 
639*e5dd7070Spatrick void AggExprEmitter::VisitMaterializeTemporaryExpr(MaterializeTemporaryExpr *E){
640*e5dd7070Spatrick   Visit(E->getSubExpr());
641*e5dd7070Spatrick }
642*e5dd7070Spatrick 
643*e5dd7070Spatrick void AggExprEmitter::VisitOpaqueValueExpr(OpaqueValueExpr *e) {
644*e5dd7070Spatrick   // If this is a unique OVE, just visit its source expression.
645*e5dd7070Spatrick   if (e->isUnique())
646*e5dd7070Spatrick     Visit(e->getSourceExpr());
647*e5dd7070Spatrick   else
648*e5dd7070Spatrick     EmitFinalDestCopy(e->getType(), CGF.getOrCreateOpaqueLValueMapping(e));
649*e5dd7070Spatrick }
650*e5dd7070Spatrick 
651*e5dd7070Spatrick void
652*e5dd7070Spatrick AggExprEmitter::VisitCompoundLiteralExpr(CompoundLiteralExpr *E) {
653*e5dd7070Spatrick   if (Dest.isPotentiallyAliased() &&
654*e5dd7070Spatrick       E->getType().isPODType(CGF.getContext())) {
655*e5dd7070Spatrick     // For a POD type, just emit a load of the lvalue + a copy, because our
656*e5dd7070Spatrick     // compound literal might alias the destination.
657*e5dd7070Spatrick     EmitAggLoadOfLValue(E);
658*e5dd7070Spatrick     return;
659*e5dd7070Spatrick   }
660*e5dd7070Spatrick 
661*e5dd7070Spatrick   AggValueSlot Slot = EnsureSlot(E->getType());
662*e5dd7070Spatrick   CGF.EmitAggExpr(E->getInitializer(), Slot);
663*e5dd7070Spatrick }
664*e5dd7070Spatrick 
665*e5dd7070Spatrick /// Attempt to look through various unimportant expressions to find a
666*e5dd7070Spatrick /// cast of the given kind.
667*e5dd7070Spatrick static Expr *findPeephole(Expr *op, CastKind kind) {
668*e5dd7070Spatrick   while (true) {
669*e5dd7070Spatrick     op = op->IgnoreParens();
670*e5dd7070Spatrick     if (CastExpr *castE = dyn_cast<CastExpr>(op)) {
671*e5dd7070Spatrick       if (castE->getCastKind() == kind)
672*e5dd7070Spatrick         return castE->getSubExpr();
673*e5dd7070Spatrick       if (castE->getCastKind() == CK_NoOp)
674*e5dd7070Spatrick         continue;
675*e5dd7070Spatrick     }
676*e5dd7070Spatrick     return nullptr;
677*e5dd7070Spatrick   }
678*e5dd7070Spatrick }
679*e5dd7070Spatrick 
680*e5dd7070Spatrick void AggExprEmitter::VisitCastExpr(CastExpr *E) {
681*e5dd7070Spatrick   if (const auto *ECE = dyn_cast<ExplicitCastExpr>(E))
682*e5dd7070Spatrick     CGF.CGM.EmitExplicitCastExprType(ECE, &CGF);
683*e5dd7070Spatrick   switch (E->getCastKind()) {
684*e5dd7070Spatrick   case CK_Dynamic: {
685*e5dd7070Spatrick     // FIXME: Can this actually happen? We have no test coverage for it.
686*e5dd7070Spatrick     assert(isa<CXXDynamicCastExpr>(E) && "CK_Dynamic without a dynamic_cast?");
687*e5dd7070Spatrick     LValue LV = CGF.EmitCheckedLValue(E->getSubExpr(),
688*e5dd7070Spatrick                                       CodeGenFunction::TCK_Load);
689*e5dd7070Spatrick     // FIXME: Do we also need to handle property references here?
690*e5dd7070Spatrick     if (LV.isSimple())
691*e5dd7070Spatrick       CGF.EmitDynamicCast(LV.getAddress(CGF), cast<CXXDynamicCastExpr>(E));
692*e5dd7070Spatrick     else
693*e5dd7070Spatrick       CGF.CGM.ErrorUnsupported(E, "non-simple lvalue dynamic_cast");
694*e5dd7070Spatrick 
695*e5dd7070Spatrick     if (!Dest.isIgnored())
696*e5dd7070Spatrick       CGF.CGM.ErrorUnsupported(E, "lvalue dynamic_cast with a destination");
697*e5dd7070Spatrick     break;
698*e5dd7070Spatrick   }
699*e5dd7070Spatrick 
700*e5dd7070Spatrick   case CK_ToUnion: {
701*e5dd7070Spatrick     // Evaluate even if the destination is ignored.
702*e5dd7070Spatrick     if (Dest.isIgnored()) {
703*e5dd7070Spatrick       CGF.EmitAnyExpr(E->getSubExpr(), AggValueSlot::ignored(),
704*e5dd7070Spatrick                       /*ignoreResult=*/true);
705*e5dd7070Spatrick       break;
706*e5dd7070Spatrick     }
707*e5dd7070Spatrick 
708*e5dd7070Spatrick     // GCC union extension
709*e5dd7070Spatrick     QualType Ty = E->getSubExpr()->getType();
710*e5dd7070Spatrick     Address CastPtr =
711*e5dd7070Spatrick       Builder.CreateElementBitCast(Dest.getAddress(), CGF.ConvertType(Ty));
712*e5dd7070Spatrick     EmitInitializationToLValue(E->getSubExpr(),
713*e5dd7070Spatrick                                CGF.MakeAddrLValue(CastPtr, Ty));
714*e5dd7070Spatrick     break;
715*e5dd7070Spatrick   }
716*e5dd7070Spatrick 
717*e5dd7070Spatrick   case CK_LValueToRValueBitCast: {
718*e5dd7070Spatrick     if (Dest.isIgnored()) {
719*e5dd7070Spatrick       CGF.EmitAnyExpr(E->getSubExpr(), AggValueSlot::ignored(),
720*e5dd7070Spatrick                       /*ignoreResult=*/true);
721*e5dd7070Spatrick       break;
722*e5dd7070Spatrick     }
723*e5dd7070Spatrick 
724*e5dd7070Spatrick     LValue SourceLV = CGF.EmitLValue(E->getSubExpr());
725*e5dd7070Spatrick     Address SourceAddress =
726*e5dd7070Spatrick         Builder.CreateElementBitCast(SourceLV.getAddress(CGF), CGF.Int8Ty);
727*e5dd7070Spatrick     Address DestAddress =
728*e5dd7070Spatrick         Builder.CreateElementBitCast(Dest.getAddress(), CGF.Int8Ty);
729*e5dd7070Spatrick     llvm::Value *SizeVal = llvm::ConstantInt::get(
730*e5dd7070Spatrick         CGF.SizeTy,
731*e5dd7070Spatrick         CGF.getContext().getTypeSizeInChars(E->getType()).getQuantity());
732*e5dd7070Spatrick     Builder.CreateMemCpy(DestAddress, SourceAddress, SizeVal);
733*e5dd7070Spatrick     break;
734*e5dd7070Spatrick   }
735*e5dd7070Spatrick 
736*e5dd7070Spatrick   case CK_DerivedToBase:
737*e5dd7070Spatrick   case CK_BaseToDerived:
738*e5dd7070Spatrick   case CK_UncheckedDerivedToBase: {
739*e5dd7070Spatrick     llvm_unreachable("cannot perform hierarchy conversion in EmitAggExpr: "
740*e5dd7070Spatrick                 "should have been unpacked before we got here");
741*e5dd7070Spatrick   }
742*e5dd7070Spatrick 
743*e5dd7070Spatrick   case CK_NonAtomicToAtomic:
744*e5dd7070Spatrick   case CK_AtomicToNonAtomic: {
745*e5dd7070Spatrick     bool isToAtomic = (E->getCastKind() == CK_NonAtomicToAtomic);
746*e5dd7070Spatrick 
747*e5dd7070Spatrick     // Determine the atomic and value types.
748*e5dd7070Spatrick     QualType atomicType = E->getSubExpr()->getType();
749*e5dd7070Spatrick     QualType valueType = E->getType();
750*e5dd7070Spatrick     if (isToAtomic) std::swap(atomicType, valueType);
751*e5dd7070Spatrick 
752*e5dd7070Spatrick     assert(atomicType->isAtomicType());
753*e5dd7070Spatrick     assert(CGF.getContext().hasSameUnqualifiedType(valueType,
754*e5dd7070Spatrick                           atomicType->castAs<AtomicType>()->getValueType()));
755*e5dd7070Spatrick 
756*e5dd7070Spatrick     // Just recurse normally if we're ignoring the result or the
757*e5dd7070Spatrick     // atomic type doesn't change representation.
758*e5dd7070Spatrick     if (Dest.isIgnored() || !CGF.CGM.isPaddedAtomicType(atomicType)) {
759*e5dd7070Spatrick       return Visit(E->getSubExpr());
760*e5dd7070Spatrick     }
761*e5dd7070Spatrick 
762*e5dd7070Spatrick     CastKind peepholeTarget =
763*e5dd7070Spatrick       (isToAtomic ? CK_AtomicToNonAtomic : CK_NonAtomicToAtomic);
764*e5dd7070Spatrick 
765*e5dd7070Spatrick     // These two cases are reverses of each other; try to peephole them.
766*e5dd7070Spatrick     if (Expr *op = findPeephole(E->getSubExpr(), peepholeTarget)) {
767*e5dd7070Spatrick       assert(CGF.getContext().hasSameUnqualifiedType(op->getType(),
768*e5dd7070Spatrick                                                      E->getType()) &&
769*e5dd7070Spatrick            "peephole significantly changed types?");
770*e5dd7070Spatrick       return Visit(op);
771*e5dd7070Spatrick     }
772*e5dd7070Spatrick 
773*e5dd7070Spatrick     // If we're converting an r-value of non-atomic type to an r-value
774*e5dd7070Spatrick     // of atomic type, just emit directly into the relevant sub-object.
775*e5dd7070Spatrick     if (isToAtomic) {
776*e5dd7070Spatrick       AggValueSlot valueDest = Dest;
777*e5dd7070Spatrick       if (!valueDest.isIgnored() && CGF.CGM.isPaddedAtomicType(atomicType)) {
778*e5dd7070Spatrick         // Zero-initialize.  (Strictly speaking, we only need to initialize
779*e5dd7070Spatrick         // the padding at the end, but this is simpler.)
780*e5dd7070Spatrick         if (!Dest.isZeroed())
781*e5dd7070Spatrick           CGF.EmitNullInitialization(Dest.getAddress(), atomicType);
782*e5dd7070Spatrick 
783*e5dd7070Spatrick         // Build a GEP to refer to the subobject.
784*e5dd7070Spatrick         Address valueAddr =
785*e5dd7070Spatrick             CGF.Builder.CreateStructGEP(valueDest.getAddress(), 0);
786*e5dd7070Spatrick         valueDest = AggValueSlot::forAddr(valueAddr,
787*e5dd7070Spatrick                                           valueDest.getQualifiers(),
788*e5dd7070Spatrick                                           valueDest.isExternallyDestructed(),
789*e5dd7070Spatrick                                           valueDest.requiresGCollection(),
790*e5dd7070Spatrick                                           valueDest.isPotentiallyAliased(),
791*e5dd7070Spatrick                                           AggValueSlot::DoesNotOverlap,
792*e5dd7070Spatrick                                           AggValueSlot::IsZeroed);
793*e5dd7070Spatrick       }
794*e5dd7070Spatrick 
795*e5dd7070Spatrick       CGF.EmitAggExpr(E->getSubExpr(), valueDest);
796*e5dd7070Spatrick       return;
797*e5dd7070Spatrick     }
798*e5dd7070Spatrick 
799*e5dd7070Spatrick     // Otherwise, we're converting an atomic type to a non-atomic type.
800*e5dd7070Spatrick     // Make an atomic temporary, emit into that, and then copy the value out.
801*e5dd7070Spatrick     AggValueSlot atomicSlot =
802*e5dd7070Spatrick       CGF.CreateAggTemp(atomicType, "atomic-to-nonatomic.temp");
803*e5dd7070Spatrick     CGF.EmitAggExpr(E->getSubExpr(), atomicSlot);
804*e5dd7070Spatrick 
805*e5dd7070Spatrick     Address valueAddr = Builder.CreateStructGEP(atomicSlot.getAddress(), 0);
806*e5dd7070Spatrick     RValue rvalue = RValue::getAggregate(valueAddr, atomicSlot.isVolatile());
807*e5dd7070Spatrick     return EmitFinalDestCopy(valueType, rvalue);
808*e5dd7070Spatrick   }
809*e5dd7070Spatrick   case CK_AddressSpaceConversion:
810*e5dd7070Spatrick      return Visit(E->getSubExpr());
811*e5dd7070Spatrick 
812*e5dd7070Spatrick   case CK_LValueToRValue:
813*e5dd7070Spatrick     // If we're loading from a volatile type, force the destination
814*e5dd7070Spatrick     // into existence.
815*e5dd7070Spatrick     if (E->getSubExpr()->getType().isVolatileQualified()) {
816*e5dd7070Spatrick       EnsureDest(E->getType());
817*e5dd7070Spatrick       return Visit(E->getSubExpr());
818*e5dd7070Spatrick     }
819*e5dd7070Spatrick 
820*e5dd7070Spatrick     LLVM_FALLTHROUGH;
821*e5dd7070Spatrick 
822*e5dd7070Spatrick 
823*e5dd7070Spatrick   case CK_NoOp:
824*e5dd7070Spatrick   case CK_UserDefinedConversion:
825*e5dd7070Spatrick   case CK_ConstructorConversion:
826*e5dd7070Spatrick     assert(CGF.getContext().hasSameUnqualifiedType(E->getSubExpr()->getType(),
827*e5dd7070Spatrick                                                    E->getType()) &&
828*e5dd7070Spatrick            "Implicit cast types must be compatible");
829*e5dd7070Spatrick     Visit(E->getSubExpr());
830*e5dd7070Spatrick     break;
831*e5dd7070Spatrick 
832*e5dd7070Spatrick   case CK_LValueBitCast:
833*e5dd7070Spatrick     llvm_unreachable("should not be emitting lvalue bitcast as rvalue");
834*e5dd7070Spatrick 
835*e5dd7070Spatrick   case CK_Dependent:
836*e5dd7070Spatrick   case CK_BitCast:
837*e5dd7070Spatrick   case CK_ArrayToPointerDecay:
838*e5dd7070Spatrick   case CK_FunctionToPointerDecay:
839*e5dd7070Spatrick   case CK_NullToPointer:
840*e5dd7070Spatrick   case CK_NullToMemberPointer:
841*e5dd7070Spatrick   case CK_BaseToDerivedMemberPointer:
842*e5dd7070Spatrick   case CK_DerivedToBaseMemberPointer:
843*e5dd7070Spatrick   case CK_MemberPointerToBoolean:
844*e5dd7070Spatrick   case CK_ReinterpretMemberPointer:
845*e5dd7070Spatrick   case CK_IntegralToPointer:
846*e5dd7070Spatrick   case CK_PointerToIntegral:
847*e5dd7070Spatrick   case CK_PointerToBoolean:
848*e5dd7070Spatrick   case CK_ToVoid:
849*e5dd7070Spatrick   case CK_VectorSplat:
850*e5dd7070Spatrick   case CK_IntegralCast:
851*e5dd7070Spatrick   case CK_BooleanToSignedIntegral:
852*e5dd7070Spatrick   case CK_IntegralToBoolean:
853*e5dd7070Spatrick   case CK_IntegralToFloating:
854*e5dd7070Spatrick   case CK_FloatingToIntegral:
855*e5dd7070Spatrick   case CK_FloatingToBoolean:
856*e5dd7070Spatrick   case CK_FloatingCast:
857*e5dd7070Spatrick   case CK_CPointerToObjCPointerCast:
858*e5dd7070Spatrick   case CK_BlockPointerToObjCPointerCast:
859*e5dd7070Spatrick   case CK_AnyPointerToBlockPointerCast:
860*e5dd7070Spatrick   case CK_ObjCObjectLValueCast:
861*e5dd7070Spatrick   case CK_FloatingRealToComplex:
862*e5dd7070Spatrick   case CK_FloatingComplexToReal:
863*e5dd7070Spatrick   case CK_FloatingComplexToBoolean:
864*e5dd7070Spatrick   case CK_FloatingComplexCast:
865*e5dd7070Spatrick   case CK_FloatingComplexToIntegralComplex:
866*e5dd7070Spatrick   case CK_IntegralRealToComplex:
867*e5dd7070Spatrick   case CK_IntegralComplexToReal:
868*e5dd7070Spatrick   case CK_IntegralComplexToBoolean:
869*e5dd7070Spatrick   case CK_IntegralComplexCast:
870*e5dd7070Spatrick   case CK_IntegralComplexToFloatingComplex:
871*e5dd7070Spatrick   case CK_ARCProduceObject:
872*e5dd7070Spatrick   case CK_ARCConsumeObject:
873*e5dd7070Spatrick   case CK_ARCReclaimReturnedObject:
874*e5dd7070Spatrick   case CK_ARCExtendBlockObject:
875*e5dd7070Spatrick   case CK_CopyAndAutoreleaseBlockObject:
876*e5dd7070Spatrick   case CK_BuiltinFnToFnPtr:
877*e5dd7070Spatrick   case CK_ZeroToOCLOpaqueType:
878*e5dd7070Spatrick 
879*e5dd7070Spatrick   case CK_IntToOCLSampler:
880*e5dd7070Spatrick   case CK_FixedPointCast:
881*e5dd7070Spatrick   case CK_FixedPointToBoolean:
882*e5dd7070Spatrick   case CK_FixedPointToIntegral:
883*e5dd7070Spatrick   case CK_IntegralToFixedPoint:
884*e5dd7070Spatrick     llvm_unreachable("cast kind invalid for aggregate types");
885*e5dd7070Spatrick   }
886*e5dd7070Spatrick }
887*e5dd7070Spatrick 
888*e5dd7070Spatrick void AggExprEmitter::VisitCallExpr(const CallExpr *E) {
889*e5dd7070Spatrick   if (E->getCallReturnType(CGF.getContext())->isReferenceType()) {
890*e5dd7070Spatrick     EmitAggLoadOfLValue(E);
891*e5dd7070Spatrick     return;
892*e5dd7070Spatrick   }
893*e5dd7070Spatrick 
894*e5dd7070Spatrick   withReturnValueSlot(E, [&](ReturnValueSlot Slot) {
895*e5dd7070Spatrick     return CGF.EmitCallExpr(E, Slot);
896*e5dd7070Spatrick   });
897*e5dd7070Spatrick }
898*e5dd7070Spatrick 
899*e5dd7070Spatrick void AggExprEmitter::VisitObjCMessageExpr(ObjCMessageExpr *E) {
900*e5dd7070Spatrick   withReturnValueSlot(E, [&](ReturnValueSlot Slot) {
901*e5dd7070Spatrick     return CGF.EmitObjCMessageExpr(E, Slot);
902*e5dd7070Spatrick   });
903*e5dd7070Spatrick }
904*e5dd7070Spatrick 
905*e5dd7070Spatrick void AggExprEmitter::VisitBinComma(const BinaryOperator *E) {
906*e5dd7070Spatrick   CGF.EmitIgnoredExpr(E->getLHS());
907*e5dd7070Spatrick   Visit(E->getRHS());
908*e5dd7070Spatrick }
909*e5dd7070Spatrick 
910*e5dd7070Spatrick void AggExprEmitter::VisitStmtExpr(const StmtExpr *E) {
911*e5dd7070Spatrick   CodeGenFunction::StmtExprEvaluation eval(CGF);
912*e5dd7070Spatrick   CGF.EmitCompoundStmt(*E->getSubStmt(), true, Dest);
913*e5dd7070Spatrick }
914*e5dd7070Spatrick 
915*e5dd7070Spatrick enum CompareKind {
916*e5dd7070Spatrick   CK_Less,
917*e5dd7070Spatrick   CK_Greater,
918*e5dd7070Spatrick   CK_Equal,
919*e5dd7070Spatrick };
920*e5dd7070Spatrick 
921*e5dd7070Spatrick static llvm::Value *EmitCompare(CGBuilderTy &Builder, CodeGenFunction &CGF,
922*e5dd7070Spatrick                                 const BinaryOperator *E, llvm::Value *LHS,
923*e5dd7070Spatrick                                 llvm::Value *RHS, CompareKind Kind,
924*e5dd7070Spatrick                                 const char *NameSuffix = "") {
925*e5dd7070Spatrick   QualType ArgTy = E->getLHS()->getType();
926*e5dd7070Spatrick   if (const ComplexType *CT = ArgTy->getAs<ComplexType>())
927*e5dd7070Spatrick     ArgTy = CT->getElementType();
928*e5dd7070Spatrick 
929*e5dd7070Spatrick   if (const auto *MPT = ArgTy->getAs<MemberPointerType>()) {
930*e5dd7070Spatrick     assert(Kind == CK_Equal &&
931*e5dd7070Spatrick            "member pointers may only be compared for equality");
932*e5dd7070Spatrick     return CGF.CGM.getCXXABI().EmitMemberPointerComparison(
933*e5dd7070Spatrick         CGF, LHS, RHS, MPT, /*IsInequality*/ false);
934*e5dd7070Spatrick   }
935*e5dd7070Spatrick 
936*e5dd7070Spatrick   // Compute the comparison instructions for the specified comparison kind.
937*e5dd7070Spatrick   struct CmpInstInfo {
938*e5dd7070Spatrick     const char *Name;
939*e5dd7070Spatrick     llvm::CmpInst::Predicate FCmp;
940*e5dd7070Spatrick     llvm::CmpInst::Predicate SCmp;
941*e5dd7070Spatrick     llvm::CmpInst::Predicate UCmp;
942*e5dd7070Spatrick   };
943*e5dd7070Spatrick   CmpInstInfo InstInfo = [&]() -> CmpInstInfo {
944*e5dd7070Spatrick     using FI = llvm::FCmpInst;
945*e5dd7070Spatrick     using II = llvm::ICmpInst;
946*e5dd7070Spatrick     switch (Kind) {
947*e5dd7070Spatrick     case CK_Less:
948*e5dd7070Spatrick       return {"cmp.lt", FI::FCMP_OLT, II::ICMP_SLT, II::ICMP_ULT};
949*e5dd7070Spatrick     case CK_Greater:
950*e5dd7070Spatrick       return {"cmp.gt", FI::FCMP_OGT, II::ICMP_SGT, II::ICMP_UGT};
951*e5dd7070Spatrick     case CK_Equal:
952*e5dd7070Spatrick       return {"cmp.eq", FI::FCMP_OEQ, II::ICMP_EQ, II::ICMP_EQ};
953*e5dd7070Spatrick     }
954*e5dd7070Spatrick     llvm_unreachable("Unrecognised CompareKind enum");
955*e5dd7070Spatrick   }();
956*e5dd7070Spatrick 
957*e5dd7070Spatrick   if (ArgTy->hasFloatingRepresentation())
958*e5dd7070Spatrick     return Builder.CreateFCmp(InstInfo.FCmp, LHS, RHS,
959*e5dd7070Spatrick                               llvm::Twine(InstInfo.Name) + NameSuffix);
960*e5dd7070Spatrick   if (ArgTy->isIntegralOrEnumerationType() || ArgTy->isPointerType()) {
961*e5dd7070Spatrick     auto Inst =
962*e5dd7070Spatrick         ArgTy->hasSignedIntegerRepresentation() ? InstInfo.SCmp : InstInfo.UCmp;
963*e5dd7070Spatrick     return Builder.CreateICmp(Inst, LHS, RHS,
964*e5dd7070Spatrick                               llvm::Twine(InstInfo.Name) + NameSuffix);
965*e5dd7070Spatrick   }
966*e5dd7070Spatrick 
967*e5dd7070Spatrick   llvm_unreachable("unsupported aggregate binary expression should have "
968*e5dd7070Spatrick                    "already been handled");
969*e5dd7070Spatrick }
970*e5dd7070Spatrick 
971*e5dd7070Spatrick void AggExprEmitter::VisitBinCmp(const BinaryOperator *E) {
972*e5dd7070Spatrick   using llvm::BasicBlock;
973*e5dd7070Spatrick   using llvm::PHINode;
974*e5dd7070Spatrick   using llvm::Value;
975*e5dd7070Spatrick   assert(CGF.getContext().hasSameType(E->getLHS()->getType(),
976*e5dd7070Spatrick                                       E->getRHS()->getType()));
977*e5dd7070Spatrick   const ComparisonCategoryInfo &CmpInfo =
978*e5dd7070Spatrick       CGF.getContext().CompCategories.getInfoForType(E->getType());
979*e5dd7070Spatrick   assert(CmpInfo.Record->isTriviallyCopyable() &&
980*e5dd7070Spatrick          "cannot copy non-trivially copyable aggregate");
981*e5dd7070Spatrick 
982*e5dd7070Spatrick   QualType ArgTy = E->getLHS()->getType();
983*e5dd7070Spatrick 
984*e5dd7070Spatrick   if (!ArgTy->isIntegralOrEnumerationType() && !ArgTy->isRealFloatingType() &&
985*e5dd7070Spatrick       !ArgTy->isNullPtrType() && !ArgTy->isPointerType() &&
986*e5dd7070Spatrick       !ArgTy->isMemberPointerType() && !ArgTy->isAnyComplexType()) {
987*e5dd7070Spatrick     return CGF.ErrorUnsupported(E, "aggregate three-way comparison");
988*e5dd7070Spatrick   }
989*e5dd7070Spatrick   bool IsComplex = ArgTy->isAnyComplexType();
990*e5dd7070Spatrick 
991*e5dd7070Spatrick   // Evaluate the operands to the expression and extract their values.
992*e5dd7070Spatrick   auto EmitOperand = [&](Expr *E) -> std::pair<Value *, Value *> {
993*e5dd7070Spatrick     RValue RV = CGF.EmitAnyExpr(E);
994*e5dd7070Spatrick     if (RV.isScalar())
995*e5dd7070Spatrick       return {RV.getScalarVal(), nullptr};
996*e5dd7070Spatrick     if (RV.isAggregate())
997*e5dd7070Spatrick       return {RV.getAggregatePointer(), nullptr};
998*e5dd7070Spatrick     assert(RV.isComplex());
999*e5dd7070Spatrick     return RV.getComplexVal();
1000*e5dd7070Spatrick   };
1001*e5dd7070Spatrick   auto LHSValues = EmitOperand(E->getLHS()),
1002*e5dd7070Spatrick        RHSValues = EmitOperand(E->getRHS());
1003*e5dd7070Spatrick 
1004*e5dd7070Spatrick   auto EmitCmp = [&](CompareKind K) {
1005*e5dd7070Spatrick     Value *Cmp = EmitCompare(Builder, CGF, E, LHSValues.first, RHSValues.first,
1006*e5dd7070Spatrick                              K, IsComplex ? ".r" : "");
1007*e5dd7070Spatrick     if (!IsComplex)
1008*e5dd7070Spatrick       return Cmp;
1009*e5dd7070Spatrick     assert(K == CompareKind::CK_Equal);
1010*e5dd7070Spatrick     Value *CmpImag = EmitCompare(Builder, CGF, E, LHSValues.second,
1011*e5dd7070Spatrick                                  RHSValues.second, K, ".i");
1012*e5dd7070Spatrick     return Builder.CreateAnd(Cmp, CmpImag, "and.eq");
1013*e5dd7070Spatrick   };
1014*e5dd7070Spatrick   auto EmitCmpRes = [&](const ComparisonCategoryInfo::ValueInfo *VInfo) {
1015*e5dd7070Spatrick     return Builder.getInt(VInfo->getIntValue());
1016*e5dd7070Spatrick   };
1017*e5dd7070Spatrick 
1018*e5dd7070Spatrick   Value *Select;
1019*e5dd7070Spatrick   if (ArgTy->isNullPtrType()) {
1020*e5dd7070Spatrick     Select = EmitCmpRes(CmpInfo.getEqualOrEquiv());
1021*e5dd7070Spatrick   } else if (!CmpInfo.isPartial()) {
1022*e5dd7070Spatrick     Value *SelectOne =
1023*e5dd7070Spatrick         Builder.CreateSelect(EmitCmp(CK_Less), EmitCmpRes(CmpInfo.getLess()),
1024*e5dd7070Spatrick                              EmitCmpRes(CmpInfo.getGreater()), "sel.lt");
1025*e5dd7070Spatrick     Select = Builder.CreateSelect(EmitCmp(CK_Equal),
1026*e5dd7070Spatrick                                   EmitCmpRes(CmpInfo.getEqualOrEquiv()),
1027*e5dd7070Spatrick                                   SelectOne, "sel.eq");
1028*e5dd7070Spatrick   } else {
1029*e5dd7070Spatrick     Value *SelectEq = Builder.CreateSelect(
1030*e5dd7070Spatrick         EmitCmp(CK_Equal), EmitCmpRes(CmpInfo.getEqualOrEquiv()),
1031*e5dd7070Spatrick         EmitCmpRes(CmpInfo.getUnordered()), "sel.eq");
1032*e5dd7070Spatrick     Value *SelectGT = Builder.CreateSelect(EmitCmp(CK_Greater),
1033*e5dd7070Spatrick                                            EmitCmpRes(CmpInfo.getGreater()),
1034*e5dd7070Spatrick                                            SelectEq, "sel.gt");
1035*e5dd7070Spatrick     Select = Builder.CreateSelect(
1036*e5dd7070Spatrick         EmitCmp(CK_Less), EmitCmpRes(CmpInfo.getLess()), SelectGT, "sel.lt");
1037*e5dd7070Spatrick   }
1038*e5dd7070Spatrick   // Create the return value in the destination slot.
1039*e5dd7070Spatrick   EnsureDest(E->getType());
1040*e5dd7070Spatrick   LValue DestLV = CGF.MakeAddrLValue(Dest.getAddress(), E->getType());
1041*e5dd7070Spatrick 
1042*e5dd7070Spatrick   // Emit the address of the first (and only) field in the comparison category
1043*e5dd7070Spatrick   // type, and initialize it from the constant integer value selected above.
1044*e5dd7070Spatrick   LValue FieldLV = CGF.EmitLValueForFieldInitialization(
1045*e5dd7070Spatrick       DestLV, *CmpInfo.Record->field_begin());
1046*e5dd7070Spatrick   CGF.EmitStoreThroughLValue(RValue::get(Select), FieldLV, /*IsInit*/ true);
1047*e5dd7070Spatrick 
1048*e5dd7070Spatrick   // All done! The result is in the Dest slot.
1049*e5dd7070Spatrick }
1050*e5dd7070Spatrick 
1051*e5dd7070Spatrick void AggExprEmitter::VisitBinaryOperator(const BinaryOperator *E) {
1052*e5dd7070Spatrick   if (E->getOpcode() == BO_PtrMemD || E->getOpcode() == BO_PtrMemI)
1053*e5dd7070Spatrick     VisitPointerToDataMemberBinaryOperator(E);
1054*e5dd7070Spatrick   else
1055*e5dd7070Spatrick     CGF.ErrorUnsupported(E, "aggregate binary expression");
1056*e5dd7070Spatrick }
1057*e5dd7070Spatrick 
1058*e5dd7070Spatrick void AggExprEmitter::VisitPointerToDataMemberBinaryOperator(
1059*e5dd7070Spatrick                                                     const BinaryOperator *E) {
1060*e5dd7070Spatrick   LValue LV = CGF.EmitPointerToDataMemberBinaryExpr(E);
1061*e5dd7070Spatrick   EmitFinalDestCopy(E->getType(), LV);
1062*e5dd7070Spatrick }
1063*e5dd7070Spatrick 
1064*e5dd7070Spatrick /// Is the value of the given expression possibly a reference to or
1065*e5dd7070Spatrick /// into a __block variable?
1066*e5dd7070Spatrick static bool isBlockVarRef(const Expr *E) {
1067*e5dd7070Spatrick   // Make sure we look through parens.
1068*e5dd7070Spatrick   E = E->IgnoreParens();
1069*e5dd7070Spatrick 
1070*e5dd7070Spatrick   // Check for a direct reference to a __block variable.
1071*e5dd7070Spatrick   if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(E)) {
1072*e5dd7070Spatrick     const VarDecl *var = dyn_cast<VarDecl>(DRE->getDecl());
1073*e5dd7070Spatrick     return (var && var->hasAttr<BlocksAttr>());
1074*e5dd7070Spatrick   }
1075*e5dd7070Spatrick 
1076*e5dd7070Spatrick   // More complicated stuff.
1077*e5dd7070Spatrick 
1078*e5dd7070Spatrick   // Binary operators.
1079*e5dd7070Spatrick   if (const BinaryOperator *op = dyn_cast<BinaryOperator>(E)) {
1080*e5dd7070Spatrick     // For an assignment or pointer-to-member operation, just care
1081*e5dd7070Spatrick     // about the LHS.
1082*e5dd7070Spatrick     if (op->isAssignmentOp() || op->isPtrMemOp())
1083*e5dd7070Spatrick       return isBlockVarRef(op->getLHS());
1084*e5dd7070Spatrick 
1085*e5dd7070Spatrick     // For a comma, just care about the RHS.
1086*e5dd7070Spatrick     if (op->getOpcode() == BO_Comma)
1087*e5dd7070Spatrick       return isBlockVarRef(op->getRHS());
1088*e5dd7070Spatrick 
1089*e5dd7070Spatrick     // FIXME: pointer arithmetic?
1090*e5dd7070Spatrick     return false;
1091*e5dd7070Spatrick 
1092*e5dd7070Spatrick   // Check both sides of a conditional operator.
1093*e5dd7070Spatrick   } else if (const AbstractConditionalOperator *op
1094*e5dd7070Spatrick                = dyn_cast<AbstractConditionalOperator>(E)) {
1095*e5dd7070Spatrick     return isBlockVarRef(op->getTrueExpr())
1096*e5dd7070Spatrick         || isBlockVarRef(op->getFalseExpr());
1097*e5dd7070Spatrick 
1098*e5dd7070Spatrick   // OVEs are required to support BinaryConditionalOperators.
1099*e5dd7070Spatrick   } else if (const OpaqueValueExpr *op
1100*e5dd7070Spatrick                = dyn_cast<OpaqueValueExpr>(E)) {
1101*e5dd7070Spatrick     if (const Expr *src = op->getSourceExpr())
1102*e5dd7070Spatrick       return isBlockVarRef(src);
1103*e5dd7070Spatrick 
1104*e5dd7070Spatrick   // Casts are necessary to get things like (*(int*)&var) = foo().
1105*e5dd7070Spatrick   // We don't really care about the kind of cast here, except
1106*e5dd7070Spatrick   // we don't want to look through l2r casts, because it's okay
1107*e5dd7070Spatrick   // to get the *value* in a __block variable.
1108*e5dd7070Spatrick   } else if (const CastExpr *cast = dyn_cast<CastExpr>(E)) {
1109*e5dd7070Spatrick     if (cast->getCastKind() == CK_LValueToRValue)
1110*e5dd7070Spatrick       return false;
1111*e5dd7070Spatrick     return isBlockVarRef(cast->getSubExpr());
1112*e5dd7070Spatrick 
1113*e5dd7070Spatrick   // Handle unary operators.  Again, just aggressively look through
1114*e5dd7070Spatrick   // it, ignoring the operation.
1115*e5dd7070Spatrick   } else if (const UnaryOperator *uop = dyn_cast<UnaryOperator>(E)) {
1116*e5dd7070Spatrick     return isBlockVarRef(uop->getSubExpr());
1117*e5dd7070Spatrick 
1118*e5dd7070Spatrick   // Look into the base of a field access.
1119*e5dd7070Spatrick   } else if (const MemberExpr *mem = dyn_cast<MemberExpr>(E)) {
1120*e5dd7070Spatrick     return isBlockVarRef(mem->getBase());
1121*e5dd7070Spatrick 
1122*e5dd7070Spatrick   // Look into the base of a subscript.
1123*e5dd7070Spatrick   } else if (const ArraySubscriptExpr *sub = dyn_cast<ArraySubscriptExpr>(E)) {
1124*e5dd7070Spatrick     return isBlockVarRef(sub->getBase());
1125*e5dd7070Spatrick   }
1126*e5dd7070Spatrick 
1127*e5dd7070Spatrick   return false;
1128*e5dd7070Spatrick }
1129*e5dd7070Spatrick 
1130*e5dd7070Spatrick void AggExprEmitter::VisitBinAssign(const BinaryOperator *E) {
1131*e5dd7070Spatrick   // For an assignment to work, the value on the right has
1132*e5dd7070Spatrick   // to be compatible with the value on the left.
1133*e5dd7070Spatrick   assert(CGF.getContext().hasSameUnqualifiedType(E->getLHS()->getType(),
1134*e5dd7070Spatrick                                                  E->getRHS()->getType())
1135*e5dd7070Spatrick          && "Invalid assignment");
1136*e5dd7070Spatrick 
1137*e5dd7070Spatrick   // If the LHS might be a __block variable, and the RHS can
1138*e5dd7070Spatrick   // potentially cause a block copy, we need to evaluate the RHS first
1139*e5dd7070Spatrick   // so that the assignment goes the right place.
1140*e5dd7070Spatrick   // This is pretty semantically fragile.
1141*e5dd7070Spatrick   if (isBlockVarRef(E->getLHS()) &&
1142*e5dd7070Spatrick       E->getRHS()->HasSideEffects(CGF.getContext())) {
1143*e5dd7070Spatrick     // Ensure that we have a destination, and evaluate the RHS into that.
1144*e5dd7070Spatrick     EnsureDest(E->getRHS()->getType());
1145*e5dd7070Spatrick     Visit(E->getRHS());
1146*e5dd7070Spatrick 
1147*e5dd7070Spatrick     // Now emit the LHS and copy into it.
1148*e5dd7070Spatrick     LValue LHS = CGF.EmitCheckedLValue(E->getLHS(), CodeGenFunction::TCK_Store);
1149*e5dd7070Spatrick 
1150*e5dd7070Spatrick     // That copy is an atomic copy if the LHS is atomic.
1151*e5dd7070Spatrick     if (LHS.getType()->isAtomicType() ||
1152*e5dd7070Spatrick         CGF.LValueIsSuitableForInlineAtomic(LHS)) {
1153*e5dd7070Spatrick       CGF.EmitAtomicStore(Dest.asRValue(), LHS, /*isInit*/ false);
1154*e5dd7070Spatrick       return;
1155*e5dd7070Spatrick     }
1156*e5dd7070Spatrick 
1157*e5dd7070Spatrick     EmitCopy(E->getLHS()->getType(),
1158*e5dd7070Spatrick              AggValueSlot::forLValue(LHS, CGF, AggValueSlot::IsDestructed,
1159*e5dd7070Spatrick                                      needsGC(E->getLHS()->getType()),
1160*e5dd7070Spatrick                                      AggValueSlot::IsAliased,
1161*e5dd7070Spatrick                                      AggValueSlot::MayOverlap),
1162*e5dd7070Spatrick              Dest);
1163*e5dd7070Spatrick     return;
1164*e5dd7070Spatrick   }
1165*e5dd7070Spatrick 
1166*e5dd7070Spatrick   LValue LHS = CGF.EmitLValue(E->getLHS());
1167*e5dd7070Spatrick 
1168*e5dd7070Spatrick   // If we have an atomic type, evaluate into the destination and then
1169*e5dd7070Spatrick   // do an atomic copy.
1170*e5dd7070Spatrick   if (LHS.getType()->isAtomicType() ||
1171*e5dd7070Spatrick       CGF.LValueIsSuitableForInlineAtomic(LHS)) {
1172*e5dd7070Spatrick     EnsureDest(E->getRHS()->getType());
1173*e5dd7070Spatrick     Visit(E->getRHS());
1174*e5dd7070Spatrick     CGF.EmitAtomicStore(Dest.asRValue(), LHS, /*isInit*/ false);
1175*e5dd7070Spatrick     return;
1176*e5dd7070Spatrick   }
1177*e5dd7070Spatrick 
1178*e5dd7070Spatrick   // Codegen the RHS so that it stores directly into the LHS.
1179*e5dd7070Spatrick   AggValueSlot LHSSlot = AggValueSlot::forLValue(
1180*e5dd7070Spatrick       LHS, CGF, AggValueSlot::IsDestructed, needsGC(E->getLHS()->getType()),
1181*e5dd7070Spatrick       AggValueSlot::IsAliased, AggValueSlot::MayOverlap);
1182*e5dd7070Spatrick   // A non-volatile aggregate destination might have volatile member.
1183*e5dd7070Spatrick   if (!LHSSlot.isVolatile() &&
1184*e5dd7070Spatrick       CGF.hasVolatileMember(E->getLHS()->getType()))
1185*e5dd7070Spatrick     LHSSlot.setVolatile(true);
1186*e5dd7070Spatrick 
1187*e5dd7070Spatrick   CGF.EmitAggExpr(E->getRHS(), LHSSlot);
1188*e5dd7070Spatrick 
1189*e5dd7070Spatrick   // Copy into the destination if the assignment isn't ignored.
1190*e5dd7070Spatrick   EmitFinalDestCopy(E->getType(), LHS);
1191*e5dd7070Spatrick }
1192*e5dd7070Spatrick 
1193*e5dd7070Spatrick void AggExprEmitter::
1194*e5dd7070Spatrick VisitAbstractConditionalOperator(const AbstractConditionalOperator *E) {
1195*e5dd7070Spatrick   llvm::BasicBlock *LHSBlock = CGF.createBasicBlock("cond.true");
1196*e5dd7070Spatrick   llvm::BasicBlock *RHSBlock = CGF.createBasicBlock("cond.false");
1197*e5dd7070Spatrick   llvm::BasicBlock *ContBlock = CGF.createBasicBlock("cond.end");
1198*e5dd7070Spatrick 
1199*e5dd7070Spatrick   // Bind the common expression if necessary.
1200*e5dd7070Spatrick   CodeGenFunction::OpaqueValueMapping binding(CGF, E);
1201*e5dd7070Spatrick 
1202*e5dd7070Spatrick   CodeGenFunction::ConditionalEvaluation eval(CGF);
1203*e5dd7070Spatrick   CGF.EmitBranchOnBoolExpr(E->getCond(), LHSBlock, RHSBlock,
1204*e5dd7070Spatrick                            CGF.getProfileCount(E));
1205*e5dd7070Spatrick 
1206*e5dd7070Spatrick   // Save whether the destination's lifetime is externally managed.
1207*e5dd7070Spatrick   bool isExternallyDestructed = Dest.isExternallyDestructed();
1208*e5dd7070Spatrick 
1209*e5dd7070Spatrick   eval.begin(CGF);
1210*e5dd7070Spatrick   CGF.EmitBlock(LHSBlock);
1211*e5dd7070Spatrick   CGF.incrementProfileCounter(E);
1212*e5dd7070Spatrick   Visit(E->getTrueExpr());
1213*e5dd7070Spatrick   eval.end(CGF);
1214*e5dd7070Spatrick 
1215*e5dd7070Spatrick   assert(CGF.HaveInsertPoint() && "expression evaluation ended with no IP!");
1216*e5dd7070Spatrick   CGF.Builder.CreateBr(ContBlock);
1217*e5dd7070Spatrick 
1218*e5dd7070Spatrick   // If the result of an agg expression is unused, then the emission
1219*e5dd7070Spatrick   // of the LHS might need to create a destination slot.  That's fine
1220*e5dd7070Spatrick   // with us, and we can safely emit the RHS into the same slot, but
1221*e5dd7070Spatrick   // we shouldn't claim that it's already being destructed.
1222*e5dd7070Spatrick   Dest.setExternallyDestructed(isExternallyDestructed);
1223*e5dd7070Spatrick 
1224*e5dd7070Spatrick   eval.begin(CGF);
1225*e5dd7070Spatrick   CGF.EmitBlock(RHSBlock);
1226*e5dd7070Spatrick   Visit(E->getFalseExpr());
1227*e5dd7070Spatrick   eval.end(CGF);
1228*e5dd7070Spatrick 
1229*e5dd7070Spatrick   CGF.EmitBlock(ContBlock);
1230*e5dd7070Spatrick }
1231*e5dd7070Spatrick 
1232*e5dd7070Spatrick void AggExprEmitter::VisitChooseExpr(const ChooseExpr *CE) {
1233*e5dd7070Spatrick   Visit(CE->getChosenSubExpr());
1234*e5dd7070Spatrick }
1235*e5dd7070Spatrick 
1236*e5dd7070Spatrick void AggExprEmitter::VisitVAArgExpr(VAArgExpr *VE) {
1237*e5dd7070Spatrick   Address ArgValue = Address::invalid();
1238*e5dd7070Spatrick   Address ArgPtr = CGF.EmitVAArg(VE, ArgValue);
1239*e5dd7070Spatrick 
1240*e5dd7070Spatrick   // If EmitVAArg fails, emit an error.
1241*e5dd7070Spatrick   if (!ArgPtr.isValid()) {
1242*e5dd7070Spatrick     CGF.ErrorUnsupported(VE, "aggregate va_arg expression");
1243*e5dd7070Spatrick     return;
1244*e5dd7070Spatrick   }
1245*e5dd7070Spatrick 
1246*e5dd7070Spatrick   EmitFinalDestCopy(VE->getType(), CGF.MakeAddrLValue(ArgPtr, VE->getType()));
1247*e5dd7070Spatrick }
1248*e5dd7070Spatrick 
1249*e5dd7070Spatrick void AggExprEmitter::VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E) {
1250*e5dd7070Spatrick   // Ensure that we have a slot, but if we already do, remember
1251*e5dd7070Spatrick   // whether it was externally destructed.
1252*e5dd7070Spatrick   bool wasExternallyDestructed = Dest.isExternallyDestructed();
1253*e5dd7070Spatrick   EnsureDest(E->getType());
1254*e5dd7070Spatrick 
1255*e5dd7070Spatrick   // We're going to push a destructor if there isn't already one.
1256*e5dd7070Spatrick   Dest.setExternallyDestructed();
1257*e5dd7070Spatrick 
1258*e5dd7070Spatrick   Visit(E->getSubExpr());
1259*e5dd7070Spatrick 
1260*e5dd7070Spatrick   // Push that destructor we promised.
1261*e5dd7070Spatrick   if (!wasExternallyDestructed)
1262*e5dd7070Spatrick     CGF.EmitCXXTemporary(E->getTemporary(), E->getType(), Dest.getAddress());
1263*e5dd7070Spatrick }
1264*e5dd7070Spatrick 
1265*e5dd7070Spatrick void
1266*e5dd7070Spatrick AggExprEmitter::VisitCXXConstructExpr(const CXXConstructExpr *E) {
1267*e5dd7070Spatrick   AggValueSlot Slot = EnsureSlot(E->getType());
1268*e5dd7070Spatrick   CGF.EmitCXXConstructExpr(E, Slot);
1269*e5dd7070Spatrick }
1270*e5dd7070Spatrick 
1271*e5dd7070Spatrick void AggExprEmitter::VisitCXXInheritedCtorInitExpr(
1272*e5dd7070Spatrick     const CXXInheritedCtorInitExpr *E) {
1273*e5dd7070Spatrick   AggValueSlot Slot = EnsureSlot(E->getType());
1274*e5dd7070Spatrick   CGF.EmitInheritedCXXConstructorCall(
1275*e5dd7070Spatrick       E->getConstructor(), E->constructsVBase(), Slot.getAddress(),
1276*e5dd7070Spatrick       E->inheritedFromVBase(), E);
1277*e5dd7070Spatrick }
1278*e5dd7070Spatrick 
1279*e5dd7070Spatrick void
1280*e5dd7070Spatrick AggExprEmitter::VisitLambdaExpr(LambdaExpr *E) {
1281*e5dd7070Spatrick   AggValueSlot Slot = EnsureSlot(E->getType());
1282*e5dd7070Spatrick   LValue SlotLV = CGF.MakeAddrLValue(Slot.getAddress(), E->getType());
1283*e5dd7070Spatrick 
1284*e5dd7070Spatrick   // We'll need to enter cleanup scopes in case any of the element
1285*e5dd7070Spatrick   // initializers throws an exception.
1286*e5dd7070Spatrick   SmallVector<EHScopeStack::stable_iterator, 16> Cleanups;
1287*e5dd7070Spatrick   llvm::Instruction *CleanupDominator = nullptr;
1288*e5dd7070Spatrick 
1289*e5dd7070Spatrick   CXXRecordDecl::field_iterator CurField = E->getLambdaClass()->field_begin();
1290*e5dd7070Spatrick   for (LambdaExpr::const_capture_init_iterator i = E->capture_init_begin(),
1291*e5dd7070Spatrick                                                e = E->capture_init_end();
1292*e5dd7070Spatrick        i != e; ++i, ++CurField) {
1293*e5dd7070Spatrick     // Emit initialization
1294*e5dd7070Spatrick     LValue LV = CGF.EmitLValueForFieldInitialization(SlotLV, *CurField);
1295*e5dd7070Spatrick     if (CurField->hasCapturedVLAType()) {
1296*e5dd7070Spatrick       CGF.EmitLambdaVLACapture(CurField->getCapturedVLAType(), LV);
1297*e5dd7070Spatrick       continue;
1298*e5dd7070Spatrick     }
1299*e5dd7070Spatrick 
1300*e5dd7070Spatrick     EmitInitializationToLValue(*i, LV);
1301*e5dd7070Spatrick 
1302*e5dd7070Spatrick     // Push a destructor if necessary.
1303*e5dd7070Spatrick     if (QualType::DestructionKind DtorKind =
1304*e5dd7070Spatrick             CurField->getType().isDestructedType()) {
1305*e5dd7070Spatrick       assert(LV.isSimple());
1306*e5dd7070Spatrick       if (CGF.needsEHCleanup(DtorKind)) {
1307*e5dd7070Spatrick         if (!CleanupDominator)
1308*e5dd7070Spatrick           CleanupDominator = CGF.Builder.CreateAlignedLoad(
1309*e5dd7070Spatrick               CGF.Int8Ty,
1310*e5dd7070Spatrick               llvm::Constant::getNullValue(CGF.Int8PtrTy),
1311*e5dd7070Spatrick               CharUnits::One()); // placeholder
1312*e5dd7070Spatrick 
1313*e5dd7070Spatrick         CGF.pushDestroy(EHCleanup, LV.getAddress(CGF), CurField->getType(),
1314*e5dd7070Spatrick                         CGF.getDestroyer(DtorKind), false);
1315*e5dd7070Spatrick         Cleanups.push_back(CGF.EHStack.stable_begin());
1316*e5dd7070Spatrick       }
1317*e5dd7070Spatrick     }
1318*e5dd7070Spatrick   }
1319*e5dd7070Spatrick 
1320*e5dd7070Spatrick   // Deactivate all the partial cleanups in reverse order, which
1321*e5dd7070Spatrick   // generally means popping them.
1322*e5dd7070Spatrick   for (unsigned i = Cleanups.size(); i != 0; --i)
1323*e5dd7070Spatrick     CGF.DeactivateCleanupBlock(Cleanups[i-1], CleanupDominator);
1324*e5dd7070Spatrick 
1325*e5dd7070Spatrick   // Destroy the placeholder if we made one.
1326*e5dd7070Spatrick   if (CleanupDominator)
1327*e5dd7070Spatrick     CleanupDominator->eraseFromParent();
1328*e5dd7070Spatrick }
1329*e5dd7070Spatrick 
1330*e5dd7070Spatrick void AggExprEmitter::VisitExprWithCleanups(ExprWithCleanups *E) {
1331*e5dd7070Spatrick   CGF.enterFullExpression(E);
1332*e5dd7070Spatrick   CodeGenFunction::RunCleanupsScope cleanups(CGF);
1333*e5dd7070Spatrick   Visit(E->getSubExpr());
1334*e5dd7070Spatrick }
1335*e5dd7070Spatrick 
1336*e5dd7070Spatrick void AggExprEmitter::VisitCXXScalarValueInitExpr(CXXScalarValueInitExpr *E) {
1337*e5dd7070Spatrick   QualType T = E->getType();
1338*e5dd7070Spatrick   AggValueSlot Slot = EnsureSlot(T);
1339*e5dd7070Spatrick   EmitNullInitializationToLValue(CGF.MakeAddrLValue(Slot.getAddress(), T));
1340*e5dd7070Spatrick }
1341*e5dd7070Spatrick 
1342*e5dd7070Spatrick void AggExprEmitter::VisitImplicitValueInitExpr(ImplicitValueInitExpr *E) {
1343*e5dd7070Spatrick   QualType T = E->getType();
1344*e5dd7070Spatrick   AggValueSlot Slot = EnsureSlot(T);
1345*e5dd7070Spatrick   EmitNullInitializationToLValue(CGF.MakeAddrLValue(Slot.getAddress(), T));
1346*e5dd7070Spatrick }
1347*e5dd7070Spatrick 
1348*e5dd7070Spatrick /// isSimpleZero - If emitting this value will obviously just cause a store of
1349*e5dd7070Spatrick /// zero to memory, return true.  This can return false if uncertain, so it just
1350*e5dd7070Spatrick /// handles simple cases.
1351*e5dd7070Spatrick static bool isSimpleZero(const Expr *E, CodeGenFunction &CGF) {
1352*e5dd7070Spatrick   E = E->IgnoreParens();
1353*e5dd7070Spatrick 
1354*e5dd7070Spatrick   // 0
1355*e5dd7070Spatrick   if (const IntegerLiteral *IL = dyn_cast<IntegerLiteral>(E))
1356*e5dd7070Spatrick     return IL->getValue() == 0;
1357*e5dd7070Spatrick   // +0.0
1358*e5dd7070Spatrick   if (const FloatingLiteral *FL = dyn_cast<FloatingLiteral>(E))
1359*e5dd7070Spatrick     return FL->getValue().isPosZero();
1360*e5dd7070Spatrick   // int()
1361*e5dd7070Spatrick   if ((isa<ImplicitValueInitExpr>(E) || isa<CXXScalarValueInitExpr>(E)) &&
1362*e5dd7070Spatrick       CGF.getTypes().isZeroInitializable(E->getType()))
1363*e5dd7070Spatrick     return true;
1364*e5dd7070Spatrick   // (int*)0 - Null pointer expressions.
1365*e5dd7070Spatrick   if (const CastExpr *ICE = dyn_cast<CastExpr>(E))
1366*e5dd7070Spatrick     return ICE->getCastKind() == CK_NullToPointer &&
1367*e5dd7070Spatrick            CGF.getTypes().isPointerZeroInitializable(E->getType()) &&
1368*e5dd7070Spatrick            !E->HasSideEffects(CGF.getContext());
1369*e5dd7070Spatrick   // '\0'
1370*e5dd7070Spatrick   if (const CharacterLiteral *CL = dyn_cast<CharacterLiteral>(E))
1371*e5dd7070Spatrick     return CL->getValue() == 0;
1372*e5dd7070Spatrick 
1373*e5dd7070Spatrick   // Otherwise, hard case: conservatively return false.
1374*e5dd7070Spatrick   return false;
1375*e5dd7070Spatrick }
1376*e5dd7070Spatrick 
1377*e5dd7070Spatrick 
1378*e5dd7070Spatrick void
1379*e5dd7070Spatrick AggExprEmitter::EmitInitializationToLValue(Expr *E, LValue LV) {
1380*e5dd7070Spatrick   QualType type = LV.getType();
1381*e5dd7070Spatrick   // FIXME: Ignore result?
1382*e5dd7070Spatrick   // FIXME: Are initializers affected by volatile?
1383*e5dd7070Spatrick   if (Dest.isZeroed() && isSimpleZero(E, CGF)) {
1384*e5dd7070Spatrick     // Storing "i32 0" to a zero'd memory location is a noop.
1385*e5dd7070Spatrick     return;
1386*e5dd7070Spatrick   } else if (isa<ImplicitValueInitExpr>(E) || isa<CXXScalarValueInitExpr>(E)) {
1387*e5dd7070Spatrick     return EmitNullInitializationToLValue(LV);
1388*e5dd7070Spatrick   } else if (isa<NoInitExpr>(E)) {
1389*e5dd7070Spatrick     // Do nothing.
1390*e5dd7070Spatrick     return;
1391*e5dd7070Spatrick   } else if (type->isReferenceType()) {
1392*e5dd7070Spatrick     RValue RV = CGF.EmitReferenceBindingToExpr(E);
1393*e5dd7070Spatrick     return CGF.EmitStoreThroughLValue(RV, LV);
1394*e5dd7070Spatrick   }
1395*e5dd7070Spatrick 
1396*e5dd7070Spatrick   switch (CGF.getEvaluationKind(type)) {
1397*e5dd7070Spatrick   case TEK_Complex:
1398*e5dd7070Spatrick     CGF.EmitComplexExprIntoLValue(E, LV, /*isInit*/ true);
1399*e5dd7070Spatrick     return;
1400*e5dd7070Spatrick   case TEK_Aggregate:
1401*e5dd7070Spatrick     CGF.EmitAggExpr(
1402*e5dd7070Spatrick         E, AggValueSlot::forLValue(LV, CGF, AggValueSlot::IsDestructed,
1403*e5dd7070Spatrick                                    AggValueSlot::DoesNotNeedGCBarriers,
1404*e5dd7070Spatrick                                    AggValueSlot::IsNotAliased,
1405*e5dd7070Spatrick                                    AggValueSlot::MayOverlap, Dest.isZeroed()));
1406*e5dd7070Spatrick     return;
1407*e5dd7070Spatrick   case TEK_Scalar:
1408*e5dd7070Spatrick     if (LV.isSimple()) {
1409*e5dd7070Spatrick       CGF.EmitScalarInit(E, /*D=*/nullptr, LV, /*Captured=*/false);
1410*e5dd7070Spatrick     } else {
1411*e5dd7070Spatrick       CGF.EmitStoreThroughLValue(RValue::get(CGF.EmitScalarExpr(E)), LV);
1412*e5dd7070Spatrick     }
1413*e5dd7070Spatrick     return;
1414*e5dd7070Spatrick   }
1415*e5dd7070Spatrick   llvm_unreachable("bad evaluation kind");
1416*e5dd7070Spatrick }
1417*e5dd7070Spatrick 
1418*e5dd7070Spatrick void AggExprEmitter::EmitNullInitializationToLValue(LValue lv) {
1419*e5dd7070Spatrick   QualType type = lv.getType();
1420*e5dd7070Spatrick 
1421*e5dd7070Spatrick   // If the destination slot is already zeroed out before the aggregate is
1422*e5dd7070Spatrick   // copied into it, we don't have to emit any zeros here.
1423*e5dd7070Spatrick   if (Dest.isZeroed() && CGF.getTypes().isZeroInitializable(type))
1424*e5dd7070Spatrick     return;
1425*e5dd7070Spatrick 
1426*e5dd7070Spatrick   if (CGF.hasScalarEvaluationKind(type)) {
1427*e5dd7070Spatrick     // For non-aggregates, we can store the appropriate null constant.
1428*e5dd7070Spatrick     llvm::Value *null = CGF.CGM.EmitNullConstant(type);
1429*e5dd7070Spatrick     // Note that the following is not equivalent to
1430*e5dd7070Spatrick     // EmitStoreThroughBitfieldLValue for ARC types.
1431*e5dd7070Spatrick     if (lv.isBitField()) {
1432*e5dd7070Spatrick       CGF.EmitStoreThroughBitfieldLValue(RValue::get(null), lv);
1433*e5dd7070Spatrick     } else {
1434*e5dd7070Spatrick       assert(lv.isSimple());
1435*e5dd7070Spatrick       CGF.EmitStoreOfScalar(null, lv, /* isInitialization */ true);
1436*e5dd7070Spatrick     }
1437*e5dd7070Spatrick   } else {
1438*e5dd7070Spatrick     // There's a potential optimization opportunity in combining
1439*e5dd7070Spatrick     // memsets; that would be easy for arrays, but relatively
1440*e5dd7070Spatrick     // difficult for structures with the current code.
1441*e5dd7070Spatrick     CGF.EmitNullInitialization(lv.getAddress(CGF), lv.getType());
1442*e5dd7070Spatrick   }
1443*e5dd7070Spatrick }
1444*e5dd7070Spatrick 
1445*e5dd7070Spatrick void AggExprEmitter::VisitInitListExpr(InitListExpr *E) {
1446*e5dd7070Spatrick #if 0
1447*e5dd7070Spatrick   // FIXME: Assess perf here?  Figure out what cases are worth optimizing here
1448*e5dd7070Spatrick   // (Length of globals? Chunks of zeroed-out space?).
1449*e5dd7070Spatrick   //
1450*e5dd7070Spatrick   // If we can, prefer a copy from a global; this is a lot less code for long
1451*e5dd7070Spatrick   // globals, and it's easier for the current optimizers to analyze.
1452*e5dd7070Spatrick   if (llvm::Constant* C = CGF.CGM.EmitConstantExpr(E, E->getType(), &CGF)) {
1453*e5dd7070Spatrick     llvm::GlobalVariable* GV =
1454*e5dd7070Spatrick     new llvm::GlobalVariable(CGF.CGM.getModule(), C->getType(), true,
1455*e5dd7070Spatrick                              llvm::GlobalValue::InternalLinkage, C, "");
1456*e5dd7070Spatrick     EmitFinalDestCopy(E->getType(), CGF.MakeAddrLValue(GV, E->getType()));
1457*e5dd7070Spatrick     return;
1458*e5dd7070Spatrick   }
1459*e5dd7070Spatrick #endif
1460*e5dd7070Spatrick   if (E->hadArrayRangeDesignator())
1461*e5dd7070Spatrick     CGF.ErrorUnsupported(E, "GNU array range designator extension");
1462*e5dd7070Spatrick 
1463*e5dd7070Spatrick   if (E->isTransparent())
1464*e5dd7070Spatrick     return Visit(E->getInit(0));
1465*e5dd7070Spatrick 
1466*e5dd7070Spatrick   AggValueSlot Dest = EnsureSlot(E->getType());
1467*e5dd7070Spatrick 
1468*e5dd7070Spatrick   LValue DestLV = CGF.MakeAddrLValue(Dest.getAddress(), E->getType());
1469*e5dd7070Spatrick 
1470*e5dd7070Spatrick   // Handle initialization of an array.
1471*e5dd7070Spatrick   if (E->getType()->isArrayType()) {
1472*e5dd7070Spatrick     auto AType = cast<llvm::ArrayType>(Dest.getAddress().getElementType());
1473*e5dd7070Spatrick     EmitArrayInit(Dest.getAddress(), AType, E->getType(), E);
1474*e5dd7070Spatrick     return;
1475*e5dd7070Spatrick   }
1476*e5dd7070Spatrick 
1477*e5dd7070Spatrick   assert(E->getType()->isRecordType() && "Only support structs/unions here!");
1478*e5dd7070Spatrick 
1479*e5dd7070Spatrick   // Do struct initialization; this code just sets each individual member
1480*e5dd7070Spatrick   // to the approprate value.  This makes bitfield support automatic;
1481*e5dd7070Spatrick   // the disadvantage is that the generated code is more difficult for
1482*e5dd7070Spatrick   // the optimizer, especially with bitfields.
1483*e5dd7070Spatrick   unsigned NumInitElements = E->getNumInits();
1484*e5dd7070Spatrick   RecordDecl *record = E->getType()->castAs<RecordType>()->getDecl();
1485*e5dd7070Spatrick 
1486*e5dd7070Spatrick   // We'll need to enter cleanup scopes in case any of the element
1487*e5dd7070Spatrick   // initializers throws an exception.
1488*e5dd7070Spatrick   SmallVector<EHScopeStack::stable_iterator, 16> cleanups;
1489*e5dd7070Spatrick   llvm::Instruction *cleanupDominator = nullptr;
1490*e5dd7070Spatrick   auto addCleanup = [&](const EHScopeStack::stable_iterator &cleanup) {
1491*e5dd7070Spatrick     cleanups.push_back(cleanup);
1492*e5dd7070Spatrick     if (!cleanupDominator) // create placeholder once needed
1493*e5dd7070Spatrick       cleanupDominator = CGF.Builder.CreateAlignedLoad(
1494*e5dd7070Spatrick           CGF.Int8Ty, llvm::Constant::getNullValue(CGF.Int8PtrTy),
1495*e5dd7070Spatrick           CharUnits::One());
1496*e5dd7070Spatrick   };
1497*e5dd7070Spatrick 
1498*e5dd7070Spatrick   unsigned curInitIndex = 0;
1499*e5dd7070Spatrick 
1500*e5dd7070Spatrick   // Emit initialization of base classes.
1501*e5dd7070Spatrick   if (auto *CXXRD = dyn_cast<CXXRecordDecl>(record)) {
1502*e5dd7070Spatrick     assert(E->getNumInits() >= CXXRD->getNumBases() &&
1503*e5dd7070Spatrick            "missing initializer for base class");
1504*e5dd7070Spatrick     for (auto &Base : CXXRD->bases()) {
1505*e5dd7070Spatrick       assert(!Base.isVirtual() && "should not see vbases here");
1506*e5dd7070Spatrick       auto *BaseRD = Base.getType()->getAsCXXRecordDecl();
1507*e5dd7070Spatrick       Address V = CGF.GetAddressOfDirectBaseInCompleteClass(
1508*e5dd7070Spatrick           Dest.getAddress(), CXXRD, BaseRD,
1509*e5dd7070Spatrick           /*isBaseVirtual*/ false);
1510*e5dd7070Spatrick       AggValueSlot AggSlot = AggValueSlot::forAddr(
1511*e5dd7070Spatrick           V, Qualifiers(),
1512*e5dd7070Spatrick           AggValueSlot::IsDestructed,
1513*e5dd7070Spatrick           AggValueSlot::DoesNotNeedGCBarriers,
1514*e5dd7070Spatrick           AggValueSlot::IsNotAliased,
1515*e5dd7070Spatrick           CGF.getOverlapForBaseInit(CXXRD, BaseRD, Base.isVirtual()));
1516*e5dd7070Spatrick       CGF.EmitAggExpr(E->getInit(curInitIndex++), AggSlot);
1517*e5dd7070Spatrick 
1518*e5dd7070Spatrick       if (QualType::DestructionKind dtorKind =
1519*e5dd7070Spatrick               Base.getType().isDestructedType()) {
1520*e5dd7070Spatrick         CGF.pushDestroy(dtorKind, V, Base.getType());
1521*e5dd7070Spatrick         addCleanup(CGF.EHStack.stable_begin());
1522*e5dd7070Spatrick       }
1523*e5dd7070Spatrick     }
1524*e5dd7070Spatrick   }
1525*e5dd7070Spatrick 
1526*e5dd7070Spatrick   // Prepare a 'this' for CXXDefaultInitExprs.
1527*e5dd7070Spatrick   CodeGenFunction::FieldConstructionScope FCS(CGF, Dest.getAddress());
1528*e5dd7070Spatrick 
1529*e5dd7070Spatrick   if (record->isUnion()) {
1530*e5dd7070Spatrick     // Only initialize one field of a union. The field itself is
1531*e5dd7070Spatrick     // specified by the initializer list.
1532*e5dd7070Spatrick     if (!E->getInitializedFieldInUnion()) {
1533*e5dd7070Spatrick       // Empty union; we have nothing to do.
1534*e5dd7070Spatrick 
1535*e5dd7070Spatrick #ifndef NDEBUG
1536*e5dd7070Spatrick       // Make sure that it's really an empty and not a failure of
1537*e5dd7070Spatrick       // semantic analysis.
1538*e5dd7070Spatrick       for (const auto *Field : record->fields())
1539*e5dd7070Spatrick         assert(Field->isUnnamedBitfield() && "Only unnamed bitfields allowed");
1540*e5dd7070Spatrick #endif
1541*e5dd7070Spatrick       return;
1542*e5dd7070Spatrick     }
1543*e5dd7070Spatrick 
1544*e5dd7070Spatrick     // FIXME: volatility
1545*e5dd7070Spatrick     FieldDecl *Field = E->getInitializedFieldInUnion();
1546*e5dd7070Spatrick 
1547*e5dd7070Spatrick     LValue FieldLoc = CGF.EmitLValueForFieldInitialization(DestLV, Field);
1548*e5dd7070Spatrick     if (NumInitElements) {
1549*e5dd7070Spatrick       // Store the initializer into the field
1550*e5dd7070Spatrick       EmitInitializationToLValue(E->getInit(0), FieldLoc);
1551*e5dd7070Spatrick     } else {
1552*e5dd7070Spatrick       // Default-initialize to null.
1553*e5dd7070Spatrick       EmitNullInitializationToLValue(FieldLoc);
1554*e5dd7070Spatrick     }
1555*e5dd7070Spatrick 
1556*e5dd7070Spatrick     return;
1557*e5dd7070Spatrick   }
1558*e5dd7070Spatrick 
1559*e5dd7070Spatrick   // Here we iterate over the fields; this makes it simpler to both
1560*e5dd7070Spatrick   // default-initialize fields and skip over unnamed fields.
1561*e5dd7070Spatrick   for (const auto *field : record->fields()) {
1562*e5dd7070Spatrick     // We're done once we hit the flexible array member.
1563*e5dd7070Spatrick     if (field->getType()->isIncompleteArrayType())
1564*e5dd7070Spatrick       break;
1565*e5dd7070Spatrick 
1566*e5dd7070Spatrick     // Always skip anonymous bitfields.
1567*e5dd7070Spatrick     if (field->isUnnamedBitfield())
1568*e5dd7070Spatrick       continue;
1569*e5dd7070Spatrick 
1570*e5dd7070Spatrick     // We're done if we reach the end of the explicit initializers, we
1571*e5dd7070Spatrick     // have a zeroed object, and the rest of the fields are
1572*e5dd7070Spatrick     // zero-initializable.
1573*e5dd7070Spatrick     if (curInitIndex == NumInitElements && Dest.isZeroed() &&
1574*e5dd7070Spatrick         CGF.getTypes().isZeroInitializable(E->getType()))
1575*e5dd7070Spatrick       break;
1576*e5dd7070Spatrick 
1577*e5dd7070Spatrick 
1578*e5dd7070Spatrick     LValue LV = CGF.EmitLValueForFieldInitialization(DestLV, field);
1579*e5dd7070Spatrick     // We never generate write-barries for initialized fields.
1580*e5dd7070Spatrick     LV.setNonGC(true);
1581*e5dd7070Spatrick 
1582*e5dd7070Spatrick     if (curInitIndex < NumInitElements) {
1583*e5dd7070Spatrick       // Store the initializer into the field.
1584*e5dd7070Spatrick       EmitInitializationToLValue(E->getInit(curInitIndex++), LV);
1585*e5dd7070Spatrick     } else {
1586*e5dd7070Spatrick       // We're out of initializers; default-initialize to null
1587*e5dd7070Spatrick       EmitNullInitializationToLValue(LV);
1588*e5dd7070Spatrick     }
1589*e5dd7070Spatrick 
1590*e5dd7070Spatrick     // Push a destructor if necessary.
1591*e5dd7070Spatrick     // FIXME: if we have an array of structures, all explicitly
1592*e5dd7070Spatrick     // initialized, we can end up pushing a linear number of cleanups.
1593*e5dd7070Spatrick     bool pushedCleanup = false;
1594*e5dd7070Spatrick     if (QualType::DestructionKind dtorKind
1595*e5dd7070Spatrick           = field->getType().isDestructedType()) {
1596*e5dd7070Spatrick       assert(LV.isSimple());
1597*e5dd7070Spatrick       if (CGF.needsEHCleanup(dtorKind)) {
1598*e5dd7070Spatrick         CGF.pushDestroy(EHCleanup, LV.getAddress(CGF), field->getType(),
1599*e5dd7070Spatrick                         CGF.getDestroyer(dtorKind), false);
1600*e5dd7070Spatrick         addCleanup(CGF.EHStack.stable_begin());
1601*e5dd7070Spatrick         pushedCleanup = true;
1602*e5dd7070Spatrick       }
1603*e5dd7070Spatrick     }
1604*e5dd7070Spatrick 
1605*e5dd7070Spatrick     // If the GEP didn't get used because of a dead zero init or something
1606*e5dd7070Spatrick     // else, clean it up for -O0 builds and general tidiness.
1607*e5dd7070Spatrick     if (!pushedCleanup && LV.isSimple())
1608*e5dd7070Spatrick       if (llvm::GetElementPtrInst *GEP =
1609*e5dd7070Spatrick               dyn_cast<llvm::GetElementPtrInst>(LV.getPointer(CGF)))
1610*e5dd7070Spatrick         if (GEP->use_empty())
1611*e5dd7070Spatrick           GEP->eraseFromParent();
1612*e5dd7070Spatrick   }
1613*e5dd7070Spatrick 
1614*e5dd7070Spatrick   // Deactivate all the partial cleanups in reverse order, which
1615*e5dd7070Spatrick   // generally means popping them.
1616*e5dd7070Spatrick   assert((cleanupDominator || cleanups.empty()) &&
1617*e5dd7070Spatrick          "Missing cleanupDominator before deactivating cleanup blocks");
1618*e5dd7070Spatrick   for (unsigned i = cleanups.size(); i != 0; --i)
1619*e5dd7070Spatrick     CGF.DeactivateCleanupBlock(cleanups[i-1], cleanupDominator);
1620*e5dd7070Spatrick 
1621*e5dd7070Spatrick   // Destroy the placeholder if we made one.
1622*e5dd7070Spatrick   if (cleanupDominator)
1623*e5dd7070Spatrick     cleanupDominator->eraseFromParent();
1624*e5dd7070Spatrick }
1625*e5dd7070Spatrick 
1626*e5dd7070Spatrick void AggExprEmitter::VisitArrayInitLoopExpr(const ArrayInitLoopExpr *E,
1627*e5dd7070Spatrick                                             llvm::Value *outerBegin) {
1628*e5dd7070Spatrick   // Emit the common subexpression.
1629*e5dd7070Spatrick   CodeGenFunction::OpaqueValueMapping binding(CGF, E->getCommonExpr());
1630*e5dd7070Spatrick 
1631*e5dd7070Spatrick   Address destPtr = EnsureSlot(E->getType()).getAddress();
1632*e5dd7070Spatrick   uint64_t numElements = E->getArraySize().getZExtValue();
1633*e5dd7070Spatrick 
1634*e5dd7070Spatrick   if (!numElements)
1635*e5dd7070Spatrick     return;
1636*e5dd7070Spatrick 
1637*e5dd7070Spatrick   // destPtr is an array*. Construct an elementType* by drilling down a level.
1638*e5dd7070Spatrick   llvm::Value *zero = llvm::ConstantInt::get(CGF.SizeTy, 0);
1639*e5dd7070Spatrick   llvm::Value *indices[] = {zero, zero};
1640*e5dd7070Spatrick   llvm::Value *begin = Builder.CreateInBoundsGEP(destPtr.getPointer(), indices,
1641*e5dd7070Spatrick                                                  "arrayinit.begin");
1642*e5dd7070Spatrick 
1643*e5dd7070Spatrick   // Prepare to special-case multidimensional array initialization: we avoid
1644*e5dd7070Spatrick   // emitting multiple destructor loops in that case.
1645*e5dd7070Spatrick   if (!outerBegin)
1646*e5dd7070Spatrick     outerBegin = begin;
1647*e5dd7070Spatrick   ArrayInitLoopExpr *InnerLoop = dyn_cast<ArrayInitLoopExpr>(E->getSubExpr());
1648*e5dd7070Spatrick 
1649*e5dd7070Spatrick   QualType elementType =
1650*e5dd7070Spatrick       CGF.getContext().getAsArrayType(E->getType())->getElementType();
1651*e5dd7070Spatrick   CharUnits elementSize = CGF.getContext().getTypeSizeInChars(elementType);
1652*e5dd7070Spatrick   CharUnits elementAlign =
1653*e5dd7070Spatrick       destPtr.getAlignment().alignmentOfArrayElement(elementSize);
1654*e5dd7070Spatrick 
1655*e5dd7070Spatrick   llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
1656*e5dd7070Spatrick   llvm::BasicBlock *bodyBB = CGF.createBasicBlock("arrayinit.body");
1657*e5dd7070Spatrick 
1658*e5dd7070Spatrick   // Jump into the body.
1659*e5dd7070Spatrick   CGF.EmitBlock(bodyBB);
1660*e5dd7070Spatrick   llvm::PHINode *index =
1661*e5dd7070Spatrick       Builder.CreatePHI(zero->getType(), 2, "arrayinit.index");
1662*e5dd7070Spatrick   index->addIncoming(zero, entryBB);
1663*e5dd7070Spatrick   llvm::Value *element = Builder.CreateInBoundsGEP(begin, index);
1664*e5dd7070Spatrick 
1665*e5dd7070Spatrick   // Prepare for a cleanup.
1666*e5dd7070Spatrick   QualType::DestructionKind dtorKind = elementType.isDestructedType();
1667*e5dd7070Spatrick   EHScopeStack::stable_iterator cleanup;
1668*e5dd7070Spatrick   if (CGF.needsEHCleanup(dtorKind) && !InnerLoop) {
1669*e5dd7070Spatrick     if (outerBegin->getType() != element->getType())
1670*e5dd7070Spatrick       outerBegin = Builder.CreateBitCast(outerBegin, element->getType());
1671*e5dd7070Spatrick     CGF.pushRegularPartialArrayCleanup(outerBegin, element, elementType,
1672*e5dd7070Spatrick                                        elementAlign,
1673*e5dd7070Spatrick                                        CGF.getDestroyer(dtorKind));
1674*e5dd7070Spatrick     cleanup = CGF.EHStack.stable_begin();
1675*e5dd7070Spatrick   } else {
1676*e5dd7070Spatrick     dtorKind = QualType::DK_none;
1677*e5dd7070Spatrick   }
1678*e5dd7070Spatrick 
1679*e5dd7070Spatrick   // Emit the actual filler expression.
1680*e5dd7070Spatrick   {
1681*e5dd7070Spatrick     // Temporaries created in an array initialization loop are destroyed
1682*e5dd7070Spatrick     // at the end of each iteration.
1683*e5dd7070Spatrick     CodeGenFunction::RunCleanupsScope CleanupsScope(CGF);
1684*e5dd7070Spatrick     CodeGenFunction::ArrayInitLoopExprScope Scope(CGF, index);
1685*e5dd7070Spatrick     LValue elementLV =
1686*e5dd7070Spatrick         CGF.MakeAddrLValue(Address(element, elementAlign), elementType);
1687*e5dd7070Spatrick 
1688*e5dd7070Spatrick     if (InnerLoop) {
1689*e5dd7070Spatrick       // If the subexpression is an ArrayInitLoopExpr, share its cleanup.
1690*e5dd7070Spatrick       auto elementSlot = AggValueSlot::forLValue(
1691*e5dd7070Spatrick           elementLV, CGF, AggValueSlot::IsDestructed,
1692*e5dd7070Spatrick           AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased,
1693*e5dd7070Spatrick           AggValueSlot::DoesNotOverlap);
1694*e5dd7070Spatrick       AggExprEmitter(CGF, elementSlot, false)
1695*e5dd7070Spatrick           .VisitArrayInitLoopExpr(InnerLoop, outerBegin);
1696*e5dd7070Spatrick     } else
1697*e5dd7070Spatrick       EmitInitializationToLValue(E->getSubExpr(), elementLV);
1698*e5dd7070Spatrick   }
1699*e5dd7070Spatrick 
1700*e5dd7070Spatrick   // Move on to the next element.
1701*e5dd7070Spatrick   llvm::Value *nextIndex = Builder.CreateNUWAdd(
1702*e5dd7070Spatrick       index, llvm::ConstantInt::get(CGF.SizeTy, 1), "arrayinit.next");
1703*e5dd7070Spatrick   index->addIncoming(nextIndex, Builder.GetInsertBlock());
1704*e5dd7070Spatrick 
1705*e5dd7070Spatrick   // Leave the loop if we're done.
1706*e5dd7070Spatrick   llvm::Value *done = Builder.CreateICmpEQ(
1707*e5dd7070Spatrick       nextIndex, llvm::ConstantInt::get(CGF.SizeTy, numElements),
1708*e5dd7070Spatrick       "arrayinit.done");
1709*e5dd7070Spatrick   llvm::BasicBlock *endBB = CGF.createBasicBlock("arrayinit.end");
1710*e5dd7070Spatrick   Builder.CreateCondBr(done, endBB, bodyBB);
1711*e5dd7070Spatrick 
1712*e5dd7070Spatrick   CGF.EmitBlock(endBB);
1713*e5dd7070Spatrick 
1714*e5dd7070Spatrick   // Leave the partial-array cleanup if we entered one.
1715*e5dd7070Spatrick   if (dtorKind)
1716*e5dd7070Spatrick     CGF.DeactivateCleanupBlock(cleanup, index);
1717*e5dd7070Spatrick }
1718*e5dd7070Spatrick 
1719*e5dd7070Spatrick void AggExprEmitter::VisitDesignatedInitUpdateExpr(DesignatedInitUpdateExpr *E) {
1720*e5dd7070Spatrick   AggValueSlot Dest = EnsureSlot(E->getType());
1721*e5dd7070Spatrick 
1722*e5dd7070Spatrick   LValue DestLV = CGF.MakeAddrLValue(Dest.getAddress(), E->getType());
1723*e5dd7070Spatrick   EmitInitializationToLValue(E->getBase(), DestLV);
1724*e5dd7070Spatrick   VisitInitListExpr(E->getUpdater());
1725*e5dd7070Spatrick }
1726*e5dd7070Spatrick 
1727*e5dd7070Spatrick //===----------------------------------------------------------------------===//
1728*e5dd7070Spatrick //                        Entry Points into this File
1729*e5dd7070Spatrick //===----------------------------------------------------------------------===//
1730*e5dd7070Spatrick 
1731*e5dd7070Spatrick /// GetNumNonZeroBytesInInit - Get an approximate count of the number of
1732*e5dd7070Spatrick /// non-zero bytes that will be stored when outputting the initializer for the
1733*e5dd7070Spatrick /// specified initializer expression.
1734*e5dd7070Spatrick static CharUnits GetNumNonZeroBytesInInit(const Expr *E, CodeGenFunction &CGF) {
1735*e5dd7070Spatrick   E = E->IgnoreParens();
1736*e5dd7070Spatrick 
1737*e5dd7070Spatrick   // 0 and 0.0 won't require any non-zero stores!
1738*e5dd7070Spatrick   if (isSimpleZero(E, CGF)) return CharUnits::Zero();
1739*e5dd7070Spatrick 
1740*e5dd7070Spatrick   // If this is an initlist expr, sum up the size of sizes of the (present)
1741*e5dd7070Spatrick   // elements.  If this is something weird, assume the whole thing is non-zero.
1742*e5dd7070Spatrick   const InitListExpr *ILE = dyn_cast<InitListExpr>(E);
1743*e5dd7070Spatrick   while (ILE && ILE->isTransparent())
1744*e5dd7070Spatrick     ILE = dyn_cast<InitListExpr>(ILE->getInit(0));
1745*e5dd7070Spatrick   if (!ILE || !CGF.getTypes().isZeroInitializable(ILE->getType()))
1746*e5dd7070Spatrick     return CGF.getContext().getTypeSizeInChars(E->getType());
1747*e5dd7070Spatrick 
1748*e5dd7070Spatrick   // InitListExprs for structs have to be handled carefully.  If there are
1749*e5dd7070Spatrick   // reference members, we need to consider the size of the reference, not the
1750*e5dd7070Spatrick   // referencee.  InitListExprs for unions and arrays can't have references.
1751*e5dd7070Spatrick   if (const RecordType *RT = E->getType()->getAs<RecordType>()) {
1752*e5dd7070Spatrick     if (!RT->isUnionType()) {
1753*e5dd7070Spatrick       RecordDecl *SD = RT->getDecl();
1754*e5dd7070Spatrick       CharUnits NumNonZeroBytes = CharUnits::Zero();
1755*e5dd7070Spatrick 
1756*e5dd7070Spatrick       unsigned ILEElement = 0;
1757*e5dd7070Spatrick       if (auto *CXXRD = dyn_cast<CXXRecordDecl>(SD))
1758*e5dd7070Spatrick         while (ILEElement != CXXRD->getNumBases())
1759*e5dd7070Spatrick           NumNonZeroBytes +=
1760*e5dd7070Spatrick               GetNumNonZeroBytesInInit(ILE->getInit(ILEElement++), CGF);
1761*e5dd7070Spatrick       for (const auto *Field : SD->fields()) {
1762*e5dd7070Spatrick         // We're done once we hit the flexible array member or run out of
1763*e5dd7070Spatrick         // InitListExpr elements.
1764*e5dd7070Spatrick         if (Field->getType()->isIncompleteArrayType() ||
1765*e5dd7070Spatrick             ILEElement == ILE->getNumInits())
1766*e5dd7070Spatrick           break;
1767*e5dd7070Spatrick         if (Field->isUnnamedBitfield())
1768*e5dd7070Spatrick           continue;
1769*e5dd7070Spatrick 
1770*e5dd7070Spatrick         const Expr *E = ILE->getInit(ILEElement++);
1771*e5dd7070Spatrick 
1772*e5dd7070Spatrick         // Reference values are always non-null and have the width of a pointer.
1773*e5dd7070Spatrick         if (Field->getType()->isReferenceType())
1774*e5dd7070Spatrick           NumNonZeroBytes += CGF.getContext().toCharUnitsFromBits(
1775*e5dd7070Spatrick               CGF.getTarget().getPointerWidth(0));
1776*e5dd7070Spatrick         else
1777*e5dd7070Spatrick           NumNonZeroBytes += GetNumNonZeroBytesInInit(E, CGF);
1778*e5dd7070Spatrick       }
1779*e5dd7070Spatrick 
1780*e5dd7070Spatrick       return NumNonZeroBytes;
1781*e5dd7070Spatrick     }
1782*e5dd7070Spatrick   }
1783*e5dd7070Spatrick 
1784*e5dd7070Spatrick 
1785*e5dd7070Spatrick   CharUnits NumNonZeroBytes = CharUnits::Zero();
1786*e5dd7070Spatrick   for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i)
1787*e5dd7070Spatrick     NumNonZeroBytes += GetNumNonZeroBytesInInit(ILE->getInit(i), CGF);
1788*e5dd7070Spatrick   return NumNonZeroBytes;
1789*e5dd7070Spatrick }
1790*e5dd7070Spatrick 
1791*e5dd7070Spatrick /// CheckAggExprForMemSetUse - If the initializer is large and has a lot of
1792*e5dd7070Spatrick /// zeros in it, emit a memset and avoid storing the individual zeros.
1793*e5dd7070Spatrick ///
1794*e5dd7070Spatrick static void CheckAggExprForMemSetUse(AggValueSlot &Slot, const Expr *E,
1795*e5dd7070Spatrick                                      CodeGenFunction &CGF) {
1796*e5dd7070Spatrick   // If the slot is already known to be zeroed, nothing to do.  Don't mess with
1797*e5dd7070Spatrick   // volatile stores.
1798*e5dd7070Spatrick   if (Slot.isZeroed() || Slot.isVolatile() || !Slot.getAddress().isValid())
1799*e5dd7070Spatrick     return;
1800*e5dd7070Spatrick 
1801*e5dd7070Spatrick   // C++ objects with a user-declared constructor don't need zero'ing.
1802*e5dd7070Spatrick   if (CGF.getLangOpts().CPlusPlus)
1803*e5dd7070Spatrick     if (const RecordType *RT = CGF.getContext()
1804*e5dd7070Spatrick                        .getBaseElementType(E->getType())->getAs<RecordType>()) {
1805*e5dd7070Spatrick       const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
1806*e5dd7070Spatrick       if (RD->hasUserDeclaredConstructor())
1807*e5dd7070Spatrick         return;
1808*e5dd7070Spatrick     }
1809*e5dd7070Spatrick 
1810*e5dd7070Spatrick   // If the type is 16-bytes or smaller, prefer individual stores over memset.
1811*e5dd7070Spatrick   CharUnits Size = Slot.getPreferredSize(CGF.getContext(), E->getType());
1812*e5dd7070Spatrick   if (Size <= CharUnits::fromQuantity(16))
1813*e5dd7070Spatrick     return;
1814*e5dd7070Spatrick 
1815*e5dd7070Spatrick   // Check to see if over 3/4 of the initializer are known to be zero.  If so,
1816*e5dd7070Spatrick   // we prefer to emit memset + individual stores for the rest.
1817*e5dd7070Spatrick   CharUnits NumNonZeroBytes = GetNumNonZeroBytesInInit(E, CGF);
1818*e5dd7070Spatrick   if (NumNonZeroBytes*4 > Size)
1819*e5dd7070Spatrick     return;
1820*e5dd7070Spatrick 
1821*e5dd7070Spatrick   // Okay, it seems like a good idea to use an initial memset, emit the call.
1822*e5dd7070Spatrick   llvm::Constant *SizeVal = CGF.Builder.getInt64(Size.getQuantity());
1823*e5dd7070Spatrick 
1824*e5dd7070Spatrick   Address Loc = Slot.getAddress();
1825*e5dd7070Spatrick   Loc = CGF.Builder.CreateElementBitCast(Loc, CGF.Int8Ty);
1826*e5dd7070Spatrick   CGF.Builder.CreateMemSet(Loc, CGF.Builder.getInt8(0), SizeVal, false);
1827*e5dd7070Spatrick 
1828*e5dd7070Spatrick   // Tell the AggExprEmitter that the slot is known zero.
1829*e5dd7070Spatrick   Slot.setZeroed();
1830*e5dd7070Spatrick }
1831*e5dd7070Spatrick 
1832*e5dd7070Spatrick 
1833*e5dd7070Spatrick 
1834*e5dd7070Spatrick 
1835*e5dd7070Spatrick /// EmitAggExpr - Emit the computation of the specified expression of aggregate
1836*e5dd7070Spatrick /// type.  The result is computed into DestPtr.  Note that if DestPtr is null,
1837*e5dd7070Spatrick /// the value of the aggregate expression is not needed.  If VolatileDest is
1838*e5dd7070Spatrick /// true, DestPtr cannot be 0.
1839*e5dd7070Spatrick void CodeGenFunction::EmitAggExpr(const Expr *E, AggValueSlot Slot) {
1840*e5dd7070Spatrick   assert(E && hasAggregateEvaluationKind(E->getType()) &&
1841*e5dd7070Spatrick          "Invalid aggregate expression to emit");
1842*e5dd7070Spatrick   assert((Slot.getAddress().isValid() || Slot.isIgnored()) &&
1843*e5dd7070Spatrick          "slot has bits but no address");
1844*e5dd7070Spatrick 
1845*e5dd7070Spatrick   // Optimize the slot if possible.
1846*e5dd7070Spatrick   CheckAggExprForMemSetUse(Slot, E, *this);
1847*e5dd7070Spatrick 
1848*e5dd7070Spatrick   AggExprEmitter(*this, Slot, Slot.isIgnored()).Visit(const_cast<Expr*>(E));
1849*e5dd7070Spatrick }
1850*e5dd7070Spatrick 
1851*e5dd7070Spatrick LValue CodeGenFunction::EmitAggExprToLValue(const Expr *E) {
1852*e5dd7070Spatrick   assert(hasAggregateEvaluationKind(E->getType()) && "Invalid argument!");
1853*e5dd7070Spatrick   Address Temp = CreateMemTemp(E->getType());
1854*e5dd7070Spatrick   LValue LV = MakeAddrLValue(Temp, E->getType());
1855*e5dd7070Spatrick   EmitAggExpr(E, AggValueSlot::forLValue(
1856*e5dd7070Spatrick                      LV, *this, AggValueSlot::IsNotDestructed,
1857*e5dd7070Spatrick                      AggValueSlot::DoesNotNeedGCBarriers,
1858*e5dd7070Spatrick                      AggValueSlot::IsNotAliased, AggValueSlot::DoesNotOverlap));
1859*e5dd7070Spatrick   return LV;
1860*e5dd7070Spatrick }
1861*e5dd7070Spatrick 
1862*e5dd7070Spatrick AggValueSlot::Overlap_t
1863*e5dd7070Spatrick CodeGenFunction::getOverlapForFieldInit(const FieldDecl *FD) {
1864*e5dd7070Spatrick   if (!FD->hasAttr<NoUniqueAddressAttr>() || !FD->getType()->isRecordType())
1865*e5dd7070Spatrick     return AggValueSlot::DoesNotOverlap;
1866*e5dd7070Spatrick 
1867*e5dd7070Spatrick   // If the field lies entirely within the enclosing class's nvsize, its tail
1868*e5dd7070Spatrick   // padding cannot overlap any already-initialized object. (The only subobjects
1869*e5dd7070Spatrick   // with greater addresses that might already be initialized are vbases.)
1870*e5dd7070Spatrick   const RecordDecl *ClassRD = FD->getParent();
1871*e5dd7070Spatrick   const ASTRecordLayout &Layout = getContext().getASTRecordLayout(ClassRD);
1872*e5dd7070Spatrick   if (Layout.getFieldOffset(FD->getFieldIndex()) +
1873*e5dd7070Spatrick           getContext().getTypeSize(FD->getType()) <=
1874*e5dd7070Spatrick       (uint64_t)getContext().toBits(Layout.getNonVirtualSize()))
1875*e5dd7070Spatrick     return AggValueSlot::DoesNotOverlap;
1876*e5dd7070Spatrick 
1877*e5dd7070Spatrick   // The tail padding may contain values we need to preserve.
1878*e5dd7070Spatrick   return AggValueSlot::MayOverlap;
1879*e5dd7070Spatrick }
1880*e5dd7070Spatrick 
1881*e5dd7070Spatrick AggValueSlot::Overlap_t CodeGenFunction::getOverlapForBaseInit(
1882*e5dd7070Spatrick     const CXXRecordDecl *RD, const CXXRecordDecl *BaseRD, bool IsVirtual) {
1883*e5dd7070Spatrick   // If the most-derived object is a field declared with [[no_unique_address]],
1884*e5dd7070Spatrick   // the tail padding of any virtual base could be reused for other subobjects
1885*e5dd7070Spatrick   // of that field's class.
1886*e5dd7070Spatrick   if (IsVirtual)
1887*e5dd7070Spatrick     return AggValueSlot::MayOverlap;
1888*e5dd7070Spatrick 
1889*e5dd7070Spatrick   // If the base class is laid out entirely within the nvsize of the derived
1890*e5dd7070Spatrick   // class, its tail padding cannot yet be initialized, so we can issue
1891*e5dd7070Spatrick   // stores at the full width of the base class.
1892*e5dd7070Spatrick   const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
1893*e5dd7070Spatrick   if (Layout.getBaseClassOffset(BaseRD) +
1894*e5dd7070Spatrick           getContext().getASTRecordLayout(BaseRD).getSize() <=
1895*e5dd7070Spatrick       Layout.getNonVirtualSize())
1896*e5dd7070Spatrick     return AggValueSlot::DoesNotOverlap;
1897*e5dd7070Spatrick 
1898*e5dd7070Spatrick   // The tail padding may contain values we need to preserve.
1899*e5dd7070Spatrick   return AggValueSlot::MayOverlap;
1900*e5dd7070Spatrick }
1901*e5dd7070Spatrick 
1902*e5dd7070Spatrick void CodeGenFunction::EmitAggregateCopy(LValue Dest, LValue Src, QualType Ty,
1903*e5dd7070Spatrick                                         AggValueSlot::Overlap_t MayOverlap,
1904*e5dd7070Spatrick                                         bool isVolatile) {
1905*e5dd7070Spatrick   assert(!Ty->isAnyComplexType() && "Shouldn't happen for complex");
1906*e5dd7070Spatrick 
1907*e5dd7070Spatrick   Address DestPtr = Dest.getAddress(*this);
1908*e5dd7070Spatrick   Address SrcPtr = Src.getAddress(*this);
1909*e5dd7070Spatrick 
1910*e5dd7070Spatrick   if (getLangOpts().CPlusPlus) {
1911*e5dd7070Spatrick     if (const RecordType *RT = Ty->getAs<RecordType>()) {
1912*e5dd7070Spatrick       CXXRecordDecl *Record = cast<CXXRecordDecl>(RT->getDecl());
1913*e5dd7070Spatrick       assert((Record->hasTrivialCopyConstructor() ||
1914*e5dd7070Spatrick               Record->hasTrivialCopyAssignment() ||
1915*e5dd7070Spatrick               Record->hasTrivialMoveConstructor() ||
1916*e5dd7070Spatrick               Record->hasTrivialMoveAssignment() ||
1917*e5dd7070Spatrick               Record->isUnion()) &&
1918*e5dd7070Spatrick              "Trying to aggregate-copy a type without a trivial copy/move "
1919*e5dd7070Spatrick              "constructor or assignment operator");
1920*e5dd7070Spatrick       // Ignore empty classes in C++.
1921*e5dd7070Spatrick       if (Record->isEmpty())
1922*e5dd7070Spatrick         return;
1923*e5dd7070Spatrick     }
1924*e5dd7070Spatrick   }
1925*e5dd7070Spatrick 
1926*e5dd7070Spatrick   // Aggregate assignment turns into llvm.memcpy.  This is almost valid per
1927*e5dd7070Spatrick   // C99 6.5.16.1p3, which states "If the value being stored in an object is
1928*e5dd7070Spatrick   // read from another object that overlaps in anyway the storage of the first
1929*e5dd7070Spatrick   // object, then the overlap shall be exact and the two objects shall have
1930*e5dd7070Spatrick   // qualified or unqualified versions of a compatible type."
1931*e5dd7070Spatrick   //
1932*e5dd7070Spatrick   // memcpy is not defined if the source and destination pointers are exactly
1933*e5dd7070Spatrick   // equal, but other compilers do this optimization, and almost every memcpy
1934*e5dd7070Spatrick   // implementation handles this case safely.  If there is a libc that does not
1935*e5dd7070Spatrick   // safely handle this, we can add a target hook.
1936*e5dd7070Spatrick 
1937*e5dd7070Spatrick   // Get data size info for this aggregate. Don't copy the tail padding if this
1938*e5dd7070Spatrick   // might be a potentially-overlapping subobject, since the tail padding might
1939*e5dd7070Spatrick   // be occupied by a different object. Otherwise, copying it is fine.
1940*e5dd7070Spatrick   std::pair<CharUnits, CharUnits> TypeInfo;
1941*e5dd7070Spatrick   if (MayOverlap)
1942*e5dd7070Spatrick     TypeInfo = getContext().getTypeInfoDataSizeInChars(Ty);
1943*e5dd7070Spatrick   else
1944*e5dd7070Spatrick     TypeInfo = getContext().getTypeInfoInChars(Ty);
1945*e5dd7070Spatrick 
1946*e5dd7070Spatrick   llvm::Value *SizeVal = nullptr;
1947*e5dd7070Spatrick   if (TypeInfo.first.isZero()) {
1948*e5dd7070Spatrick     // But note that getTypeInfo returns 0 for a VLA.
1949*e5dd7070Spatrick     if (auto *VAT = dyn_cast_or_null<VariableArrayType>(
1950*e5dd7070Spatrick             getContext().getAsArrayType(Ty))) {
1951*e5dd7070Spatrick       QualType BaseEltTy;
1952*e5dd7070Spatrick       SizeVal = emitArrayLength(VAT, BaseEltTy, DestPtr);
1953*e5dd7070Spatrick       TypeInfo = getContext().getTypeInfoInChars(BaseEltTy);
1954*e5dd7070Spatrick       assert(!TypeInfo.first.isZero());
1955*e5dd7070Spatrick       SizeVal = Builder.CreateNUWMul(
1956*e5dd7070Spatrick           SizeVal,
1957*e5dd7070Spatrick           llvm::ConstantInt::get(SizeTy, TypeInfo.first.getQuantity()));
1958*e5dd7070Spatrick     }
1959*e5dd7070Spatrick   }
1960*e5dd7070Spatrick   if (!SizeVal) {
1961*e5dd7070Spatrick     SizeVal = llvm::ConstantInt::get(SizeTy, TypeInfo.first.getQuantity());
1962*e5dd7070Spatrick   }
1963*e5dd7070Spatrick 
1964*e5dd7070Spatrick   // FIXME: If we have a volatile struct, the optimizer can remove what might
1965*e5dd7070Spatrick   // appear to be `extra' memory ops:
1966*e5dd7070Spatrick   //
1967*e5dd7070Spatrick   // volatile struct { int i; } a, b;
1968*e5dd7070Spatrick   //
1969*e5dd7070Spatrick   // int main() {
1970*e5dd7070Spatrick   //   a = b;
1971*e5dd7070Spatrick   //   a = b;
1972*e5dd7070Spatrick   // }
1973*e5dd7070Spatrick   //
1974*e5dd7070Spatrick   // we need to use a different call here.  We use isVolatile to indicate when
1975*e5dd7070Spatrick   // either the source or the destination is volatile.
1976*e5dd7070Spatrick 
1977*e5dd7070Spatrick   DestPtr = Builder.CreateElementBitCast(DestPtr, Int8Ty);
1978*e5dd7070Spatrick   SrcPtr = Builder.CreateElementBitCast(SrcPtr, Int8Ty);
1979*e5dd7070Spatrick 
1980*e5dd7070Spatrick   // Don't do any of the memmove_collectable tests if GC isn't set.
1981*e5dd7070Spatrick   if (CGM.getLangOpts().getGC() == LangOptions::NonGC) {
1982*e5dd7070Spatrick     // fall through
1983*e5dd7070Spatrick   } else if (const RecordType *RecordTy = Ty->getAs<RecordType>()) {
1984*e5dd7070Spatrick     RecordDecl *Record = RecordTy->getDecl();
1985*e5dd7070Spatrick     if (Record->hasObjectMember()) {
1986*e5dd7070Spatrick       CGM.getObjCRuntime().EmitGCMemmoveCollectable(*this, DestPtr, SrcPtr,
1987*e5dd7070Spatrick                                                     SizeVal);
1988*e5dd7070Spatrick       return;
1989*e5dd7070Spatrick     }
1990*e5dd7070Spatrick   } else if (Ty->isArrayType()) {
1991*e5dd7070Spatrick     QualType BaseType = getContext().getBaseElementType(Ty);
1992*e5dd7070Spatrick     if (const RecordType *RecordTy = BaseType->getAs<RecordType>()) {
1993*e5dd7070Spatrick       if (RecordTy->getDecl()->hasObjectMember()) {
1994*e5dd7070Spatrick         CGM.getObjCRuntime().EmitGCMemmoveCollectable(*this, DestPtr, SrcPtr,
1995*e5dd7070Spatrick                                                       SizeVal);
1996*e5dd7070Spatrick         return;
1997*e5dd7070Spatrick       }
1998*e5dd7070Spatrick     }
1999*e5dd7070Spatrick   }
2000*e5dd7070Spatrick 
2001*e5dd7070Spatrick   auto Inst = Builder.CreateMemCpy(DestPtr, SrcPtr, SizeVal, isVolatile);
2002*e5dd7070Spatrick 
2003*e5dd7070Spatrick   // Determine the metadata to describe the position of any padding in this
2004*e5dd7070Spatrick   // memcpy, as well as the TBAA tags for the members of the struct, in case
2005*e5dd7070Spatrick   // the optimizer wishes to expand it in to scalar memory operations.
2006*e5dd7070Spatrick   if (llvm::MDNode *TBAAStructTag = CGM.getTBAAStructInfo(Ty))
2007*e5dd7070Spatrick     Inst->setMetadata(llvm::LLVMContext::MD_tbaa_struct, TBAAStructTag);
2008*e5dd7070Spatrick 
2009*e5dd7070Spatrick   if (CGM.getCodeGenOpts().NewStructPathTBAA) {
2010*e5dd7070Spatrick     TBAAAccessInfo TBAAInfo = CGM.mergeTBAAInfoForMemoryTransfer(
2011*e5dd7070Spatrick         Dest.getTBAAInfo(), Src.getTBAAInfo());
2012*e5dd7070Spatrick     CGM.DecorateInstructionWithTBAA(Inst, TBAAInfo);
2013*e5dd7070Spatrick   }
2014*e5dd7070Spatrick }
2015