1 //===--- Sema.cpp - AST Builder and Semantic Analysis Implementation ------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the actions class which performs semantic analysis and
10 // builds an AST out of a parse stream.
11 //
12 //===----------------------------------------------------------------------===//
13
14 #include "UsedDeclVisitor.h"
15 #include "clang/AST/ASTContext.h"
16 #include "clang/AST/ASTDiagnostic.h"
17 #include "clang/AST/Decl.h"
18 #include "clang/AST/DeclCXX.h"
19 #include "clang/AST/DeclFriend.h"
20 #include "clang/AST/DeclObjC.h"
21 #include "clang/AST/Expr.h"
22 #include "clang/AST/ExprCXX.h"
23 #include "clang/AST/PrettyDeclStackTrace.h"
24 #include "clang/AST/StmtCXX.h"
25 #include "clang/Basic/DiagnosticOptions.h"
26 #include "clang/Basic/PartialDiagnostic.h"
27 #include "clang/Basic/SourceManager.h"
28 #include "clang/Basic/Stack.h"
29 #include "clang/Basic/TargetInfo.h"
30 #include "clang/Lex/HeaderSearch.h"
31 #include "clang/Lex/Preprocessor.h"
32 #include "clang/Sema/CXXFieldCollector.h"
33 #include "clang/Sema/DelayedDiagnostic.h"
34 #include "clang/Sema/ExternalSemaSource.h"
35 #include "clang/Sema/Initialization.h"
36 #include "clang/Sema/MultiplexExternalSemaSource.h"
37 #include "clang/Sema/ObjCMethodList.h"
38 #include "clang/Sema/Scope.h"
39 #include "clang/Sema/ScopeInfo.h"
40 #include "clang/Sema/SemaConsumer.h"
41 #include "clang/Sema/SemaInternal.h"
42 #include "clang/Sema/TemplateDeduction.h"
43 #include "clang/Sema/TemplateInstCallback.h"
44 #include "clang/Sema/TypoCorrection.h"
45 #include "llvm/ADT/DenseMap.h"
46 #include "llvm/ADT/SmallPtrSet.h"
47 #include "llvm/Support/TimeProfiler.h"
48
49 using namespace clang;
50 using namespace sema;
51
getLocForEndOfToken(SourceLocation Loc,unsigned Offset)52 SourceLocation Sema::getLocForEndOfToken(SourceLocation Loc, unsigned Offset) {
53 return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, LangOpts);
54 }
55
getModuleLoader() const56 ModuleLoader &Sema::getModuleLoader() const { return PP.getModuleLoader(); }
57
58 IdentifierInfo *
InventAbbreviatedTemplateParameterTypeName(IdentifierInfo * ParamName,unsigned int Index)59 Sema::InventAbbreviatedTemplateParameterTypeName(IdentifierInfo *ParamName,
60 unsigned int Index) {
61 std::string InventedName;
62 llvm::raw_string_ostream OS(InventedName);
63
64 if (!ParamName)
65 OS << "auto:" << Index + 1;
66 else
67 OS << ParamName->getName() << ":auto";
68
69 OS.flush();
70 return &Context.Idents.get(OS.str());
71 }
72
getPrintingPolicy(const ASTContext & Context,const Preprocessor & PP)73 PrintingPolicy Sema::getPrintingPolicy(const ASTContext &Context,
74 const Preprocessor &PP) {
75 PrintingPolicy Policy = Context.getPrintingPolicy();
76 // In diagnostics, we print _Bool as bool if the latter is defined as the
77 // former.
78 Policy.Bool = Context.getLangOpts().Bool;
79 if (!Policy.Bool) {
80 if (const MacroInfo *BoolMacro = PP.getMacroInfo(Context.getBoolName())) {
81 Policy.Bool = BoolMacro->isObjectLike() &&
82 BoolMacro->getNumTokens() == 1 &&
83 BoolMacro->getReplacementToken(0).is(tok::kw__Bool);
84 }
85 }
86
87 return Policy;
88 }
89
ActOnTranslationUnitScope(Scope * S)90 void Sema::ActOnTranslationUnitScope(Scope *S) {
91 TUScope = S;
92 PushDeclContext(S, Context.getTranslationUnitDecl());
93 }
94
95 namespace clang {
96 namespace sema {
97
98 class SemaPPCallbacks : public PPCallbacks {
99 Sema *S = nullptr;
100 llvm::SmallVector<SourceLocation, 8> IncludeStack;
101
102 public:
set(Sema & S)103 void set(Sema &S) { this->S = &S; }
104
reset()105 void reset() { S = nullptr; }
106
FileChanged(SourceLocation Loc,FileChangeReason Reason,SrcMgr::CharacteristicKind FileType,FileID PrevFID)107 virtual void FileChanged(SourceLocation Loc, FileChangeReason Reason,
108 SrcMgr::CharacteristicKind FileType,
109 FileID PrevFID) override {
110 if (!S)
111 return;
112 switch (Reason) {
113 case EnterFile: {
114 SourceManager &SM = S->getSourceManager();
115 SourceLocation IncludeLoc = SM.getIncludeLoc(SM.getFileID(Loc));
116 if (IncludeLoc.isValid()) {
117 if (llvm::timeTraceProfilerEnabled()) {
118 const FileEntry *FE = SM.getFileEntryForID(SM.getFileID(Loc));
119 llvm::timeTraceProfilerBegin(
120 "Source", FE != nullptr ? FE->getName() : StringRef("<unknown>"));
121 }
122
123 IncludeStack.push_back(IncludeLoc);
124 S->DiagnoseNonDefaultPragmaAlignPack(
125 Sema::PragmaAlignPackDiagnoseKind::NonDefaultStateAtInclude,
126 IncludeLoc);
127 }
128 break;
129 }
130 case ExitFile:
131 if (!IncludeStack.empty()) {
132 if (llvm::timeTraceProfilerEnabled())
133 llvm::timeTraceProfilerEnd();
134
135 S->DiagnoseNonDefaultPragmaAlignPack(
136 Sema::PragmaAlignPackDiagnoseKind::ChangedStateAtExit,
137 IncludeStack.pop_back_val());
138 }
139 break;
140 default:
141 break;
142 }
143 }
144 };
145
146 } // end namespace sema
147 } // end namespace clang
148
149 const unsigned Sema::MaxAlignmentExponent;
150 const unsigned Sema::MaximumAlignment;
151
Sema(Preprocessor & pp,ASTContext & ctxt,ASTConsumer & consumer,TranslationUnitKind TUKind,CodeCompleteConsumer * CodeCompleter)152 Sema::Sema(Preprocessor &pp, ASTContext &ctxt, ASTConsumer &consumer,
153 TranslationUnitKind TUKind, CodeCompleteConsumer *CodeCompleter)
154 : ExternalSource(nullptr), isMultiplexExternalSource(false),
155 CurFPFeatures(pp.getLangOpts()), LangOpts(pp.getLangOpts()), PP(pp),
156 Context(ctxt), Consumer(consumer), Diags(PP.getDiagnostics()),
157 SourceMgr(PP.getSourceManager()), CollectStats(false),
158 CodeCompleter(CodeCompleter), CurContext(nullptr),
159 OriginalLexicalContext(nullptr), MSStructPragmaOn(false),
160 MSPointerToMemberRepresentationMethod(
161 LangOpts.getMSPointerToMemberRepresentationMethod()),
162 VtorDispStack(LangOpts.getVtorDispMode()),
163 AlignPackStack(AlignPackInfo(getLangOpts().XLPragmaPack)),
164 DataSegStack(nullptr), BSSSegStack(nullptr), ConstSegStack(nullptr),
165 CodeSegStack(nullptr), FpPragmaStack(FPOptionsOverride()),
166 CurInitSeg(nullptr), VisContext(nullptr),
167 PragmaAttributeCurrentTargetDecl(nullptr),
168 IsBuildingRecoveryCallExpr(false), Cleanup{}, LateTemplateParser(nullptr),
169 LateTemplateParserCleanup(nullptr), OpaqueParser(nullptr), IdResolver(pp),
170 StdExperimentalNamespaceCache(nullptr), StdInitializerList(nullptr),
171 StdCoroutineTraitsCache(nullptr), CXXTypeInfoDecl(nullptr),
172 MSVCGuidDecl(nullptr), NSNumberDecl(nullptr), NSValueDecl(nullptr),
173 NSStringDecl(nullptr), StringWithUTF8StringMethod(nullptr),
174 ValueWithBytesObjCTypeMethod(nullptr), NSArrayDecl(nullptr),
175 ArrayWithObjectsMethod(nullptr), NSDictionaryDecl(nullptr),
176 DictionaryWithObjectsMethod(nullptr), GlobalNewDeleteDeclared(false),
177 TUKind(TUKind), NumSFINAEErrors(0),
178 FullyCheckedComparisonCategories(
179 static_cast<unsigned>(ComparisonCategoryType::Last) + 1),
180 SatisfactionCache(Context), AccessCheckingSFINAE(false),
181 InNonInstantiationSFINAEContext(false), NonInstantiationEntries(0),
182 ArgumentPackSubstitutionIndex(-1), CurrentInstantiationScope(nullptr),
183 DisableTypoCorrection(false), TyposCorrected(0), AnalysisWarnings(*this),
184 ThreadSafetyDeclCache(nullptr), VarDataSharingAttributesStack(nullptr),
185 CurScope(nullptr), Ident_super(nullptr), Ident___float128(nullptr) {
186 TUScope = nullptr;
187 isConstantEvaluatedOverride = false;
188
189 LoadedExternalKnownNamespaces = false;
190 for (unsigned I = 0; I != NSAPI::NumNSNumberLiteralMethods; ++I)
191 NSNumberLiteralMethods[I] = nullptr;
192
193 if (getLangOpts().ObjC)
194 NSAPIObj.reset(new NSAPI(Context));
195
196 if (getLangOpts().CPlusPlus)
197 FieldCollector.reset(new CXXFieldCollector());
198
199 // Tell diagnostics how to render things from the AST library.
200 Diags.SetArgToStringFn(&FormatASTNodeDiagnosticArgument, &Context);
201
202 ExprEvalContexts.emplace_back(
203 ExpressionEvaluationContext::PotentiallyEvaluated, 0, CleanupInfo{},
204 nullptr, ExpressionEvaluationContextRecord::EK_Other);
205
206 // Initialization of data sharing attributes stack for OpenMP
207 InitDataSharingAttributesStack();
208
209 std::unique_ptr<sema::SemaPPCallbacks> Callbacks =
210 std::make_unique<sema::SemaPPCallbacks>();
211 SemaPPCallbackHandler = Callbacks.get();
212 PP.addPPCallbacks(std::move(Callbacks));
213 SemaPPCallbackHandler->set(*this);
214 }
215
216 // Anchor Sema's type info to this TU.
anchor()217 void Sema::anchor() {}
218
addImplicitTypedef(StringRef Name,QualType T)219 void Sema::addImplicitTypedef(StringRef Name, QualType T) {
220 DeclarationName DN = &Context.Idents.get(Name);
221 if (IdResolver.begin(DN) == IdResolver.end())
222 PushOnScopeChains(Context.buildImplicitTypedef(T, Name), TUScope);
223 }
224
Initialize()225 void Sema::Initialize() {
226 if (SemaConsumer *SC = dyn_cast<SemaConsumer>(&Consumer))
227 SC->InitializeSema(*this);
228
229 // Tell the external Sema source about this Sema object.
230 if (ExternalSemaSource *ExternalSema
231 = dyn_cast_or_null<ExternalSemaSource>(Context.getExternalSource()))
232 ExternalSema->InitializeSema(*this);
233
234 // This needs to happen after ExternalSemaSource::InitializeSema(this) or we
235 // will not be able to merge any duplicate __va_list_tag decls correctly.
236 VAListTagName = PP.getIdentifierInfo("__va_list_tag");
237
238 if (!TUScope)
239 return;
240
241 // Initialize predefined 128-bit integer types, if needed.
242 if (Context.getTargetInfo().hasInt128Type() ||
243 (Context.getAuxTargetInfo() &&
244 Context.getAuxTargetInfo()->hasInt128Type())) {
245 // If either of the 128-bit integer types are unavailable to name lookup,
246 // define them now.
247 DeclarationName Int128 = &Context.Idents.get("__int128_t");
248 if (IdResolver.begin(Int128) == IdResolver.end())
249 PushOnScopeChains(Context.getInt128Decl(), TUScope);
250
251 DeclarationName UInt128 = &Context.Idents.get("__uint128_t");
252 if (IdResolver.begin(UInt128) == IdResolver.end())
253 PushOnScopeChains(Context.getUInt128Decl(), TUScope);
254 }
255
256
257 // Initialize predefined Objective-C types:
258 if (getLangOpts().ObjC) {
259 // If 'SEL' does not yet refer to any declarations, make it refer to the
260 // predefined 'SEL'.
261 DeclarationName SEL = &Context.Idents.get("SEL");
262 if (IdResolver.begin(SEL) == IdResolver.end())
263 PushOnScopeChains(Context.getObjCSelDecl(), TUScope);
264
265 // If 'id' does not yet refer to any declarations, make it refer to the
266 // predefined 'id'.
267 DeclarationName Id = &Context.Idents.get("id");
268 if (IdResolver.begin(Id) == IdResolver.end())
269 PushOnScopeChains(Context.getObjCIdDecl(), TUScope);
270
271 // Create the built-in typedef for 'Class'.
272 DeclarationName Class = &Context.Idents.get("Class");
273 if (IdResolver.begin(Class) == IdResolver.end())
274 PushOnScopeChains(Context.getObjCClassDecl(), TUScope);
275
276 // Create the built-in forward declaratino for 'Protocol'.
277 DeclarationName Protocol = &Context.Idents.get("Protocol");
278 if (IdResolver.begin(Protocol) == IdResolver.end())
279 PushOnScopeChains(Context.getObjCProtocolDecl(), TUScope);
280 }
281
282 // Create the internal type for the *StringMakeConstantString builtins.
283 DeclarationName ConstantString = &Context.Idents.get("__NSConstantString");
284 if (IdResolver.begin(ConstantString) == IdResolver.end())
285 PushOnScopeChains(Context.getCFConstantStringDecl(), TUScope);
286
287 // Initialize Microsoft "predefined C++ types".
288 if (getLangOpts().MSVCCompat) {
289 if (getLangOpts().CPlusPlus &&
290 IdResolver.begin(&Context.Idents.get("type_info")) == IdResolver.end())
291 PushOnScopeChains(Context.buildImplicitRecord("type_info", TTK_Class),
292 TUScope);
293
294 addImplicitTypedef("size_t", Context.getSizeType());
295 }
296
297 // Initialize predefined OpenCL types and supported extensions and (optional)
298 // core features.
299 if (getLangOpts().OpenCL) {
300 getOpenCLOptions().addSupport(
301 Context.getTargetInfo().getSupportedOpenCLOpts(), getLangOpts());
302 addImplicitTypedef("sampler_t", Context.OCLSamplerTy);
303 addImplicitTypedef("event_t", Context.OCLEventTy);
304 if (getLangOpts().OpenCLCPlusPlus || getLangOpts().OpenCLVersion >= 200) {
305 addImplicitTypedef("clk_event_t", Context.OCLClkEventTy);
306 addImplicitTypedef("queue_t", Context.OCLQueueTy);
307 addImplicitTypedef("reserve_id_t", Context.OCLReserveIDTy);
308 addImplicitTypedef("atomic_int", Context.getAtomicType(Context.IntTy));
309 addImplicitTypedef("atomic_uint",
310 Context.getAtomicType(Context.UnsignedIntTy));
311 addImplicitTypedef("atomic_float",
312 Context.getAtomicType(Context.FloatTy));
313 // OpenCLC v2.0, s6.13.11.6 requires that atomic_flag is implemented as
314 // 32-bit integer and OpenCLC v2.0, s6.1.1 int is always 32-bit wide.
315 addImplicitTypedef("atomic_flag", Context.getAtomicType(Context.IntTy));
316
317
318 // OpenCL v2.0 s6.13.11.6:
319 // - The atomic_long and atomic_ulong types are supported if the
320 // cl_khr_int64_base_atomics and cl_khr_int64_extended_atomics
321 // extensions are supported.
322 // - The atomic_double type is only supported if double precision
323 // is supported and the cl_khr_int64_base_atomics and
324 // cl_khr_int64_extended_atomics extensions are supported.
325 // - If the device address space is 64-bits, the data types
326 // atomic_intptr_t, atomic_uintptr_t, atomic_size_t and
327 // atomic_ptrdiff_t are supported if the cl_khr_int64_base_atomics and
328 // cl_khr_int64_extended_atomics extensions are supported.
329
330 auto AddPointerSizeDependentTypes = [&]() {
331 auto AtomicSizeT = Context.getAtomicType(Context.getSizeType());
332 auto AtomicIntPtrT = Context.getAtomicType(Context.getIntPtrType());
333 auto AtomicUIntPtrT = Context.getAtomicType(Context.getUIntPtrType());
334 auto AtomicPtrDiffT =
335 Context.getAtomicType(Context.getPointerDiffType());
336 addImplicitTypedef("atomic_size_t", AtomicSizeT);
337 addImplicitTypedef("atomic_intptr_t", AtomicIntPtrT);
338 addImplicitTypedef("atomic_uintptr_t", AtomicUIntPtrT);
339 addImplicitTypedef("atomic_ptrdiff_t", AtomicPtrDiffT);
340 };
341
342 if (Context.getTypeSize(Context.getSizeType()) == 32) {
343 AddPointerSizeDependentTypes();
344 }
345
346 std::vector<QualType> Atomic64BitTypes;
347 if (getOpenCLOptions().isSupported("cl_khr_int64_base_atomics",
348 getLangOpts()) &&
349 getOpenCLOptions().isSupported("cl_khr_int64_extended_atomics",
350 getLangOpts())) {
351 if (getOpenCLOptions().isSupported("cl_khr_fp64", getLangOpts())) {
352 auto AtomicDoubleT = Context.getAtomicType(Context.DoubleTy);
353 addImplicitTypedef("atomic_double", AtomicDoubleT);
354 Atomic64BitTypes.push_back(AtomicDoubleT);
355 }
356 auto AtomicLongT = Context.getAtomicType(Context.LongTy);
357 auto AtomicULongT = Context.getAtomicType(Context.UnsignedLongTy);
358 addImplicitTypedef("atomic_long", AtomicLongT);
359 addImplicitTypedef("atomic_ulong", AtomicULongT);
360
361
362 if (Context.getTypeSize(Context.getSizeType()) == 64) {
363 AddPointerSizeDependentTypes();
364 }
365 }
366 }
367
368
369 #define EXT_OPAQUE_TYPE(ExtType, Id, Ext) \
370 if (getOpenCLOptions().isSupported(#Ext, getLangOpts())) { \
371 addImplicitTypedef(#ExtType, Context.Id##Ty); \
372 }
373 #include "clang/Basic/OpenCLExtensionTypes.def"
374 }
375
376 if (Context.getTargetInfo().hasAArch64SVETypes()) {
377 #define SVE_TYPE(Name, Id, SingletonId) \
378 addImplicitTypedef(Name, Context.SingletonId);
379 #include "clang/Basic/AArch64SVEACLETypes.def"
380 }
381
382 if (Context.getTargetInfo().getTriple().isPPC64() &&
383 Context.getTargetInfo().hasFeature("paired-vector-memops")) {
384 if (Context.getTargetInfo().hasFeature("mma")) {
385 #define PPC_VECTOR_MMA_TYPE(Name, Id, Size) \
386 addImplicitTypedef(#Name, Context.Id##Ty);
387 #include "clang/Basic/PPCTypes.def"
388 }
389 #define PPC_VECTOR_VSX_TYPE(Name, Id, Size) \
390 addImplicitTypedef(#Name, Context.Id##Ty);
391 #include "clang/Basic/PPCTypes.def"
392 }
393
394 if (Context.getTargetInfo().hasRISCVVTypes()) {
395 #define RVV_TYPE(Name, Id, SingletonId) \
396 addImplicitTypedef(Name, Context.SingletonId);
397 #include "clang/Basic/RISCVVTypes.def"
398 }
399
400 if (Context.getTargetInfo().hasBuiltinMSVaList()) {
401 DeclarationName MSVaList = &Context.Idents.get("__builtin_ms_va_list");
402 if (IdResolver.begin(MSVaList) == IdResolver.end())
403 PushOnScopeChains(Context.getBuiltinMSVaListDecl(), TUScope);
404 }
405
406 DeclarationName BuiltinVaList = &Context.Idents.get("__builtin_va_list");
407 if (IdResolver.begin(BuiltinVaList) == IdResolver.end())
408 PushOnScopeChains(Context.getBuiltinVaListDecl(), TUScope);
409 }
410
~Sema()411 Sema::~Sema() {
412 assert(InstantiatingSpecializations.empty() &&
413 "failed to clean up an InstantiatingTemplate?");
414
415 if (VisContext) FreeVisContext();
416
417 // Kill all the active scopes.
418 for (sema::FunctionScopeInfo *FSI : FunctionScopes)
419 delete FSI;
420
421 // Tell the SemaConsumer to forget about us; we're going out of scope.
422 if (SemaConsumer *SC = dyn_cast<SemaConsumer>(&Consumer))
423 SC->ForgetSema();
424
425 // Detach from the external Sema source.
426 if (ExternalSemaSource *ExternalSema
427 = dyn_cast_or_null<ExternalSemaSource>(Context.getExternalSource()))
428 ExternalSema->ForgetSema();
429
430 // If Sema's ExternalSource is the multiplexer - we own it.
431 if (isMultiplexExternalSource)
432 delete ExternalSource;
433
434 // Delete cached satisfactions.
435 std::vector<ConstraintSatisfaction *> Satisfactions;
436 Satisfactions.reserve(Satisfactions.size());
437 for (auto &Node : SatisfactionCache)
438 Satisfactions.push_back(&Node);
439 for (auto *Node : Satisfactions)
440 delete Node;
441
442 threadSafety::threadSafetyCleanup(ThreadSafetyDeclCache);
443
444 // Destroys data sharing attributes stack for OpenMP
445 DestroyDataSharingAttributesStack();
446
447 // Detach from the PP callback handler which outlives Sema since it's owned
448 // by the preprocessor.
449 SemaPPCallbackHandler->reset();
450 }
451
warnStackExhausted(SourceLocation Loc)452 void Sema::warnStackExhausted(SourceLocation Loc) {
453 // Only warn about this once.
454 if (!WarnedStackExhausted) {
455 Diag(Loc, diag::warn_stack_exhausted);
456 WarnedStackExhausted = true;
457 }
458 }
459
runWithSufficientStackSpace(SourceLocation Loc,llvm::function_ref<void ()> Fn)460 void Sema::runWithSufficientStackSpace(SourceLocation Loc,
461 llvm::function_ref<void()> Fn) {
462 clang::runWithSufficientStackSpace([&] { warnStackExhausted(Loc); }, Fn);
463 }
464
465 /// makeUnavailableInSystemHeader - There is an error in the current
466 /// context. If we're still in a system header, and we can plausibly
467 /// make the relevant declaration unavailable instead of erroring, do
468 /// so and return true.
makeUnavailableInSystemHeader(SourceLocation loc,UnavailableAttr::ImplicitReason reason)469 bool Sema::makeUnavailableInSystemHeader(SourceLocation loc,
470 UnavailableAttr::ImplicitReason reason) {
471 // If we're not in a function, it's an error.
472 FunctionDecl *fn = dyn_cast<FunctionDecl>(CurContext);
473 if (!fn) return false;
474
475 // If we're in template instantiation, it's an error.
476 if (inTemplateInstantiation())
477 return false;
478
479 // If that function's not in a system header, it's an error.
480 if (!Context.getSourceManager().isInSystemHeader(loc))
481 return false;
482
483 // If the function is already unavailable, it's not an error.
484 if (fn->hasAttr<UnavailableAttr>()) return true;
485
486 fn->addAttr(UnavailableAttr::CreateImplicit(Context, "", reason, loc));
487 return true;
488 }
489
getASTMutationListener() const490 ASTMutationListener *Sema::getASTMutationListener() const {
491 return getASTConsumer().GetASTMutationListener();
492 }
493
494 ///Registers an external source. If an external source already exists,
495 /// creates a multiplex external source and appends to it.
496 ///
497 ///\param[in] E - A non-null external sema source.
498 ///
addExternalSource(ExternalSemaSource * E)499 void Sema::addExternalSource(ExternalSemaSource *E) {
500 assert(E && "Cannot use with NULL ptr");
501
502 if (!ExternalSource) {
503 ExternalSource = E;
504 return;
505 }
506
507 if (isMultiplexExternalSource)
508 static_cast<MultiplexExternalSemaSource*>(ExternalSource)->addSource(*E);
509 else {
510 ExternalSource = new MultiplexExternalSemaSource(*ExternalSource, *E);
511 isMultiplexExternalSource = true;
512 }
513 }
514
515 /// Print out statistics about the semantic analysis.
PrintStats() const516 void Sema::PrintStats() const {
517 llvm::errs() << "\n*** Semantic Analysis Stats:\n";
518 llvm::errs() << NumSFINAEErrors << " SFINAE diagnostics trapped.\n";
519
520 BumpAlloc.PrintStats();
521 AnalysisWarnings.PrintStats();
522 }
523
diagnoseNullableToNonnullConversion(QualType DstType,QualType SrcType,SourceLocation Loc)524 void Sema::diagnoseNullableToNonnullConversion(QualType DstType,
525 QualType SrcType,
526 SourceLocation Loc) {
527 Optional<NullabilityKind> ExprNullability = SrcType->getNullability(Context);
528 if (!ExprNullability || (*ExprNullability != NullabilityKind::Nullable &&
529 *ExprNullability != NullabilityKind::NullableResult))
530 return;
531
532 Optional<NullabilityKind> TypeNullability = DstType->getNullability(Context);
533 if (!TypeNullability || *TypeNullability != NullabilityKind::NonNull)
534 return;
535
536 Diag(Loc, diag::warn_nullability_lost) << SrcType << DstType;
537 }
538
diagnoseZeroToNullptrConversion(CastKind Kind,const Expr * E)539 void Sema::diagnoseZeroToNullptrConversion(CastKind Kind, const Expr* E) {
540 if (Diags.isIgnored(diag::warn_zero_as_null_pointer_constant,
541 E->getBeginLoc()))
542 return;
543 // nullptr only exists from C++11 on, so don't warn on its absence earlier.
544 if (!getLangOpts().CPlusPlus11)
545 return;
546
547 if (Kind != CK_NullToPointer && Kind != CK_NullToMemberPointer)
548 return;
549 if (E->IgnoreParenImpCasts()->getType()->isNullPtrType())
550 return;
551
552 // Don't diagnose the conversion from a 0 literal to a null pointer argument
553 // in a synthesized call to operator<=>.
554 if (!CodeSynthesisContexts.empty() &&
555 CodeSynthesisContexts.back().Kind ==
556 CodeSynthesisContext::RewritingOperatorAsSpaceship)
557 return;
558
559 // If it is a macro from system header, and if the macro name is not "NULL",
560 // do not warn.
561 SourceLocation MaybeMacroLoc = E->getBeginLoc();
562 if (Diags.getSuppressSystemWarnings() &&
563 SourceMgr.isInSystemMacro(MaybeMacroLoc) &&
564 !findMacroSpelling(MaybeMacroLoc, "NULL"))
565 return;
566
567 Diag(E->getBeginLoc(), diag::warn_zero_as_null_pointer_constant)
568 << FixItHint::CreateReplacement(E->getSourceRange(), "nullptr");
569 }
570
571 /// ImpCastExprToType - If Expr is not of type 'Type', insert an implicit cast.
572 /// If there is already an implicit cast, merge into the existing one.
573 /// The result is of the given category.
ImpCastExprToType(Expr * E,QualType Ty,CastKind Kind,ExprValueKind VK,const CXXCastPath * BasePath,CheckedConversionKind CCK)574 ExprResult Sema::ImpCastExprToType(Expr *E, QualType Ty,
575 CastKind Kind, ExprValueKind VK,
576 const CXXCastPath *BasePath,
577 CheckedConversionKind CCK) {
578 #ifndef NDEBUG
579 if (VK == VK_RValue && !E->isRValue()) {
580 switch (Kind) {
581 default:
582 llvm_unreachable(("can't implicitly cast lvalue to rvalue with this cast "
583 "kind: " +
584 std::string(CastExpr::getCastKindName(Kind)))
585 .c_str());
586 case CK_Dependent:
587 case CK_LValueToRValue:
588 case CK_ArrayToPointerDecay:
589 case CK_FunctionToPointerDecay:
590 case CK_ToVoid:
591 case CK_NonAtomicToAtomic:
592 break;
593 }
594 }
595 assert((VK == VK_RValue || Kind == CK_Dependent || !E->isRValue()) &&
596 "can't cast rvalue to lvalue");
597 #endif
598
599 diagnoseNullableToNonnullConversion(Ty, E->getType(), E->getBeginLoc());
600 diagnoseZeroToNullptrConversion(Kind, E);
601
602 QualType ExprTy = Context.getCanonicalType(E->getType());
603 QualType TypeTy = Context.getCanonicalType(Ty);
604
605 if (ExprTy == TypeTy)
606 return E;
607
608 // C++1z [conv.array]: The temporary materialization conversion is applied.
609 // We also use this to fuel C++ DR1213, which applies to C++11 onwards.
610 if (Kind == CK_ArrayToPointerDecay && getLangOpts().CPlusPlus &&
611 E->getValueKind() == VK_RValue) {
612 // The temporary is an lvalue in C++98 and an xvalue otherwise.
613 ExprResult Materialized = CreateMaterializeTemporaryExpr(
614 E->getType(), E, !getLangOpts().CPlusPlus11);
615 if (Materialized.isInvalid())
616 return ExprError();
617 E = Materialized.get();
618 }
619
620 if (ImplicitCastExpr *ImpCast = dyn_cast<ImplicitCastExpr>(E)) {
621 if (ImpCast->getCastKind() == Kind && (!BasePath || BasePath->empty())) {
622 ImpCast->setType(Ty);
623 ImpCast->setValueKind(VK);
624 return E;
625 }
626 }
627
628 return ImplicitCastExpr::Create(Context, Ty, Kind, E, BasePath, VK,
629 CurFPFeatureOverrides());
630 }
631
632 /// ScalarTypeToBooleanCastKind - Returns the cast kind corresponding
633 /// to the conversion from scalar type ScalarTy to the Boolean type.
ScalarTypeToBooleanCastKind(QualType ScalarTy)634 CastKind Sema::ScalarTypeToBooleanCastKind(QualType ScalarTy) {
635 switch (ScalarTy->getScalarTypeKind()) {
636 case Type::STK_Bool: return CK_NoOp;
637 case Type::STK_CPointer: return CK_PointerToBoolean;
638 case Type::STK_BlockPointer: return CK_PointerToBoolean;
639 case Type::STK_ObjCObjectPointer: return CK_PointerToBoolean;
640 case Type::STK_MemberPointer: return CK_MemberPointerToBoolean;
641 case Type::STK_Integral: return CK_IntegralToBoolean;
642 case Type::STK_Floating: return CK_FloatingToBoolean;
643 case Type::STK_IntegralComplex: return CK_IntegralComplexToBoolean;
644 case Type::STK_FloatingComplex: return CK_FloatingComplexToBoolean;
645 case Type::STK_FixedPoint: return CK_FixedPointToBoolean;
646 }
647 llvm_unreachable("unknown scalar type kind");
648 }
649
650 /// Used to prune the decls of Sema's UnusedFileScopedDecls vector.
ShouldRemoveFromUnused(Sema * SemaRef,const DeclaratorDecl * D)651 static bool ShouldRemoveFromUnused(Sema *SemaRef, const DeclaratorDecl *D) {
652 if (D->getMostRecentDecl()->isUsed())
653 return true;
654
655 if (D->isExternallyVisible())
656 return true;
657
658 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
659 // If this is a function template and none of its specializations is used,
660 // we should warn.
661 if (FunctionTemplateDecl *Template = FD->getDescribedFunctionTemplate())
662 for (const auto *Spec : Template->specializations())
663 if (ShouldRemoveFromUnused(SemaRef, Spec))
664 return true;
665
666 // UnusedFileScopedDecls stores the first declaration.
667 // The declaration may have become definition so check again.
668 const FunctionDecl *DeclToCheck;
669 if (FD->hasBody(DeclToCheck))
670 return !SemaRef->ShouldWarnIfUnusedFileScopedDecl(DeclToCheck);
671
672 // Later redecls may add new information resulting in not having to warn,
673 // so check again.
674 DeclToCheck = FD->getMostRecentDecl();
675 if (DeclToCheck != FD)
676 return !SemaRef->ShouldWarnIfUnusedFileScopedDecl(DeclToCheck);
677 }
678
679 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
680 // If a variable usable in constant expressions is referenced,
681 // don't warn if it isn't used: if the value of a variable is required
682 // for the computation of a constant expression, it doesn't make sense to
683 // warn even if the variable isn't odr-used. (isReferenced doesn't
684 // precisely reflect that, but it's a decent approximation.)
685 if (VD->isReferenced() &&
686 VD->mightBeUsableInConstantExpressions(SemaRef->Context))
687 return true;
688
689 if (VarTemplateDecl *Template = VD->getDescribedVarTemplate())
690 // If this is a variable template and none of its specializations is used,
691 // we should warn.
692 for (const auto *Spec : Template->specializations())
693 if (ShouldRemoveFromUnused(SemaRef, Spec))
694 return true;
695
696 // UnusedFileScopedDecls stores the first declaration.
697 // The declaration may have become definition so check again.
698 const VarDecl *DeclToCheck = VD->getDefinition();
699 if (DeclToCheck)
700 return !SemaRef->ShouldWarnIfUnusedFileScopedDecl(DeclToCheck);
701
702 // Later redecls may add new information resulting in not having to warn,
703 // so check again.
704 DeclToCheck = VD->getMostRecentDecl();
705 if (DeclToCheck != VD)
706 return !SemaRef->ShouldWarnIfUnusedFileScopedDecl(DeclToCheck);
707 }
708
709 return false;
710 }
711
isFunctionOrVarDeclExternC(NamedDecl * ND)712 static bool isFunctionOrVarDeclExternC(NamedDecl *ND) {
713 if (auto *FD = dyn_cast<FunctionDecl>(ND))
714 return FD->isExternC();
715 return cast<VarDecl>(ND)->isExternC();
716 }
717
718 /// Determine whether ND is an external-linkage function or variable whose
719 /// type has no linkage.
isExternalWithNoLinkageType(ValueDecl * VD)720 bool Sema::isExternalWithNoLinkageType(ValueDecl *VD) {
721 // Note: it's not quite enough to check whether VD has UniqueExternalLinkage,
722 // because we also want to catch the case where its type has VisibleNoLinkage,
723 // which does not affect the linkage of VD.
724 return getLangOpts().CPlusPlus && VD->hasExternalFormalLinkage() &&
725 !isExternalFormalLinkage(VD->getType()->getLinkage()) &&
726 !isFunctionOrVarDeclExternC(VD);
727 }
728
729 /// Obtains a sorted list of functions and variables that are undefined but
730 /// ODR-used.
getUndefinedButUsed(SmallVectorImpl<std::pair<NamedDecl *,SourceLocation>> & Undefined)731 void Sema::getUndefinedButUsed(
732 SmallVectorImpl<std::pair<NamedDecl *, SourceLocation> > &Undefined) {
733 for (const auto &UndefinedUse : UndefinedButUsed) {
734 NamedDecl *ND = UndefinedUse.first;
735
736 // Ignore attributes that have become invalid.
737 if (ND->isInvalidDecl()) continue;
738
739 // __attribute__((weakref)) is basically a definition.
740 if (ND->hasAttr<WeakRefAttr>()) continue;
741
742 if (isa<CXXDeductionGuideDecl>(ND))
743 continue;
744
745 if (ND->hasAttr<DLLImportAttr>() || ND->hasAttr<DLLExportAttr>()) {
746 // An exported function will always be emitted when defined, so even if
747 // the function is inline, it doesn't have to be emitted in this TU. An
748 // imported function implies that it has been exported somewhere else.
749 continue;
750 }
751
752 if (FunctionDecl *FD = dyn_cast<FunctionDecl>(ND)) {
753 if (FD->isDefined())
754 continue;
755 if (FD->isExternallyVisible() &&
756 !isExternalWithNoLinkageType(FD) &&
757 !FD->getMostRecentDecl()->isInlined() &&
758 !FD->hasAttr<ExcludeFromExplicitInstantiationAttr>())
759 continue;
760 if (FD->getBuiltinID())
761 continue;
762 } else {
763 auto *VD = cast<VarDecl>(ND);
764 if (VD->hasDefinition() != VarDecl::DeclarationOnly)
765 continue;
766 if (VD->isExternallyVisible() &&
767 !isExternalWithNoLinkageType(VD) &&
768 !VD->getMostRecentDecl()->isInline() &&
769 !VD->hasAttr<ExcludeFromExplicitInstantiationAttr>())
770 continue;
771
772 // Skip VarDecls that lack formal definitions but which we know are in
773 // fact defined somewhere.
774 if (VD->isKnownToBeDefined())
775 continue;
776 }
777
778 Undefined.push_back(std::make_pair(ND, UndefinedUse.second));
779 }
780 }
781
782 /// checkUndefinedButUsed - Check for undefined objects with internal linkage
783 /// or that are inline.
checkUndefinedButUsed(Sema & S)784 static void checkUndefinedButUsed(Sema &S) {
785 if (S.UndefinedButUsed.empty()) return;
786
787 // Collect all the still-undefined entities with internal linkage.
788 SmallVector<std::pair<NamedDecl *, SourceLocation>, 16> Undefined;
789 S.getUndefinedButUsed(Undefined);
790 if (Undefined.empty()) return;
791
792 for (auto Undef : Undefined) {
793 ValueDecl *VD = cast<ValueDecl>(Undef.first);
794 SourceLocation UseLoc = Undef.second;
795
796 if (S.isExternalWithNoLinkageType(VD)) {
797 // C++ [basic.link]p8:
798 // A type without linkage shall not be used as the type of a variable
799 // or function with external linkage unless
800 // -- the entity has C language linkage
801 // -- the entity is not odr-used or is defined in the same TU
802 //
803 // As an extension, accept this in cases where the type is externally
804 // visible, since the function or variable actually can be defined in
805 // another translation unit in that case.
806 S.Diag(VD->getLocation(), isExternallyVisible(VD->getType()->getLinkage())
807 ? diag::ext_undefined_internal_type
808 : diag::err_undefined_internal_type)
809 << isa<VarDecl>(VD) << VD;
810 } else if (!VD->isExternallyVisible()) {
811 // FIXME: We can promote this to an error. The function or variable can't
812 // be defined anywhere else, so the program must necessarily violate the
813 // one definition rule.
814 S.Diag(VD->getLocation(), diag::warn_undefined_internal)
815 << isa<VarDecl>(VD) << VD;
816 } else if (auto *FD = dyn_cast<FunctionDecl>(VD)) {
817 (void)FD;
818 assert(FD->getMostRecentDecl()->isInlined() &&
819 "used object requires definition but isn't inline or internal?");
820 // FIXME: This is ill-formed; we should reject.
821 S.Diag(VD->getLocation(), diag::warn_undefined_inline) << VD;
822 } else {
823 assert(cast<VarDecl>(VD)->getMostRecentDecl()->isInline() &&
824 "used var requires definition but isn't inline or internal?");
825 S.Diag(VD->getLocation(), diag::err_undefined_inline_var) << VD;
826 }
827 if (UseLoc.isValid())
828 S.Diag(UseLoc, diag::note_used_here);
829 }
830
831 S.UndefinedButUsed.clear();
832 }
833
LoadExternalWeakUndeclaredIdentifiers()834 void Sema::LoadExternalWeakUndeclaredIdentifiers() {
835 if (!ExternalSource)
836 return;
837
838 SmallVector<std::pair<IdentifierInfo *, WeakInfo>, 4> WeakIDs;
839 ExternalSource->ReadWeakUndeclaredIdentifiers(WeakIDs);
840 for (auto &WeakID : WeakIDs)
841 WeakUndeclaredIdentifiers.insert(WeakID);
842 }
843
844
845 typedef llvm::DenseMap<const CXXRecordDecl*, bool> RecordCompleteMap;
846
847 /// Returns true, if all methods and nested classes of the given
848 /// CXXRecordDecl are defined in this translation unit.
849 ///
850 /// Should only be called from ActOnEndOfTranslationUnit so that all
851 /// definitions are actually read.
MethodsAndNestedClassesComplete(const CXXRecordDecl * RD,RecordCompleteMap & MNCComplete)852 static bool MethodsAndNestedClassesComplete(const CXXRecordDecl *RD,
853 RecordCompleteMap &MNCComplete) {
854 RecordCompleteMap::iterator Cache = MNCComplete.find(RD);
855 if (Cache != MNCComplete.end())
856 return Cache->second;
857 if (!RD->isCompleteDefinition())
858 return false;
859 bool Complete = true;
860 for (DeclContext::decl_iterator I = RD->decls_begin(),
861 E = RD->decls_end();
862 I != E && Complete; ++I) {
863 if (const CXXMethodDecl *M = dyn_cast<CXXMethodDecl>(*I))
864 Complete = M->isDefined() || M->isDefaulted() ||
865 (M->isPure() && !isa<CXXDestructorDecl>(M));
866 else if (const FunctionTemplateDecl *F = dyn_cast<FunctionTemplateDecl>(*I))
867 // If the template function is marked as late template parsed at this
868 // point, it has not been instantiated and therefore we have not
869 // performed semantic analysis on it yet, so we cannot know if the type
870 // can be considered complete.
871 Complete = !F->getTemplatedDecl()->isLateTemplateParsed() &&
872 F->getTemplatedDecl()->isDefined();
873 else if (const CXXRecordDecl *R = dyn_cast<CXXRecordDecl>(*I)) {
874 if (R->isInjectedClassName())
875 continue;
876 if (R->hasDefinition())
877 Complete = MethodsAndNestedClassesComplete(R->getDefinition(),
878 MNCComplete);
879 else
880 Complete = false;
881 }
882 }
883 MNCComplete[RD] = Complete;
884 return Complete;
885 }
886
887 /// Returns true, if the given CXXRecordDecl is fully defined in this
888 /// translation unit, i.e. all methods are defined or pure virtual and all
889 /// friends, friend functions and nested classes are fully defined in this
890 /// translation unit.
891 ///
892 /// Should only be called from ActOnEndOfTranslationUnit so that all
893 /// definitions are actually read.
IsRecordFullyDefined(const CXXRecordDecl * RD,RecordCompleteMap & RecordsComplete,RecordCompleteMap & MNCComplete)894 static bool IsRecordFullyDefined(const CXXRecordDecl *RD,
895 RecordCompleteMap &RecordsComplete,
896 RecordCompleteMap &MNCComplete) {
897 RecordCompleteMap::iterator Cache = RecordsComplete.find(RD);
898 if (Cache != RecordsComplete.end())
899 return Cache->second;
900 bool Complete = MethodsAndNestedClassesComplete(RD, MNCComplete);
901 for (CXXRecordDecl::friend_iterator I = RD->friend_begin(),
902 E = RD->friend_end();
903 I != E && Complete; ++I) {
904 // Check if friend classes and methods are complete.
905 if (TypeSourceInfo *TSI = (*I)->getFriendType()) {
906 // Friend classes are available as the TypeSourceInfo of the FriendDecl.
907 if (CXXRecordDecl *FriendD = TSI->getType()->getAsCXXRecordDecl())
908 Complete = MethodsAndNestedClassesComplete(FriendD, MNCComplete);
909 else
910 Complete = false;
911 } else {
912 // Friend functions are available through the NamedDecl of FriendDecl.
913 if (const FunctionDecl *FD =
914 dyn_cast<FunctionDecl>((*I)->getFriendDecl()))
915 Complete = FD->isDefined();
916 else
917 // This is a template friend, give up.
918 Complete = false;
919 }
920 }
921 RecordsComplete[RD] = Complete;
922 return Complete;
923 }
924
emitAndClearUnusedLocalTypedefWarnings()925 void Sema::emitAndClearUnusedLocalTypedefWarnings() {
926 if (ExternalSource)
927 ExternalSource->ReadUnusedLocalTypedefNameCandidates(
928 UnusedLocalTypedefNameCandidates);
929 for (const TypedefNameDecl *TD : UnusedLocalTypedefNameCandidates) {
930 if (TD->isReferenced())
931 continue;
932 Diag(TD->getLocation(), diag::warn_unused_local_typedef)
933 << isa<TypeAliasDecl>(TD) << TD->getDeclName();
934 }
935 UnusedLocalTypedefNameCandidates.clear();
936 }
937
938 /// This is called before the very first declaration in the translation unit
939 /// is parsed. Note that the ASTContext may have already injected some
940 /// declarations.
ActOnStartOfTranslationUnit()941 void Sema::ActOnStartOfTranslationUnit() {
942 if (getLangOpts().ModulesTS &&
943 (getLangOpts().getCompilingModule() == LangOptions::CMK_ModuleInterface ||
944 getLangOpts().getCompilingModule() == LangOptions::CMK_None)) {
945 // We start in an implied global module fragment.
946 SourceLocation StartOfTU =
947 SourceMgr.getLocForStartOfFile(SourceMgr.getMainFileID());
948 ActOnGlobalModuleFragmentDecl(StartOfTU);
949 ModuleScopes.back().ImplicitGlobalModuleFragment = true;
950 }
951 }
952
ActOnEndOfTranslationUnitFragment(TUFragmentKind Kind)953 void Sema::ActOnEndOfTranslationUnitFragment(TUFragmentKind Kind) {
954 // No explicit actions are required at the end of the global module fragment.
955 if (Kind == TUFragmentKind::Global)
956 return;
957
958 // Transfer late parsed template instantiations over to the pending template
959 // instantiation list. During normal compilation, the late template parser
960 // will be installed and instantiating these templates will succeed.
961 //
962 // If we are building a TU prefix for serialization, it is also safe to
963 // transfer these over, even though they are not parsed. The end of the TU
964 // should be outside of any eager template instantiation scope, so when this
965 // AST is deserialized, these templates will not be parsed until the end of
966 // the combined TU.
967 PendingInstantiations.insert(PendingInstantiations.end(),
968 LateParsedInstantiations.begin(),
969 LateParsedInstantiations.end());
970 LateParsedInstantiations.clear();
971
972 // If DefinedUsedVTables ends up marking any virtual member functions it
973 // might lead to more pending template instantiations, which we then need
974 // to instantiate.
975 DefineUsedVTables();
976
977 // C++: Perform implicit template instantiations.
978 //
979 // FIXME: When we perform these implicit instantiations, we do not
980 // carefully keep track of the point of instantiation (C++ [temp.point]).
981 // This means that name lookup that occurs within the template
982 // instantiation will always happen at the end of the translation unit,
983 // so it will find some names that are not required to be found. This is
984 // valid, but we could do better by diagnosing if an instantiation uses a
985 // name that was not visible at its first point of instantiation.
986 if (ExternalSource) {
987 // Load pending instantiations from the external source.
988 SmallVector<PendingImplicitInstantiation, 4> Pending;
989 ExternalSource->ReadPendingInstantiations(Pending);
990 for (auto PII : Pending)
991 if (auto Func = dyn_cast<FunctionDecl>(PII.first))
992 Func->setInstantiationIsPending(true);
993 PendingInstantiations.insert(PendingInstantiations.begin(),
994 Pending.begin(), Pending.end());
995 }
996
997 {
998 llvm::TimeTraceScope TimeScope("PerformPendingInstantiations");
999 PerformPendingInstantiations();
1000 }
1001
1002 emitDeferredDiags();
1003
1004 assert(LateParsedInstantiations.empty() &&
1005 "end of TU template instantiation should not create more "
1006 "late-parsed templates");
1007
1008 // Report diagnostics for uncorrected delayed typos. Ideally all of them
1009 // should have been corrected by that time, but it is very hard to cover all
1010 // cases in practice.
1011 for (const auto &Typo : DelayedTypos) {
1012 // We pass an empty TypoCorrection to indicate no correction was performed.
1013 Typo.second.DiagHandler(TypoCorrection());
1014 }
1015 DelayedTypos.clear();
1016 }
1017
1018 /// ActOnEndOfTranslationUnit - This is called at the very end of the
1019 /// translation unit when EOF is reached and all but the top-level scope is
1020 /// popped.
ActOnEndOfTranslationUnit()1021 void Sema::ActOnEndOfTranslationUnit() {
1022 assert(DelayedDiagnostics.getCurrentPool() == nullptr
1023 && "reached end of translation unit with a pool attached?");
1024
1025 // If code completion is enabled, don't perform any end-of-translation-unit
1026 // work.
1027 if (PP.isCodeCompletionEnabled())
1028 return;
1029
1030 // Complete translation units and modules define vtables and perform implicit
1031 // instantiations. PCH files do not.
1032 if (TUKind != TU_Prefix) {
1033 DiagnoseUseOfUnimplementedSelectors();
1034
1035 ActOnEndOfTranslationUnitFragment(
1036 !ModuleScopes.empty() && ModuleScopes.back().Module->Kind ==
1037 Module::PrivateModuleFragment
1038 ? TUFragmentKind::Private
1039 : TUFragmentKind::Normal);
1040
1041 if (LateTemplateParserCleanup)
1042 LateTemplateParserCleanup(OpaqueParser);
1043
1044 CheckDelayedMemberExceptionSpecs();
1045 } else {
1046 // If we are building a TU prefix for serialization, it is safe to transfer
1047 // these over, even though they are not parsed. The end of the TU should be
1048 // outside of any eager template instantiation scope, so when this AST is
1049 // deserialized, these templates will not be parsed until the end of the
1050 // combined TU.
1051 PendingInstantiations.insert(PendingInstantiations.end(),
1052 LateParsedInstantiations.begin(),
1053 LateParsedInstantiations.end());
1054 LateParsedInstantiations.clear();
1055
1056 if (LangOpts.PCHInstantiateTemplates) {
1057 llvm::TimeTraceScope TimeScope("PerformPendingInstantiations");
1058 PerformPendingInstantiations();
1059 }
1060 }
1061
1062 DiagnoseUnterminatedPragmaAlignPack();
1063 DiagnoseUnterminatedPragmaAttribute();
1064
1065 // All delayed member exception specs should be checked or we end up accepting
1066 // incompatible declarations.
1067 assert(DelayedOverridingExceptionSpecChecks.empty());
1068 assert(DelayedEquivalentExceptionSpecChecks.empty());
1069
1070 // All dllexport classes should have been processed already.
1071 assert(DelayedDllExportClasses.empty());
1072 assert(DelayedDllExportMemberFunctions.empty());
1073
1074 // Remove file scoped decls that turned out to be used.
1075 UnusedFileScopedDecls.erase(
1076 std::remove_if(UnusedFileScopedDecls.begin(nullptr, true),
1077 UnusedFileScopedDecls.end(),
1078 [this](const DeclaratorDecl *DD) {
1079 return ShouldRemoveFromUnused(this, DD);
1080 }),
1081 UnusedFileScopedDecls.end());
1082
1083 if (TUKind == TU_Prefix) {
1084 // Translation unit prefixes don't need any of the checking below.
1085 if (!PP.isIncrementalProcessingEnabled())
1086 TUScope = nullptr;
1087 return;
1088 }
1089
1090 // Check for #pragma weak identifiers that were never declared
1091 LoadExternalWeakUndeclaredIdentifiers();
1092 for (auto WeakID : WeakUndeclaredIdentifiers) {
1093 if (WeakID.second.getUsed())
1094 continue;
1095
1096 Decl *PrevDecl = LookupSingleName(TUScope, WeakID.first, SourceLocation(),
1097 LookupOrdinaryName);
1098 if (PrevDecl != nullptr &&
1099 !(isa<FunctionDecl>(PrevDecl) || isa<VarDecl>(PrevDecl)))
1100 Diag(WeakID.second.getLocation(), diag::warn_attribute_wrong_decl_type)
1101 << "'weak'" << ExpectedVariableOrFunction;
1102 else
1103 Diag(WeakID.second.getLocation(), diag::warn_weak_identifier_undeclared)
1104 << WeakID.first;
1105 }
1106
1107 if (LangOpts.CPlusPlus11 &&
1108 !Diags.isIgnored(diag::warn_delegating_ctor_cycle, SourceLocation()))
1109 CheckDelegatingCtorCycles();
1110
1111 if (!Diags.hasErrorOccurred()) {
1112 if (ExternalSource)
1113 ExternalSource->ReadUndefinedButUsed(UndefinedButUsed);
1114 checkUndefinedButUsed(*this);
1115 }
1116
1117 // A global-module-fragment is only permitted within a module unit.
1118 bool DiagnosedMissingModuleDeclaration = false;
1119 if (!ModuleScopes.empty() &&
1120 ModuleScopes.back().Module->Kind == Module::GlobalModuleFragment &&
1121 !ModuleScopes.back().ImplicitGlobalModuleFragment) {
1122 Diag(ModuleScopes.back().BeginLoc,
1123 diag::err_module_declaration_missing_after_global_module_introducer);
1124 DiagnosedMissingModuleDeclaration = true;
1125 }
1126
1127 if (TUKind == TU_Module) {
1128 // If we are building a module interface unit, we need to have seen the
1129 // module declaration by now.
1130 if (getLangOpts().getCompilingModule() ==
1131 LangOptions::CMK_ModuleInterface &&
1132 (ModuleScopes.empty() ||
1133 !ModuleScopes.back().Module->isModulePurview()) &&
1134 !DiagnosedMissingModuleDeclaration) {
1135 // FIXME: Make a better guess as to where to put the module declaration.
1136 Diag(getSourceManager().getLocForStartOfFile(
1137 getSourceManager().getMainFileID()),
1138 diag::err_module_declaration_missing);
1139 }
1140
1141 // If we are building a module, resolve all of the exported declarations
1142 // now.
1143 if (Module *CurrentModule = PP.getCurrentModule()) {
1144 ModuleMap &ModMap = PP.getHeaderSearchInfo().getModuleMap();
1145
1146 SmallVector<Module *, 2> Stack;
1147 Stack.push_back(CurrentModule);
1148 while (!Stack.empty()) {
1149 Module *Mod = Stack.pop_back_val();
1150
1151 // Resolve the exported declarations and conflicts.
1152 // FIXME: Actually complain, once we figure out how to teach the
1153 // diagnostic client to deal with complaints in the module map at this
1154 // point.
1155 ModMap.resolveExports(Mod, /*Complain=*/false);
1156 ModMap.resolveUses(Mod, /*Complain=*/false);
1157 ModMap.resolveConflicts(Mod, /*Complain=*/false);
1158
1159 // Queue the submodules, so their exports will also be resolved.
1160 Stack.append(Mod->submodule_begin(), Mod->submodule_end());
1161 }
1162 }
1163
1164 // Warnings emitted in ActOnEndOfTranslationUnit() should be emitted for
1165 // modules when they are built, not every time they are used.
1166 emitAndClearUnusedLocalTypedefWarnings();
1167 }
1168
1169 // C99 6.9.2p2:
1170 // A declaration of an identifier for an object that has file
1171 // scope without an initializer, and without a storage-class
1172 // specifier or with the storage-class specifier static,
1173 // constitutes a tentative definition. If a translation unit
1174 // contains one or more tentative definitions for an identifier,
1175 // and the translation unit contains no external definition for
1176 // that identifier, then the behavior is exactly as if the
1177 // translation unit contains a file scope declaration of that
1178 // identifier, with the composite type as of the end of the
1179 // translation unit, with an initializer equal to 0.
1180 llvm::SmallSet<VarDecl *, 32> Seen;
1181 for (TentativeDefinitionsType::iterator
1182 T = TentativeDefinitions.begin(ExternalSource),
1183 TEnd = TentativeDefinitions.end();
1184 T != TEnd; ++T) {
1185 VarDecl *VD = (*T)->getActingDefinition();
1186
1187 // If the tentative definition was completed, getActingDefinition() returns
1188 // null. If we've already seen this variable before, insert()'s second
1189 // return value is false.
1190 if (!VD || VD->isInvalidDecl() || !Seen.insert(VD).second)
1191 continue;
1192
1193 if (const IncompleteArrayType *ArrayT
1194 = Context.getAsIncompleteArrayType(VD->getType())) {
1195 // Set the length of the array to 1 (C99 6.9.2p5).
1196 Diag(VD->getLocation(), diag::warn_tentative_incomplete_array);
1197 llvm::APInt One(Context.getTypeSize(Context.getSizeType()), true);
1198 QualType T = Context.getConstantArrayType(ArrayT->getElementType(), One,
1199 nullptr, ArrayType::Normal, 0);
1200 VD->setType(T);
1201 } else if (RequireCompleteType(VD->getLocation(), VD->getType(),
1202 diag::err_tentative_def_incomplete_type))
1203 VD->setInvalidDecl();
1204
1205 // No initialization is performed for a tentative definition.
1206 CheckCompleteVariableDeclaration(VD);
1207
1208 // Notify the consumer that we've completed a tentative definition.
1209 if (!VD->isInvalidDecl())
1210 Consumer.CompleteTentativeDefinition(VD);
1211 }
1212
1213 for (auto D : ExternalDeclarations) {
1214 if (!D || D->isInvalidDecl() || D->getPreviousDecl() || !D->isUsed())
1215 continue;
1216
1217 Consumer.CompleteExternalDeclaration(D);
1218 }
1219
1220 // If there were errors, disable 'unused' warnings since they will mostly be
1221 // noise. Don't warn for a use from a module: either we should warn on all
1222 // file-scope declarations in modules or not at all, but whether the
1223 // declaration is used is immaterial.
1224 if (!Diags.hasErrorOccurred() && TUKind != TU_Module) {
1225 // Output warning for unused file scoped decls.
1226 for (UnusedFileScopedDeclsType::iterator
1227 I = UnusedFileScopedDecls.begin(ExternalSource),
1228 E = UnusedFileScopedDecls.end(); I != E; ++I) {
1229 if (ShouldRemoveFromUnused(this, *I))
1230 continue;
1231
1232 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(*I)) {
1233 const FunctionDecl *DiagD;
1234 if (!FD->hasBody(DiagD))
1235 DiagD = FD;
1236 if (DiagD->isDeleted())
1237 continue; // Deleted functions are supposed to be unused.
1238 if (DiagD->isReferenced()) {
1239 if (isa<CXXMethodDecl>(DiagD))
1240 Diag(DiagD->getLocation(), diag::warn_unneeded_member_function)
1241 << DiagD;
1242 else {
1243 if (FD->getStorageClass() == SC_Static &&
1244 !FD->isInlineSpecified() &&
1245 !SourceMgr.isInMainFile(
1246 SourceMgr.getExpansionLoc(FD->getLocation())))
1247 Diag(DiagD->getLocation(),
1248 diag::warn_unneeded_static_internal_decl)
1249 << DiagD;
1250 else
1251 Diag(DiagD->getLocation(), diag::warn_unneeded_internal_decl)
1252 << /*function*/ 0 << DiagD;
1253 }
1254 } else {
1255 if (FD->getDescribedFunctionTemplate())
1256 Diag(DiagD->getLocation(), diag::warn_unused_template)
1257 << /*function*/ 0 << DiagD;
1258 else
1259 Diag(DiagD->getLocation(), isa<CXXMethodDecl>(DiagD)
1260 ? diag::warn_unused_member_function
1261 : diag::warn_unused_function)
1262 << DiagD;
1263 }
1264 } else {
1265 const VarDecl *DiagD = cast<VarDecl>(*I)->getDefinition();
1266 if (!DiagD)
1267 DiagD = cast<VarDecl>(*I);
1268 if (DiagD->isReferenced()) {
1269 Diag(DiagD->getLocation(), diag::warn_unneeded_internal_decl)
1270 << /*variable*/ 1 << DiagD;
1271 } else if (DiagD->getType().isConstQualified()) {
1272 const SourceManager &SM = SourceMgr;
1273 if (SM.getMainFileID() != SM.getFileID(DiagD->getLocation()) ||
1274 !PP.getLangOpts().IsHeaderFile)
1275 Diag(DiagD->getLocation(), diag::warn_unused_const_variable)
1276 << DiagD;
1277 } else {
1278 if (DiagD->getDescribedVarTemplate())
1279 Diag(DiagD->getLocation(), diag::warn_unused_template)
1280 << /*variable*/ 1 << DiagD;
1281 else
1282 Diag(DiagD->getLocation(), diag::warn_unused_variable) << DiagD;
1283 }
1284 }
1285 }
1286
1287 emitAndClearUnusedLocalTypedefWarnings();
1288 }
1289
1290 if (!Diags.isIgnored(diag::warn_unused_private_field, SourceLocation())) {
1291 // FIXME: Load additional unused private field candidates from the external
1292 // source.
1293 RecordCompleteMap RecordsComplete;
1294 RecordCompleteMap MNCComplete;
1295 for (NamedDeclSetType::iterator I = UnusedPrivateFields.begin(),
1296 E = UnusedPrivateFields.end(); I != E; ++I) {
1297 const NamedDecl *D = *I;
1298 const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D->getDeclContext());
1299 if (RD && !RD->isUnion() &&
1300 IsRecordFullyDefined(RD, RecordsComplete, MNCComplete)) {
1301 Diag(D->getLocation(), diag::warn_unused_private_field)
1302 << D->getDeclName();
1303 }
1304 }
1305 }
1306
1307 if (!Diags.isIgnored(diag::warn_mismatched_delete_new, SourceLocation())) {
1308 if (ExternalSource)
1309 ExternalSource->ReadMismatchingDeleteExpressions(DeleteExprs);
1310 for (const auto &DeletedFieldInfo : DeleteExprs) {
1311 for (const auto &DeleteExprLoc : DeletedFieldInfo.second) {
1312 AnalyzeDeleteExprMismatch(DeletedFieldInfo.first, DeleteExprLoc.first,
1313 DeleteExprLoc.second);
1314 }
1315 }
1316 }
1317
1318 // Check we've noticed that we're no longer parsing the initializer for every
1319 // variable. If we miss cases, then at best we have a performance issue and
1320 // at worst a rejects-valid bug.
1321 assert(ParsingInitForAutoVars.empty() &&
1322 "Didn't unmark var as having its initializer parsed");
1323
1324 if (!PP.isIncrementalProcessingEnabled())
1325 TUScope = nullptr;
1326 }
1327
1328
1329 //===----------------------------------------------------------------------===//
1330 // Helper functions.
1331 //===----------------------------------------------------------------------===//
1332
getFunctionLevelDeclContext()1333 DeclContext *Sema::getFunctionLevelDeclContext() {
1334 DeclContext *DC = CurContext;
1335
1336 while (true) {
1337 if (isa<BlockDecl>(DC) || isa<EnumDecl>(DC) || isa<CapturedDecl>(DC) ||
1338 isa<RequiresExprBodyDecl>(DC)) {
1339 DC = DC->getParent();
1340 } else if (isa<CXXMethodDecl>(DC) &&
1341 cast<CXXMethodDecl>(DC)->getOverloadedOperator() == OO_Call &&
1342 cast<CXXRecordDecl>(DC->getParent())->isLambda()) {
1343 DC = DC->getParent()->getParent();
1344 }
1345 else break;
1346 }
1347
1348 return DC;
1349 }
1350
1351 /// getCurFunctionDecl - If inside of a function body, this returns a pointer
1352 /// to the function decl for the function being parsed. If we're currently
1353 /// in a 'block', this returns the containing context.
getCurFunctionDecl()1354 FunctionDecl *Sema::getCurFunctionDecl() {
1355 DeclContext *DC = getFunctionLevelDeclContext();
1356 return dyn_cast<FunctionDecl>(DC);
1357 }
1358
getCurMethodDecl()1359 ObjCMethodDecl *Sema::getCurMethodDecl() {
1360 DeclContext *DC = getFunctionLevelDeclContext();
1361 while (isa<RecordDecl>(DC))
1362 DC = DC->getParent();
1363 return dyn_cast<ObjCMethodDecl>(DC);
1364 }
1365
getCurFunctionOrMethodDecl()1366 NamedDecl *Sema::getCurFunctionOrMethodDecl() {
1367 DeclContext *DC = getFunctionLevelDeclContext();
1368 if (isa<ObjCMethodDecl>(DC) || isa<FunctionDecl>(DC))
1369 return cast<NamedDecl>(DC);
1370 return nullptr;
1371 }
1372
getDefaultCXXMethodAddrSpace() const1373 LangAS Sema::getDefaultCXXMethodAddrSpace() const {
1374 if (getLangOpts().OpenCL)
1375 return LangAS::opencl_generic;
1376 return LangAS::Default;
1377 }
1378
EmitCurrentDiagnostic(unsigned DiagID)1379 void Sema::EmitCurrentDiagnostic(unsigned DiagID) {
1380 // FIXME: It doesn't make sense to me that DiagID is an incoming argument here
1381 // and yet we also use the current diag ID on the DiagnosticsEngine. This has
1382 // been made more painfully obvious by the refactor that introduced this
1383 // function, but it is possible that the incoming argument can be
1384 // eliminated. If it truly cannot be (for example, there is some reentrancy
1385 // issue I am not seeing yet), then there should at least be a clarifying
1386 // comment somewhere.
1387 if (Optional<TemplateDeductionInfo*> Info = isSFINAEContext()) {
1388 switch (DiagnosticIDs::getDiagnosticSFINAEResponse(
1389 Diags.getCurrentDiagID())) {
1390 case DiagnosticIDs::SFINAE_Report:
1391 // We'll report the diagnostic below.
1392 break;
1393
1394 case DiagnosticIDs::SFINAE_SubstitutionFailure:
1395 // Count this failure so that we know that template argument deduction
1396 // has failed.
1397 ++NumSFINAEErrors;
1398
1399 // Make a copy of this suppressed diagnostic and store it with the
1400 // template-deduction information.
1401 if (*Info && !(*Info)->hasSFINAEDiagnostic()) {
1402 Diagnostic DiagInfo(&Diags);
1403 (*Info)->addSFINAEDiagnostic(DiagInfo.getLocation(),
1404 PartialDiagnostic(DiagInfo, Context.getDiagAllocator()));
1405 }
1406
1407 Diags.setLastDiagnosticIgnored(true);
1408 Diags.Clear();
1409 return;
1410
1411 case DiagnosticIDs::SFINAE_AccessControl: {
1412 // Per C++ Core Issue 1170, access control is part of SFINAE.
1413 // Additionally, the AccessCheckingSFINAE flag can be used to temporarily
1414 // make access control a part of SFINAE for the purposes of checking
1415 // type traits.
1416 if (!AccessCheckingSFINAE && !getLangOpts().CPlusPlus11)
1417 break;
1418
1419 SourceLocation Loc = Diags.getCurrentDiagLoc();
1420
1421 // Suppress this diagnostic.
1422 ++NumSFINAEErrors;
1423
1424 // Make a copy of this suppressed diagnostic and store it with the
1425 // template-deduction information.
1426 if (*Info && !(*Info)->hasSFINAEDiagnostic()) {
1427 Diagnostic DiagInfo(&Diags);
1428 (*Info)->addSFINAEDiagnostic(DiagInfo.getLocation(),
1429 PartialDiagnostic(DiagInfo, Context.getDiagAllocator()));
1430 }
1431
1432 Diags.setLastDiagnosticIgnored(true);
1433 Diags.Clear();
1434
1435 // Now the diagnostic state is clear, produce a C++98 compatibility
1436 // warning.
1437 Diag(Loc, diag::warn_cxx98_compat_sfinae_access_control);
1438
1439 // The last diagnostic which Sema produced was ignored. Suppress any
1440 // notes attached to it.
1441 Diags.setLastDiagnosticIgnored(true);
1442 return;
1443 }
1444
1445 case DiagnosticIDs::SFINAE_Suppress:
1446 // Make a copy of this suppressed diagnostic and store it with the
1447 // template-deduction information;
1448 if (*Info) {
1449 Diagnostic DiagInfo(&Diags);
1450 (*Info)->addSuppressedDiagnostic(DiagInfo.getLocation(),
1451 PartialDiagnostic(DiagInfo, Context.getDiagAllocator()));
1452 }
1453
1454 // Suppress this diagnostic.
1455 Diags.setLastDiagnosticIgnored(true);
1456 Diags.Clear();
1457 return;
1458 }
1459 }
1460
1461 // Copy the diagnostic printing policy over the ASTContext printing policy.
1462 // TODO: Stop doing that. See: https://reviews.llvm.org/D45093#1090292
1463 Context.setPrintingPolicy(getPrintingPolicy());
1464
1465 // Emit the diagnostic.
1466 if (!Diags.EmitCurrentDiagnostic())
1467 return;
1468
1469 // If this is not a note, and we're in a template instantiation
1470 // that is different from the last template instantiation where
1471 // we emitted an error, print a template instantiation
1472 // backtrace.
1473 if (!DiagnosticIDs::isBuiltinNote(DiagID))
1474 PrintContextStack();
1475 }
1476
1477 Sema::SemaDiagnosticBuilder
Diag(SourceLocation Loc,const PartialDiagnostic & PD,bool DeferHint)1478 Sema::Diag(SourceLocation Loc, const PartialDiagnostic &PD, bool DeferHint) {
1479 return Diag(Loc, PD.getDiagID(), DeferHint) << PD;
1480 }
1481
hasUncompilableErrorOccurred() const1482 bool Sema::hasUncompilableErrorOccurred() const {
1483 if (getDiagnostics().hasUncompilableErrorOccurred())
1484 return true;
1485 auto *FD = dyn_cast<FunctionDecl>(CurContext);
1486 if (!FD)
1487 return false;
1488 auto Loc = DeviceDeferredDiags.find(FD);
1489 if (Loc == DeviceDeferredDiags.end())
1490 return false;
1491 for (auto PDAt : Loc->second) {
1492 if (DiagnosticIDs::isDefaultMappingAsError(PDAt.second.getDiagID()))
1493 return true;
1494 }
1495 return false;
1496 }
1497
1498 // Print notes showing how we can reach FD starting from an a priori
1499 // known-callable function.
emitCallStackNotes(Sema & S,FunctionDecl * FD)1500 static void emitCallStackNotes(Sema &S, FunctionDecl *FD) {
1501 auto FnIt = S.DeviceKnownEmittedFns.find(FD);
1502 while (FnIt != S.DeviceKnownEmittedFns.end()) {
1503 // Respect error limit.
1504 if (S.Diags.hasFatalErrorOccurred())
1505 return;
1506 DiagnosticBuilder Builder(
1507 S.Diags.Report(FnIt->second.Loc, diag::note_called_by));
1508 Builder << FnIt->second.FD;
1509 FnIt = S.DeviceKnownEmittedFns.find(FnIt->second.FD);
1510 }
1511 }
1512
1513 namespace {
1514
1515 /// Helper class that emits deferred diagnostic messages if an entity directly
1516 /// or indirectly using the function that causes the deferred diagnostic
1517 /// messages is known to be emitted.
1518 ///
1519 /// During parsing of AST, certain diagnostic messages are recorded as deferred
1520 /// diagnostics since it is unknown whether the functions containing such
1521 /// diagnostics will be emitted. A list of potentially emitted functions and
1522 /// variables that may potentially trigger emission of functions are also
1523 /// recorded. DeferredDiagnosticsEmitter recursively visits used functions
1524 /// by each function to emit deferred diagnostics.
1525 ///
1526 /// During the visit, certain OpenMP directives or initializer of variables
1527 /// with certain OpenMP attributes will cause subsequent visiting of any
1528 /// functions enter a state which is called OpenMP device context in this
1529 /// implementation. The state is exited when the directive or initializer is
1530 /// exited. This state can change the emission states of subsequent uses
1531 /// of functions.
1532 ///
1533 /// Conceptually the functions or variables to be visited form a use graph
1534 /// where the parent node uses the child node. At any point of the visit,
1535 /// the tree nodes traversed from the tree root to the current node form a use
1536 /// stack. The emission state of the current node depends on two factors:
1537 /// 1. the emission state of the root node
1538 /// 2. whether the current node is in OpenMP device context
1539 /// If the function is decided to be emitted, its contained deferred diagnostics
1540 /// are emitted, together with the information about the use stack.
1541 ///
1542 class DeferredDiagnosticsEmitter
1543 : public UsedDeclVisitor<DeferredDiagnosticsEmitter> {
1544 public:
1545 typedef UsedDeclVisitor<DeferredDiagnosticsEmitter> Inherited;
1546
1547 // Whether the function is already in the current use-path.
1548 llvm::SmallPtrSet<CanonicalDeclPtr<Decl>, 4> InUsePath;
1549
1550 // The current use-path.
1551 llvm::SmallVector<CanonicalDeclPtr<FunctionDecl>, 4> UsePath;
1552
1553 // Whether the visiting of the function has been done. Done[0] is for the
1554 // case not in OpenMP device context. Done[1] is for the case in OpenMP
1555 // device context. We need two sets because diagnostics emission may be
1556 // different depending on whether it is in OpenMP device context.
1557 llvm::SmallPtrSet<CanonicalDeclPtr<Decl>, 4> DoneMap[2];
1558
1559 // Emission state of the root node of the current use graph.
1560 bool ShouldEmitRootNode;
1561
1562 // Current OpenMP device context level. It is initialized to 0 and each
1563 // entering of device context increases it by 1 and each exit decreases
1564 // it by 1. Non-zero value indicates it is currently in device context.
1565 unsigned InOMPDeviceContext;
1566
DeferredDiagnosticsEmitter(Sema & S)1567 DeferredDiagnosticsEmitter(Sema &S)
1568 : Inherited(S), ShouldEmitRootNode(false), InOMPDeviceContext(0) {}
1569
shouldVisitDiscardedStmt() const1570 bool shouldVisitDiscardedStmt() const { return false; }
1571
VisitOMPTargetDirective(OMPTargetDirective * Node)1572 void VisitOMPTargetDirective(OMPTargetDirective *Node) {
1573 ++InOMPDeviceContext;
1574 Inherited::VisitOMPTargetDirective(Node);
1575 --InOMPDeviceContext;
1576 }
1577
visitUsedDecl(SourceLocation Loc,Decl * D)1578 void visitUsedDecl(SourceLocation Loc, Decl *D) {
1579 if (isa<VarDecl>(D))
1580 return;
1581 if (auto *FD = dyn_cast<FunctionDecl>(D))
1582 checkFunc(Loc, FD);
1583 else
1584 Inherited::visitUsedDecl(Loc, D);
1585 }
1586
checkVar(VarDecl * VD)1587 void checkVar(VarDecl *VD) {
1588 assert(VD->isFileVarDecl() &&
1589 "Should only check file-scope variables");
1590 if (auto *Init = VD->getInit()) {
1591 auto DevTy = OMPDeclareTargetDeclAttr::getDeviceType(VD);
1592 bool IsDev = DevTy && (*DevTy == OMPDeclareTargetDeclAttr::DT_NoHost ||
1593 *DevTy == OMPDeclareTargetDeclAttr::DT_Any);
1594 if (IsDev)
1595 ++InOMPDeviceContext;
1596 this->Visit(Init);
1597 if (IsDev)
1598 --InOMPDeviceContext;
1599 }
1600 }
1601
checkFunc(SourceLocation Loc,FunctionDecl * FD)1602 void checkFunc(SourceLocation Loc, FunctionDecl *FD) {
1603 auto &Done = DoneMap[InOMPDeviceContext > 0 ? 1 : 0];
1604 FunctionDecl *Caller = UsePath.empty() ? nullptr : UsePath.back();
1605 if ((!ShouldEmitRootNode && !S.getLangOpts().OpenMP && !Caller) ||
1606 S.shouldIgnoreInHostDeviceCheck(FD) || InUsePath.count(FD))
1607 return;
1608 // Finalize analysis of OpenMP-specific constructs.
1609 if (Caller && S.LangOpts.OpenMP && UsePath.size() == 1 &&
1610 (ShouldEmitRootNode || InOMPDeviceContext))
1611 S.finalizeOpenMPDelayedAnalysis(Caller, FD, Loc);
1612 if (Caller)
1613 S.DeviceKnownEmittedFns[FD] = {Caller, Loc};
1614 // Always emit deferred diagnostics for the direct users. This does not
1615 // lead to explosion of diagnostics since each user is visited at most
1616 // twice.
1617 if (ShouldEmitRootNode || InOMPDeviceContext)
1618 emitDeferredDiags(FD, Caller);
1619 // Do not revisit a function if the function body has been completely
1620 // visited before.
1621 if (!Done.insert(FD).second)
1622 return;
1623 InUsePath.insert(FD);
1624 UsePath.push_back(FD);
1625 if (auto *S = FD->getBody()) {
1626 this->Visit(S);
1627 }
1628 UsePath.pop_back();
1629 InUsePath.erase(FD);
1630 }
1631
checkRecordedDecl(Decl * D)1632 void checkRecordedDecl(Decl *D) {
1633 if (auto *FD = dyn_cast<FunctionDecl>(D)) {
1634 ShouldEmitRootNode = S.getEmissionStatus(FD, /*Final=*/true) ==
1635 Sema::FunctionEmissionStatus::Emitted;
1636 checkFunc(SourceLocation(), FD);
1637 } else
1638 checkVar(cast<VarDecl>(D));
1639 }
1640
1641 // Emit any deferred diagnostics for FD
emitDeferredDiags(FunctionDecl * FD,bool ShowCallStack)1642 void emitDeferredDiags(FunctionDecl *FD, bool ShowCallStack) {
1643 auto It = S.DeviceDeferredDiags.find(FD);
1644 if (It == S.DeviceDeferredDiags.end())
1645 return;
1646 bool HasWarningOrError = false;
1647 bool FirstDiag = true;
1648 for (PartialDiagnosticAt &PDAt : It->second) {
1649 // Respect error limit.
1650 if (S.Diags.hasFatalErrorOccurred())
1651 return;
1652 const SourceLocation &Loc = PDAt.first;
1653 const PartialDiagnostic &PD = PDAt.second;
1654 HasWarningOrError |=
1655 S.getDiagnostics().getDiagnosticLevel(PD.getDiagID(), Loc) >=
1656 DiagnosticsEngine::Warning;
1657 {
1658 DiagnosticBuilder Builder(S.Diags.Report(Loc, PD.getDiagID()));
1659 PD.Emit(Builder);
1660 }
1661 // Emit the note on the first diagnostic in case too many diagnostics
1662 // cause the note not emitted.
1663 if (FirstDiag && HasWarningOrError && ShowCallStack) {
1664 emitCallStackNotes(S, FD);
1665 FirstDiag = false;
1666 }
1667 }
1668 }
1669 };
1670 } // namespace
1671
emitDeferredDiags()1672 void Sema::emitDeferredDiags() {
1673 if (ExternalSource)
1674 ExternalSource->ReadDeclsToCheckForDeferredDiags(
1675 DeclsToCheckForDeferredDiags);
1676
1677 if ((DeviceDeferredDiags.empty() && !LangOpts.OpenMP) ||
1678 DeclsToCheckForDeferredDiags.empty())
1679 return;
1680
1681 DeferredDiagnosticsEmitter DDE(*this);
1682 for (auto D : DeclsToCheckForDeferredDiags)
1683 DDE.checkRecordedDecl(D);
1684 }
1685
1686 // In CUDA, there are some constructs which may appear in semantically-valid
1687 // code, but trigger errors if we ever generate code for the function in which
1688 // they appear. Essentially every construct you're not allowed to use on the
1689 // device falls into this category, because you are allowed to use these
1690 // constructs in a __host__ __device__ function, but only if that function is
1691 // never codegen'ed on the device.
1692 //
1693 // To handle semantic checking for these constructs, we keep track of the set of
1694 // functions we know will be emitted, either because we could tell a priori that
1695 // they would be emitted, or because they were transitively called by a
1696 // known-emitted function.
1697 //
1698 // We also keep a partial call graph of which not-known-emitted functions call
1699 // which other not-known-emitted functions.
1700 //
1701 // When we see something which is illegal if the current function is emitted
1702 // (usually by way of CUDADiagIfDeviceCode, CUDADiagIfHostCode, or
1703 // CheckCUDACall), we first check if the current function is known-emitted. If
1704 // so, we immediately output the diagnostic.
1705 //
1706 // Otherwise, we "defer" the diagnostic. It sits in Sema::DeviceDeferredDiags
1707 // until we discover that the function is known-emitted, at which point we take
1708 // it out of this map and emit the diagnostic.
1709
SemaDiagnosticBuilder(Kind K,SourceLocation Loc,unsigned DiagID,FunctionDecl * Fn,Sema & S)1710 Sema::SemaDiagnosticBuilder::SemaDiagnosticBuilder(Kind K, SourceLocation Loc,
1711 unsigned DiagID,
1712 FunctionDecl *Fn, Sema &S)
1713 : S(S), Loc(Loc), DiagID(DiagID), Fn(Fn),
1714 ShowCallStack(K == K_ImmediateWithCallStack || K == K_Deferred) {
1715 switch (K) {
1716 case K_Nop:
1717 break;
1718 case K_Immediate:
1719 case K_ImmediateWithCallStack:
1720 ImmediateDiag.emplace(
1721 ImmediateDiagBuilder(S.Diags.Report(Loc, DiagID), S, DiagID));
1722 break;
1723 case K_Deferred:
1724 assert(Fn && "Must have a function to attach the deferred diag to.");
1725 auto &Diags = S.DeviceDeferredDiags[Fn];
1726 PartialDiagId.emplace(Diags.size());
1727 Diags.emplace_back(Loc, S.PDiag(DiagID));
1728 break;
1729 }
1730 }
1731
SemaDiagnosticBuilder(SemaDiagnosticBuilder && D)1732 Sema::SemaDiagnosticBuilder::SemaDiagnosticBuilder(SemaDiagnosticBuilder &&D)
1733 : S(D.S), Loc(D.Loc), DiagID(D.DiagID), Fn(D.Fn),
1734 ShowCallStack(D.ShowCallStack), ImmediateDiag(D.ImmediateDiag),
1735 PartialDiagId(D.PartialDiagId) {
1736 // Clean the previous diagnostics.
1737 D.ShowCallStack = false;
1738 D.ImmediateDiag.reset();
1739 D.PartialDiagId.reset();
1740 }
1741
~SemaDiagnosticBuilder()1742 Sema::SemaDiagnosticBuilder::~SemaDiagnosticBuilder() {
1743 if (ImmediateDiag) {
1744 // Emit our diagnostic and, if it was a warning or error, output a callstack
1745 // if Fn isn't a priori known-emitted.
1746 bool IsWarningOrError = S.getDiagnostics().getDiagnosticLevel(
1747 DiagID, Loc) >= DiagnosticsEngine::Warning;
1748 ImmediateDiag.reset(); // Emit the immediate diag.
1749 if (IsWarningOrError && ShowCallStack)
1750 emitCallStackNotes(S, Fn);
1751 } else {
1752 assert((!PartialDiagId || ShowCallStack) &&
1753 "Must always show call stack for deferred diags.");
1754 }
1755 }
1756
1757 Sema::SemaDiagnosticBuilder
targetDiag(SourceLocation Loc,unsigned DiagID,FunctionDecl * FD)1758 Sema::targetDiag(SourceLocation Loc, unsigned DiagID, FunctionDecl *FD) {
1759 FD = FD ? FD : getCurFunctionDecl();
1760 if (LangOpts.OpenMP)
1761 return LangOpts.OpenMPIsDevice ? diagIfOpenMPDeviceCode(Loc, DiagID, FD)
1762 : diagIfOpenMPHostCode(Loc, DiagID, FD);
1763 if (getLangOpts().CUDA)
1764 return getLangOpts().CUDAIsDevice ? CUDADiagIfDeviceCode(Loc, DiagID)
1765 : CUDADiagIfHostCode(Loc, DiagID);
1766
1767 if (getLangOpts().SYCLIsDevice)
1768 return SYCLDiagIfDeviceCode(Loc, DiagID);
1769
1770 return SemaDiagnosticBuilder(SemaDiagnosticBuilder::K_Immediate, Loc, DiagID,
1771 FD, *this);
1772 }
1773
Diag(SourceLocation Loc,unsigned DiagID,bool DeferHint)1774 Sema::SemaDiagnosticBuilder Sema::Diag(SourceLocation Loc, unsigned DiagID,
1775 bool DeferHint) {
1776 bool IsError = Diags.getDiagnosticIDs()->isDefaultMappingAsError(DiagID);
1777 bool ShouldDefer = getLangOpts().CUDA && LangOpts.GPUDeferDiag &&
1778 DiagnosticIDs::isDeferrable(DiagID) &&
1779 (DeferHint || !IsError);
1780 auto SetIsLastErrorImmediate = [&](bool Flag) {
1781 if (IsError)
1782 IsLastErrorImmediate = Flag;
1783 };
1784 if (!ShouldDefer) {
1785 SetIsLastErrorImmediate(true);
1786 return SemaDiagnosticBuilder(SemaDiagnosticBuilder::K_Immediate, Loc,
1787 DiagID, getCurFunctionDecl(), *this);
1788 }
1789
1790 SemaDiagnosticBuilder DB = getLangOpts().CUDAIsDevice
1791 ? CUDADiagIfDeviceCode(Loc, DiagID)
1792 : CUDADiagIfHostCode(Loc, DiagID);
1793 SetIsLastErrorImmediate(DB.isImmediate());
1794 return DB;
1795 }
1796
checkDeviceDecl(ValueDecl * D,SourceLocation Loc)1797 void Sema::checkDeviceDecl(ValueDecl *D, SourceLocation Loc) {
1798 if (isUnevaluatedContext())
1799 return;
1800
1801 Decl *C = cast<Decl>(getCurLexicalContext());
1802
1803 // Memcpy operations for structs containing a member with unsupported type
1804 // are ok, though.
1805 if (const auto *MD = dyn_cast<CXXMethodDecl>(C)) {
1806 if ((MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()) &&
1807 MD->isTrivial())
1808 return;
1809
1810 if (const auto *Ctor = dyn_cast<CXXConstructorDecl>(MD))
1811 if (Ctor->isCopyOrMoveConstructor() && Ctor->isTrivial())
1812 return;
1813 }
1814
1815 // Try to associate errors with the lexical context, if that is a function, or
1816 // the value declaration otherwise.
1817 FunctionDecl *FD =
1818 isa<FunctionDecl>(C) ? cast<FunctionDecl>(C) : dyn_cast<FunctionDecl>(D);
1819 auto CheckType = [&](QualType Ty) {
1820 if (Ty->isDependentType())
1821 return;
1822
1823 if (Ty->isExtIntType()) {
1824 if (!Context.getTargetInfo().hasExtIntType()) {
1825 targetDiag(Loc, diag::err_device_unsupported_type, FD)
1826 << D << false /*show bit size*/ << 0 /*bitsize*/
1827 << Ty << Context.getTargetInfo().getTriple().str();
1828 }
1829 return;
1830 }
1831
1832 if ((Ty->isFloat16Type() && !Context.getTargetInfo().hasFloat16Type()) ||
1833 ((Ty->isFloat128Type() ||
1834 (Ty->isRealFloatingType() && Context.getTypeSize(Ty) == 128)) &&
1835 !Context.getTargetInfo().hasFloat128Type()) ||
1836 (Ty->isIntegerType() && Context.getTypeSize(Ty) == 128 &&
1837 !Context.getTargetInfo().hasInt128Type())) {
1838 if (targetDiag(Loc, diag::err_device_unsupported_type, FD)
1839 << D << true /*show bit size*/
1840 << static_cast<unsigned>(Context.getTypeSize(Ty)) << Ty
1841 << Context.getTargetInfo().getTriple().str())
1842 D->setInvalidDecl();
1843 targetDiag(D->getLocation(), diag::note_defined_here, FD) << D;
1844 }
1845 };
1846
1847 QualType Ty = D->getType();
1848 CheckType(Ty);
1849
1850 if (const auto *FPTy = dyn_cast<FunctionProtoType>(Ty)) {
1851 for (const auto &ParamTy : FPTy->param_types())
1852 CheckType(ParamTy);
1853 CheckType(FPTy->getReturnType());
1854 }
1855 if (const auto *FNPTy = dyn_cast<FunctionNoProtoType>(Ty))
1856 CheckType(FNPTy->getReturnType());
1857 }
1858
1859 /// Looks through the macro-expansion chain for the given
1860 /// location, looking for a macro expansion with the given name.
1861 /// If one is found, returns true and sets the location to that
1862 /// expansion loc.
findMacroSpelling(SourceLocation & locref,StringRef name)1863 bool Sema::findMacroSpelling(SourceLocation &locref, StringRef name) {
1864 SourceLocation loc = locref;
1865 if (!loc.isMacroID()) return false;
1866
1867 // There's no good way right now to look at the intermediate
1868 // expansions, so just jump to the expansion location.
1869 loc = getSourceManager().getExpansionLoc(loc);
1870
1871 // If that's written with the name, stop here.
1872 SmallString<16> buffer;
1873 if (getPreprocessor().getSpelling(loc, buffer) == name) {
1874 locref = loc;
1875 return true;
1876 }
1877 return false;
1878 }
1879
1880 /// Determines the active Scope associated with the given declaration
1881 /// context.
1882 ///
1883 /// This routine maps a declaration context to the active Scope object that
1884 /// represents that declaration context in the parser. It is typically used
1885 /// from "scope-less" code (e.g., template instantiation, lazy creation of
1886 /// declarations) that injects a name for name-lookup purposes and, therefore,
1887 /// must update the Scope.
1888 ///
1889 /// \returns The scope corresponding to the given declaraion context, or NULL
1890 /// if no such scope is open.
getScopeForContext(DeclContext * Ctx)1891 Scope *Sema::getScopeForContext(DeclContext *Ctx) {
1892
1893 if (!Ctx)
1894 return nullptr;
1895
1896 Ctx = Ctx->getPrimaryContext();
1897 for (Scope *S = getCurScope(); S; S = S->getParent()) {
1898 // Ignore scopes that cannot have declarations. This is important for
1899 // out-of-line definitions of static class members.
1900 if (S->getFlags() & (Scope::DeclScope | Scope::TemplateParamScope))
1901 if (DeclContext *Entity = S->getEntity())
1902 if (Ctx == Entity->getPrimaryContext())
1903 return S;
1904 }
1905
1906 return nullptr;
1907 }
1908
1909 /// Enter a new function scope
PushFunctionScope()1910 void Sema::PushFunctionScope() {
1911 if (FunctionScopes.empty() && CachedFunctionScope) {
1912 // Use CachedFunctionScope to avoid allocating memory when possible.
1913 CachedFunctionScope->Clear();
1914 FunctionScopes.push_back(CachedFunctionScope.release());
1915 } else {
1916 FunctionScopes.push_back(new FunctionScopeInfo(getDiagnostics()));
1917 }
1918 if (LangOpts.OpenMP)
1919 pushOpenMPFunctionRegion();
1920 }
1921
PushBlockScope(Scope * BlockScope,BlockDecl * Block)1922 void Sema::PushBlockScope(Scope *BlockScope, BlockDecl *Block) {
1923 FunctionScopes.push_back(new BlockScopeInfo(getDiagnostics(),
1924 BlockScope, Block));
1925 }
1926
PushLambdaScope()1927 LambdaScopeInfo *Sema::PushLambdaScope() {
1928 LambdaScopeInfo *const LSI = new LambdaScopeInfo(getDiagnostics());
1929 FunctionScopes.push_back(LSI);
1930 return LSI;
1931 }
1932
RecordParsingTemplateParameterDepth(unsigned Depth)1933 void Sema::RecordParsingTemplateParameterDepth(unsigned Depth) {
1934 if (LambdaScopeInfo *const LSI = getCurLambda()) {
1935 LSI->AutoTemplateParameterDepth = Depth;
1936 return;
1937 }
1938 llvm_unreachable(
1939 "Remove assertion if intentionally called in a non-lambda context.");
1940 }
1941
1942 // Check that the type of the VarDecl has an accessible copy constructor and
1943 // resolve its destructor's exception specification.
checkEscapingByref(VarDecl * VD,Sema & S)1944 static void checkEscapingByref(VarDecl *VD, Sema &S) {
1945 QualType T = VD->getType();
1946 EnterExpressionEvaluationContext scope(
1947 S, Sema::ExpressionEvaluationContext::PotentiallyEvaluated);
1948 SourceLocation Loc = VD->getLocation();
1949 Expr *VarRef =
1950 new (S.Context) DeclRefExpr(S.Context, VD, false, T, VK_LValue, Loc);
1951 ExprResult Result = S.PerformMoveOrCopyInitialization(
1952 InitializedEntity::InitializeBlock(Loc, T, false), VD, VD->getType(),
1953 VarRef, /*AllowNRVO=*/true);
1954 if (!Result.isInvalid()) {
1955 Result = S.MaybeCreateExprWithCleanups(Result);
1956 Expr *Init = Result.getAs<Expr>();
1957 S.Context.setBlockVarCopyInit(VD, Init, S.canThrow(Init));
1958 }
1959
1960 // The destructor's exception specification is needed when IRGen generates
1961 // block copy/destroy functions. Resolve it here.
1962 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1963 if (CXXDestructorDecl *DD = RD->getDestructor()) {
1964 auto *FPT = DD->getType()->getAs<FunctionProtoType>();
1965 S.ResolveExceptionSpec(Loc, FPT);
1966 }
1967 }
1968
markEscapingByrefs(const FunctionScopeInfo & FSI,Sema & S)1969 static void markEscapingByrefs(const FunctionScopeInfo &FSI, Sema &S) {
1970 // Set the EscapingByref flag of __block variables captured by
1971 // escaping blocks.
1972 for (const BlockDecl *BD : FSI.Blocks) {
1973 for (const BlockDecl::Capture &BC : BD->captures()) {
1974 VarDecl *VD = BC.getVariable();
1975 if (VD->hasAttr<BlocksAttr>()) {
1976 // Nothing to do if this is a __block variable captured by a
1977 // non-escaping block.
1978 if (BD->doesNotEscape())
1979 continue;
1980 VD->setEscapingByref();
1981 }
1982 // Check whether the captured variable is or contains an object of
1983 // non-trivial C union type.
1984 QualType CapType = BC.getVariable()->getType();
1985 if (CapType.hasNonTrivialToPrimitiveDestructCUnion() ||
1986 CapType.hasNonTrivialToPrimitiveCopyCUnion())
1987 S.checkNonTrivialCUnion(BC.getVariable()->getType(),
1988 BD->getCaretLocation(),
1989 Sema::NTCUC_BlockCapture,
1990 Sema::NTCUK_Destruct|Sema::NTCUK_Copy);
1991 }
1992 }
1993
1994 for (VarDecl *VD : FSI.ByrefBlockVars) {
1995 // __block variables might require us to capture a copy-initializer.
1996 if (!VD->isEscapingByref())
1997 continue;
1998 // It's currently invalid to ever have a __block variable with an
1999 // array type; should we diagnose that here?
2000 // Regardless, we don't want to ignore array nesting when
2001 // constructing this copy.
2002 if (VD->getType()->isStructureOrClassType())
2003 checkEscapingByref(VD, S);
2004 }
2005 }
2006
2007 /// Pop a function (or block or lambda or captured region) scope from the stack.
2008 ///
2009 /// \param WP The warning policy to use for CFG-based warnings, or null if such
2010 /// warnings should not be produced.
2011 /// \param D The declaration corresponding to this function scope, if producing
2012 /// CFG-based warnings.
2013 /// \param BlockType The type of the block expression, if D is a BlockDecl.
2014 Sema::PoppedFunctionScopePtr
PopFunctionScopeInfo(const AnalysisBasedWarnings::Policy * WP,const Decl * D,QualType BlockType)2015 Sema::PopFunctionScopeInfo(const AnalysisBasedWarnings::Policy *WP,
2016 const Decl *D, QualType BlockType) {
2017 assert(!FunctionScopes.empty() && "mismatched push/pop!");
2018
2019 markEscapingByrefs(*FunctionScopes.back(), *this);
2020
2021 PoppedFunctionScopePtr Scope(FunctionScopes.pop_back_val(),
2022 PoppedFunctionScopeDeleter(this));
2023
2024 if (LangOpts.OpenMP)
2025 popOpenMPFunctionRegion(Scope.get());
2026
2027 // Issue any analysis-based warnings.
2028 if (WP && D)
2029 AnalysisWarnings.IssueWarnings(*WP, Scope.get(), D, BlockType);
2030 else
2031 for (const auto &PUD : Scope->PossiblyUnreachableDiags)
2032 Diag(PUD.Loc, PUD.PD);
2033
2034 return Scope;
2035 }
2036
2037 void Sema::PoppedFunctionScopeDeleter::
operator ()(sema::FunctionScopeInfo * Scope) const2038 operator()(sema::FunctionScopeInfo *Scope) const {
2039 // Stash the function scope for later reuse if it's for a normal function.
2040 if (Scope->isPlainFunction() && !Self->CachedFunctionScope)
2041 Self->CachedFunctionScope.reset(Scope);
2042 else
2043 delete Scope;
2044 }
2045
PushCompoundScope(bool IsStmtExpr)2046 void Sema::PushCompoundScope(bool IsStmtExpr) {
2047 getCurFunction()->CompoundScopes.push_back(CompoundScopeInfo(IsStmtExpr));
2048 }
2049
PopCompoundScope()2050 void Sema::PopCompoundScope() {
2051 FunctionScopeInfo *CurFunction = getCurFunction();
2052 assert(!CurFunction->CompoundScopes.empty() && "mismatched push/pop");
2053
2054 CurFunction->CompoundScopes.pop_back();
2055 }
2056
2057 /// Determine whether any errors occurred within this function/method/
2058 /// block.
hasAnyUnrecoverableErrorsInThisFunction() const2059 bool Sema::hasAnyUnrecoverableErrorsInThisFunction() const {
2060 return getCurFunction()->hasUnrecoverableErrorOccurred();
2061 }
2062
setFunctionHasBranchIntoScope()2063 void Sema::setFunctionHasBranchIntoScope() {
2064 if (!FunctionScopes.empty())
2065 FunctionScopes.back()->setHasBranchIntoScope();
2066 }
2067
setFunctionHasBranchProtectedScope()2068 void Sema::setFunctionHasBranchProtectedScope() {
2069 if (!FunctionScopes.empty())
2070 FunctionScopes.back()->setHasBranchProtectedScope();
2071 }
2072
setFunctionHasIndirectGoto()2073 void Sema::setFunctionHasIndirectGoto() {
2074 if (!FunctionScopes.empty())
2075 FunctionScopes.back()->setHasIndirectGoto();
2076 }
2077
setFunctionHasMustTail()2078 void Sema::setFunctionHasMustTail() {
2079 if (!FunctionScopes.empty())
2080 FunctionScopes.back()->setHasMustTail();
2081 }
2082
getCurBlock()2083 BlockScopeInfo *Sema::getCurBlock() {
2084 if (FunctionScopes.empty())
2085 return nullptr;
2086
2087 auto CurBSI = dyn_cast<BlockScopeInfo>(FunctionScopes.back());
2088 if (CurBSI && CurBSI->TheDecl &&
2089 !CurBSI->TheDecl->Encloses(CurContext)) {
2090 // We have switched contexts due to template instantiation.
2091 assert(!CodeSynthesisContexts.empty());
2092 return nullptr;
2093 }
2094
2095 return CurBSI;
2096 }
2097
getEnclosingFunction() const2098 FunctionScopeInfo *Sema::getEnclosingFunction() const {
2099 if (FunctionScopes.empty())
2100 return nullptr;
2101
2102 for (int e = FunctionScopes.size() - 1; e >= 0; --e) {
2103 if (isa<sema::BlockScopeInfo>(FunctionScopes[e]))
2104 continue;
2105 return FunctionScopes[e];
2106 }
2107 return nullptr;
2108 }
2109
getEnclosingLambda() const2110 LambdaScopeInfo *Sema::getEnclosingLambda() const {
2111 for (auto *Scope : llvm::reverse(FunctionScopes)) {
2112 if (auto *LSI = dyn_cast<sema::LambdaScopeInfo>(Scope)) {
2113 if (LSI->Lambda && !LSI->Lambda->Encloses(CurContext)) {
2114 // We have switched contexts due to template instantiation.
2115 // FIXME: We should swap out the FunctionScopes during code synthesis
2116 // so that we don't need to check for this.
2117 assert(!CodeSynthesisContexts.empty());
2118 return nullptr;
2119 }
2120 return LSI;
2121 }
2122 }
2123 return nullptr;
2124 }
2125
getCurLambda(bool IgnoreNonLambdaCapturingScope)2126 LambdaScopeInfo *Sema::getCurLambda(bool IgnoreNonLambdaCapturingScope) {
2127 if (FunctionScopes.empty())
2128 return nullptr;
2129
2130 auto I = FunctionScopes.rbegin();
2131 if (IgnoreNonLambdaCapturingScope) {
2132 auto E = FunctionScopes.rend();
2133 while (I != E && isa<CapturingScopeInfo>(*I) && !isa<LambdaScopeInfo>(*I))
2134 ++I;
2135 if (I == E)
2136 return nullptr;
2137 }
2138 auto *CurLSI = dyn_cast<LambdaScopeInfo>(*I);
2139 if (CurLSI && CurLSI->Lambda &&
2140 !CurLSI->Lambda->Encloses(CurContext)) {
2141 // We have switched contexts due to template instantiation.
2142 assert(!CodeSynthesisContexts.empty());
2143 return nullptr;
2144 }
2145
2146 return CurLSI;
2147 }
2148
2149 // We have a generic lambda if we parsed auto parameters, or we have
2150 // an associated template parameter list.
getCurGenericLambda()2151 LambdaScopeInfo *Sema::getCurGenericLambda() {
2152 if (LambdaScopeInfo *LSI = getCurLambda()) {
2153 return (LSI->TemplateParams.size() ||
2154 LSI->GLTemplateParameterList) ? LSI : nullptr;
2155 }
2156 return nullptr;
2157 }
2158
2159
ActOnComment(SourceRange Comment)2160 void Sema::ActOnComment(SourceRange Comment) {
2161 if (!LangOpts.RetainCommentsFromSystemHeaders &&
2162 SourceMgr.isInSystemHeader(Comment.getBegin()))
2163 return;
2164 RawComment RC(SourceMgr, Comment, LangOpts.CommentOpts, false);
2165 if (RC.isAlmostTrailingComment()) {
2166 SourceRange MagicMarkerRange(Comment.getBegin(),
2167 Comment.getBegin().getLocWithOffset(3));
2168 StringRef MagicMarkerText;
2169 switch (RC.getKind()) {
2170 case RawComment::RCK_OrdinaryBCPL:
2171 MagicMarkerText = "///<";
2172 break;
2173 case RawComment::RCK_OrdinaryC:
2174 MagicMarkerText = "/**<";
2175 break;
2176 default:
2177 llvm_unreachable("if this is an almost Doxygen comment, "
2178 "it should be ordinary");
2179 }
2180 Diag(Comment.getBegin(), diag::warn_not_a_doxygen_trailing_member_comment) <<
2181 FixItHint::CreateReplacement(MagicMarkerRange, MagicMarkerText);
2182 }
2183 Context.addComment(RC);
2184 }
2185
2186 // Pin this vtable to this file.
~ExternalSemaSource()2187 ExternalSemaSource::~ExternalSemaSource() {}
2188 char ExternalSemaSource::ID;
2189
ReadMethodPool(Selector Sel)2190 void ExternalSemaSource::ReadMethodPool(Selector Sel) { }
updateOutOfDateSelector(Selector Sel)2191 void ExternalSemaSource::updateOutOfDateSelector(Selector Sel) { }
2192
ReadKnownNamespaces(SmallVectorImpl<NamespaceDecl * > & Namespaces)2193 void ExternalSemaSource::ReadKnownNamespaces(
2194 SmallVectorImpl<NamespaceDecl *> &Namespaces) {
2195 }
2196
ReadUndefinedButUsed(llvm::MapVector<NamedDecl *,SourceLocation> & Undefined)2197 void ExternalSemaSource::ReadUndefinedButUsed(
2198 llvm::MapVector<NamedDecl *, SourceLocation> &Undefined) {}
2199
ReadMismatchingDeleteExpressions(llvm::MapVector<FieldDecl *,llvm::SmallVector<std::pair<SourceLocation,bool>,4>> &)2200 void ExternalSemaSource::ReadMismatchingDeleteExpressions(llvm::MapVector<
2201 FieldDecl *, llvm::SmallVector<std::pair<SourceLocation, bool>, 4>> &) {}
2202
2203 /// Figure out if an expression could be turned into a call.
2204 ///
2205 /// Use this when trying to recover from an error where the programmer may have
2206 /// written just the name of a function instead of actually calling it.
2207 ///
2208 /// \param E - The expression to examine.
2209 /// \param ZeroArgCallReturnTy - If the expression can be turned into a call
2210 /// with no arguments, this parameter is set to the type returned by such a
2211 /// call; otherwise, it is set to an empty QualType.
2212 /// \param OverloadSet - If the expression is an overloaded function
2213 /// name, this parameter is populated with the decls of the various overloads.
tryExprAsCall(Expr & E,QualType & ZeroArgCallReturnTy,UnresolvedSetImpl & OverloadSet)2214 bool Sema::tryExprAsCall(Expr &E, QualType &ZeroArgCallReturnTy,
2215 UnresolvedSetImpl &OverloadSet) {
2216 ZeroArgCallReturnTy = QualType();
2217 OverloadSet.clear();
2218
2219 const OverloadExpr *Overloads = nullptr;
2220 bool IsMemExpr = false;
2221 if (E.getType() == Context.OverloadTy) {
2222 OverloadExpr::FindResult FR = OverloadExpr::find(const_cast<Expr*>(&E));
2223
2224 // Ignore overloads that are pointer-to-member constants.
2225 if (FR.HasFormOfMemberPointer)
2226 return false;
2227
2228 Overloads = FR.Expression;
2229 } else if (E.getType() == Context.BoundMemberTy) {
2230 Overloads = dyn_cast<UnresolvedMemberExpr>(E.IgnoreParens());
2231 IsMemExpr = true;
2232 }
2233
2234 bool Ambiguous = false;
2235 bool IsMV = false;
2236
2237 if (Overloads) {
2238 for (OverloadExpr::decls_iterator it = Overloads->decls_begin(),
2239 DeclsEnd = Overloads->decls_end(); it != DeclsEnd; ++it) {
2240 OverloadSet.addDecl(*it);
2241
2242 // Check whether the function is a non-template, non-member which takes no
2243 // arguments.
2244 if (IsMemExpr)
2245 continue;
2246 if (const FunctionDecl *OverloadDecl
2247 = dyn_cast<FunctionDecl>((*it)->getUnderlyingDecl())) {
2248 if (OverloadDecl->getMinRequiredArguments() == 0) {
2249 if (!ZeroArgCallReturnTy.isNull() && !Ambiguous &&
2250 (!IsMV || !(OverloadDecl->isCPUDispatchMultiVersion() ||
2251 OverloadDecl->isCPUSpecificMultiVersion()))) {
2252 ZeroArgCallReturnTy = QualType();
2253 Ambiguous = true;
2254 } else {
2255 ZeroArgCallReturnTy = OverloadDecl->getReturnType();
2256 IsMV = OverloadDecl->isCPUDispatchMultiVersion() ||
2257 OverloadDecl->isCPUSpecificMultiVersion();
2258 }
2259 }
2260 }
2261 }
2262
2263 // If it's not a member, use better machinery to try to resolve the call
2264 if (!IsMemExpr)
2265 return !ZeroArgCallReturnTy.isNull();
2266 }
2267
2268 // Attempt to call the member with no arguments - this will correctly handle
2269 // member templates with defaults/deduction of template arguments, overloads
2270 // with default arguments, etc.
2271 if (IsMemExpr && !E.isTypeDependent()) {
2272 Sema::TentativeAnalysisScope Trap(*this);
2273 ExprResult R = BuildCallToMemberFunction(nullptr, &E, SourceLocation(),
2274 None, SourceLocation());
2275 if (R.isUsable()) {
2276 ZeroArgCallReturnTy = R.get()->getType();
2277 return true;
2278 }
2279 return false;
2280 }
2281
2282 if (const DeclRefExpr *DeclRef = dyn_cast<DeclRefExpr>(E.IgnoreParens())) {
2283 if (const FunctionDecl *Fun = dyn_cast<FunctionDecl>(DeclRef->getDecl())) {
2284 if (Fun->getMinRequiredArguments() == 0)
2285 ZeroArgCallReturnTy = Fun->getReturnType();
2286 return true;
2287 }
2288 }
2289
2290 // We don't have an expression that's convenient to get a FunctionDecl from,
2291 // but we can at least check if the type is "function of 0 arguments".
2292 QualType ExprTy = E.getType();
2293 const FunctionType *FunTy = nullptr;
2294 QualType PointeeTy = ExprTy->getPointeeType();
2295 if (!PointeeTy.isNull())
2296 FunTy = PointeeTy->getAs<FunctionType>();
2297 if (!FunTy)
2298 FunTy = ExprTy->getAs<FunctionType>();
2299
2300 if (const FunctionProtoType *FPT =
2301 dyn_cast_or_null<FunctionProtoType>(FunTy)) {
2302 if (FPT->getNumParams() == 0)
2303 ZeroArgCallReturnTy = FunTy->getReturnType();
2304 return true;
2305 }
2306 return false;
2307 }
2308
2309 /// Give notes for a set of overloads.
2310 ///
2311 /// A companion to tryExprAsCall. In cases when the name that the programmer
2312 /// wrote was an overloaded function, we may be able to make some guesses about
2313 /// plausible overloads based on their return types; such guesses can be handed
2314 /// off to this method to be emitted as notes.
2315 ///
2316 /// \param Overloads - The overloads to note.
2317 /// \param FinalNoteLoc - If we've suppressed printing some overloads due to
2318 /// -fshow-overloads=best, this is the location to attach to the note about too
2319 /// many candidates. Typically this will be the location of the original
2320 /// ill-formed expression.
noteOverloads(Sema & S,const UnresolvedSetImpl & Overloads,const SourceLocation FinalNoteLoc)2321 static void noteOverloads(Sema &S, const UnresolvedSetImpl &Overloads,
2322 const SourceLocation FinalNoteLoc) {
2323 unsigned ShownOverloads = 0;
2324 unsigned SuppressedOverloads = 0;
2325 for (UnresolvedSetImpl::iterator It = Overloads.begin(),
2326 DeclsEnd = Overloads.end(); It != DeclsEnd; ++It) {
2327 if (ShownOverloads >= S.Diags.getNumOverloadCandidatesToShow()) {
2328 ++SuppressedOverloads;
2329 continue;
2330 }
2331
2332 NamedDecl *Fn = (*It)->getUnderlyingDecl();
2333 // Don't print overloads for non-default multiversioned functions.
2334 if (const auto *FD = Fn->getAsFunction()) {
2335 if (FD->isMultiVersion() && FD->hasAttr<TargetAttr>() &&
2336 !FD->getAttr<TargetAttr>()->isDefaultVersion())
2337 continue;
2338 }
2339 S.Diag(Fn->getLocation(), diag::note_possible_target_of_call);
2340 ++ShownOverloads;
2341 }
2342
2343 S.Diags.overloadCandidatesShown(ShownOverloads);
2344
2345 if (SuppressedOverloads)
2346 S.Diag(FinalNoteLoc, diag::note_ovl_too_many_candidates)
2347 << SuppressedOverloads;
2348 }
2349
notePlausibleOverloads(Sema & S,SourceLocation Loc,const UnresolvedSetImpl & Overloads,bool (* IsPlausibleResult)(QualType))2350 static void notePlausibleOverloads(Sema &S, SourceLocation Loc,
2351 const UnresolvedSetImpl &Overloads,
2352 bool (*IsPlausibleResult)(QualType)) {
2353 if (!IsPlausibleResult)
2354 return noteOverloads(S, Overloads, Loc);
2355
2356 UnresolvedSet<2> PlausibleOverloads;
2357 for (OverloadExpr::decls_iterator It = Overloads.begin(),
2358 DeclsEnd = Overloads.end(); It != DeclsEnd; ++It) {
2359 const FunctionDecl *OverloadDecl = cast<FunctionDecl>(*It);
2360 QualType OverloadResultTy = OverloadDecl->getReturnType();
2361 if (IsPlausibleResult(OverloadResultTy))
2362 PlausibleOverloads.addDecl(It.getDecl());
2363 }
2364 noteOverloads(S, PlausibleOverloads, Loc);
2365 }
2366
2367 /// Determine whether the given expression can be called by just
2368 /// putting parentheses after it. Notably, expressions with unary
2369 /// operators can't be because the unary operator will start parsing
2370 /// outside the call.
IsCallableWithAppend(Expr * E)2371 static bool IsCallableWithAppend(Expr *E) {
2372 E = E->IgnoreImplicit();
2373 return (!isa<CStyleCastExpr>(E) &&
2374 !isa<UnaryOperator>(E) &&
2375 !isa<BinaryOperator>(E) &&
2376 !isa<CXXOperatorCallExpr>(E));
2377 }
2378
IsCPUDispatchCPUSpecificMultiVersion(const Expr * E)2379 static bool IsCPUDispatchCPUSpecificMultiVersion(const Expr *E) {
2380 if (const auto *UO = dyn_cast<UnaryOperator>(E))
2381 E = UO->getSubExpr();
2382
2383 if (const auto *ULE = dyn_cast<UnresolvedLookupExpr>(E)) {
2384 if (ULE->getNumDecls() == 0)
2385 return false;
2386
2387 const NamedDecl *ND = *ULE->decls_begin();
2388 if (const auto *FD = dyn_cast<FunctionDecl>(ND))
2389 return FD->isCPUDispatchMultiVersion() || FD->isCPUSpecificMultiVersion();
2390 }
2391 return false;
2392 }
2393
tryToRecoverWithCall(ExprResult & E,const PartialDiagnostic & PD,bool ForceComplain,bool (* IsPlausibleResult)(QualType))2394 bool Sema::tryToRecoverWithCall(ExprResult &E, const PartialDiagnostic &PD,
2395 bool ForceComplain,
2396 bool (*IsPlausibleResult)(QualType)) {
2397 SourceLocation Loc = E.get()->getExprLoc();
2398 SourceRange Range = E.get()->getSourceRange();
2399
2400 QualType ZeroArgCallTy;
2401 UnresolvedSet<4> Overloads;
2402 if (tryExprAsCall(*E.get(), ZeroArgCallTy, Overloads) &&
2403 !ZeroArgCallTy.isNull() &&
2404 (!IsPlausibleResult || IsPlausibleResult(ZeroArgCallTy))) {
2405 // At this point, we know E is potentially callable with 0
2406 // arguments and that it returns something of a reasonable type,
2407 // so we can emit a fixit and carry on pretending that E was
2408 // actually a CallExpr.
2409 SourceLocation ParenInsertionLoc = getLocForEndOfToken(Range.getEnd());
2410 bool IsMV = IsCPUDispatchCPUSpecificMultiVersion(E.get());
2411 Diag(Loc, PD) << /*zero-arg*/ 1 << IsMV << Range
2412 << (IsCallableWithAppend(E.get())
2413 ? FixItHint::CreateInsertion(ParenInsertionLoc, "()")
2414 : FixItHint());
2415 if (!IsMV)
2416 notePlausibleOverloads(*this, Loc, Overloads, IsPlausibleResult);
2417
2418 // FIXME: Try this before emitting the fixit, and suppress diagnostics
2419 // while doing so.
2420 E = BuildCallExpr(nullptr, E.get(), Range.getEnd(), None,
2421 Range.getEnd().getLocWithOffset(1));
2422 return true;
2423 }
2424
2425 if (!ForceComplain) return false;
2426
2427 bool IsMV = IsCPUDispatchCPUSpecificMultiVersion(E.get());
2428 Diag(Loc, PD) << /*not zero-arg*/ 0 << IsMV << Range;
2429 if (!IsMV)
2430 notePlausibleOverloads(*this, Loc, Overloads, IsPlausibleResult);
2431 E = ExprError();
2432 return true;
2433 }
2434
getSuperIdentifier() const2435 IdentifierInfo *Sema::getSuperIdentifier() const {
2436 if (!Ident_super)
2437 Ident_super = &Context.Idents.get("super");
2438 return Ident_super;
2439 }
2440
getFloat128Identifier() const2441 IdentifierInfo *Sema::getFloat128Identifier() const {
2442 if (!Ident___float128)
2443 Ident___float128 = &Context.Idents.get("__float128");
2444 return Ident___float128;
2445 }
2446
PushCapturedRegionScope(Scope * S,CapturedDecl * CD,RecordDecl * RD,CapturedRegionKind K,unsigned OpenMPCaptureLevel)2447 void Sema::PushCapturedRegionScope(Scope *S, CapturedDecl *CD, RecordDecl *RD,
2448 CapturedRegionKind K,
2449 unsigned OpenMPCaptureLevel) {
2450 auto *CSI = new CapturedRegionScopeInfo(
2451 getDiagnostics(), S, CD, RD, CD->getContextParam(), K,
2452 (getLangOpts().OpenMP && K == CR_OpenMP) ? getOpenMPNestingLevel() : 0,
2453 OpenMPCaptureLevel);
2454 CSI->ReturnType = Context.VoidTy;
2455 FunctionScopes.push_back(CSI);
2456 }
2457
getCurCapturedRegion()2458 CapturedRegionScopeInfo *Sema::getCurCapturedRegion() {
2459 if (FunctionScopes.empty())
2460 return nullptr;
2461
2462 return dyn_cast<CapturedRegionScopeInfo>(FunctionScopes.back());
2463 }
2464
2465 const llvm::MapVector<FieldDecl *, Sema::DeleteLocs> &
getMismatchingDeleteExpressions() const2466 Sema::getMismatchingDeleteExpressions() const {
2467 return DeleteExprs;
2468 }
2469