1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "RecordInfo.h"
6 
7 #include <string>
8 
9 #include "Config.h"
10 #include "clang/Sema/Sema.h"
11 
12 using namespace clang;
13 using std::string;
14 
RecordInfo(CXXRecordDecl * record,RecordCache * cache)15 RecordInfo::RecordInfo(CXXRecordDecl* record, RecordCache* cache)
16     : cache_(cache),
17       record_(record),
18       name_(record->getName()),
19       fields_need_tracing_(TracingStatus::Unknown()),
20       bases_(0),
21       fields_(0),
22       is_stack_allocated_(kNotComputed),
23       is_non_newable_(kNotComputed),
24       is_only_placement_newable_(kNotComputed),
25       does_need_finalization_(kNotComputed),
26       has_gc_mixin_methods_(kNotComputed),
27       is_declaring_local_trace_(kNotComputed),
28       determined_trace_methods_(false),
29       trace_method_(0),
30       trace_dispatch_method_(0),
31       finalize_dispatch_method_(0),
32       is_gc_derived_(false),
33       directly_derived_gc_base_(nullptr) {}
34 
~RecordInfo()35 RecordInfo::~RecordInfo() {
36   delete fields_;
37   delete bases_;
38 }
39 
40 // Get |count| number of template arguments. Returns false if there
41 // are fewer than |count| arguments or any of the arguments are not
42 // of a valid Type structure. If |count| is non-positive, all
43 // arguments are collected.
GetTemplateArgs(size_t count,TemplateArgs * output_args)44 bool RecordInfo::GetTemplateArgs(size_t count, TemplateArgs* output_args) {
45   ClassTemplateSpecializationDecl* tmpl =
46       dyn_cast<ClassTemplateSpecializationDecl>(record_);
47   if (!tmpl)
48     return false;
49   const TemplateArgumentList& args = tmpl->getTemplateArgs();
50   if (args.size() < count)
51     return false;
52   if (count <= 0)
53     count = args.size();
54   for (unsigned i = 0; i < count; ++i) {
55     TemplateArgument arg = args[i];
56     if (arg.getKind() == TemplateArgument::Type && !arg.getAsType().isNull()) {
57       output_args->push_back(arg.getAsType().getTypePtr());
58     } else {
59       return false;
60     }
61   }
62   return true;
63 }
64 
65 // Test if a record is a HeapAllocated collection.
IsHeapAllocatedCollection()66 bool RecordInfo::IsHeapAllocatedCollection() {
67   if (!Config::IsGCCollection(name_) && !Config::IsWTFCollection(name_))
68     return false;
69 
70   TemplateArgs args;
71   if (GetTemplateArgs(0, &args)) {
72     for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
73       if (CXXRecordDecl* decl = (*it)->getAsCXXRecordDecl())
74         if (decl->getName() == kHeapAllocatorName)
75           return true;
76     }
77   }
78 
79   return Config::IsGCCollection(name_);
80 }
81 
HasOptionalFinalizer()82 bool RecordInfo::HasOptionalFinalizer() {
83   if (!IsHeapAllocatedCollection())
84     return false;
85   // Heap collections may have a finalizer but it is optional (i.e. may be
86   // delayed until FinalizeGarbageCollectedObject() gets called), unless there
87   // is an inline buffer. Vector and Deque can have an inline
88   // buffer.
89   if (name_ != "Vector" && name_ != "Deque" && name_ != "HeapVector" &&
90       name_ != "HeapDeque")
91     return true;
92   ClassTemplateSpecializationDecl* tmpl =
93       dyn_cast<ClassTemplateSpecializationDecl>(record_);
94   // These collections require template specialization so tmpl should always be
95   // non-null for valid code.
96   if (!tmpl)
97     return false;
98   const TemplateArgumentList& args = tmpl->getTemplateArgs();
99   if (args.size() < 2)
100     return true;
101   TemplateArgument arg = args[1];
102   // The second template argument must be void or 0 so there is no inline
103   // buffer.
104   return (arg.getKind() == TemplateArgument::Type &&
105           arg.getAsType()->isVoidType()) ||
106          (arg.getKind() == TemplateArgument::Integral &&
107           arg.getAsIntegral().getExtValue() == 0);
108 }
109 
110 // Test if a record is derived from a garbage collected base.
IsGCDerived()111 bool RecordInfo::IsGCDerived() {
112   // If already computed, return the known result.
113   if (gc_base_names_.size())
114     return is_gc_derived_;
115 
116   if (!record_->hasDefinition())
117     return false;
118 
119   // The base classes are not themselves considered garbage collected objects.
120   if (Config::IsGCBase(name_))
121     return false;
122 
123   // Walk the inheritance tree to find GC base classes.
124   walkBases();
125   return is_gc_derived_;
126 }
127 
128 // Test if a record is directly derived from a garbage collected base.
IsGCDirectlyDerived()129 bool RecordInfo::IsGCDirectlyDerived() {
130   // If already computed, return the known result.
131   if (directly_derived_gc_base_)
132     return true;
133 
134   if (!record_->hasDefinition())
135     return false;
136 
137   // The base classes are not themselves considered garbage collected objects.
138   if (Config::IsGCBase(name_))
139     return false;
140 
141   for (const auto& it : record()->bases()) {
142     const CXXRecordDecl* base = it.getType()->getAsCXXRecordDecl();
143     if (!base)
144       continue;
145 
146     if (Config::IsGCSimpleBase(base->getName())) {
147       directly_derived_gc_base_ = &it;
148       break;
149     }
150   }
151 
152   return directly_derived_gc_base_;
153 }
154 
GetDependentTemplatedDecl(const Type & type)155 CXXRecordDecl* RecordInfo::GetDependentTemplatedDecl(const Type& type) {
156   const TemplateSpecializationType* tmpl_type =
157       type.getAs<TemplateSpecializationType>();
158   if (!tmpl_type)
159     return 0;
160 
161   TemplateDecl* tmpl_decl = tmpl_type->getTemplateName().getAsTemplateDecl();
162   if (!tmpl_decl)
163     return 0;
164 
165   if (CXXRecordDecl* record_decl =
166           dyn_cast_or_null<CXXRecordDecl>(tmpl_decl->getTemplatedDecl()))
167     return record_decl;
168 
169   // Type is an alias.
170   TypeAliasDecl* alias_decl =
171       dyn_cast<TypeAliasDecl>(tmpl_decl->getTemplatedDecl());
172   assert(alias_decl);
173   const Type* alias_type = alias_decl->getUnderlyingType().getTypePtr();
174   if (CXXRecordDecl* record_decl = alias_type->getAsCXXRecordDecl())
175     return record_decl;
176   return GetDependentTemplatedDecl(*alias_type);
177 }
178 
walkBases()179 void RecordInfo::walkBases() {
180   // This traversal is akin to CXXRecordDecl::forallBases()'s,
181   // but without stepping over dependent bases -- these might also
182   // have a "GC base name", so are to be included and considered.
183   SmallVector<const CXXRecordDecl*, 8> queue;
184 
185   const CXXRecordDecl* base_record = record();
186   while (true) {
187     for (const auto& it : base_record->bases()) {
188       const RecordType* type = it.getType()->getAs<RecordType>();
189       CXXRecordDecl* base;
190       if (!type)
191         base = GetDependentTemplatedDecl(*it.getType());
192       else {
193         base = cast_or_null<CXXRecordDecl>(type->getDecl()->getDefinition());
194         if (base)
195           queue.push_back(base);
196       }
197       if (!base)
198         continue;
199 
200       llvm::StringRef name = base->getName();
201       if (Config::IsGCBase(name)) {
202         gc_base_names_.push_back(std::string(name));
203         is_gc_derived_ = true;
204       }
205     }
206 
207     if (queue.empty())
208       break;
209     base_record = queue.pop_back_val(); // not actually a queue.
210   }
211 }
212 
213 // A GC mixin is a class that inherits from a GC mixin base and has
214 // not yet been "mixed in" with another GC base class.
IsGCMixin()215 bool RecordInfo::IsGCMixin() {
216   if (!IsGCDerived() || !gc_base_names_.size())
217     return false;
218   for (const auto& gc_base : gc_base_names_) {
219       // If it is not a mixin base we are done.
220       if (!Config::IsGCMixinBase(gc_base))
221           return false;
222   }
223   // This is a mixin if all GC bases are mixins.
224   return true;
225 }
226 
227 // Test if a record is allocated on the managed heap.
IsGCAllocated()228 bool RecordInfo::IsGCAllocated() {
229   return IsGCDerived() || IsHeapAllocatedCollection();
230 }
231 
HasDefinition()232 bool RecordInfo::HasDefinition() {
233   return record_->hasDefinition();
234 }
235 
Lookup(CXXRecordDecl * record)236 RecordInfo* RecordCache::Lookup(CXXRecordDecl* record) {
237   // Ignore classes annotated with the GC_PLUGIN_IGNORE macro.
238   if (!record || Config::IsIgnoreAnnotated(record))
239     return 0;
240   Cache::iterator it = cache_.find(record);
241   if (it != cache_.end())
242     return &it->second;
243   return &cache_.insert(std::make_pair(record, RecordInfo(record, this)))
244               .first->second;
245 }
246 
HasTypeAlias(std::string marker_name) const247 bool RecordInfo::HasTypeAlias(std::string marker_name) const {
248   for (Decl* decl : record_->decls()) {
249     TypeAliasDecl* alias = dyn_cast<TypeAliasDecl>(decl);
250     if (!alias)
251       continue;
252     if (alias->getName() == marker_name)
253       return true;
254   }
255   return false;
256 }
257 
IsStackAllocated()258 bool RecordInfo::IsStackAllocated() {
259   if (is_stack_allocated_ == kNotComputed) {
260     is_stack_allocated_ = kFalse;
261     if (HasTypeAlias("IsStackAllocatedTypeMarker")) {
262       is_stack_allocated_ = kTrue;
263       return is_stack_allocated_;
264     }
265     for (Bases::iterator it = GetBases().begin();
266          it != GetBases().end();
267          ++it) {
268       if (it->second.info()->IsStackAllocated()) {
269         is_stack_allocated_ = kTrue;
270         return is_stack_allocated_;
271       }
272     }
273     for (CXXRecordDecl::method_iterator it = record_->method_begin();
274          it != record_->method_end();
275          ++it) {
276       if (it->getNameAsString() == kNewOperatorName &&
277           it->isDeleted() &&
278           Config::IsStackAnnotated(*it)) {
279         is_stack_allocated_ = kTrue;
280         return is_stack_allocated_;
281       }
282     }
283   }
284   return is_stack_allocated_;
285 }
286 
IsNonNewable()287 bool RecordInfo::IsNonNewable() {
288   if (is_non_newable_ == kNotComputed) {
289     bool deleted = false;
290     bool all_deleted = true;
291     for (CXXRecordDecl::method_iterator it = record_->method_begin();
292          it != record_->method_end();
293          ++it) {
294       if (it->getNameAsString() == kNewOperatorName) {
295         deleted = it->isDeleted();
296         all_deleted = all_deleted && deleted;
297       }
298     }
299     is_non_newable_ = (deleted && all_deleted) ? kTrue : kFalse;
300   }
301   return is_non_newable_;
302 }
303 
IsOnlyPlacementNewable()304 bool RecordInfo::IsOnlyPlacementNewable() {
305   if (is_only_placement_newable_ == kNotComputed) {
306     bool placement = false;
307     bool new_deleted = false;
308     for (CXXRecordDecl::method_iterator it = record_->method_begin();
309          it != record_->method_end();
310          ++it) {
311       if (it->getNameAsString() == kNewOperatorName) {
312         if (it->getNumParams() == 1) {
313           new_deleted = it->isDeleted();
314         } else if (it->getNumParams() == 2) {
315           placement = !it->isDeleted();
316         }
317       }
318     }
319     is_only_placement_newable_ = (placement && new_deleted) ? kTrue : kFalse;
320   }
321   return is_only_placement_newable_;
322 }
323 
DeclaresNewOperator()324 CXXMethodDecl* RecordInfo::DeclaresNewOperator() {
325   for (CXXRecordDecl::method_iterator it = record_->method_begin();
326        it != record_->method_end();
327        ++it) {
328     if (it->getNameAsString() == kNewOperatorName && it->getNumParams() == 1)
329       return *it;
330   }
331   return 0;
332 }
333 
334 // An object requires a tracing method if it has any fields that need tracing
335 // or if it inherits from multiple bases that need tracing.
RequiresTraceMethod()336 bool RecordInfo::RequiresTraceMethod() {
337   if (IsStackAllocated())
338     return false;
339   if (GetTraceMethod())
340     return true;
341   unsigned bases_with_trace = 0;
342   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
343     if (it->second.NeedsTracing().IsNeeded())
344       ++bases_with_trace;
345   }
346   // If a single base has a Trace method, this type can inherit the Trace
347   // method from that base. If more than a single base has a Trace method,
348   // this type needs it's own Trace method which will delegate to each of
349   // the bases' Trace methods.
350   if (bases_with_trace > 1)
351     return true;
352   GetFields();
353   return fields_need_tracing_.IsNeeded();
354 }
355 
356 // Get the actual tracing method (ie, can be traceAfterDispatch if there is a
357 // dispatch method).
GetTraceMethod()358 CXXMethodDecl* RecordInfo::GetTraceMethod() {
359   DetermineTracingMethods();
360   return trace_method_;
361 }
362 
363 // Get the static trace dispatch method.
GetTraceDispatchMethod()364 CXXMethodDecl* RecordInfo::GetTraceDispatchMethod() {
365   DetermineTracingMethods();
366   return trace_dispatch_method_;
367 }
368 
GetFinalizeDispatchMethod()369 CXXMethodDecl* RecordInfo::GetFinalizeDispatchMethod() {
370   DetermineTracingMethods();
371   return finalize_dispatch_method_;
372 }
373 
GetDirectGCBase()374 const CXXBaseSpecifier* RecordInfo::GetDirectGCBase() {
375   if (!IsGCDirectlyDerived())
376     return nullptr;
377   return directly_derived_gc_base_;
378 }
379 
GetBases()380 RecordInfo::Bases& RecordInfo::GetBases() {
381   if (!bases_)
382     bases_ = CollectBases();
383   return *bases_;
384 }
385 
InheritsTrace()386 bool RecordInfo::InheritsTrace() {
387   if (GetTraceMethod())
388     return true;
389   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
390     if (it->second.info()->InheritsTrace())
391       return true;
392   }
393   return false;
394 }
395 
InheritsNonVirtualTrace()396 CXXMethodDecl* RecordInfo::InheritsNonVirtualTrace() {
397   if (CXXMethodDecl* trace = GetTraceMethod())
398     return trace->isVirtual() ? 0 : trace;
399   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
400     if (CXXMethodDecl* trace = it->second.info()->InheritsNonVirtualTrace())
401       return trace;
402   }
403   return 0;
404 }
405 
DeclaresGCMixinMethods()406 bool RecordInfo::DeclaresGCMixinMethods() {
407   DetermineTracingMethods();
408   return has_gc_mixin_methods_;
409 }
410 
DeclaresLocalTraceMethod()411 bool RecordInfo::DeclaresLocalTraceMethod() {
412   if (is_declaring_local_trace_ != kNotComputed)
413     return is_declaring_local_trace_;
414   DetermineTracingMethods();
415   is_declaring_local_trace_ = trace_method_ ? kTrue : kFalse;
416   if (is_declaring_local_trace_) {
417     for (auto it = record_->method_begin();
418          it != record_->method_end(); ++it) {
419       if (*it == trace_method_) {
420         is_declaring_local_trace_ = kTrue;
421         break;
422       }
423     }
424   }
425   return is_declaring_local_trace_;
426 }
427 
428 // A (non-virtual) class is considered abstract in Blink if it has
429 // no public constructors and no create methods.
IsConsideredAbstract()430 bool RecordInfo::IsConsideredAbstract() {
431   for (CXXRecordDecl::ctor_iterator it = record_->ctor_begin();
432        it != record_->ctor_end();
433        ++it) {
434     if (!it->isCopyOrMoveConstructor() && it->getAccess() == AS_public)
435       return false;
436   }
437   for (CXXRecordDecl::method_iterator it = record_->method_begin();
438        it != record_->method_end();
439        ++it) {
440     if (it->getNameAsString() == kCreateName)
441       return false;
442   }
443   return true;
444 }
445 
CollectBases()446 RecordInfo::Bases* RecordInfo::CollectBases() {
447   // Compute the collection locally to avoid inconsistent states.
448   Bases* bases = new Bases;
449   if (!record_->hasDefinition())
450     return bases;
451   for (CXXRecordDecl::base_class_iterator it = record_->bases_begin();
452        it != record_->bases_end();
453        ++it) {
454     const CXXBaseSpecifier& spec = *it;
455     RecordInfo* info = cache_->Lookup(spec.getType());
456     if (!info)
457       continue;
458     CXXRecordDecl* base = info->record();
459     TracingStatus status = info->InheritsTrace()
460                                ? TracingStatus::Needed()
461                                : TracingStatus::Unneeded();
462     bases->push_back(std::make_pair(base, BasePoint(spec, info, status)));
463   }
464   return bases;
465 }
466 
GetFields()467 RecordInfo::Fields& RecordInfo::GetFields() {
468   if (!fields_)
469     fields_ = CollectFields();
470   return *fields_;
471 }
472 
CollectFields()473 RecordInfo::Fields* RecordInfo::CollectFields() {
474   // Compute the collection locally to avoid inconsistent states.
475   Fields* fields = new Fields;
476   if (!record_->hasDefinition())
477     return fields;
478   TracingStatus fields_status = TracingStatus::Unneeded();
479   for (RecordDecl::field_iterator it = record_->field_begin();
480        it != record_->field_end();
481        ++it) {
482     FieldDecl* field = *it;
483     // Ignore fields annotated with the GC_PLUGIN_IGNORE macro.
484     if (Config::IsIgnoreAnnotated(field))
485       continue;
486     // Check if the unexpanded type should be recorded; needed
487     // to track iterator aliases only
488     const Type* unexpandedType = field->getType().getSplitUnqualifiedType().Ty;
489     Edge* edge = CreateEdgeFromOriginalType(unexpandedType);
490     if (!edge)
491       edge = CreateEdge(field->getType().getTypePtrOrNull());
492     if (edge) {
493       fields_status = fields_status.LUB(edge->NeedsTracing(Edge::kRecursive));
494       fields->insert(std::make_pair(field, FieldPoint(field, edge)));
495     }
496   }
497   fields_need_tracing_ = fields_status;
498   return fields;
499 }
500 
DetermineTracingMethods()501 void RecordInfo::DetermineTracingMethods() {
502   if (determined_trace_methods_)
503     return;
504   determined_trace_methods_ = true;
505   if (Config::IsGCBase(name_))
506     return;
507   CXXMethodDecl* trace = nullptr;
508   CXXMethodDecl* trace_after_dispatch = nullptr;
509   bool has_adjust_and_mark = false;
510   bool has_is_heap_object_alive = false;
511   for (Decl* decl : record_->decls()) {
512     CXXMethodDecl* method = dyn_cast<CXXMethodDecl>(decl);
513     if (!method) {
514       if (FunctionTemplateDecl* func_template =
515           dyn_cast<FunctionTemplateDecl>(decl))
516         method = dyn_cast<CXXMethodDecl>(func_template->getTemplatedDecl());
517     }
518     if (!method)
519       continue;
520 
521     switch (Config::GetTraceMethodType(method)) {
522       case Config::TRACE_METHOD:
523         trace = method;
524         break;
525       case Config::TRACE_AFTER_DISPATCH_METHOD:
526         trace_after_dispatch = method;
527         break;
528       case Config::NOT_TRACE_METHOD:
529         if (method->getNameAsString() == kFinalizeName) {
530           finalize_dispatch_method_ = method;
531         } else if (method->getNameAsString() == kAdjustAndMarkName) {
532           has_adjust_and_mark = true;
533         } else if (method->getNameAsString() == kIsHeapObjectAliveName) {
534           has_is_heap_object_alive = true;
535         }
536         break;
537     }
538   }
539 
540   // Record if class defines the two GCMixin methods.
541   has_gc_mixin_methods_ =
542       has_adjust_and_mark && has_is_heap_object_alive ? kTrue : kFalse;
543   if (trace_after_dispatch) {
544     trace_method_ = trace_after_dispatch;
545     trace_dispatch_method_ = trace;
546   } else {
547     // TODO: Can we never have a dispatch method called trace without the same
548     // class defining a traceAfterDispatch method?
549     trace_method_ = trace;
550     trace_dispatch_method_ = nullptr;
551   }
552   if (trace_dispatch_method_ && finalize_dispatch_method_)
553     return;
554   // If this class does not define dispatching methods inherit them.
555   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
556     // TODO: Does it make sense to inherit multiple dispatch methods?
557     if (CXXMethodDecl* dispatch = it->second.info()->GetTraceDispatchMethod()) {
558       assert(!trace_dispatch_method_ && "Multiple trace dispatching methods");
559       trace_dispatch_method_ = dispatch;
560     }
561     if (CXXMethodDecl* dispatch =
562             it->second.info()->GetFinalizeDispatchMethod()) {
563       assert(!finalize_dispatch_method_ &&
564              "Multiple finalize dispatching methods");
565       finalize_dispatch_method_ = dispatch;
566     }
567   }
568 }
569 
570 // TODO: Add classes with a finalize() method that specialize FinalizerTrait.
NeedsFinalization()571 bool RecordInfo::NeedsFinalization() {
572   if (does_need_finalization_ == kNotComputed) {
573     if (HasOptionalFinalizer()) {
574       does_need_finalization_ = kFalse;
575       return does_need_finalization_;
576     }
577 
578     // Rely on hasNonTrivialDestructor(), but if the only
579     // identifiable reason for it being true is the presence
580     // of a safely ignorable class as a direct base,
581     // or we're processing such an 'ignorable' class, then it does
582     // not need finalization.
583     does_need_finalization_ =
584         record_->hasNonTrivialDestructor() ? kTrue : kFalse;
585     if (!does_need_finalization_)
586       return does_need_finalization_;
587 
588     CXXDestructorDecl* dtor = record_->getDestructor();
589     if (dtor && dtor->isUserProvided())
590       return does_need_finalization_;
591     for (Fields::iterator it = GetFields().begin();
592          it != GetFields().end();
593          ++it) {
594       if (it->second.edge()->NeedsFinalization())
595         return does_need_finalization_;
596     }
597 
598     for (Bases::iterator it = GetBases().begin();
599          it != GetBases().end();
600          ++it) {
601       if (it->second.info()->NeedsFinalization())
602         return does_need_finalization_;
603     }
604     // Destructor was non-trivial due to bases with destructors that
605     // can be safely ignored. Hence, no need for finalization.
606     does_need_finalization_ = kFalse;
607   }
608   return does_need_finalization_;
609 }
610 
611 // A class needs tracing if:
612 // - it is allocated on the managed heap,
613 // - it has a Trace method (i.e. the plugin assumes such a method was added for
614 //                          a reason).
615 // - it is derived from a class that needs tracing, or
616 // - it contains fields that need tracing.
617 //
NeedsTracing(Edge::NeedsTracingOption option)618 TracingStatus RecordInfo::NeedsTracing(Edge::NeedsTracingOption option) {
619   if (IsGCAllocated())
620     return TracingStatus::Needed();
621 
622   if (IsStackAllocated())
623     return TracingStatus::Unneeded();
624 
625   if (GetTraceMethod())
626     return TracingStatus::Needed();
627 
628   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
629     if (it->second.info()->NeedsTracing(option).IsNeeded())
630       return TracingStatus::Needed();
631   }
632 
633   if (option == Edge::kRecursive)
634     GetFields();
635 
636   return fields_need_tracing_;
637 }
638 
isInStdNamespace(clang::Sema & sema,NamespaceDecl * ns)639 static bool isInStdNamespace(clang::Sema& sema, NamespaceDecl* ns)
640 {
641   while (ns) {
642     if (sema.getStdNamespace()->InEnclosingNamespaceSetOf(ns))
643       return true;
644     ns = dyn_cast<NamespaceDecl>(ns->getParent());
645   }
646   return false;
647 }
648 
CreateEdgeFromOriginalType(const Type * type)649 Edge* RecordInfo::CreateEdgeFromOriginalType(const Type* type) {
650   if (!type)
651     return nullptr;
652 
653   // look for "typedef ... iterator;"
654   if (!isa<ElaboratedType>(type))
655     return nullptr;
656   const ElaboratedType* elaboratedType = cast<ElaboratedType>(type);
657   if (!isa<TypedefType>(elaboratedType->getNamedType()))
658     return nullptr;
659   const TypedefType* typedefType =
660       cast<TypedefType>(elaboratedType->getNamedType());
661   std::string typeName = typedefType->getDecl()->getNameAsString();
662   if (!Config::IsIterator(typeName))
663     return nullptr;
664   RecordInfo* info =
665       cache_->Lookup(elaboratedType->getQualifier()->getAsType());
666 
667   bool on_heap = false;
668   // Silently handle unknown types; the on-heap collection types will
669   // have to be in scope for the declaration to compile, though.
670   if (info) {
671     on_heap = Config::IsGCCollection(info->name());
672   }
673   return new Iterator(info, on_heap);
674 }
675 
CreateEdge(const Type * type)676 Edge* RecordInfo::CreateEdge(const Type* type) {
677   if (!type) {
678     return 0;
679   }
680 
681   if (type->isPointerType() || type->isReferenceType()) {
682     if (Edge* ptr = CreateEdge(type->getPointeeType().getTypePtrOrNull()))
683       return new RawPtr(ptr, type->isReferenceType());
684     return 0;
685   }
686 
687   RecordInfo* info = cache_->Lookup(type);
688 
689   // If the type is neither a pointer or a C++ record we ignore it.
690   if (!info) {
691     return 0;
692   }
693 
694   TemplateArgs args;
695 
696   if (Config::IsRefOrWeakPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
697     if (Edge* ptr = CreateEdge(args[0]))
698       return new RefPtr(
699           ptr, Config::IsRefPtr(info->name()) ? Edge::kStrong : Edge::kWeak);
700     return 0;
701   }
702 
703   if (Config::IsUniquePtr(info->name()) && info->GetTemplateArgs(1, &args)) {
704     // Check that this is std::unique_ptr
705     NamespaceDecl* ns =
706         dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
707     clang::Sema& sema = cache_->instance().getSema();
708     if (!isInStdNamespace(sema, ns))
709       return 0;
710     if (Edge* ptr = CreateEdge(args[0]))
711       return new UniquePtr(ptr);
712     return 0;
713   }
714 
715   // Find top-level namespace.
716   NamespaceDecl* ns = dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
717   if (ns) {
718     while (NamespaceDecl* outer_ns =
719                dyn_cast<NamespaceDecl>(ns->getDeclContext())) {
720       ns = outer_ns;
721     }
722   }
723   auto ns_name = ns ? ns->getName() : "";
724 
725   if (Config::IsMember(info->name(), ns_name, info, &args)) {
726     if (Edge* ptr = CreateEdge(args[0])) {
727       return new Member(ptr);
728     }
729     return 0;
730   }
731 
732   if (Config::IsWeakMember(info->name(), ns_name, info, &args)) {
733     if (Edge* ptr = CreateEdge(args[0]))
734       return new WeakMember(ptr);
735     return 0;
736   }
737 
738   bool is_persistent = Config::IsPersistent(info->name(), ns_name, info, &args);
739   if (is_persistent ||
740       Config::IsCrossThreadPersistent(info->name(), ns_name, info, &args)) {
741     if (Edge* ptr = CreateEdge(args[0])) {
742       if (is_persistent)
743         return new Persistent(ptr);
744       else
745         return new CrossThreadPersistent(ptr);
746     }
747     return 0;
748   }
749 
750   if (Config::IsGCCollection(info->name()) ||
751       Config::IsWTFCollection(info->name())) {
752     bool on_heap = info->IsHeapAllocatedCollection();
753     size_t count = Config::CollectionDimension(info->name());
754     if (!info->GetTemplateArgs(count, &args))
755       return 0;
756     Collection* edge = new Collection(info, on_heap);
757     for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
758       if (Edge* member = CreateEdge(*it)) {
759         edge->members().push_back(member);
760       }
761       // TODO: Handle the case where we fail to create an edge (eg, if the
762       // argument is a primitive type or just not fully known yet).
763     }
764     return edge;
765   }
766 
767   if (Config::IsTraceWrapperV8Reference(info->name(), ns_name, info, &args)) {
768     if (Edge* ptr = CreateEdge(args[0]))
769       return new TraceWrapperV8Reference(ptr);
770     return 0;
771   }
772 
773   return new Value(info);
774 }
775