1 // Copyright 2009-2010 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/profiler/heap-profiler.h"
6 
7 #include "src/api/api-inl.h"
8 #include "src/debug/debug.h"
9 #include "src/heap/combined-heap.h"
10 #include "src/heap/heap-inl.h"
11 #include "src/profiler/allocation-tracker.h"
12 #include "src/profiler/heap-snapshot-generator-inl.h"
13 #include "src/profiler/sampling-heap-profiler.h"
14 
15 namespace v8 {
16 namespace internal {
17 
HeapProfiler(Heap * heap)18 HeapProfiler::HeapProfiler(Heap* heap)
19     : ids_(new HeapObjectsMap(heap)),
20       names_(new StringsStorage()),
21       is_tracking_object_moves_(false) {}
22 
23 HeapProfiler::~HeapProfiler() = default;
24 
DeleteAllSnapshots()25 void HeapProfiler::DeleteAllSnapshots() {
26   snapshots_.clear();
27   MaybeClearStringsStorage();
28 }
29 
MaybeClearStringsStorage()30 void HeapProfiler::MaybeClearStringsStorage() {
31   if (snapshots_.empty() && !sampling_heap_profiler_ && !allocation_tracker_) {
32     names_.reset(new StringsStorage());
33   }
34 }
35 
RemoveSnapshot(HeapSnapshot * snapshot)36 void HeapProfiler::RemoveSnapshot(HeapSnapshot* snapshot) {
37   snapshots_.erase(
38       std::find_if(snapshots_.begin(), snapshots_.end(),
39                    [&](const std::unique_ptr<HeapSnapshot>& entry) {
40                      return entry.get() == snapshot;
41                    }));
42 }
43 
AddBuildEmbedderGraphCallback(v8::HeapProfiler::BuildEmbedderGraphCallback callback,void * data)44 void HeapProfiler::AddBuildEmbedderGraphCallback(
45     v8::HeapProfiler::BuildEmbedderGraphCallback callback, void* data) {
46   build_embedder_graph_callbacks_.push_back({callback, data});
47 }
48 
RemoveBuildEmbedderGraphCallback(v8::HeapProfiler::BuildEmbedderGraphCallback callback,void * data)49 void HeapProfiler::RemoveBuildEmbedderGraphCallback(
50     v8::HeapProfiler::BuildEmbedderGraphCallback callback, void* data) {
51   auto it = std::find(build_embedder_graph_callbacks_.begin(),
52                       build_embedder_graph_callbacks_.end(),
53                       std::make_pair(callback, data));
54   if (it != build_embedder_graph_callbacks_.end())
55     build_embedder_graph_callbacks_.erase(it);
56 }
57 
BuildEmbedderGraph(Isolate * isolate,v8::EmbedderGraph * graph)58 void HeapProfiler::BuildEmbedderGraph(Isolate* isolate,
59                                       v8::EmbedderGraph* graph) {
60   for (const auto& cb : build_embedder_graph_callbacks_) {
61     cb.first(reinterpret_cast<v8::Isolate*>(isolate), graph, cb.second);
62   }
63 }
64 
TakeSnapshot(v8::ActivityControl * control,v8::HeapProfiler::ObjectNameResolver * resolver,bool treat_global_objects_as_roots)65 HeapSnapshot* HeapProfiler::TakeSnapshot(
66     v8::ActivityControl* control,
67     v8::HeapProfiler::ObjectNameResolver* resolver,
68     bool treat_global_objects_as_roots) {
69   HeapSnapshot* result = new HeapSnapshot(this, treat_global_objects_as_roots);
70   {
71     HeapSnapshotGenerator generator(result, control, resolver, heap());
72     if (!generator.GenerateSnapshot()) {
73       delete result;
74       result = nullptr;
75     } else {
76       snapshots_.emplace_back(result);
77     }
78   }
79   ids_->RemoveDeadEntries();
80   is_tracking_object_moves_ = true;
81 
82   heap()->isolate()->debug()->feature_tracker()->Track(
83       DebugFeatureTracker::kHeapSnapshot);
84 
85   return result;
86 }
87 
StartSamplingHeapProfiler(uint64_t sample_interval,int stack_depth,v8::HeapProfiler::SamplingFlags flags)88 bool HeapProfiler::StartSamplingHeapProfiler(
89     uint64_t sample_interval, int stack_depth,
90     v8::HeapProfiler::SamplingFlags flags) {
91   if (sampling_heap_profiler_.get()) {
92     return false;
93   }
94   sampling_heap_profiler_.reset(new SamplingHeapProfiler(
95       heap(), names_.get(), sample_interval, stack_depth, flags));
96   return true;
97 }
98 
99 
StopSamplingHeapProfiler()100 void HeapProfiler::StopSamplingHeapProfiler() {
101   sampling_heap_profiler_.reset();
102   MaybeClearStringsStorage();
103 }
104 
105 
GetAllocationProfile()106 v8::AllocationProfile* HeapProfiler::GetAllocationProfile() {
107   if (sampling_heap_profiler_.get()) {
108     return sampling_heap_profiler_->GetAllocationProfile();
109   } else {
110     return nullptr;
111   }
112 }
113 
114 
StartHeapObjectsTracking(bool track_allocations)115 void HeapProfiler::StartHeapObjectsTracking(bool track_allocations) {
116   ids_->UpdateHeapObjectsMap();
117   is_tracking_object_moves_ = true;
118   DCHECK(!allocation_tracker_);
119   if (track_allocations) {
120     allocation_tracker_.reset(new AllocationTracker(ids_.get(), names_.get()));
121     heap()->AddHeapObjectAllocationTracker(this);
122     heap()->isolate()->debug()->feature_tracker()->Track(
123         DebugFeatureTracker::kAllocationTracking);
124   }
125 }
126 
PushHeapObjectsStats(OutputStream * stream,int64_t * timestamp_us)127 SnapshotObjectId HeapProfiler::PushHeapObjectsStats(OutputStream* stream,
128                                                     int64_t* timestamp_us) {
129   return ids_->PushHeapObjectsStats(stream, timestamp_us);
130 }
131 
StopHeapObjectsTracking()132 void HeapProfiler::StopHeapObjectsTracking() {
133   ids_->StopHeapObjectsTracking();
134   if (allocation_tracker_) {
135     allocation_tracker_.reset();
136     MaybeClearStringsStorage();
137     heap()->RemoveHeapObjectAllocationTracker(this);
138   }
139 }
140 
GetSnapshotsCount()141 int HeapProfiler::GetSnapshotsCount() {
142   return static_cast<int>(snapshots_.size());
143 }
144 
GetSnapshot(int index)145 HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
146   return snapshots_.at(index).get();
147 }
148 
GetSnapshotObjectId(Handle<Object> obj)149 SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle<Object> obj) {
150   if (!obj->IsHeapObject())
151     return v8::HeapProfiler::kUnknownObjectId;
152   return ids_->FindEntry(HeapObject::cast(*obj).address());
153 }
154 
GetSnapshotObjectId(NativeObject obj)155 SnapshotObjectId HeapProfiler::GetSnapshotObjectId(NativeObject obj) {
156   // Try to find id of regular native node first.
157   SnapshotObjectId id = ids_->FindEntry(reinterpret_cast<Address>(obj));
158   // In case no id has been found, check whether there exists an entry where the
159   // native objects has been merged into a V8 entry.
160   if (id == v8::HeapProfiler::kUnknownObjectId) {
161     id = ids_->FindMergedNativeEntry(obj);
162   }
163   return id;
164 }
165 
ObjectMoveEvent(Address from,Address to,int size)166 void HeapProfiler::ObjectMoveEvent(Address from, Address to, int size) {
167   base::MutexGuard guard(&profiler_mutex_);
168   bool known_object = ids_->MoveObject(from, to, size);
169   if (!known_object && allocation_tracker_) {
170     allocation_tracker_->address_to_trace()->MoveObject(from, to, size);
171   }
172 }
173 
AllocationEvent(Address addr,int size)174 void HeapProfiler::AllocationEvent(Address addr, int size) {
175   DisallowHeapAllocation no_allocation;
176   if (allocation_tracker_) {
177     allocation_tracker_->AllocationEvent(addr, size);
178   }
179 }
180 
181 
UpdateObjectSizeEvent(Address addr,int size)182 void HeapProfiler::UpdateObjectSizeEvent(Address addr, int size) {
183   ids_->UpdateObjectSize(addr, size);
184 }
185 
FindHeapObjectById(SnapshotObjectId id)186 Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) {
187   HeapObject object;
188   CombinedHeapObjectIterator iterator(heap(),
189                                       HeapObjectIterator::kFilterUnreachable);
190   // Make sure that object with the given id is still reachable.
191   for (HeapObject obj = iterator.Next(); !obj.is_null();
192        obj = iterator.Next()) {
193     if (ids_->FindEntry(obj.address()) == id) {
194       DCHECK(object.is_null());
195       object = obj;
196       // Can't break -- kFilterUnreachable requires full heap traversal.
197     }
198   }
199 
200   return !object.is_null() ? Handle<HeapObject>(object, isolate())
201                            : Handle<HeapObject>();
202 }
203 
204 
ClearHeapObjectMap()205 void HeapProfiler::ClearHeapObjectMap() {
206   ids_.reset(new HeapObjectsMap(heap()));
207   if (!allocation_tracker_) is_tracking_object_moves_ = false;
208 }
209 
210 
heap() const211 Heap* HeapProfiler::heap() const { return ids_->heap(); }
212 
isolate() const213 Isolate* HeapProfiler::isolate() const { return heap()->isolate(); }
214 
QueryObjects(Handle<Context> context,debug::QueryObjectPredicate * predicate,PersistentValueVector<v8::Object> * objects)215 void HeapProfiler::QueryObjects(Handle<Context> context,
216                                 debug::QueryObjectPredicate* predicate,
217                                 PersistentValueVector<v8::Object>* objects) {
218   {
219     CombinedHeapObjectIterator function_heap_iterator(
220         heap(), HeapObjectIterator::kFilterUnreachable);
221     for (HeapObject heap_obj = function_heap_iterator.Next();
222          !heap_obj.is_null(); heap_obj = function_heap_iterator.Next()) {
223       if (heap_obj.IsFeedbackVector()) {
224         FeedbackVector::cast(heap_obj).ClearSlots(isolate());
225       }
226     }
227   }
228   // We should return accurate information about live objects, so we need to
229   // collect all garbage first.
230   heap()->CollectAllAvailableGarbage(GarbageCollectionReason::kHeapProfiler);
231   CombinedHeapObjectIterator heap_iterator(
232       heap(), HeapObjectIterator::kFilterUnreachable);
233   for (HeapObject heap_obj = heap_iterator.Next(); !heap_obj.is_null();
234        heap_obj = heap_iterator.Next()) {
235     if (!heap_obj.IsJSObject() || heap_obj.IsExternal(isolate())) continue;
236     v8::Local<v8::Object> v8_obj(
237         Utils::ToLocal(handle(JSObject::cast(heap_obj), isolate())));
238     if (!predicate->Filter(v8_obj)) continue;
239     objects->Append(v8_obj);
240   }
241 }
242 
243 }  // namespace internal
244 }  // namespace v8
245