1 // Copyright 2018 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "third_party/blink/renderer/platform/heap/unified_heap_controller.h"
6
7 #include "base/macros.h"
8 #include "third_party/blink/public/common/features.h"
9 #include "third_party/blink/renderer/platform/bindings/dom_wrapper_world.h"
10 #include "third_party/blink/renderer/platform/bindings/script_forbidden_scope.h"
11 #include "third_party/blink/renderer/platform/bindings/script_wrappable.h"
12 #include "third_party/blink/renderer/platform/bindings/wrapper_type_info.h"
13 #include "third_party/blink/renderer/platform/heap/heap.h"
14 #include "third_party/blink/renderer/platform/heap/heap_stats_collector.h"
15 #include "third_party/blink/renderer/platform/heap/marking_visitor.h"
16 #include "third_party/blink/renderer/platform/heap/thread_state.h"
17 #include "third_party/blink/renderer/platform/runtime_enabled_features.h"
18
19 namespace blink {
20
21 namespace {
22
ToBlinkGCStackState(v8::EmbedderHeapTracer::EmbedderStackState stack_state)23 constexpr BlinkGC::StackState ToBlinkGCStackState(
24 v8::EmbedderHeapTracer::EmbedderStackState stack_state) {
25 return stack_state == v8::EmbedderHeapTracer::EmbedderStackState::kEmpty
26 ? BlinkGC::kNoHeapPointersOnStack
27 : BlinkGC::kHeapPointersOnStack;
28 }
29
30 } // namespace
31
UnifiedHeapController(ThreadState * thread_state)32 UnifiedHeapController::UnifiedHeapController(ThreadState* thread_state)
33 : thread_state_(thread_state) {
34 thread_state->Heap().stats_collector()->RegisterObserver(this);
35 }
36
~UnifiedHeapController()37 UnifiedHeapController::~UnifiedHeapController() {
38 thread_state_->Heap().stats_collector()->UnregisterObserver(this);
39 }
40
TracePrologue(v8::EmbedderHeapTracer::TraceFlags v8_flags)41 void UnifiedHeapController::TracePrologue(
42 v8::EmbedderHeapTracer::TraceFlags v8_flags) {
43 VLOG(2) << "UnifiedHeapController::TracePrologue";
44 ThreadHeapStatsCollector::BlinkGCInV8Scope nested_scope(
45 thread_state_->Heap().stats_collector());
46
47 // Be conservative here as a new garbage collection gets started right away.
48 thread_state_->FinishIncrementalMarkingIfRunning(
49 BlinkGC::CollectionType::kMajor, BlinkGC::kHeapPointersOnStack,
50 BlinkGC::kIncrementalAndConcurrentMarking,
51 BlinkGC::kConcurrentAndLazySweeping,
52 thread_state_->current_gc_data_.reason);
53
54 thread_state_->SetGCState(ThreadState::kNoGCScheduled);
55 BlinkGC::GCReason gc_reason =
56 (v8_flags & v8::EmbedderHeapTracer::TraceFlags::kReduceMemory)
57 ? BlinkGC::GCReason::kUnifiedHeapForMemoryReductionGC
58 : BlinkGC::GCReason::kUnifiedHeapGC;
59 thread_state_->StartIncrementalMarking(gc_reason);
60
61 is_tracing_done_ = false;
62 }
63
EnterFinalPause(EmbedderStackState stack_state)64 void UnifiedHeapController::EnterFinalPause(EmbedderStackState stack_state) {
65 VLOG(2) << "UnifiedHeapController::EnterFinalPause";
66 ThreadHeapStatsCollector::BlinkGCInV8Scope nested_scope(
67 thread_state_->Heap().stats_collector());
68 thread_state_->AtomicPauseMarkPrologue(
69 BlinkGC::CollectionType::kMajor, ToBlinkGCStackState(stack_state),
70 BlinkGC::kIncrementalAndConcurrentMarking,
71 thread_state_->current_gc_data_.reason);
72 thread_state_->AtomicPauseMarkRoots(ToBlinkGCStackState(stack_state),
73 BlinkGC::kIncrementalAndConcurrentMarking,
74 thread_state_->current_gc_data_.reason);
75 }
76
TraceEpilogue(v8::EmbedderHeapTracer::TraceSummary * summary)77 void UnifiedHeapController::TraceEpilogue(
78 v8::EmbedderHeapTracer::TraceSummary* summary) {
79 VLOG(2) << "UnifiedHeapController::TraceEpilogue";
80 {
81 ThreadHeapStatsCollector::BlinkGCInV8Scope nested_scope(
82 thread_state_->Heap().stats_collector());
83 thread_state_->AtomicPauseMarkEpilogue(
84 BlinkGC::kIncrementalAndConcurrentMarking);
85 thread_state_->AtomicPauseSweepAndCompact(
86 BlinkGC::CollectionType::kMajor,
87 BlinkGC::kIncrementalAndConcurrentMarking,
88 BlinkGC::kConcurrentAndLazySweeping);
89
90 ThreadHeapStatsCollector* const stats_collector =
91 thread_state_->Heap().stats_collector();
92 summary->allocated_size =
93 static_cast<size_t>(stats_collector->marked_bytes());
94 summary->time = stats_collector->marking_time_so_far().InMillisecondsF();
95 buffered_allocated_size_ = 0;
96 }
97 thread_state_->AtomicPauseEpilogue();
98 }
99
RegisterV8References(const std::vector<std::pair<void *,void * >> & internal_fields_of_potential_wrappers)100 void UnifiedHeapController::RegisterV8References(
101 const std::vector<std::pair<void*, void*>>&
102 internal_fields_of_potential_wrappers) {
103 VLOG(2) << "UnifiedHeapController::RegisterV8References";
104 DCHECK(thread_state()->IsMarkingInProgress());
105
106 const bool was_in_atomic_pause = thread_state()->in_atomic_pause();
107 if (!was_in_atomic_pause)
108 ThreadState::Current()->EnterAtomicPause();
109 for (const auto& internal_fields : internal_fields_of_potential_wrappers) {
110 const WrapperTypeInfo* wrapper_type_info =
111 reinterpret_cast<const WrapperTypeInfo*>(internal_fields.first);
112 if (wrapper_type_info->gin_embedder != gin::GinEmbedder::kEmbedderBlink) {
113 continue;
114 }
115 is_tracing_done_ = false;
116 wrapper_type_info->Trace(thread_state_->CurrentVisitor(),
117 internal_fields.second);
118 }
119 if (!was_in_atomic_pause)
120 ThreadState::Current()->LeaveAtomicPause();
121 }
122
AdvanceTracing(double deadline_in_ms)123 bool UnifiedHeapController::AdvanceTracing(double deadline_in_ms) {
124 VLOG(2) << "UnifiedHeapController::AdvanceTracing";
125 ThreadHeapStatsCollector::BlinkGCInV8Scope nested_scope(
126 thread_state_->Heap().stats_collector());
127 if (!thread_state_->in_atomic_pause()) {
128 ThreadHeapStatsCollector::EnabledScope advance_tracing_scope(
129 thread_state_->Heap().stats_collector(),
130 ThreadHeapStatsCollector::kUnifiedMarkingStep);
131 // V8 calls into embedder tracing from its own marking to ensure
132 // progress. Oilpan will additionally schedule marking steps.
133 ThreadState::AtomicPauseScope atomic_pause_scope(thread_state_);
134 ScriptForbiddenScope script_forbidden_scope;
135 base::TimeTicks deadline =
136 base::TimeTicks() + base::TimeDelta::FromMillisecondsD(deadline_in_ms);
137 is_tracing_done_ = thread_state_->MarkPhaseAdvanceMarking(deadline);
138 if (!is_tracing_done_) {
139 thread_state_->RestartIncrementalMarkingIfPaused();
140 }
141 if (base::FeatureList::IsEnabled(
142 blink::features::kBlinkHeapConcurrentMarking)) {
143 is_tracing_done_ =
144 thread_state_->ConcurrentMarkingStep() && is_tracing_done_;
145 }
146 return is_tracing_done_;
147 }
148 thread_state_->AtomicPauseMarkTransitiveClosure();
149 is_tracing_done_ = true;
150 return true;
151 }
152
IsTracingDone()153 bool UnifiedHeapController::IsTracingDone() {
154 return is_tracing_done_;
155 }
156
IsRootForNonTracingGC(const v8::TracedReference<v8::Value> & handle)157 bool UnifiedHeapController::IsRootForNonTracingGC(
158 const v8::TracedReference<v8::Value>& handle) {
159 if (thread_state()->IsIncrementalMarking()) {
160 // We have a non-tracing GC while unified GC is in progress. Treat all
161 // objects as roots to avoid stale pointers in the marking worklists.
162 return true;
163 }
164 const uint16_t class_id = handle.WrapperClassId();
165 // Stand-alone reference or kCustomWrappableId. Keep as root as
166 // we don't know better.
167 if (class_id != WrapperTypeInfo::kNodeClassId &&
168 class_id != WrapperTypeInfo::kObjectClassId)
169 return true;
170
171 const v8::TracedReference<v8::Object>& traced =
172 handle.template As<v8::Object>();
173 if (ToWrapperTypeInfo(traced)->IsActiveScriptWrappable() &&
174 ToScriptWrappable(traced)->HasPendingActivity()) {
175 return true;
176 }
177
178 if (ToScriptWrappable(traced)->HasEventListeners()) {
179 return true;
180 }
181
182 return false;
183 }
184
ResetHandleInNonTracingGC(const v8::TracedReference<v8::Value> & handle)185 void UnifiedHeapController::ResetHandleInNonTracingGC(
186 const v8::TracedReference<v8::Value>& handle) {
187 const uint16_t class_id = handle.WrapperClassId();
188 // Only consider handles that have not been treated as roots, see
189 // IsRootForNonTracingGCInternal.
190 if (class_id != WrapperTypeInfo::kNodeClassId &&
191 class_id != WrapperTypeInfo::kObjectClassId)
192 return;
193
194 // We should not reset any handles during an already running tracing
195 // collection. Resetting a handle could re-allocate a backing or trigger
196 // potential in place rehashing. Both operations may trigger write barriers by
197 // moving references. Such references may already be dead but not yet cleared
198 // which would result in reporting dead objects to V8.
199 DCHECK(!thread_state()->IsIncrementalMarking());
200 // Clearing the wrapper below adjusts the DOM wrapper store which may
201 // re-allocate its backing. We have to avoid report memory to V8 as that may
202 // trigger GC during GC.
203 ThreadState::GCForbiddenScope gc_forbidden(thread_state());
204 const v8::TracedReference<v8::Object>& traced = handle.As<v8::Object>();
205 bool success = DOMWrapperWorld::UnsetSpecificWrapperIfSet(
206 ToScriptWrappable(traced), traced);
207 // Since V8 found a handle, Blink needs to find it as well when trying to
208 // remove it.
209 CHECK(success);
210 }
211
IsRootForNonTracingGC(const v8::TracedGlobal<v8::Value> & handle)212 bool UnifiedHeapController::IsRootForNonTracingGC(
213 const v8::TracedGlobal<v8::Value>& handle) {
214 CHECK(false) << "Blink does not use v8::TracedGlobal.";
215 return false;
216 }
217
ReportBufferedAllocatedSizeIfPossible()218 void UnifiedHeapController::ReportBufferedAllocatedSizeIfPossible() {
219 // Avoid reporting to V8 in the following conditions as that may trigger GC
220 // finalizations where not allowed.
221 // - Recursive sweeping.
222 // - GC forbidden scope.
223 if ((thread_state()->IsSweepingInProgress() &&
224 thread_state()->SweepForbidden()) ||
225 thread_state()->IsGCForbidden()) {
226 return;
227 }
228
229 if (buffered_allocated_size_ < 0) {
230 DecreaseAllocatedSize(static_cast<size_t>(-buffered_allocated_size_));
231 } else {
232 IncreaseAllocatedSize(static_cast<size_t>(buffered_allocated_size_));
233 }
234 buffered_allocated_size_ = 0;
235 }
236
IncreaseAllocatedObjectSize(size_t delta_bytes)237 void UnifiedHeapController::IncreaseAllocatedObjectSize(size_t delta_bytes) {
238 buffered_allocated_size_ += static_cast<int64_t>(delta_bytes);
239 ReportBufferedAllocatedSizeIfPossible();
240 }
241
DecreaseAllocatedObjectSize(size_t delta_bytes)242 void UnifiedHeapController::DecreaseAllocatedObjectSize(size_t delta_bytes) {
243 buffered_allocated_size_ -= static_cast<int64_t>(delta_bytes);
244 ReportBufferedAllocatedSizeIfPossible();
245 }
246
247 } // namespace blink
248