1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/cppgc-js/cpp-heap.h"
6 
7 #include <cstdint>
8 #include <memory>
9 #include <numeric>
10 
11 #include "include/cppgc/heap-consistency.h"
12 #include "include/cppgc/platform.h"
13 #include "include/v8-local-handle.h"
14 #include "include/v8-platform.h"
15 #include "src/base/logging.h"
16 #include "src/base/macros.h"
17 #include "src/base/platform/platform.h"
18 #include "src/base/platform/time.h"
19 #include "src/execution/isolate-inl.h"
20 #include "src/flags/flags.h"
21 #include "src/handles/handles.h"
22 #include "src/heap/base/stack.h"
23 #include "src/heap/cppgc-js/cpp-snapshot.h"
24 #include "src/heap/cppgc-js/unified-heap-marking-state.h"
25 #include "src/heap/cppgc-js/unified-heap-marking-verifier.h"
26 #include "src/heap/cppgc-js/unified-heap-marking-visitor.h"
27 #include "src/heap/cppgc/concurrent-marker.h"
28 #include "src/heap/cppgc/gc-info-table.h"
29 #include "src/heap/cppgc/heap-base.h"
30 #include "src/heap/cppgc/heap-object-header.h"
31 #include "src/heap/cppgc/marker.h"
32 #include "src/heap/cppgc/marking-state.h"
33 #include "src/heap/cppgc/marking-visitor.h"
34 #include "src/heap/cppgc/metric-recorder.h"
35 #include "src/heap/cppgc/object-allocator.h"
36 #include "src/heap/cppgc/prefinalizer-handler.h"
37 #include "src/heap/cppgc/raw-heap.h"
38 #include "src/heap/cppgc/stats-collector.h"
39 #include "src/heap/cppgc/sweeper.h"
40 #include "src/heap/embedder-tracing.h"
41 #include "src/heap/gc-tracer.h"
42 #include "src/heap/marking-worklist.h"
43 #include "src/heap/sweeper.h"
44 #include "src/init/v8.h"
45 #include "src/profiler/heap-profiler.h"
46 
47 namespace v8 {
48 
49 // static
50 constexpr uint16_t WrapperDescriptor::kUnknownEmbedderId;
51 
52 // static
Create(v8::Platform * platform,const CppHeapCreateParams & params)53 std::unique_ptr<CppHeap> CppHeap::Create(v8::Platform* platform,
54                                          const CppHeapCreateParams& params) {
55   return std::make_unique<internal::CppHeap>(platform, params.custom_spaces,
56                                              params.wrapper_descriptor);
57 }
58 
GetAllocationHandle()59 cppgc::AllocationHandle& CppHeap::GetAllocationHandle() {
60   return internal::CppHeap::From(this)->object_allocator();
61 }
62 
GetHeapHandle()63 cppgc::HeapHandle& CppHeap::GetHeapHandle() {
64   return *internal::CppHeap::From(this);
65 }
66 
Terminate()67 void CppHeap::Terminate() { internal::CppHeap::From(this)->Terminate(); }
68 
CollectStatistics(cppgc::HeapStatistics::DetailLevel detail_level)69 cppgc::HeapStatistics CppHeap::CollectStatistics(
70     cppgc::HeapStatistics::DetailLevel detail_level) {
71   return internal::CppHeap::From(this)->AsBase().CollectStatistics(
72       detail_level);
73 }
74 
CollectCustomSpaceStatisticsAtLastGC(std::vector<cppgc::CustomSpaceIndex> custom_spaces,std::unique_ptr<CustomSpaceStatisticsReceiver> receiver)75 void CppHeap::CollectCustomSpaceStatisticsAtLastGC(
76     std::vector<cppgc::CustomSpaceIndex> custom_spaces,
77     std::unique_ptr<CustomSpaceStatisticsReceiver> receiver) {
78   return internal::CppHeap::From(this)->CollectCustomSpaceStatisticsAtLastGC(
79       std::move(custom_spaces), std::move(receiver));
80 }
81 
EnableDetachedGarbageCollectionsForTesting()82 void CppHeap::EnableDetachedGarbageCollectionsForTesting() {
83   return internal::CppHeap::From(this)
84       ->EnableDetachedGarbageCollectionsForTesting();
85 }
86 
CollectGarbageForTesting(cppgc::EmbedderStackState stack_state)87 void CppHeap::CollectGarbageForTesting(cppgc::EmbedderStackState stack_state) {
88   return internal::CppHeap::From(this)->CollectGarbageForTesting(stack_state);
89 }
90 
DijkstraMarkingBarrierSlow(cppgc::HeapHandle & heap_handle,const TracedReferenceBase & ref)91 void JSHeapConsistency::DijkstraMarkingBarrierSlow(
92     cppgc::HeapHandle& heap_handle, const TracedReferenceBase& ref) {
93   auto& heap_base = cppgc::internal::HeapBase::From(heap_handle);
94   static_cast<JSVisitor*>(&heap_base.marker()->Visitor())->Trace(ref);
95 }
96 
CheckWrapper(v8::Local<v8::Object> & wrapper,int wrapper_index,const void * wrappable)97 void JSHeapConsistency::CheckWrapper(v8::Local<v8::Object>& wrapper,
98                                      int wrapper_index, const void* wrappable) {
99   CHECK_EQ(wrappable,
100            wrapper->GetAlignedPointerFromInternalField(wrapper_index));
101 }
102 
103 namespace internal {
104 
105 namespace {
106 
107 class CppgcPlatformAdapter final : public cppgc::Platform {
108  public:
CppgcPlatformAdapter(v8::Platform * platform)109   explicit CppgcPlatformAdapter(v8::Platform* platform) : platform_(platform) {}
110 
111   CppgcPlatformAdapter(const CppgcPlatformAdapter&) = delete;
112   CppgcPlatformAdapter& operator=(const CppgcPlatformAdapter&) = delete;
113 
GetPageAllocator()114   PageAllocator* GetPageAllocator() final {
115     return platform_->GetPageAllocator();
116   }
117 
MonotonicallyIncreasingTime()118   double MonotonicallyIncreasingTime() final {
119     return platform_->MonotonicallyIncreasingTime();
120   }
121 
GetForegroundTaskRunner()122   std::shared_ptr<TaskRunner> GetForegroundTaskRunner() final {
123     // If no Isolate has been set, there's no task runner to leverage for
124     // foreground tasks. In detached mode the original platform handles the
125     // task runner retrieval.
126     if (!isolate_ && !is_in_detached_mode_) return nullptr;
127 
128     return platform_->GetForegroundTaskRunner(isolate_);
129   }
130 
PostJob(TaskPriority priority,std::unique_ptr<JobTask> job_task)131   std::unique_ptr<JobHandle> PostJob(TaskPriority priority,
132                                      std::unique_ptr<JobTask> job_task) final {
133     return platform_->PostJob(priority, std::move(job_task));
134   }
135 
GetTracingController()136   TracingController* GetTracingController() override {
137     return platform_->GetTracingController();
138   }
139 
SetIsolate(v8::Isolate * isolate)140   void SetIsolate(v8::Isolate* isolate) { isolate_ = isolate; }
EnableDetachedModeForTesting()141   void EnableDetachedModeForTesting() { is_in_detached_mode_ = true; }
142 
143  private:
144   v8::Platform* platform_;
145   v8::Isolate* isolate_ = nullptr;
146   bool is_in_detached_mode_ = false;
147 };
148 
149 class UnifiedHeapConcurrentMarker
150     : public cppgc::internal::ConcurrentMarkerBase {
151  public:
UnifiedHeapConcurrentMarker(cppgc::internal::HeapBase & heap,cppgc::internal::MarkingWorklists & marking_worklists,cppgc::internal::IncrementalMarkingSchedule & incremental_marking_schedule,cppgc::Platform * platform,UnifiedHeapMarkingState & unified_heap_marking_state)152   UnifiedHeapConcurrentMarker(
153       cppgc::internal::HeapBase& heap,
154       cppgc::internal::MarkingWorklists& marking_worklists,
155       cppgc::internal::IncrementalMarkingSchedule& incremental_marking_schedule,
156       cppgc::Platform* platform,
157       UnifiedHeapMarkingState& unified_heap_marking_state)
158       : cppgc::internal::ConcurrentMarkerBase(
159             heap, marking_worklists, incremental_marking_schedule, platform),
160         unified_heap_marking_state_(unified_heap_marking_state) {}
161 
162   std::unique_ptr<cppgc::Visitor> CreateConcurrentMarkingVisitor(
163       cppgc::internal::ConcurrentMarkingState&) const final;
164 
165  private:
166   UnifiedHeapMarkingState& unified_heap_marking_state_;
167 };
168 
169 std::unique_ptr<cppgc::Visitor>
CreateConcurrentMarkingVisitor(cppgc::internal::ConcurrentMarkingState & marking_state) const170 UnifiedHeapConcurrentMarker::CreateConcurrentMarkingVisitor(
171     cppgc::internal::ConcurrentMarkingState& marking_state) const {
172   return std::make_unique<ConcurrentUnifiedHeapMarkingVisitor>(
173       heap(), marking_state, unified_heap_marking_state_);
174 }
175 
176 class UnifiedHeapMarker final : public cppgc::internal::MarkerBase {
177  public:
178   UnifiedHeapMarker(Key, Heap* v8_heap, cppgc::internal::HeapBase& cpp_heap,
179                     cppgc::Platform* platform, MarkingConfig config);
180 
181   ~UnifiedHeapMarker() final = default;
182 
183   void AddObject(void*);
184 
185  protected:
visitor()186   cppgc::Visitor& visitor() final { return marking_visitor_; }
conservative_visitor()187   cppgc::internal::ConservativeTracingVisitor& conservative_visitor() final {
188     return conservative_marking_visitor_;
189   }
stack_visitor()190   ::heap::base::StackVisitor& stack_visitor() final {
191     return conservative_marking_visitor_;
192   }
193 
194  private:
195   UnifiedHeapMarkingState unified_heap_marking_state_;
196   MutatorUnifiedHeapMarkingVisitor marking_visitor_;
197   cppgc::internal::ConservativeMarkingVisitor conservative_marking_visitor_;
198 };
199 
UnifiedHeapMarker(Key key,Heap * v8_heap,cppgc::internal::HeapBase & heap,cppgc::Platform * platform,MarkingConfig config)200 UnifiedHeapMarker::UnifiedHeapMarker(Key key, Heap* v8_heap,
201                                      cppgc::internal::HeapBase& heap,
202                                      cppgc::Platform* platform,
203                                      MarkingConfig config)
204     : cppgc::internal::MarkerBase(key, heap, platform, config),
205       unified_heap_marking_state_(v8_heap),
206       marking_visitor_(heap, mutator_marking_state_,
207                        unified_heap_marking_state_),
208       conservative_marking_visitor_(heap, mutator_marking_state_,
209                                     marking_visitor_) {
210   concurrent_marker_ = std::make_unique<UnifiedHeapConcurrentMarker>(
211       heap_, marking_worklists_, schedule_, platform_,
212       unified_heap_marking_state_);
213 }
214 
AddObject(void * object)215 void UnifiedHeapMarker::AddObject(void* object) {
216   mutator_marking_state_.MarkAndPush(
217       cppgc::internal::HeapObjectHeader::FromObject(object));
218 }
219 
FatalOutOfMemoryHandlerImpl(const std::string & reason,const SourceLocation &,HeapBase * heap)220 void FatalOutOfMemoryHandlerImpl(const std::string& reason,
221                                  const SourceLocation&, HeapBase* heap) {
222   FatalProcessOutOfMemory(
223       reinterpret_cast<v8::internal::Isolate*>(
224           static_cast<v8::internal::CppHeap*>(heap)->isolate()),
225       reason.c_str());
226 }
227 
228 }  // namespace
229 
AddMainThreadEvent(const FullCycle & cppgc_event)230 void CppHeap::MetricRecorderAdapter::AddMainThreadEvent(
231     const FullCycle& cppgc_event) {
232   last_full_gc_event_ = cppgc_event;
233   GetIsolate()->heap()->tracer()->NotifyGCCompleted();
234 }
235 
AddMainThreadEvent(const MainThreadIncrementalMark & cppgc_event)236 void CppHeap::MetricRecorderAdapter::AddMainThreadEvent(
237     const MainThreadIncrementalMark& cppgc_event) {
238   // Incremental marking steps might be nested in V8 marking steps. In such
239   // cases, stash the relevant values and delegate to V8 to report them. For
240   // non-nested steps, report to the Recorder directly.
241   if (cpp_heap_.is_in_v8_marking_step_) {
242     last_incremental_mark_event_ = cppgc_event;
243     return;
244   }
245   // This is a standalone incremental marking step.
246   const std::shared_ptr<metrics::Recorder>& recorder =
247       GetIsolate()->metrics_recorder();
248   DCHECK_NOT_NULL(recorder);
249   if (!recorder->HasEmbedderRecorder()) return;
250   incremental_mark_batched_events_.events.emplace_back();
251   incremental_mark_batched_events_.events.back().cpp_wall_clock_duration_in_us =
252       cppgc_event.duration_us;
253   // TODO(chromium:1154636): Populate event.wall_clock_duration_in_us.
254   if (incremental_mark_batched_events_.events.size() == kMaxBatchedEvents) {
255     recorder->AddMainThreadEvent(std::move(incremental_mark_batched_events_),
256                                  GetContextId());
257     incremental_mark_batched_events_ = {};
258   }
259 }
260 
AddMainThreadEvent(const MainThreadIncrementalSweep & cppgc_event)261 void CppHeap::MetricRecorderAdapter::AddMainThreadEvent(
262     const MainThreadIncrementalSweep& cppgc_event) {
263   // Incremental sweeping steps are never nested inside V8 sweeping steps, so
264   // report to the Recorder directly.
265   const std::shared_ptr<metrics::Recorder>& recorder =
266       GetIsolate()->metrics_recorder();
267   DCHECK_NOT_NULL(recorder);
268   if (!recorder->HasEmbedderRecorder()) return;
269   incremental_sweep_batched_events_.events.emplace_back();
270   incremental_sweep_batched_events_.events.back()
271       .cpp_wall_clock_duration_in_us = cppgc_event.duration_us;
272   // TODO(chromium:1154636): Populate event.wall_clock_duration_in_us.
273   if (incremental_sweep_batched_events_.events.size() == kMaxBatchedEvents) {
274     recorder->AddMainThreadEvent(std::move(incremental_sweep_batched_events_),
275                                  GetContextId());
276     incremental_sweep_batched_events_ = {};
277   }
278 }
279 
FlushBatchedIncrementalEvents()280 void CppHeap::MetricRecorderAdapter::FlushBatchedIncrementalEvents() {
281   const std::shared_ptr<metrics::Recorder>& recorder =
282       GetIsolate()->metrics_recorder();
283   DCHECK_NOT_NULL(recorder);
284   if (!incremental_mark_batched_events_.events.empty()) {
285     recorder->AddMainThreadEvent(std::move(incremental_mark_batched_events_),
286                                  GetContextId());
287     incremental_mark_batched_events_ = {};
288   }
289   if (!incremental_sweep_batched_events_.events.empty()) {
290     recorder->AddMainThreadEvent(std::move(incremental_sweep_batched_events_),
291                                  GetContextId());
292     incremental_sweep_batched_events_ = {};
293   }
294 }
295 
MetricsReportPending() const296 bool CppHeap::MetricRecorderAdapter::MetricsReportPending() const {
297   return last_full_gc_event_.has_value();
298 }
299 
300 const base::Optional<cppgc::internal::MetricRecorder::FullCycle>
ExtractLastFullGcEvent()301 CppHeap::MetricRecorderAdapter::ExtractLastFullGcEvent() {
302   return std::move(last_full_gc_event_);
303 }
304 
305 const base::Optional<cppgc::internal::MetricRecorder::MainThreadIncrementalMark>
ExtractLastIncrementalMarkEvent()306 CppHeap::MetricRecorderAdapter::ExtractLastIncrementalMarkEvent() {
307   return std::move(last_incremental_mark_event_);
308 }
309 
GetIsolate() const310 Isolate* CppHeap::MetricRecorderAdapter::GetIsolate() const {
311   DCHECK_NOT_NULL(cpp_heap_.isolate());
312   return reinterpret_cast<Isolate*>(cpp_heap_.isolate());
313 }
314 
GetContextId() const315 v8::metrics::Recorder::ContextId CppHeap::MetricRecorderAdapter::GetContextId()
316     const {
317   DCHECK_NOT_NULL(GetIsolate());
318   if (GetIsolate()->context().is_null())
319     return v8::metrics::Recorder::ContextId::Empty();
320   HandleScope scope(GetIsolate());
321   return GetIsolate()->GetOrRegisterRecorderContextId(
322       GetIsolate()->native_context());
323 }
324 
CppHeap(v8::Platform * platform,const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>> & custom_spaces,const v8::WrapperDescriptor & wrapper_descriptor)325 CppHeap::CppHeap(
326     v8::Platform* platform,
327     const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces,
328     const v8::WrapperDescriptor& wrapper_descriptor)
329     : cppgc::internal::HeapBase(
330           std::make_shared<CppgcPlatformAdapter>(platform), custom_spaces,
331           cppgc::internal::HeapBase::StackSupport::
332               kSupportsConservativeStackScan),
333       wrapper_descriptor_(wrapper_descriptor) {
334   CHECK_NE(WrapperDescriptor::kUnknownEmbedderId,
335            wrapper_descriptor_.embedder_id_for_garbage_collected);
336   // Enter no GC scope. `AttachIsolate()` removes this and allows triggering
337   // garbage collections.
338   no_gc_scope_++;
339   stats_collector()->RegisterObserver(this);
340 }
341 
~CppHeap()342 CppHeap::~CppHeap() {
343   if (isolate_) {
344     isolate_->heap()->DetachCppHeap();
345   }
346 }
347 
Terminate()348 void CppHeap::Terminate() {
349   // Must not be attached to a heap when invoking termination GCs.
350   CHECK(!isolate_);
351   // Gracefully terminate the C++ heap invoking destructors.
352   HeapBase::Terminate();
353 }
354 
AttachIsolate(Isolate * isolate)355 void CppHeap::AttachIsolate(Isolate* isolate) {
356   CHECK(!in_detached_testing_mode_);
357   CHECK_NULL(isolate_);
358   isolate_ = isolate;
359   static_cast<CppgcPlatformAdapter*>(platform())
360       ->SetIsolate(reinterpret_cast<v8::Isolate*>(isolate_));
361   if (isolate_->heap_profiler()) {
362     isolate_->heap_profiler()->AddBuildEmbedderGraphCallback(
363         &CppGraphBuilder::Run, this);
364   }
365   isolate_->heap()->SetEmbedderHeapTracer(this);
366   isolate_->heap()->local_embedder_heap_tracer()->SetWrapperDescriptor(
367       wrapper_descriptor_);
368   SetMetricRecorder(std::make_unique<MetricRecorderAdapter>(*this));
369   SetStackStart(base::Stack::GetStackStart());
370   oom_handler().SetCustomHandler(&FatalOutOfMemoryHandlerImpl);
371   no_gc_scope_--;
372 }
373 
DetachIsolate()374 void CppHeap::DetachIsolate() {
375   // TODO(chromium:1056170): Investigate whether this can be enforced with a
376   // CHECK across all relevant embedders and setups.
377   if (!isolate_) return;
378 
379   // Delegate to existing EmbedderHeapTracer API to finish any ongoing garbage
380   // collection.
381   FinalizeTracing();
382   sweeper_.FinishIfRunning();
383 
384   if (isolate_->heap_profiler()) {
385     isolate_->heap_profiler()->RemoveBuildEmbedderGraphCallback(
386         &CppGraphBuilder::Run, this);
387   }
388   SetMetricRecorder(nullptr);
389   isolate_ = nullptr;
390   // Any future garbage collections will ignore the V8->C++ references.
391   isolate()->SetEmbedderHeapTracer(nullptr);
392   oom_handler().SetCustomHandler(nullptr);
393   // Enter no GC scope.
394   no_gc_scope_++;
395 }
396 
RegisterV8References(const std::vector<std::pair<void *,void * >> & embedder_fields)397 void CppHeap::RegisterV8References(
398     const std::vector<std::pair<void*, void*> >& embedder_fields) {
399   DCHECK(marker_);
400   for (auto& tuple : embedder_fields) {
401     // First field points to type.
402     // Second field points to object.
403     static_cast<UnifiedHeapMarker*>(marker_.get())->AddObject(tuple.second);
404   }
405   marking_done_ = false;
406 }
407 
408 namespace {
409 
ShouldReduceMemory(CppHeap::TraceFlags flags)410 bool ShouldReduceMemory(CppHeap::TraceFlags flags) {
411   return (flags == CppHeap::TraceFlags::kReduceMemory) ||
412          (flags == CppHeap::TraceFlags::kForced);
413 }
414 
415 }  // namespace
416 
TracePrologue(TraceFlags flags)417 void CppHeap::TracePrologue(TraceFlags flags) {
418   CHECK(!sweeper_.IsSweepingInProgress());
419 
420   current_flags_ = flags;
421   const UnifiedHeapMarker::MarkingConfig marking_config{
422       UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor,
423       cppgc::Heap::StackState::kNoHeapPointers,
424       ((current_flags_ & TraceFlags::kForced) &&
425        !force_incremental_marking_for_testing_)
426           ? UnifiedHeapMarker::MarkingConfig::MarkingType::kAtomic
427           : UnifiedHeapMarker::MarkingConfig::MarkingType::
428                 kIncrementalAndConcurrent,
429       flags & TraceFlags::kForced
430           ? UnifiedHeapMarker::MarkingConfig::IsForcedGC::kForced
431           : UnifiedHeapMarker::MarkingConfig::IsForcedGC::kNotForced};
432   DCHECK_IMPLIES(!isolate_, (cppgc::Heap::MarkingType::kAtomic ==
433                              marking_config.marking_type) ||
434                                 force_incremental_marking_for_testing_);
435   if (ShouldReduceMemory(flags)) {
436     // Only enable compaction when in a memory reduction garbage collection as
437     // it may significantly increase the final garbage collection pause.
438     compactor_.InitializeIfShouldCompact(marking_config.marking_type,
439                                          marking_config.stack_state);
440   }
441   marker_ =
442       cppgc::internal::MarkerFactory::CreateAndStartMarking<UnifiedHeapMarker>(
443           isolate_ ? isolate_->heap() : nullptr, AsBase(), platform_.get(),
444           marking_config);
445   marking_done_ = false;
446 }
447 
AdvanceTracing(double deadline_in_ms)448 bool CppHeap::AdvanceTracing(double deadline_in_ms) {
449   is_in_v8_marking_step_ = true;
450   cppgc::internal::StatsCollector::EnabledScope stats_scope(
451       stats_collector(),
452       in_atomic_pause_ ? cppgc::internal::StatsCollector::kAtomicMark
453                        : cppgc::internal::StatsCollector::kIncrementalMark);
454   const v8::base::TimeDelta deadline =
455       in_atomic_pause_ ? v8::base::TimeDelta::Max()
456                        : v8::base::TimeDelta::FromMillisecondsD(deadline_in_ms);
457   const size_t marked_bytes_limit = in_atomic_pause_ ? SIZE_MAX : 0;
458   // TODO(chromium:1056170): Replace when unified heap transitions to
459   // bytes-based deadline.
460   marking_done_ =
461       marker_->AdvanceMarkingWithLimits(deadline, marked_bytes_limit);
462   DCHECK_IMPLIES(in_atomic_pause_, marking_done_);
463   is_in_v8_marking_step_ = false;
464   return marking_done_;
465 }
466 
IsTracingDone()467 bool CppHeap::IsTracingDone() { return marking_done_; }
468 
EnterFinalPause(EmbedderStackState stack_state)469 void CppHeap::EnterFinalPause(EmbedderStackState stack_state) {
470   CHECK(!in_disallow_gc_scope());
471   in_atomic_pause_ = true;
472   if (override_stack_state_) {
473     stack_state = *override_stack_state_;
474   }
475   marker_->EnterAtomicPause(stack_state);
476   if (compactor_.CancelIfShouldNotCompact(cppgc::Heap::MarkingType::kAtomic,
477                                           stack_state)) {
478     marker_->NotifyCompactionCancelled();
479   }
480 }
481 
TraceEpilogue(TraceSummary * trace_summary)482 void CppHeap::TraceEpilogue(TraceSummary* trace_summary) {
483   CHECK(in_atomic_pause_);
484   CHECK(marking_done_);
485   {
486     cppgc::subtle::DisallowGarbageCollectionScope disallow_gc_scope(*this);
487     marker_->LeaveAtomicPause();
488   }
489   marker_.reset();
490   if (isolate_) {
491     auto* tracer = isolate_->heap()->local_embedder_heap_tracer();
492     DCHECK_NOT_NULL(tracer);
493     tracer->UpdateRemoteStats(
494         stats_collector_->marked_bytes(),
495         stats_collector_->marking_time().InMillisecondsF());
496   }
497   // The allocated bytes counter in v8 was reset to the current marked bytes, so
498   // any pending allocated bytes updates should be discarded.
499   buffered_allocated_bytes_ = 0;
500   const size_t bytes_allocated_in_prefinalizers = ExecutePreFinalizers();
501 #if CPPGC_VERIFY_HEAP
502   UnifiedHeapMarkingVerifier verifier(*this);
503   verifier.Run(
504       stack_state_of_prev_gc(), stack_end_of_current_gc(),
505       stats_collector()->marked_bytes() + bytes_allocated_in_prefinalizers);
506 #endif  // CPPGC_VERIFY_HEAP
507   USE(bytes_allocated_in_prefinalizers);
508 
509   {
510     cppgc::subtle::NoGarbageCollectionScope no_gc(*this);
511     cppgc::internal::Sweeper::SweepingConfig::CompactableSpaceHandling
512         compactable_space_handling = compactor_.CompactSpacesIfEnabled();
513     const cppgc::internal::Sweeper::SweepingConfig sweeping_config{
514         // In case the GC was forced, also finalize sweeping right away.
515         current_flags_ & TraceFlags::kForced
516             ? cppgc::internal::Sweeper::SweepingConfig::SweepingType::kAtomic
517             : cppgc::internal::Sweeper::SweepingConfig::SweepingType::
518                   kIncrementalAndConcurrent,
519         compactable_space_handling,
520         ShouldReduceMemory(current_flags_)
521             ? cppgc::internal::Sweeper::SweepingConfig::FreeMemoryHandling::
522                   kDiscardWherePossible
523             : cppgc::internal::Sweeper::SweepingConfig::FreeMemoryHandling::
524                   kDoNotDiscard};
525     DCHECK_IMPLIES(
526         !isolate_,
527         cppgc::internal::Sweeper::SweepingConfig::SweepingType::kAtomic ==
528             sweeping_config.sweeping_type);
529     sweeper().Start(sweeping_config);
530   }
531   DCHECK_NOT_NULL(trace_summary);
532   trace_summary->allocated_size = SIZE_MAX;
533   trace_summary->time = 0;
534   in_atomic_pause_ = false;
535   sweeper().NotifyDoneIfNeeded();
536 }
537 
AllocatedObjectSizeIncreased(size_t bytes)538 void CppHeap::AllocatedObjectSizeIncreased(size_t bytes) {
539   buffered_allocated_bytes_ += static_cast<int64_t>(bytes);
540   ReportBufferedAllocationSizeIfPossible();
541 }
542 
AllocatedObjectSizeDecreased(size_t bytes)543 void CppHeap::AllocatedObjectSizeDecreased(size_t bytes) {
544   buffered_allocated_bytes_ -= static_cast<int64_t>(bytes);
545   ReportBufferedAllocationSizeIfPossible();
546 }
547 
ReportBufferedAllocationSizeIfPossible()548 void CppHeap::ReportBufferedAllocationSizeIfPossible() {
549   // Avoid reporting to V8 in the following conditions as that may trigger GC
550   // finalizations where not allowed.
551   // - Recursive sweeping.
552   // - GC forbidden scope.
553   if (sweeper().IsSweepingOnMutatorThread() || in_no_gc_scope()) {
554     return;
555   }
556 
557   // The calls below may trigger full GCs that are synchronous and also execute
558   // epilogue callbacks. Since such callbacks may allocate, the counter must
559   // already be zeroed by that time.
560   const int64_t bytes_to_report = buffered_allocated_bytes_;
561   buffered_allocated_bytes_ = 0;
562 
563   if (bytes_to_report < 0) {
564     DecreaseAllocatedSize(static_cast<size_t>(-bytes_to_report));
565   } else {
566     IncreaseAllocatedSize(static_cast<size_t>(bytes_to_report));
567   }
568 }
569 
CollectGarbageForTesting(cppgc::internal::GarbageCollector::Config::StackState stack_state)570 void CppHeap::CollectGarbageForTesting(
571     cppgc::internal::GarbageCollector::Config::StackState stack_state) {
572   if (in_no_gc_scope()) return;
573 
574   // Finish sweeping in case it is still running.
575   sweeper().FinishIfRunning();
576 
577   SetStackEndOfCurrentGC(v8::base::Stack::GetCurrentStackPosition());
578 
579   if (isolate_) {
580     // Go through EmbedderHeapTracer API and perform a unified heap collection.
581     GarbageCollectionForTesting(stack_state);
582   } else {
583     // Perform an atomic GC, with starting incremental/concurrent marking and
584     // immediately finalizing the garbage collection.
585     if (!IsMarking()) TracePrologue(TraceFlags::kForced);
586     EnterFinalPause(stack_state);
587     AdvanceTracing(std::numeric_limits<double>::infinity());
588     TraceSummary trace_summary;
589     TraceEpilogue(&trace_summary);
590     DCHECK_EQ(SIZE_MAX, trace_summary.allocated_size);
591   }
592 }
593 
EnableDetachedGarbageCollectionsForTesting()594 void CppHeap::EnableDetachedGarbageCollectionsForTesting() {
595   CHECK(!in_detached_testing_mode_);
596   CHECK_NULL(isolate_);
597   no_gc_scope_--;
598   in_detached_testing_mode_ = true;
599   static_cast<CppgcPlatformAdapter*>(platform())
600       ->EnableDetachedModeForTesting();
601 }
602 
StartIncrementalGarbageCollectionForTesting()603 void CppHeap::StartIncrementalGarbageCollectionForTesting() {
604   DCHECK(!in_no_gc_scope());
605   DCHECK_NULL(isolate_);
606   if (IsMarking()) return;
607   force_incremental_marking_for_testing_ = true;
608   TracePrologue(TraceFlags::kForced);
609   force_incremental_marking_for_testing_ = false;
610 }
611 
FinalizeIncrementalGarbageCollectionForTesting(EmbedderStackState stack_state)612 void CppHeap::FinalizeIncrementalGarbageCollectionForTesting(
613     EmbedderStackState stack_state) {
614   DCHECK(!in_no_gc_scope());
615   DCHECK_NULL(isolate_);
616   DCHECK(IsMarking());
617   if (IsMarking()) {
618     CollectGarbageForTesting(stack_state);
619   }
620   sweeper_.FinishIfRunning();
621 }
622 
623 namespace {
624 
ReportCustomSpaceStatistics(cppgc::internal::RawHeap & raw_heap,std::vector<cppgc::CustomSpaceIndex> custom_spaces,std::unique_ptr<CustomSpaceStatisticsReceiver> receiver)625 void ReportCustomSpaceStatistics(
626     cppgc::internal::RawHeap& raw_heap,
627     std::vector<cppgc::CustomSpaceIndex> custom_spaces,
628     std::unique_ptr<CustomSpaceStatisticsReceiver> receiver) {
629   for (auto custom_space_index : custom_spaces) {
630     const cppgc::internal::BaseSpace* space =
631         raw_heap.CustomSpace(custom_space_index);
632     size_t allocated_bytes = std::accumulate(
633         space->begin(), space->end(), 0, [](size_t sum, auto* page) {
634           return sum + page->AllocatedBytesAtLastGC();
635         });
636     receiver->AllocatedBytes(custom_space_index, allocated_bytes);
637   }
638 }
639 
640 class CollectCustomSpaceStatisticsAtLastGCTask final : public v8::Task {
641  public:
642   static constexpr v8::base::TimeDelta kTaskDelayMs =
643       v8::base::TimeDelta::FromMilliseconds(10);
644 
CollectCustomSpaceStatisticsAtLastGCTask(cppgc::internal::HeapBase & heap,std::vector<cppgc::CustomSpaceIndex> custom_spaces,std::unique_ptr<CustomSpaceStatisticsReceiver> receiver)645   CollectCustomSpaceStatisticsAtLastGCTask(
646       cppgc::internal::HeapBase& heap,
647       std::vector<cppgc::CustomSpaceIndex> custom_spaces,
648       std::unique_ptr<CustomSpaceStatisticsReceiver> receiver)
649       : heap_(heap),
650         custom_spaces_(std::move(custom_spaces)),
651         receiver_(std::move(receiver)) {}
652 
Run()653   void Run() final {
654     cppgc::internal::Sweeper& sweeper = heap_.sweeper();
655     if (sweeper.PerformSweepOnMutatorThread(
656             heap_.platform()->MonotonicallyIncreasingTime() +
657             kStepSizeMs.InSecondsF())) {
658       // Sweeping is done.
659       DCHECK(!sweeper.IsSweepingInProgress());
660       ReportCustomSpaceStatistics(heap_.raw_heap(), std::move(custom_spaces_),
661                                   std::move(receiver_));
662     } else {
663       heap_.platform()->GetForegroundTaskRunner()->PostDelayedTask(
664           std::make_unique<CollectCustomSpaceStatisticsAtLastGCTask>(
665               heap_, std::move(custom_spaces_), std::move(receiver_)),
666           kTaskDelayMs.InSecondsF());
667     }
668   }
669 
670  private:
671   static constexpr v8::base::TimeDelta kStepSizeMs =
672       v8::base::TimeDelta::FromMilliseconds(5);
673 
674   cppgc::internal::HeapBase& heap_;
675   std::vector<cppgc::CustomSpaceIndex> custom_spaces_;
676   std::unique_ptr<CustomSpaceStatisticsReceiver> receiver_;
677 };
678 
679 constexpr v8::base::TimeDelta
680     CollectCustomSpaceStatisticsAtLastGCTask::kTaskDelayMs;
681 constexpr v8::base::TimeDelta
682     CollectCustomSpaceStatisticsAtLastGCTask::kStepSizeMs;
683 
684 }  // namespace
685 
CollectCustomSpaceStatisticsAtLastGC(std::vector<cppgc::CustomSpaceIndex> custom_spaces,std::unique_ptr<CustomSpaceStatisticsReceiver> receiver)686 void CppHeap::CollectCustomSpaceStatisticsAtLastGC(
687     std::vector<cppgc::CustomSpaceIndex> custom_spaces,
688     std::unique_ptr<CustomSpaceStatisticsReceiver> receiver) {
689   if (sweeper().IsSweepingInProgress()) {
690     platform()->GetForegroundTaskRunner()->PostDelayedTask(
691         std::make_unique<CollectCustomSpaceStatisticsAtLastGCTask>(
692             AsBase(), std::move(custom_spaces), std::move(receiver)),
693         CollectCustomSpaceStatisticsAtLastGCTask::kTaskDelayMs.InSecondsF());
694     return;
695   }
696   ReportCustomSpaceStatistics(raw_heap(), std::move(custom_spaces),
697                               std::move(receiver));
698 }
699 
GetMetricRecorder() const700 CppHeap::MetricRecorderAdapter* CppHeap::GetMetricRecorder() const {
701   return static_cast<MetricRecorderAdapter*>(
702       stats_collector_->GetMetricRecorder());
703 }
704 
FinishSweepingIfRunning()705 void CppHeap::FinishSweepingIfRunning() { sweeper_.FinishIfRunning(); }
706 
707 }  // namespace internal
708 }  // namespace v8
709