1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/gc-tracer.h"
6 
7 #include <cstdarg>
8 
9 #include "include/v8-metrics.h"
10 #include "src/base/atomic-utils.h"
11 #include "src/base/strings.h"
12 #include "src/common/globals.h"
13 #include "src/execution/isolate.h"
14 #include "src/execution/thread-id.h"
15 #include "src/heap/cppgc-js/cpp-heap.h"
16 #include "src/heap/cppgc/metric-recorder.h"
17 #include "src/heap/heap-inl.h"
18 #include "src/heap/incremental-marking.h"
19 #include "src/heap/spaces.h"
20 #include "src/logging/counters.h"
21 #include "src/logging/metrics.h"
22 #include "src/logging/tracing-flags.h"
23 #include "src/tracing/tracing-category-observer.h"
24 
25 namespace v8 {
26 namespace internal {
27 
CountTotalHolesSize(Heap * heap)28 static size_t CountTotalHolesSize(Heap* heap) {
29   size_t holes_size = 0;
30   PagedSpaceIterator spaces(heap);
31   for (PagedSpace* space = spaces.Next(); space != nullptr;
32        space = spaces.Next()) {
33     DCHECK_GE(holes_size + space->Waste() + space->Available(), holes_size);
34     holes_size += space->Waste() + space->Available();
35   }
36   return holes_size;
37 }
38 
39 #ifdef V8_RUNTIME_CALL_STATS
worker_thread_runtime_call_stats()40 WorkerThreadRuntimeCallStats* GCTracer::worker_thread_runtime_call_stats() {
41   return heap_->isolate()->counters()->worker_thread_runtime_call_stats();
42 }
43 
RCSCounterFromScope(Scope::ScopeId id)44 RuntimeCallCounterId GCTracer::RCSCounterFromScope(Scope::ScopeId id) {
45   STATIC_ASSERT(Scope::FIRST_SCOPE == Scope::MC_INCREMENTAL);
46   return static_cast<RuntimeCallCounterId>(
47       static_cast<int>(RuntimeCallCounterId::kGC_MC_INCREMENTAL) +
48       static_cast<int>(id));
49 }
50 #endif  // defined(V8_RUNTIME_CALL_STATS)
51 
MonotonicallyIncreasingTimeInMs()52 double GCTracer::MonotonicallyIncreasingTimeInMs() {
53   if (V8_UNLIKELY(FLAG_predictable)) {
54     return heap_->MonotonicallyIncreasingTimeInMs();
55   } else {
56     return base::TimeTicks::Now().ToInternalValue() /
57            static_cast<double>(base::Time::kMicrosecondsPerMillisecond);
58   }
59 }
60 
CurrentEpoch(Scope::ScopeId scope_id)61 CollectionEpoch GCTracer::CurrentEpoch(Scope::ScopeId scope_id) {
62   if (Scope::NeedsYoungEpoch(scope_id)) {
63     return heap_->epoch_young();
64   } else {
65     return heap_->epoch_full();
66   }
67 }
68 
Scope(GCTracer * tracer,ScopeId scope,ThreadKind thread_kind)69 GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind)
70     : tracer_(tracer), scope_(scope), thread_kind_(thread_kind) {
71   start_time_ = tracer_->MonotonicallyIncreasingTimeInMs();
72 #ifdef V8_RUNTIME_CALL_STATS
73   if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
74   if (thread_kind_ == ThreadKind::kMain) {
75     DCHECK_EQ(tracer_->heap_->isolate()->thread_id(), ThreadId::Current());
76     runtime_stats_ =
77         tracer_->heap_->isolate()->counters()->runtime_call_stats();
78     runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
79   } else {
80     runtime_call_stats_scope_.emplace(
81         tracer->worker_thread_runtime_call_stats());
82     runtime_stats_ = runtime_call_stats_scope_->Get();
83     runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
84   }
85 #endif  // defined(V8_RUNTIME_CALL_STATS)
86 }
87 
~Scope()88 GCTracer::Scope::~Scope() {
89   double duration_ms = tracer_->MonotonicallyIncreasingTimeInMs() - start_time_;
90 
91   if (thread_kind_ == ThreadKind::kMain) {
92     DCHECK_EQ(tracer_->heap_->isolate()->thread_id(), ThreadId::Current());
93     tracer_->AddScopeSample(scope_, duration_ms);
94     if (scope_ == ScopeId::MC_INCREMENTAL ||
95         scope_ == ScopeId::MC_INCREMENTAL_START ||
96         scope_ == MC_INCREMENTAL_FINALIZE) {
97       auto* long_task_stats =
98           tracer_->heap_->isolate()->GetCurrentLongTaskStats();
99       long_task_stats->gc_full_incremental_wall_clock_duration_us +=
100           static_cast<int64_t>(duration_ms *
101                                base::Time::kMicrosecondsPerMillisecond);
102     }
103   } else {
104     tracer_->AddScopeSampleBackground(scope_, duration_ms);
105   }
106 
107 #ifdef V8_RUNTIME_CALL_STATS
108   if (V8_LIKELY(runtime_stats_ == nullptr)) return;
109   runtime_stats_->Leave(&timer_);
110 #endif  // defined(V8_RUNTIME_CALL_STATS)
111 }
112 
Name(ScopeId id)113 const char* GCTracer::Scope::Name(ScopeId id) {
114 #define CASE(scope)  \
115   case Scope::scope: \
116     return "V8.GC_" #scope;
117   switch (id) {
118     TRACER_SCOPES(CASE)
119     TRACER_BACKGROUND_SCOPES(CASE)
120     case Scope::NUMBER_OF_SCOPES:
121       break;
122   }
123 #undef CASE
124   UNREACHABLE();
125 }
126 
NeedsYoungEpoch(ScopeId id)127 bool GCTracer::Scope::NeedsYoungEpoch(ScopeId id) {
128 #define CASE(scope)  \
129   case Scope::scope: \
130     return true;
131   switch (id) {
132     TRACER_YOUNG_EPOCH_SCOPES(CASE)
133     default:
134       return false;
135   }
136 #undef CASE
137   UNREACHABLE();
138 }
139 
Event(Type type,GarbageCollectionReason gc_reason,const char * collector_reason)140 GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
141                        const char* collector_reason)
142     : type(type),
143       gc_reason(gc_reason),
144       collector_reason(collector_reason),
145       start_time(0.0),
146       end_time(0.0),
147       reduce_memory(false),
148       start_object_size(0),
149       end_object_size(0),
150       start_memory_size(0),
151       end_memory_size(0),
152       start_holes_size(0),
153       end_holes_size(0),
154       young_object_size(0),
155       survived_young_object_size(0),
156       incremental_marking_bytes(0),
157       incremental_marking_duration(0.0) {
158   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
159     scopes[i] = 0;
160   }
161 }
162 
TypeName(bool short_name) const163 const char* GCTracer::Event::TypeName(bool short_name) const {
164   switch (type) {
165     case SCAVENGER:
166       return (short_name) ? "s" : "Scavenge";
167     case MARK_COMPACTOR:
168     case INCREMENTAL_MARK_COMPACTOR:
169       return (short_name) ? "ms" : "Mark-sweep";
170     case MINOR_MARK_COMPACTOR:
171       return (short_name) ? "mmc" : "Minor Mark-Compact";
172     case START:
173       return (short_name) ? "st" : "Start";
174   }
175   return "Unknown Event Type";
176 }
177 
GCTracer(Heap * heap)178 GCTracer::GCTracer(Heap* heap)
179     : heap_(heap),
180       current_(Event::START, GarbageCollectionReason::kUnknown, nullptr),
181       previous_(current_),
182       incremental_marking_bytes_(0),
183       incremental_marking_duration_(0.0),
184       incremental_marking_start_time_(0.0),
185       recorded_incremental_marking_speed_(0.0),
186       allocation_time_ms_(0.0),
187       new_space_allocation_counter_bytes_(0),
188       old_generation_allocation_counter_bytes_(0),
189       embedder_allocation_counter_bytes_(0),
190       allocation_duration_since_gc_(0.0),
191       new_space_allocation_in_bytes_since_gc_(0),
192       old_generation_allocation_in_bytes_since_gc_(0),
193       embedder_allocation_in_bytes_since_gc_(0),
194       combined_mark_compact_speed_cache_(0.0),
195       start_counter_(0),
196       average_mutator_duration_(0),
197       average_mark_compact_duration_(0),
198       current_mark_compact_mutator_utilization_(1.0),
199       previous_mark_compact_end_time_(0) {
200   // All accesses to incremental_marking_scope assume that incremental marking
201   // scopes come first.
202   STATIC_ASSERT(0 == Scope::FIRST_INCREMENTAL_SCOPE);
203   // We assume that MC_INCREMENTAL is the first scope so that we can properly
204   // map it to RuntimeCallStats.
205   STATIC_ASSERT(0 == Scope::MC_INCREMENTAL);
206   current_.end_time = MonotonicallyIncreasingTimeInMs();
207   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
208     background_counter_[i].total_duration_ms = 0;
209   }
210 }
211 
ResetForTesting()212 void GCTracer::ResetForTesting() {
213   current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr);
214   current_.end_time = MonotonicallyIncreasingTimeInMs();
215   previous_ = current_;
216   ResetIncrementalMarkingCounters();
217   allocation_time_ms_ = 0.0;
218   new_space_allocation_counter_bytes_ = 0.0;
219   old_generation_allocation_counter_bytes_ = 0.0;
220   allocation_duration_since_gc_ = 0.0;
221   new_space_allocation_in_bytes_since_gc_ = 0.0;
222   old_generation_allocation_in_bytes_since_gc_ = 0.0;
223   combined_mark_compact_speed_cache_ = 0.0;
224   recorded_minor_gcs_total_.Reset();
225   recorded_minor_gcs_survived_.Reset();
226   recorded_compactions_.Reset();
227   recorded_mark_compacts_.Reset();
228   recorded_incremental_mark_compacts_.Reset();
229   recorded_new_generation_allocations_.Reset();
230   recorded_old_generation_allocations_.Reset();
231   recorded_embedder_generation_allocations_.Reset();
232   recorded_survival_ratios_.Reset();
233   start_counter_ = 0;
234   average_mutator_duration_ = 0;
235   average_mark_compact_duration_ = 0;
236   current_mark_compact_mutator_utilization_ = 1.0;
237   previous_mark_compact_end_time_ = 0;
238   base::MutexGuard guard(&background_counter_mutex_);
239   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
240     background_counter_[i].total_duration_ms = 0;
241   }
242 }
243 
NotifyYoungGenerationHandling(YoungGenerationHandling young_generation_handling)244 void GCTracer::NotifyYoungGenerationHandling(
245     YoungGenerationHandling young_generation_handling) {
246   DCHECK(current_.type == Event::SCAVENGER || start_counter_ > 1);
247   heap_->isolate()->counters()->young_generation_handling()->AddSample(
248       static_cast<int>(young_generation_handling));
249 }
250 
Start(GarbageCollector collector,GarbageCollectionReason gc_reason,const char * collector_reason)251 void GCTracer::Start(GarbageCollector collector,
252                      GarbageCollectionReason gc_reason,
253                      const char* collector_reason) {
254   start_counter_++;
255   if (start_counter_ != 1) return;
256 
257   previous_ = current_;
258 
259   switch (collector) {
260     case GarbageCollector::SCAVENGER:
261       current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
262       break;
263     case GarbageCollector::MINOR_MARK_COMPACTOR:
264       current_ =
265           Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason);
266       break;
267     case GarbageCollector::MARK_COMPACTOR:
268       if (heap_->incremental_marking()->WasActivated()) {
269         current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason,
270                          collector_reason);
271       } else {
272         current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
273       }
274       break;
275   }
276 
277   current_.reduce_memory = heap_->ShouldReduceMemory();
278   current_.start_time = MonotonicallyIncreasingTimeInMs();
279   current_.start_object_size = 0;
280   current_.start_memory_size = 0;
281   current_.start_holes_size = 0;
282   current_.young_object_size = 0;
283 
284   current_.incremental_marking_bytes = 0;
285   current_.incremental_marking_duration = 0;
286 
287   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
288     current_.scopes[i] = 0;
289   }
290 
291   Counters* counters = heap_->isolate()->counters();
292 
293   if (Heap::IsYoungGenerationCollector(collector)) {
294     counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason));
295   } else {
296     counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason));
297 
298     if (FLAG_trace_gc_freelists) {
299       PrintIsolate(heap_->isolate(),
300                    "FreeLists statistics before collection:\n");
301       heap_->PrintFreeListsStats();
302     }
303   }
304 }
305 
StartInSafepoint()306 void GCTracer::StartInSafepoint() {
307   SampleAllocation(current_.start_time, heap_->NewSpaceAllocationCounter(),
308                    heap_->OldGenerationAllocationCounter(),
309                    heap_->EmbedderAllocationCounter());
310 
311   current_.start_object_size = heap_->SizeOfObjects();
312   current_.start_memory_size = heap_->memory_allocator()->Size();
313   current_.start_holes_size = CountTotalHolesSize(heap_);
314   size_t new_space_size = (heap_->new_space() ? heap_->new_space()->Size() : 0);
315   size_t new_lo_space_size =
316       (heap_->new_lo_space() ? heap_->new_lo_space()->SizeOfObjects() : 0);
317   current_.young_object_size = new_space_size + new_lo_space_size;
318 }
319 
ResetIncrementalMarkingCounters()320 void GCTracer::ResetIncrementalMarkingCounters() {
321   incremental_marking_bytes_ = 0;
322   incremental_marking_duration_ = 0;
323   for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
324     incremental_marking_scopes_[i].ResetCurrentCycle();
325   }
326 }
327 
StopInSafepoint()328 void GCTracer::StopInSafepoint() {
329   current_.end_object_size = heap_->SizeOfObjects();
330   current_.end_memory_size = heap_->memory_allocator()->Size();
331   current_.end_holes_size = CountTotalHolesSize(heap_);
332   current_.survived_young_object_size = heap_->SurvivedYoungObjectSize();
333 }
334 
Stop(GarbageCollector collector)335 void GCTracer::Stop(GarbageCollector collector) {
336   start_counter_--;
337   if (start_counter_ != 0) {
338     if (FLAG_trace_gc_verbose) {
339       heap_->isolate()->PrintWithTimestamp(
340           "[Finished reentrant %s during %s.]\n",
341           Heap::CollectorName(collector), current_.TypeName(false));
342     }
343     return;
344   }
345 
346   DCHECK_LE(0, start_counter_);
347   DCHECK((collector == GarbageCollector::SCAVENGER &&
348           current_.type == Event::SCAVENGER) ||
349          (collector == GarbageCollector::MINOR_MARK_COMPACTOR &&
350           current_.type == Event::MINOR_MARK_COMPACTOR) ||
351          (collector == GarbageCollector::MARK_COMPACTOR &&
352           (current_.type == Event::MARK_COMPACTOR ||
353            current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
354 
355   current_.end_time = MonotonicallyIncreasingTimeInMs();
356 
357   AddAllocation(current_.end_time);
358 
359   double duration = current_.end_time - current_.start_time;
360   int64_t duration_us =
361       static_cast<int64_t>(duration * base::Time::kMicrosecondsPerMillisecond);
362   auto* long_task_stats = heap_->isolate()->GetCurrentLongTaskStats();
363 
364   switch (current_.type) {
365     case Event::SCAVENGER:
366     case Event::MINOR_MARK_COMPACTOR:
367       recorded_minor_gcs_total_.Push(
368           MakeBytesAndDuration(current_.young_object_size, duration));
369       recorded_minor_gcs_survived_.Push(
370           MakeBytesAndDuration(current_.survived_young_object_size, duration));
371       FetchBackgroundMinorGCCounters();
372       long_task_stats->gc_young_wall_clock_duration_us += duration_us;
373       break;
374     case Event::INCREMENTAL_MARK_COMPACTOR:
375       current_.incremental_marking_bytes = incremental_marking_bytes_;
376       current_.incremental_marking_duration = incremental_marking_duration_;
377       for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
378         current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
379         current_.scopes[i] = incremental_marking_scopes_[i].duration;
380       }
381 
382       RecordMutatorUtilization(
383           current_.end_time, duration + current_.incremental_marking_duration);
384       RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes,
385                                     current_.incremental_marking_duration);
386       recorded_incremental_mark_compacts_.Push(
387           MakeBytesAndDuration(current_.end_object_size, duration));
388       RecordGCSumCounters(duration);
389       ResetIncrementalMarkingCounters();
390       combined_mark_compact_speed_cache_ = 0.0;
391       FetchBackgroundMarkCompactCounters();
392       long_task_stats->gc_full_atomic_wall_clock_duration_us += duration_us;
393       break;
394     case Event::MARK_COMPACTOR:
395       DCHECK_EQ(0u, current_.incremental_marking_bytes);
396       DCHECK_EQ(0, current_.incremental_marking_duration);
397       RecordMutatorUtilization(
398           current_.end_time, duration + current_.incremental_marking_duration);
399       recorded_mark_compacts_.Push(
400           MakeBytesAndDuration(current_.end_object_size, duration));
401       RecordGCSumCounters(duration);
402       ResetIncrementalMarkingCounters();
403       combined_mark_compact_speed_cache_ = 0.0;
404       FetchBackgroundMarkCompactCounters();
405       long_task_stats->gc_full_atomic_wall_clock_duration_us += duration_us;
406       break;
407     case Event::START:
408       UNREACHABLE();
409   }
410   FetchBackgroundGeneralCounters();
411 
412   heap_->UpdateTotalGCTime(duration);
413 
414   if ((current_.type == Event::SCAVENGER ||
415        current_.type == Event::MINOR_MARK_COMPACTOR) &&
416       FLAG_trace_gc_ignore_scavenger)
417     return;
418 
419   if (FLAG_trace_gc_nvp) {
420     PrintNVP();
421   } else {
422     Print();
423   }
424 
425   if (FLAG_trace_gc) {
426     heap_->PrintShortHeapStatistics();
427   }
428 
429   if (V8_UNLIKELY(TracingFlags::gc.load(std::memory_order_relaxed) &
430                   v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
431     std::stringstream heap_stats;
432     heap_->DumpJSONHeapStatistics(heap_stats);
433 
434     TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "V8.GC_Heap_Stats",
435                          TRACE_EVENT_SCOPE_THREAD, "stats",
436                          TRACE_STR_COPY(heap_stats.str().c_str()));
437   }
438 }
439 
NotifySweepingCompleted()440 void GCTracer::NotifySweepingCompleted() {
441   if (FLAG_trace_gc_freelists) {
442     PrintIsolate(heap_->isolate(),
443                  "FreeLists statistics after sweeping completed:\n");
444     heap_->PrintFreeListsStats();
445   }
446   if (FLAG_trace_allocations_origins) {
447     heap_->new_space()->PrintAllocationsOrigins();
448     heap_->old_space()->PrintAllocationsOrigins();
449     heap_->code_space()->PrintAllocationsOrigins();
450     heap_->map_space()->PrintAllocationsOrigins();
451   }
452   metrics_report_pending_ = true;
453   NotifyGCCompleted();
454 }
455 
SampleAllocation(double current_ms,size_t new_space_counter_bytes,size_t old_generation_counter_bytes,size_t embedder_counter_bytes)456 void GCTracer::SampleAllocation(double current_ms,
457                                 size_t new_space_counter_bytes,
458                                 size_t old_generation_counter_bytes,
459                                 size_t embedder_counter_bytes) {
460   if (allocation_time_ms_ == 0) {
461     // It is the first sample.
462     allocation_time_ms_ = current_ms;
463     new_space_allocation_counter_bytes_ = new_space_counter_bytes;
464     old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
465     embedder_allocation_counter_bytes_ = embedder_counter_bytes;
466     return;
467   }
468   // This assumes that counters are unsigned integers so that the subtraction
469   // below works even if the new counter is less than the old counter.
470   size_t new_space_allocated_bytes =
471       new_space_counter_bytes - new_space_allocation_counter_bytes_;
472   size_t old_generation_allocated_bytes =
473       old_generation_counter_bytes - old_generation_allocation_counter_bytes_;
474   size_t embedder_allocated_bytes =
475       embedder_counter_bytes - embedder_allocation_counter_bytes_;
476   double duration = current_ms - allocation_time_ms_;
477   allocation_time_ms_ = current_ms;
478   new_space_allocation_counter_bytes_ = new_space_counter_bytes;
479   old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
480   embedder_allocation_counter_bytes_ = embedder_counter_bytes;
481   allocation_duration_since_gc_ += duration;
482   new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
483   old_generation_allocation_in_bytes_since_gc_ +=
484       old_generation_allocated_bytes;
485   embedder_allocation_in_bytes_since_gc_ += embedder_allocated_bytes;
486 }
487 
AddAllocation(double current_ms)488 void GCTracer::AddAllocation(double current_ms) {
489   allocation_time_ms_ = current_ms;
490   if (allocation_duration_since_gc_ > 0) {
491     recorded_new_generation_allocations_.Push(
492         MakeBytesAndDuration(new_space_allocation_in_bytes_since_gc_,
493                              allocation_duration_since_gc_));
494     recorded_old_generation_allocations_.Push(
495         MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_,
496                              allocation_duration_since_gc_));
497     recorded_embedder_generation_allocations_.Push(MakeBytesAndDuration(
498         embedder_allocation_in_bytes_since_gc_, allocation_duration_since_gc_));
499   }
500   allocation_duration_since_gc_ = 0;
501   new_space_allocation_in_bytes_since_gc_ = 0;
502   old_generation_allocation_in_bytes_since_gc_ = 0;
503   embedder_allocation_in_bytes_since_gc_ = 0;
504 }
505 
AddCompactionEvent(double duration,size_t live_bytes_compacted)506 void GCTracer::AddCompactionEvent(double duration,
507                                   size_t live_bytes_compacted) {
508   recorded_compactions_.Push(
509       MakeBytesAndDuration(live_bytes_compacted, duration));
510 }
511 
512 
AddSurvivalRatio(double promotion_ratio)513 void GCTracer::AddSurvivalRatio(double promotion_ratio) {
514   recorded_survival_ratios_.Push(promotion_ratio);
515 }
516 
AddIncrementalMarkingStep(double duration,size_t bytes)517 void GCTracer::AddIncrementalMarkingStep(double duration, size_t bytes) {
518   if (bytes > 0) {
519     incremental_marking_bytes_ += bytes;
520     incremental_marking_duration_ += duration;
521   }
522   ReportIncrementalMarkingStepToRecorder();
523 }
524 
Output(const char * format,...) const525 void GCTracer::Output(const char* format, ...) const {
526   if (FLAG_trace_gc) {
527     va_list arguments;
528     va_start(arguments, format);
529     base::OS::VPrint(format, arguments);
530     va_end(arguments);
531   }
532 
533   const int kBufferSize = 256;
534   char raw_buffer[kBufferSize];
535   base::Vector<char> buffer(raw_buffer, kBufferSize);
536   va_list arguments2;
537   va_start(arguments2, format);
538   base::VSNPrintF(buffer, format, arguments2);
539   va_end(arguments2);
540 
541   heap_->AddToRingBuffer(buffer.begin());
542 }
543 
Print() const544 void GCTracer::Print() const {
545   double duration = current_.end_time - current_.start_time;
546   const size_t kIncrementalStatsSize = 128;
547   char incremental_buffer[kIncrementalStatsSize] = {0};
548 
549   if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
550     base::OS::SNPrintF(
551         incremental_buffer, kIncrementalStatsSize,
552         " (+ %.1f ms in %d steps since start of marking, "
553         "biggest step %.1f ms, walltime since start of marking %.f ms)",
554         current_.scopes[Scope::MC_INCREMENTAL],
555         current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
556         current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].longest_step,
557         current_.end_time - incremental_marking_start_time_);
558   }
559 
560   // Avoid PrintF as Output also appends the string to the tracing ring buffer
561   // that gets printed on OOM failures.
562   Output(
563       "[%d:%p] "
564       "%8.0f ms: "
565       "%s%s %.1f (%.1f) -> %.1f (%.1f) MB, "
566       "%.1f / %.1f ms %s (average mu = %.3f, current mu = %.3f) %s %s\n",
567       base::OS::GetCurrentProcessId(),
568       reinterpret_cast<void*>(heap_->isolate()),
569       heap_->isolate()->time_millis_since_init(), current_.TypeName(false),
570       current_.reduce_memory ? " (reduce)" : "",
571       static_cast<double>(current_.start_object_size) / MB,
572       static_cast<double>(current_.start_memory_size) / MB,
573       static_cast<double>(current_.end_object_size) / MB,
574       static_cast<double>(current_.end_memory_size) / MB, duration,
575       TotalExternalTime(), incremental_buffer,
576       AverageMarkCompactMutatorUtilization(),
577       CurrentMarkCompactMutatorUtilization(),
578       Heap::GarbageCollectionReasonToString(current_.gc_reason),
579       current_.collector_reason != nullptr ? current_.collector_reason : "");
580 }
581 
582 
PrintNVP() const583 void GCTracer::PrintNVP() const {
584   double duration = current_.end_time - current_.start_time;
585   double spent_in_mutator = current_.start_time - previous_.end_time;
586   size_t allocated_since_last_gc =
587       current_.start_object_size - previous_.end_object_size;
588 
589   double incremental_walltime_duration = 0;
590 
591   if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
592     incremental_walltime_duration =
593         current_.end_time - incremental_marking_start_time_;
594   }
595 
596   switch (current_.type) {
597     case Event::SCAVENGER:
598       heap_->isolate()->PrintWithTimestamp(
599           "pause=%.1f "
600           "mutator=%.1f "
601           "gc=%s "
602           "reduce_memory=%d "
603           "time_to_safepoint=%.2f "
604           "heap.prologue=%.2f "
605           "heap.epilogue=%.2f "
606           "heap.epilogue.reduce_new_space=%.2f "
607           "heap.external.prologue=%.2f "
608           "heap.external.epilogue=%.2f "
609           "heap.external_weak_global_handles=%.2f "
610           "fast_promote=%.2f "
611           "complete.sweep_array_buffers=%.2f "
612           "scavenge=%.2f "
613           "scavenge.free_remembered_set=%.2f "
614           "scavenge.roots=%.2f "
615           "scavenge.weak=%.2f "
616           "scavenge.weak_global_handles.identify=%.2f "
617           "scavenge.weak_global_handles.process=%.2f "
618           "scavenge.parallel=%.2f "
619           "scavenge.update_refs=%.2f "
620           "scavenge.sweep_array_buffers=%.2f "
621           "background.scavenge.parallel=%.2f "
622           "background.unmapper=%.2f "
623           "unmapper=%.2f "
624           "incremental.steps_count=%d "
625           "incremental.steps_took=%.1f "
626           "scavenge_throughput=%.f "
627           "total_size_before=%zu "
628           "total_size_after=%zu "
629           "holes_size_before=%zu "
630           "holes_size_after=%zu "
631           "allocated=%zu "
632           "promoted=%zu "
633           "semi_space_copied=%zu "
634           "nodes_died_in_new=%d "
635           "nodes_copied_in_new=%d "
636           "nodes_promoted=%d "
637           "promotion_ratio=%.1f%% "
638           "average_survival_ratio=%.1f%% "
639           "promotion_rate=%.1f%% "
640           "semi_space_copy_rate=%.1f%% "
641           "new_space_allocation_throughput=%.1f "
642           "unmapper_chunks=%d\n",
643           duration, spent_in_mutator, current_.TypeName(true),
644           current_.reduce_memory, current_.scopes[Scope::TIME_TO_SAFEPOINT],
645           current_.scopes[Scope::HEAP_PROLOGUE],
646           current_.scopes[Scope::HEAP_EPILOGUE],
647           current_.scopes[Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE],
648           current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE],
649           current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE],
650           current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES],
651           current_.scopes[Scope::SCAVENGER_FAST_PROMOTE],
652           current_.scopes[Scope::SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS],
653           current_.scopes[Scope::SCAVENGER_SCAVENGE],
654           current_.scopes[Scope::SCAVENGER_FREE_REMEMBERED_SET],
655           current_.scopes[Scope::SCAVENGER_SCAVENGE_ROOTS],
656           current_.scopes[Scope::SCAVENGER_SCAVENGE_WEAK],
657           current_
658               .scopes[Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY],
659           current_
660               .scopes[Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS],
661           current_.scopes[Scope::SCAVENGER_SCAVENGE_PARALLEL],
662           current_.scopes[Scope::SCAVENGER_SCAVENGE_UPDATE_REFS],
663           current_.scopes[Scope::SCAVENGER_SWEEP_ARRAY_BUFFERS],
664           current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL],
665           current_.scopes[Scope::BACKGROUND_UNMAPPER],
666           current_.scopes[Scope::UNMAPPER],
667           current_.incremental_marking_scopes[GCTracer::Scope::MC_INCREMENTAL]
668               .steps,
669           current_.scopes[Scope::MC_INCREMENTAL],
670           ScavengeSpeedInBytesPerMillisecond(), current_.start_object_size,
671           current_.end_object_size, current_.start_holes_size,
672           current_.end_holes_size, allocated_since_last_gc,
673           heap_->promoted_objects_size(),
674           heap_->semi_space_copied_object_size(),
675           heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
676           heap_->nodes_promoted_, heap_->promotion_ratio_,
677           AverageSurvivalRatio(), heap_->promotion_rate_,
678           heap_->semi_space_copied_rate_,
679           NewSpaceAllocationThroughputInBytesPerMillisecond(),
680           heap_->memory_allocator()->unmapper()->NumberOfChunks());
681       break;
682     case Event::MINOR_MARK_COMPACTOR:
683       heap_->isolate()->PrintWithTimestamp(
684           "pause=%.1f "
685           "mutator=%.1f "
686           "gc=%s "
687           "reduce_memory=%d "
688           "minor_mc=%.2f "
689           "finish_sweeping=%.2f "
690           "time_to_safepoint=%.2f "
691           "mark=%.2f "
692           "mark.seed=%.2f "
693           "mark.roots=%.2f "
694           "mark.weak=%.2f "
695           "mark.global_handles=%.2f "
696           "clear=%.2f "
697           "clear.string_table=%.2f "
698           "clear.weak_lists=%.2f "
699           "evacuate=%.2f "
700           "evacuate.copy=%.2f "
701           "evacuate.update_pointers=%.2f "
702           "evacuate.update_pointers.to_new_roots=%.2f "
703           "evacuate.update_pointers.slots=%.2f "
704           "background.mark=%.2f "
705           "background.evacuate.copy=%.2f "
706           "background.evacuate.update_pointers=%.2f "
707           "background.unmapper=%.2f "
708           "unmapper=%.2f "
709           "update_marking_deque=%.2f "
710           "reset_liveness=%.2f\n",
711           duration, spent_in_mutator, "mmc", current_.reduce_memory,
712           current_.scopes[Scope::MINOR_MC],
713           current_.scopes[Scope::MINOR_MC_SWEEPING],
714           current_.scopes[Scope::TIME_TO_SAFEPOINT],
715           current_.scopes[Scope::MINOR_MC_MARK],
716           current_.scopes[Scope::MINOR_MC_MARK_SEED],
717           current_.scopes[Scope::MINOR_MC_MARK_ROOTS],
718           current_.scopes[Scope::MINOR_MC_MARK_WEAK],
719           current_.scopes[Scope::MINOR_MC_MARK_GLOBAL_HANDLES],
720           current_.scopes[Scope::MINOR_MC_CLEAR],
721           current_.scopes[Scope::MINOR_MC_CLEAR_STRING_TABLE],
722           current_.scopes[Scope::MINOR_MC_CLEAR_WEAK_LISTS],
723           current_.scopes[Scope::MINOR_MC_EVACUATE],
724           current_.scopes[Scope::MINOR_MC_EVACUATE_COPY],
725           current_.scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS],
726           current_
727               .scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS],
728           current_.scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS],
729           current_.scopes[Scope::MINOR_MC_BACKGROUND_MARKING],
730           current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY],
731           current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
732           current_.scopes[Scope::BACKGROUND_UNMAPPER],
733           current_.scopes[Scope::UNMAPPER],
734           current_.scopes[Scope::MINOR_MC_MARKING_DEQUE],
735           current_.scopes[Scope::MINOR_MC_RESET_LIVENESS]);
736       break;
737     case Event::MARK_COMPACTOR:
738     case Event::INCREMENTAL_MARK_COMPACTOR:
739       heap_->isolate()->PrintWithTimestamp(
740           "pause=%.1f "
741           "mutator=%.1f "
742           "gc=%s "
743           "reduce_memory=%d "
744           "time_to_safepoint=%.2f "
745           "heap.prologue=%.2f "
746           "heap.embedder_tracing_epilogue=%.2f "
747           "heap.epilogue=%.2f "
748           "heap.epilogue.reduce_new_space=%.2f "
749           "heap.external.prologue=%.1f "
750           "heap.external.epilogue=%.1f "
751           "heap.external.weak_global_handles=%.1f "
752           "clear=%1.f "
753           "clear.dependent_code=%.1f "
754           "clear.maps=%.1f "
755           "clear.slots_buffer=%.1f "
756           "clear.string_table=%.1f "
757           "clear.weak_collections=%.1f "
758           "clear.weak_lists=%.1f "
759           "clear.weak_references=%.1f "
760           "complete.sweep_array_buffers=%.1f "
761           "epilogue=%.1f "
762           "evacuate=%.1f "
763           "evacuate.candidates=%.1f "
764           "evacuate.clean_up=%.1f "
765           "evacuate.copy=%.1f "
766           "evacuate.prologue=%.1f "
767           "evacuate.epilogue=%.1f "
768           "evacuate.rebalance=%.1f "
769           "evacuate.update_pointers=%.1f "
770           "evacuate.update_pointers.to_new_roots=%.1f "
771           "evacuate.update_pointers.slots.main=%.1f "
772           "evacuate.update_pointers.weak=%.1f "
773           "finish=%.1f "
774           "finish.sweep_array_buffers=%.1f "
775           "mark=%.1f "
776           "mark.finish_incremental=%.1f "
777           "mark.roots=%.1f "
778           "mark.main=%.1f "
779           "mark.weak_closure=%.1f "
780           "mark.weak_closure.ephemeron=%.1f "
781           "mark.weak_closure.ephemeron.marking=%.1f "
782           "mark.weak_closure.ephemeron.linear=%.1f "
783           "mark.weak_closure.weak_handles=%.1f "
784           "mark.weak_closure.weak_roots=%.1f "
785           "mark.weak_closure.harmony=%.1f "
786           "mark.embedder_prologue=%.1f "
787           "mark.embedder_tracing=%.1f "
788           "prologue=%.1f "
789           "sweep=%.1f "
790           "sweep.code=%.1f "
791           "sweep.map=%.1f "
792           "sweep.old=%.1f "
793           "incremental=%.1f "
794           "incremental.finalize=%.1f "
795           "incremental.finalize.body=%.1f "
796           "incremental.finalize.external.prologue=%.1f "
797           "incremental.finalize.external.epilogue=%.1f "
798           "incremental.layout_change=%.1f "
799           "incremental.sweep_array_buffers=%.1f "
800           "incremental.sweeping=%.1f "
801           "incremental.embedder_prologue=%.1f "
802           "incremental.embedder_tracing=%.1f "
803           "incremental_wrapper_tracing_longest_step=%.1f "
804           "incremental_finalize_longest_step=%.1f "
805           "incremental_finalize_steps_count=%d "
806           "incremental_longest_step=%.1f "
807           "incremental_steps_count=%d "
808           "incremental_marking_throughput=%.f "
809           "incremental_walltime_duration=%.f "
810           "background.mark=%.1f "
811           "background.sweep=%.1f "
812           "background.evacuate.copy=%.1f "
813           "background.evacuate.update_pointers=%.1f "
814           "background.unmapper=%.1f "
815           "unmapper=%.1f "
816           "total_size_before=%zu "
817           "total_size_after=%zu "
818           "holes_size_before=%zu "
819           "holes_size_after=%zu "
820           "allocated=%zu "
821           "promoted=%zu "
822           "semi_space_copied=%zu "
823           "nodes_died_in_new=%d "
824           "nodes_copied_in_new=%d "
825           "nodes_promoted=%d "
826           "promotion_ratio=%.1f%% "
827           "average_survival_ratio=%.1f%% "
828           "promotion_rate=%.1f%% "
829           "semi_space_copy_rate=%.1f%% "
830           "new_space_allocation_throughput=%.1f "
831           "unmapper_chunks=%d "
832           "compaction_speed=%.f\n",
833           duration, spent_in_mutator, current_.TypeName(true),
834           current_.reduce_memory, current_.scopes[Scope::TIME_TO_SAFEPOINT],
835           current_.scopes[Scope::HEAP_PROLOGUE],
836           current_.scopes[Scope::HEAP_EMBEDDER_TRACING_EPILOGUE],
837           current_.scopes[Scope::HEAP_EPILOGUE],
838           current_.scopes[Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE],
839           current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE],
840           current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE],
841           current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES],
842           current_.scopes[Scope::MC_CLEAR],
843           current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
844           current_.scopes[Scope::MC_CLEAR_MAPS],
845           current_.scopes[Scope::MC_CLEAR_SLOTS_BUFFER],
846           current_.scopes[Scope::MC_CLEAR_STRING_TABLE],
847           current_.scopes[Scope::MC_CLEAR_WEAK_COLLECTIONS],
848           current_.scopes[Scope::MC_CLEAR_WEAK_LISTS],
849           current_.scopes[Scope::MC_CLEAR_WEAK_REFERENCES],
850           current_.scopes[Scope::MC_COMPLETE_SWEEP_ARRAY_BUFFERS],
851           current_.scopes[Scope::MC_EPILOGUE],
852           current_.scopes[Scope::MC_EVACUATE],
853           current_.scopes[Scope::MC_EVACUATE_CANDIDATES],
854           current_.scopes[Scope::MC_EVACUATE_CLEAN_UP],
855           current_.scopes[Scope::MC_EVACUATE_COPY],
856           current_.scopes[Scope::MC_EVACUATE_PROLOGUE],
857           current_.scopes[Scope::MC_EVACUATE_EPILOGUE],
858           current_.scopes[Scope::MC_EVACUATE_REBALANCE],
859           current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS],
860           current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS],
861           current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAIN],
862           current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK],
863           current_.scopes[Scope::MC_FINISH],
864           current_.scopes[Scope::MC_FINISH_SWEEP_ARRAY_BUFFERS],
865           current_.scopes[Scope::MC_MARK],
866           current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
867           current_.scopes[Scope::MC_MARK_ROOTS],
868           current_.scopes[Scope::MC_MARK_MAIN],
869           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
870           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON],
871           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_MARKING],
872           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_LINEAR],
873           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES],
874           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS],
875           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_HARMONY],
876           current_.scopes[Scope::MC_MARK_EMBEDDER_PROLOGUE],
877           current_.scopes[Scope::MC_MARK_EMBEDDER_TRACING],
878           current_.scopes[Scope::MC_PROLOGUE], current_.scopes[Scope::MC_SWEEP],
879           current_.scopes[Scope::MC_SWEEP_CODE],
880           current_.scopes[Scope::MC_SWEEP_MAP],
881           current_.scopes[Scope::MC_SWEEP_OLD],
882           current_.scopes[Scope::MC_INCREMENTAL],
883           current_.scopes[Scope::MC_INCREMENTAL_FINALIZE],
884           current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY],
885           current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE],
886           current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE],
887           current_.scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE],
888           current_.scopes[Scope::MC_INCREMENTAL_START],
889           current_.scopes[Scope::MC_INCREMENTAL_SWEEPING],
890           current_.scopes[Scope::MC_INCREMENTAL_EMBEDDER_PROLOGUE],
891           current_.scopes[Scope::MC_INCREMENTAL_EMBEDDER_TRACING],
892           current_
893               .incremental_marking_scopes
894                   [Scope::MC_INCREMENTAL_EMBEDDER_TRACING]
895               .longest_step,
896           current_
897               .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
898               .longest_step,
899           current_
900               .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
901               .steps,
902           current_.incremental_marking_scopes[Scope::MC_INCREMENTAL]
903               .longest_step,
904           current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
905           IncrementalMarkingSpeedInBytesPerMillisecond(),
906           incremental_walltime_duration,
907           current_.scopes[Scope::MC_BACKGROUND_MARKING],
908           current_.scopes[Scope::MC_BACKGROUND_SWEEPING],
909           current_.scopes[Scope::MC_BACKGROUND_EVACUATE_COPY],
910           current_.scopes[Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
911           current_.scopes[Scope::BACKGROUND_UNMAPPER],
912           current_.scopes[Scope::UNMAPPER], current_.start_object_size,
913           current_.end_object_size, current_.start_holes_size,
914           current_.end_holes_size, allocated_since_last_gc,
915           heap_->promoted_objects_size(),
916           heap_->semi_space_copied_object_size(),
917           heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
918           heap_->nodes_promoted_, heap_->promotion_ratio_,
919           AverageSurvivalRatio(), heap_->promotion_rate_,
920           heap_->semi_space_copied_rate_,
921           NewSpaceAllocationThroughputInBytesPerMillisecond(),
922           heap_->memory_allocator()->unmapper()->NumberOfChunks(),
923           CompactionSpeedInBytesPerMillisecond());
924       break;
925     case Event::START:
926       break;
927     default:
928       UNREACHABLE();
929   }
930 }
931 
AverageSpeed(const base::RingBuffer<BytesAndDuration> & buffer,const BytesAndDuration & initial,double time_ms)932 double GCTracer::AverageSpeed(const base::RingBuffer<BytesAndDuration>& buffer,
933                               const BytesAndDuration& initial, double time_ms) {
934   BytesAndDuration sum = buffer.Sum(
935       [time_ms](BytesAndDuration a, BytesAndDuration b) {
936         if (time_ms != 0 && a.second >= time_ms) return a;
937         return std::make_pair(a.first + b.first, a.second + b.second);
938       },
939       initial);
940   uint64_t bytes = sum.first;
941   double durations = sum.second;
942   if (durations == 0.0) return 0;
943   double speed = bytes / durations;
944   const int max_speed = 1024 * MB;
945   const int min_speed = 1;
946   if (speed >= max_speed) return max_speed;
947   if (speed <= min_speed) return min_speed;
948   return speed;
949 }
950 
AverageSpeed(const base::RingBuffer<BytesAndDuration> & buffer)951 double GCTracer::AverageSpeed(
952     const base::RingBuffer<BytesAndDuration>& buffer) {
953   return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0);
954 }
955 
RecordIncrementalMarkingSpeed(size_t bytes,double duration)956 void GCTracer::RecordIncrementalMarkingSpeed(size_t bytes, double duration) {
957   if (duration == 0 || bytes == 0) return;
958   double current_speed = bytes / duration;
959   if (recorded_incremental_marking_speed_ == 0) {
960     recorded_incremental_marking_speed_ = current_speed;
961   } else {
962     recorded_incremental_marking_speed_ =
963         (recorded_incremental_marking_speed_ + current_speed) / 2;
964   }
965 }
966 
RecordTimeToIncrementalMarkingTask(double time_to_task)967 void GCTracer::RecordTimeToIncrementalMarkingTask(double time_to_task) {
968   if (average_time_to_incremental_marking_task_ == 0.0) {
969     average_time_to_incremental_marking_task_ = time_to_task;
970   } else {
971     average_time_to_incremental_marking_task_ =
972         (average_time_to_incremental_marking_task_ + time_to_task) / 2;
973   }
974 }
975 
AverageTimeToIncrementalMarkingTask() const976 double GCTracer::AverageTimeToIncrementalMarkingTask() const {
977   return average_time_to_incremental_marking_task_;
978 }
979 
RecordEmbedderSpeed(size_t bytes,double duration)980 void GCTracer::RecordEmbedderSpeed(size_t bytes, double duration) {
981   if (duration == 0 || bytes == 0) return;
982   double current_speed = bytes / duration;
983   if (recorded_embedder_speed_ == 0.0) {
984     recorded_embedder_speed_ = current_speed;
985   } else {
986     recorded_embedder_speed_ = (recorded_embedder_speed_ + current_speed) / 2;
987   }
988 }
989 
RecordMutatorUtilization(double mark_compact_end_time,double mark_compact_duration)990 void GCTracer::RecordMutatorUtilization(double mark_compact_end_time,
991                                         double mark_compact_duration) {
992   if (previous_mark_compact_end_time_ == 0) {
993     // The first event only contributes to previous_mark_compact_end_time_,
994     // because we cannot compute the mutator duration.
995     previous_mark_compact_end_time_ = mark_compact_end_time;
996   } else {
997     double total_duration =
998         mark_compact_end_time - previous_mark_compact_end_time_;
999     double mutator_duration = total_duration - mark_compact_duration;
1000     if (average_mark_compact_duration_ == 0 && average_mutator_duration_ == 0) {
1001       // This is the first event with mutator and mark-compact durations.
1002       average_mark_compact_duration_ = mark_compact_duration;
1003       average_mutator_duration_ = mutator_duration;
1004     } else {
1005       average_mark_compact_duration_ =
1006           (average_mark_compact_duration_ + mark_compact_duration) / 2;
1007       average_mutator_duration_ =
1008           (average_mutator_duration_ + mutator_duration) / 2;
1009     }
1010     current_mark_compact_mutator_utilization_ =
1011         total_duration ? mutator_duration / total_duration : 0;
1012     previous_mark_compact_end_time_ = mark_compact_end_time;
1013   }
1014 }
1015 
AverageMarkCompactMutatorUtilization() const1016 double GCTracer::AverageMarkCompactMutatorUtilization() const {
1017   double average_total_duration =
1018       average_mark_compact_duration_ + average_mutator_duration_;
1019   if (average_total_duration == 0) return 1.0;
1020   return average_mutator_duration_ / average_total_duration;
1021 }
1022 
CurrentMarkCompactMutatorUtilization() const1023 double GCTracer::CurrentMarkCompactMutatorUtilization() const {
1024   return current_mark_compact_mutator_utilization_;
1025 }
1026 
IncrementalMarkingSpeedInBytesPerMillisecond() const1027 double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
1028   if (recorded_incremental_marking_speed_ != 0) {
1029     return recorded_incremental_marking_speed_;
1030   }
1031   if (incremental_marking_duration_ != 0.0) {
1032     return incremental_marking_bytes_ / incremental_marking_duration_;
1033   }
1034   return kConservativeSpeedInBytesPerMillisecond;
1035 }
1036 
EmbedderSpeedInBytesPerMillisecond() const1037 double GCTracer::EmbedderSpeedInBytesPerMillisecond() const {
1038   // Note: Returning 0 is ok here as callers check for whether embedder speeds
1039   // have been recorded at all.
1040   return recorded_embedder_speed_;
1041 }
1042 
ScavengeSpeedInBytesPerMillisecond(ScavengeSpeedMode mode) const1043 double GCTracer::ScavengeSpeedInBytesPerMillisecond(
1044     ScavengeSpeedMode mode) const {
1045   if (mode == kForAllObjects) {
1046     return AverageSpeed(recorded_minor_gcs_total_);
1047   } else {
1048     return AverageSpeed(recorded_minor_gcs_survived_);
1049   }
1050 }
1051 
CompactionSpeedInBytesPerMillisecond() const1052 double GCTracer::CompactionSpeedInBytesPerMillisecond() const {
1053   return AverageSpeed(recorded_compactions_);
1054 }
1055 
MarkCompactSpeedInBytesPerMillisecond() const1056 double GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
1057   return AverageSpeed(recorded_mark_compacts_);
1058 }
1059 
FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const1060 double GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const {
1061   return AverageSpeed(recorded_incremental_mark_compacts_);
1062 }
1063 
CombinedMarkCompactSpeedInBytesPerMillisecond()1064 double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
1065   const double kMinimumMarkingSpeed = 0.5;
1066   if (combined_mark_compact_speed_cache_ > 0)
1067     return combined_mark_compact_speed_cache_;
1068   // MarkCompact speed is more stable than incremental marking speed, because
1069   // there might not be many incremental marking steps because of concurrent
1070   // marking.
1071   combined_mark_compact_speed_cache_ = MarkCompactSpeedInBytesPerMillisecond();
1072   if (combined_mark_compact_speed_cache_ > 0)
1073     return combined_mark_compact_speed_cache_;
1074   double speed1 = IncrementalMarkingSpeedInBytesPerMillisecond();
1075   double speed2 = FinalIncrementalMarkCompactSpeedInBytesPerMillisecond();
1076   if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
1077     // No data for the incremental marking speed.
1078     // Return the non-incremental mark-compact speed.
1079     combined_mark_compact_speed_cache_ =
1080         MarkCompactSpeedInBytesPerMillisecond();
1081   } else {
1082     // Combine the speed of incremental step and the speed of the final step.
1083     // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
1084     combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
1085   }
1086   return combined_mark_compact_speed_cache_;
1087 }
1088 
CombineSpeedsInBytesPerMillisecond(double default_speed,double optional_speed)1089 double GCTracer::CombineSpeedsInBytesPerMillisecond(double default_speed,
1090                                                     double optional_speed) {
1091   constexpr double kMinimumSpeed = 0.5;
1092   if (optional_speed < kMinimumSpeed) {
1093     return default_speed;
1094   }
1095   return default_speed * optional_speed / (default_speed + optional_speed);
1096 }
1097 
NewSpaceAllocationThroughputInBytesPerMillisecond(double time_ms) const1098 double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
1099     double time_ms) const {
1100   size_t bytes = new_space_allocation_in_bytes_since_gc_;
1101   double durations = allocation_duration_since_gc_;
1102   return AverageSpeed(recorded_new_generation_allocations_,
1103                       MakeBytesAndDuration(bytes, durations), time_ms);
1104 }
1105 
OldGenerationAllocationThroughputInBytesPerMillisecond(double time_ms) const1106 double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
1107     double time_ms) const {
1108   size_t bytes = old_generation_allocation_in_bytes_since_gc_;
1109   double durations = allocation_duration_since_gc_;
1110   return AverageSpeed(recorded_old_generation_allocations_,
1111                       MakeBytesAndDuration(bytes, durations), time_ms);
1112 }
1113 
EmbedderAllocationThroughputInBytesPerMillisecond(double time_ms) const1114 double GCTracer::EmbedderAllocationThroughputInBytesPerMillisecond(
1115     double time_ms) const {
1116   size_t bytes = embedder_allocation_in_bytes_since_gc_;
1117   double durations = allocation_duration_since_gc_;
1118   return AverageSpeed(recorded_embedder_generation_allocations_,
1119                       MakeBytesAndDuration(bytes, durations), time_ms);
1120 }
1121 
AllocationThroughputInBytesPerMillisecond(double time_ms) const1122 double GCTracer::AllocationThroughputInBytesPerMillisecond(
1123     double time_ms) const {
1124   return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
1125          OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
1126 }
1127 
CurrentAllocationThroughputInBytesPerMillisecond() const1128 double GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
1129   return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
1130 }
1131 
CurrentOldGenerationAllocationThroughputInBytesPerMillisecond() const1132 double GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
1133     const {
1134   return OldGenerationAllocationThroughputInBytesPerMillisecond(
1135       kThroughputTimeFrameMs);
1136 }
1137 
CurrentEmbedderAllocationThroughputInBytesPerMillisecond() const1138 double GCTracer::CurrentEmbedderAllocationThroughputInBytesPerMillisecond()
1139     const {
1140   return EmbedderAllocationThroughputInBytesPerMillisecond(
1141       kThroughputTimeFrameMs);
1142 }
1143 
AverageSurvivalRatio() const1144 double GCTracer::AverageSurvivalRatio() const {
1145   if (recorded_survival_ratios_.Count() == 0) return 0.0;
1146   double sum = recorded_survival_ratios_.Sum(
1147       [](double a, double b) { return a + b; }, 0.0);
1148   return sum / recorded_survival_ratios_.Count();
1149 }
1150 
SurvivalEventsRecorded() const1151 bool GCTracer::SurvivalEventsRecorded() const {
1152   return recorded_survival_ratios_.Count() > 0;
1153 }
1154 
ResetSurvivalEvents()1155 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); }
1156 
NotifyIncrementalMarkingStart()1157 void GCTracer::NotifyIncrementalMarkingStart() {
1158   incremental_marking_start_time_ = MonotonicallyIncreasingTimeInMs();
1159 }
1160 
FetchBackgroundMarkCompactCounters()1161 void GCTracer::FetchBackgroundMarkCompactCounters() {
1162   FetchBackgroundCounters(Scope::FIRST_MC_BACKGROUND_SCOPE,
1163                           Scope::LAST_MC_BACKGROUND_SCOPE);
1164   heap_->isolate()->counters()->background_marking()->AddSample(
1165       static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_MARKING]));
1166   heap_->isolate()->counters()->background_sweeping()->AddSample(
1167       static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_SWEEPING]));
1168 }
1169 
FetchBackgroundMinorGCCounters()1170 void GCTracer::FetchBackgroundMinorGCCounters() {
1171   FetchBackgroundCounters(Scope::FIRST_MINOR_GC_BACKGROUND_SCOPE,
1172                           Scope::LAST_MINOR_GC_BACKGROUND_SCOPE);
1173   heap_->isolate()->counters()->background_scavenger()->AddSample(
1174       static_cast<int>(
1175           current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL]));
1176 }
1177 
FetchBackgroundGeneralCounters()1178 void GCTracer::FetchBackgroundGeneralCounters() {
1179   FetchBackgroundCounters(Scope::FIRST_GENERAL_BACKGROUND_SCOPE,
1180                           Scope::LAST_GENERAL_BACKGROUND_SCOPE);
1181 }
1182 
FetchBackgroundCounters(int first_scope,int last_scope)1183 void GCTracer::FetchBackgroundCounters(int first_scope, int last_scope) {
1184   base::MutexGuard guard(&background_counter_mutex_);
1185   for (int i = first_scope; i <= last_scope; i++) {
1186     current_.scopes[i] += background_counter_[i].total_duration_ms;
1187     background_counter_[i].total_duration_ms = 0;
1188   }
1189 }
1190 
AddScopeSampleBackground(Scope::ScopeId scope,double duration)1191 void GCTracer::AddScopeSampleBackground(Scope::ScopeId scope, double duration) {
1192   base::MutexGuard guard(&background_counter_mutex_);
1193   BackgroundCounter& counter = background_counter_[scope];
1194   counter.total_duration_ms += duration;
1195 }
1196 
RecordGCPhasesHistograms(TimedHistogram * gc_timer)1197 void GCTracer::RecordGCPhasesHistograms(TimedHistogram* gc_timer) {
1198   Counters* counters = heap_->isolate()->counters();
1199   if (gc_timer == counters->gc_finalize()) {
1200     DCHECK_EQ(Scope::FIRST_TOP_MC_SCOPE, Scope::MC_CLEAR);
1201     counters->gc_finalize_clear()->AddSample(
1202         static_cast<int>(current_.scopes[Scope::MC_CLEAR]));
1203     counters->gc_finalize_epilogue()->AddSample(
1204         static_cast<int>(current_.scopes[Scope::MC_EPILOGUE]));
1205     counters->gc_finalize_evacuate()->AddSample(
1206         static_cast<int>(current_.scopes[Scope::MC_EVACUATE]));
1207     counters->gc_finalize_finish()->AddSample(
1208         static_cast<int>(current_.scopes[Scope::MC_FINISH]));
1209     counters->gc_finalize_mark()->AddSample(
1210         static_cast<int>(current_.scopes[Scope::MC_MARK]));
1211     counters->gc_finalize_prologue()->AddSample(
1212         static_cast<int>(current_.scopes[Scope::MC_PROLOGUE]));
1213     counters->gc_finalize_sweep()->AddSample(
1214         static_cast<int>(current_.scopes[Scope::MC_SWEEP]));
1215     if (incremental_marking_duration_ > 0) {
1216       heap_->isolate()->counters()->incremental_marking_sum()->AddSample(
1217           static_cast<int>(incremental_marking_duration_));
1218     }
1219     const double overall_marking_time =
1220         incremental_marking_duration_ + current_.scopes[Scope::MC_MARK];
1221     heap_->isolate()->counters()->gc_marking_sum()->AddSample(
1222         static_cast<int>(overall_marking_time));
1223 
1224     // Filter out samples where
1225     // - we don't have high-resolution timers;
1226     // - size of marked objects is very small;
1227     // - marking time is rounded to 0;
1228     constexpr size_t kMinObjectSizeForReportingThroughput = 1024 * 1024;
1229     if (base::TimeTicks::IsHighResolution() &&
1230         heap_->SizeOfObjects() > kMinObjectSizeForReportingThroughput &&
1231         overall_marking_time > 0) {
1232       const double overall_v8_marking_time =
1233           overall_marking_time -
1234           current_.scopes[Scope::MC_MARK_EMBEDDER_TRACING];
1235       if (overall_v8_marking_time > 0) {
1236         const int main_thread_marking_throughput_mb_per_s =
1237             static_cast<int>(static_cast<double>(heap_->SizeOfObjects()) /
1238                              overall_v8_marking_time * 1000 / 1024 / 1024);
1239         heap_->isolate()
1240             ->counters()
1241             ->gc_main_thread_marking_throughput()
1242             ->AddSample(
1243                 static_cast<int>(main_thread_marking_throughput_mb_per_s));
1244       }
1245     }
1246 
1247     DCHECK_EQ(Scope::LAST_TOP_MC_SCOPE, Scope::MC_SWEEP);
1248   } else if (gc_timer == counters->gc_scavenger()) {
1249     counters->gc_scavenger_scavenge_main()->AddSample(
1250         static_cast<int>(current_.scopes[Scope::SCAVENGER_SCAVENGE_PARALLEL]));
1251     counters->gc_scavenger_scavenge_roots()->AddSample(
1252         static_cast<int>(current_.scopes[Scope::SCAVENGER_SCAVENGE_ROOTS]));
1253   }
1254 }
1255 
RecordGCSumCounters(double atomic_pause_duration)1256 void GCTracer::RecordGCSumCounters(double atomic_pause_duration) {
1257   base::MutexGuard guard(&background_counter_mutex_);
1258 
1259   const double overall_duration =
1260       current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE]
1261           .duration +
1262       current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_START]
1263           .duration +
1264       current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_SWEEPING]
1265           .duration +
1266       incremental_marking_duration_ +
1267       current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE]
1268           .duration +
1269       atomic_pause_duration;
1270   const double background_duration =
1271       background_counter_[Scope::MC_BACKGROUND_EVACUATE_COPY]
1272           .total_duration_ms +
1273       background_counter_[Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS]
1274           .total_duration_ms +
1275       background_counter_[Scope::MC_BACKGROUND_MARKING].total_duration_ms +
1276       background_counter_[Scope::MC_BACKGROUND_SWEEPING].total_duration_ms;
1277 
1278   const double marking_duration =
1279       current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE]
1280           .duration +
1281       current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_START]
1282           .duration +
1283       incremental_marking_duration_ +
1284       current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE]
1285           .duration +
1286       current_.scopes[Scope::MC_MARK];
1287   const double marking_background_duration =
1288       background_counter_[Scope::MC_BACKGROUND_MARKING].total_duration_ms;
1289 
1290   // UMA.
1291   heap_->isolate()->counters()->gc_mark_compactor()->AddSample(
1292       static_cast<int>(overall_duration));
1293 
1294   // Emit trace event counters.
1295   TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
1296                        "V8.GCMarkCompactorSummary", TRACE_EVENT_SCOPE_THREAD,
1297                        "duration", overall_duration, "background_duration",
1298                        background_duration);
1299   TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
1300                        "V8.GCMarkCompactorMarkingSummary",
1301                        TRACE_EVENT_SCOPE_THREAD, "duration", marking_duration,
1302                        "background_duration", marking_background_duration);
1303 }
1304 
NotifyGCCompleted()1305 void GCTracer::NotifyGCCompleted() {
1306   // Report full GC cycle metric to recorder only when both v8 and cppgc (if
1307   // available) GCs have finished. This method is invoked by both v8 and cppgc.
1308   if (!metrics_report_pending_) {
1309     // V8 sweeping is not done yet.
1310     return;
1311   }
1312   const auto* cpp_heap = heap_->cpp_heap();
1313   if (cpp_heap &&
1314       !CppHeap::From(cpp_heap)->GetMetricRecorder()->MetricsReportPending()) {
1315     // Cppgc sweeping is not done yet.
1316     return;
1317   }
1318   ReportFullCycleToRecorder();
1319 }
1320 
1321 namespace {
1322 
CopyTimeMetrics(::v8::metrics::GarbageCollectionPhases & metrics,const cppgc::internal::MetricRecorder::FullCycle::IncrementalPhases & cppgc_metrics)1323 void CopyTimeMetrics(
1324     ::v8::metrics::GarbageCollectionPhases& metrics,
1325     const cppgc::internal::MetricRecorder::FullCycle::IncrementalPhases&
1326         cppgc_metrics) {
1327   DCHECK_NE(-1, cppgc_metrics.mark_duration_us);
1328   metrics.mark_wall_clock_duration_in_us = cppgc_metrics.mark_duration_us;
1329   DCHECK_NE(-1, cppgc_metrics.sweep_duration_us);
1330   metrics.sweep_wall_clock_duration_in_us = cppgc_metrics.sweep_duration_us;
1331 }
1332 
CopyTimeMetrics(::v8::metrics::GarbageCollectionPhases & metrics,const cppgc::internal::MetricRecorder::FullCycle::Phases & cppgc_metrics)1333 void CopyTimeMetrics(
1334     ::v8::metrics::GarbageCollectionPhases& metrics,
1335     const cppgc::internal::MetricRecorder::FullCycle::Phases& cppgc_metrics) {
1336   DCHECK_NE(-1, cppgc_metrics.compact_duration_us);
1337   metrics.compact_wall_clock_duration_in_us = cppgc_metrics.compact_duration_us;
1338   DCHECK_NE(-1, cppgc_metrics.mark_duration_us);
1339   metrics.mark_wall_clock_duration_in_us = cppgc_metrics.mark_duration_us;
1340   DCHECK_NE(-1, cppgc_metrics.sweep_duration_us);
1341   metrics.sweep_wall_clock_duration_in_us = cppgc_metrics.sweep_duration_us;
1342   DCHECK_NE(-1, cppgc_metrics.weak_duration_us);
1343   metrics.weak_wall_clock_duration_in_us = cppgc_metrics.weak_duration_us;
1344 }
1345 
CopySizeMetrics(::v8::metrics::GarbageCollectionSizes & metrics,const cppgc::internal::MetricRecorder::FullCycle::Sizes & cppgc_metrics)1346 void CopySizeMetrics(
1347     ::v8::metrics::GarbageCollectionSizes& metrics,
1348     const cppgc::internal::MetricRecorder::FullCycle::Sizes& cppgc_metrics) {
1349   DCHECK_NE(-1, cppgc_metrics.after_bytes);
1350   metrics.bytes_after = cppgc_metrics.after_bytes;
1351   DCHECK_NE(-1, cppgc_metrics.before_bytes);
1352   metrics.bytes_before = cppgc_metrics.before_bytes;
1353   DCHECK_NE(-1, cppgc_metrics.freed_bytes);
1354   metrics.bytes_freed = cppgc_metrics.freed_bytes;
1355 }
1356 
GetContextId(v8::internal::Isolate * isolate)1357 ::v8::metrics::Recorder::ContextId GetContextId(
1358     v8::internal::Isolate* isolate) {
1359   DCHECK_NOT_NULL(isolate);
1360   if (isolate->context().is_null())
1361     return v8::metrics::Recorder::ContextId::Empty();
1362   HandleScope scope(isolate);
1363   return isolate->GetOrRegisterRecorderContextId(isolate->native_context());
1364 }
1365 
FlushBatchedIncrementalEvents(v8::metrics::GarbageCollectionFullMainThreadBatchedIncrementalMark & batched_events,Isolate * isolate)1366 void FlushBatchedIncrementalEvents(
1367     v8::metrics::GarbageCollectionFullMainThreadBatchedIncrementalMark&
1368         batched_events,
1369     Isolate* isolate) {
1370   DCHECK_NOT_NULL(isolate->metrics_recorder());
1371   DCHECK(!batched_events.events.empty());
1372   isolate->metrics_recorder()->AddMainThreadEvent(std::move(batched_events),
1373                                                   GetContextId(isolate));
1374 }
1375 
1376 }  // namespace
1377 
ReportFullCycleToRecorder()1378 void GCTracer::ReportFullCycleToRecorder() {
1379   const std::shared_ptr<metrics::Recorder>& recorder =
1380       heap_->isolate()->metrics_recorder();
1381   DCHECK_NOT_NULL(recorder);
1382   if (!recorder->HasEmbedderRecorder()) return;
1383   if (!incremental_mark_batched_events_.events.empty()) {
1384     FlushBatchedIncrementalEvents(incremental_mark_batched_events_,
1385                                   heap_->isolate());
1386   }
1387   v8::metrics::GarbageCollectionFullCycle event;
1388   if (heap_->cpp_heap()) {
1389     auto* cpp_heap = v8::internal::CppHeap::From(heap_->cpp_heap());
1390     cpp_heap->GetMetricRecorder()->FlushBatchedIncrementalEvents();
1391     const base::Optional<cppgc::internal::MetricRecorder::FullCycle>
1392         optional_cppgc_event =
1393             cpp_heap->GetMetricRecorder()->ExtractLastFullGcEvent();
1394     DCHECK(optional_cppgc_event.has_value());
1395     const cppgc::internal::MetricRecorder::FullCycle& cppgc_event =
1396         optional_cppgc_event.value();
1397     CopyTimeMetrics(event.total_cpp, cppgc_event.total);
1398     CopyTimeMetrics(event.main_thread_cpp, cppgc_event.main_thread);
1399     CopyTimeMetrics(event.main_thread_atomic_cpp,
1400                     cppgc_event.main_thread_atomic);
1401     CopyTimeMetrics(event.main_thread_incremental_cpp,
1402                     cppgc_event.main_thread_incremental);
1403     CopySizeMetrics(event.objects_cpp, cppgc_event.objects);
1404     CopySizeMetrics(event.memory_cpp, cppgc_event.memory);
1405     DCHECK_NE(-1, cppgc_event.collection_rate_in_percent);
1406     event.collection_rate_cpp_in_percent =
1407         cppgc_event.collection_rate_in_percent;
1408     DCHECK_NE(-1, cppgc_event.efficiency_in_bytes_per_us);
1409     event.efficiency_cpp_in_bytes_per_us =
1410         cppgc_event.efficiency_in_bytes_per_us;
1411     DCHECK_NE(-1, cppgc_event.main_thread_efficiency_in_bytes_per_us);
1412     event.main_thread_efficiency_cpp_in_bytes_per_us =
1413         cppgc_event.main_thread_efficiency_in_bytes_per_us;
1414   }
1415   // TODO(chromium:1154636): Populate v8 metrics.
1416   recorder->AddMainThreadEvent(event, GetContextId(heap_->isolate()));
1417   metrics_report_pending_ = false;
1418 }
1419 
ReportIncrementalMarkingStepToRecorder()1420 void GCTracer::ReportIncrementalMarkingStepToRecorder() {
1421   static constexpr int kMaxBatchedEvents =
1422       CppHeap::MetricRecorderAdapter::kMaxBatchedEvents;
1423   const std::shared_ptr<metrics::Recorder>& recorder =
1424       heap_->isolate()->metrics_recorder();
1425   DCHECK_NOT_NULL(recorder);
1426   if (!recorder->HasEmbedderRecorder()) return;
1427   incremental_mark_batched_events_.events.emplace_back();
1428   if (heap_->cpp_heap()) {
1429     const base::Optional<
1430         cppgc::internal::MetricRecorder::MainThreadIncrementalMark>
1431         cppgc_event = v8::internal::CppHeap::From(heap_->cpp_heap())
1432                           ->GetMetricRecorder()
1433                           ->ExtractLastIncrementalMarkEvent();
1434     if (cppgc_event.has_value()) {
1435       DCHECK_NE(-1, cppgc_event.value().duration_us);
1436       incremental_mark_batched_events_.events.back()
1437           .cpp_wall_clock_duration_in_us = cppgc_event.value().duration_us;
1438     }
1439   }
1440   // TODO(chromium:1154636): Populate event.wall_clock_duration_in_us.
1441   if (incremental_mark_batched_events_.events.size() == kMaxBatchedEvents) {
1442     FlushBatchedIncrementalEvents(incremental_mark_batched_events_,
1443                                   heap_->isolate());
1444   }
1445 }
1446 
1447 }  // namespace internal
1448 }  // namespace v8
1449