1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sts=4 et sw=4 tw=99:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "js/MemoryMetrics.h"
8 
9 #include "mozilla/DebugOnly.h"
10 
11 #include "jsapi.h"
12 #include "jscompartment.h"
13 #include "jsgc.h"
14 #include "jsobj.h"
15 #include "jsscript.h"
16 
17 #include "jit/BaselineJIT.h"
18 #include "jit/Ion.h"
19 #include "vm/ArrayObject.h"
20 #include "vm/Runtime.h"
21 #include "vm/Shape.h"
22 #include "vm/String.h"
23 #include "vm/Symbol.h"
24 #include "vm/WrapperObject.h"
25 
26 using mozilla::DebugOnly;
27 using mozilla::MallocSizeOf;
28 using mozilla::Move;
29 using mozilla::PodCopy;
30 using mozilla::PodEqual;
31 
32 using namespace js;
33 
34 using JS::RuntimeStats;
35 using JS::ObjectPrivateVisitor;
36 using JS::ZoneStats;
37 using JS::CompartmentStats;
38 
39 namespace js {
40 
41 JS_FRIEND_API(size_t)
MemoryReportingSundriesThreshold()42 MemoryReportingSundriesThreshold()
43 {
44     return 8 * 1024;
45 }
46 
47 template <typename CharT>
48 static uint32_t
HashStringChars(JSString * s)49 HashStringChars(JSString* s)
50 {
51     ScopedJSFreePtr<CharT> ownedChars;
52     const CharT* chars;
53     JS::AutoCheckCannotGC nogc;
54     if (s->isLinear()) {
55         chars = s->asLinear().chars<CharT>(nogc);
56     } else {
57         // Slowest hash function evar!
58         if (!s->asRope().copyChars<CharT>(/* tcx */ nullptr, ownedChars))
59             MOZ_CRASH("oom");
60         chars = ownedChars;
61     }
62 
63     return mozilla::HashString(chars, s->length());
64 }
65 
66 /* static */ HashNumber
hash(const Lookup & l)67 InefficientNonFlatteningStringHashPolicy::hash(const Lookup& l)
68 {
69     return l->hasLatin1Chars()
70            ? HashStringChars<Latin1Char>(l)
71            : HashStringChars<char16_t>(l);
72 }
73 
74 template <typename Char1, typename Char2>
75 static bool
EqualStringsPure(JSString * s1,JSString * s2)76 EqualStringsPure(JSString* s1, JSString* s2)
77 {
78     if (s1->length() != s2->length())
79         return false;
80 
81     const Char1* c1;
82     ScopedJSFreePtr<Char1> ownedChars1;
83     JS::AutoCheckCannotGC nogc;
84     if (s1->isLinear()) {
85         c1 = s1->asLinear().chars<Char1>(nogc);
86     } else {
87         if (!s1->asRope().copyChars<Char1>(/* tcx */ nullptr, ownedChars1))
88             MOZ_CRASH("oom");
89         c1 = ownedChars1;
90     }
91 
92     const Char2* c2;
93     ScopedJSFreePtr<Char2> ownedChars2;
94     if (s2->isLinear()) {
95         c2 = s2->asLinear().chars<Char2>(nogc);
96     } else {
97         if (!s2->asRope().copyChars<Char2>(/* tcx */ nullptr, ownedChars2))
98             MOZ_CRASH("oom");
99         c2 = ownedChars2;
100     }
101 
102     return EqualChars(c1, c2, s1->length());
103 }
104 
105 /* static */ bool
match(const JSString * const & k,const Lookup & l)106 InefficientNonFlatteningStringHashPolicy::match(const JSString* const& k, const Lookup& l)
107 {
108     // We can't use js::EqualStrings, because that flattens our strings.
109     JSString* s1 = const_cast<JSString*>(k);
110     if (k->hasLatin1Chars()) {
111         return l->hasLatin1Chars()
112                ? EqualStringsPure<Latin1Char, Latin1Char>(s1, l)
113                : EqualStringsPure<Latin1Char, char16_t>(s1, l);
114     }
115 
116     return l->hasLatin1Chars()
117            ? EqualStringsPure<char16_t, Latin1Char>(s1, l)
118            : EqualStringsPure<char16_t, char16_t>(s1, l);
119 }
120 
121 /* static */ HashNumber
hash(const Lookup & l)122 CStringHashPolicy::hash(const Lookup& l)
123 {
124     return mozilla::HashString(l);
125 }
126 
127 /* static */ bool
match(const char * const & k,const Lookup & l)128 CStringHashPolicy::match(const char* const& k, const Lookup& l)
129 {
130     return strcmp(k, l) == 0;
131 }
132 
133 } // namespace js
134 
135 namespace JS {
136 
NotableStringInfo()137 NotableStringInfo::NotableStringInfo()
138   : StringInfo(),
139     buffer(0),
140     length(0)
141 {
142 }
143 
144 template <typename CharT>
145 static void
StoreStringChars(char * buffer,size_t bufferSize,JSString * str)146 StoreStringChars(char* buffer, size_t bufferSize, JSString* str)
147 {
148     const CharT* chars;
149     ScopedJSFreePtr<CharT> ownedChars;
150     JS::AutoCheckCannotGC nogc;
151     if (str->isLinear()) {
152         chars = str->asLinear().chars<CharT>(nogc);
153     } else {
154         if (!str->asRope().copyChars<CharT>(/* tcx */ nullptr, ownedChars))
155             MOZ_CRASH("oom");
156         chars = ownedChars;
157     }
158 
159     // We might truncate |str| even if it's much shorter than 1024 chars, if
160     // |str| contains unicode chars.  Since this is just for a memory reporter,
161     // we don't care.
162     PutEscapedString(buffer, bufferSize, chars, str->length(), /* quote */ 0);
163 }
164 
NotableStringInfo(JSString * str,const StringInfo & info)165 NotableStringInfo::NotableStringInfo(JSString* str, const StringInfo& info)
166   : StringInfo(info),
167     length(str->length())
168 {
169     size_t bufferSize = Min(str->length() + 1, size_t(MAX_SAVED_CHARS));
170     buffer = js_pod_malloc<char>(bufferSize);
171     if (!buffer) {
172         MOZ_CRASH("oom");
173     }
174 
175     if (str->hasLatin1Chars())
176         StoreStringChars<Latin1Char>(buffer, bufferSize, str);
177     else
178         StoreStringChars<char16_t>(buffer, bufferSize, str);
179 }
180 
NotableStringInfo(NotableStringInfo && info)181 NotableStringInfo::NotableStringInfo(NotableStringInfo&& info)
182   : StringInfo(Move(info)),
183     length(info.length)
184 {
185     buffer = info.buffer;
186     info.buffer = nullptr;
187 }
188 
operator =(NotableStringInfo && info)189 NotableStringInfo& NotableStringInfo::operator=(NotableStringInfo&& info)
190 {
191     MOZ_ASSERT(this != &info, "self-move assignment is prohibited");
192     this->~NotableStringInfo();
193     new (this) NotableStringInfo(Move(info));
194     return *this;
195 }
196 
NotableClassInfo()197 NotableClassInfo::NotableClassInfo()
198   : ClassInfo(),
199     className_(nullptr)
200 {
201 }
202 
NotableClassInfo(const char * className,const ClassInfo & info)203 NotableClassInfo::NotableClassInfo(const char* className, const ClassInfo& info)
204   : ClassInfo(info)
205 {
206     size_t bytes = strlen(className) + 1;
207     className_ = js_pod_malloc<char>(bytes);
208     if (!className_)
209         MOZ_CRASH("oom");
210     PodCopy(className_, className, bytes);
211 }
212 
NotableClassInfo(NotableClassInfo && info)213 NotableClassInfo::NotableClassInfo(NotableClassInfo&& info)
214   : ClassInfo(Move(info))
215 {
216     className_ = info.className_;
217     info.className_ = nullptr;
218 }
219 
operator =(NotableClassInfo && info)220 NotableClassInfo& NotableClassInfo::operator=(NotableClassInfo&& info)
221 {
222     MOZ_ASSERT(this != &info, "self-move assignment is prohibited");
223     this->~NotableClassInfo();
224     new (this) NotableClassInfo(Move(info));
225     return *this;
226 }
227 
NotableScriptSourceInfo()228 NotableScriptSourceInfo::NotableScriptSourceInfo()
229   : ScriptSourceInfo(),
230     filename_(nullptr)
231 {
232 }
233 
NotableScriptSourceInfo(const char * filename,const ScriptSourceInfo & info)234 NotableScriptSourceInfo::NotableScriptSourceInfo(const char* filename, const ScriptSourceInfo& info)
235   : ScriptSourceInfo(info)
236 {
237     size_t bytes = strlen(filename) + 1;
238     filename_ = js_pod_malloc<char>(bytes);
239     if (!filename_)
240         MOZ_CRASH("oom");
241     PodCopy(filename_, filename, bytes);
242 }
243 
NotableScriptSourceInfo(NotableScriptSourceInfo && info)244 NotableScriptSourceInfo::NotableScriptSourceInfo(NotableScriptSourceInfo&& info)
245   : ScriptSourceInfo(Move(info))
246 {
247     filename_ = info.filename_;
248     info.filename_ = nullptr;
249 }
250 
operator =(NotableScriptSourceInfo && info)251 NotableScriptSourceInfo& NotableScriptSourceInfo::operator=(NotableScriptSourceInfo&& info)
252 {
253     MOZ_ASSERT(this != &info, "self-move assignment is prohibited");
254     this->~NotableScriptSourceInfo();
255     new (this) NotableScriptSourceInfo(Move(info));
256     return *this;
257 }
258 
259 
260 } // namespace JS
261 
262 typedef HashSet<ScriptSource*, DefaultHasher<ScriptSource*>, SystemAllocPolicy> SourceSet;
263 
264 struct StatsClosure
265 {
266     RuntimeStats* rtStats;
267     ObjectPrivateVisitor* opv;
268     SourceSet seenSources;
269     bool anonymize;
270 
StatsClosureStatsClosure271     StatsClosure(RuntimeStats* rt, ObjectPrivateVisitor* v, bool anon)
272       : rtStats(rt),
273         opv(v),
274         anonymize(anon)
275     {}
276 
initStatsClosure277     bool init() {
278         return seenSources.init();
279     }
280 };
281 
282 static void
DecommittedArenasChunkCallback(JSRuntime * rt,void * data,gc::Chunk * chunk)283 DecommittedArenasChunkCallback(JSRuntime* rt, void* data, gc::Chunk* chunk)
284 {
285     // This case is common and fast to check.  Do it first.
286     if (chunk->decommittedArenas.isAllClear())
287         return;
288 
289     size_t n = 0;
290     for (size_t i = 0; i < gc::ArenasPerChunk; i++) {
291         if (chunk->decommittedArenas.get(i))
292             n += gc::ArenaSize;
293     }
294     MOZ_ASSERT(n > 0);
295     *static_cast<size_t*>(data) += n;
296 }
297 
298 static void
StatsZoneCallback(JSRuntime * rt,void * data,Zone * zone)299 StatsZoneCallback(JSRuntime* rt, void* data, Zone* zone)
300 {
301     // Append a new CompartmentStats to the vector.
302     RuntimeStats* rtStats = static_cast<StatsClosure*>(data)->rtStats;
303 
304     // CollectRuntimeStats reserves enough space.
305     MOZ_ALWAYS_TRUE(rtStats->zoneStatsVector.growBy(1));
306     ZoneStats& zStats = rtStats->zoneStatsVector.back();
307     if (!zStats.initStrings(rt))
308         MOZ_CRASH("oom");
309     rtStats->initExtraZoneStats(zone, &zStats);
310     rtStats->currZoneStats = &zStats;
311 
312     zone->addSizeOfIncludingThis(rtStats->mallocSizeOf_,
313                                  &zStats.typePool,
314                                  &zStats.baselineStubsOptimized,
315                                  &zStats.uniqueIdMap);
316 }
317 
318 static void
StatsCompartmentCallback(JSRuntime * rt,void * data,JSCompartment * compartment)319 StatsCompartmentCallback(JSRuntime* rt, void* data, JSCompartment* compartment)
320 {
321     // Append a new CompartmentStats to the vector.
322     RuntimeStats* rtStats = static_cast<StatsClosure*>(data)->rtStats;
323 
324     // CollectRuntimeStats reserves enough space.
325     MOZ_ALWAYS_TRUE(rtStats->compartmentStatsVector.growBy(1));
326     CompartmentStats& cStats = rtStats->compartmentStatsVector.back();
327     if (!cStats.initClasses(rt))
328         MOZ_CRASH("oom");
329     rtStats->initExtraCompartmentStats(compartment, &cStats);
330 
331     compartment->compartmentStats = &cStats;
332 
333     // Measure the compartment object itself, and things hanging off it.
334     compartment->addSizeOfIncludingThis(rtStats->mallocSizeOf_,
335                                         &cStats.typeInferenceAllocationSiteTables,
336                                         &cStats.typeInferenceArrayTypeTables,
337                                         &cStats.typeInferenceObjectTypeTables,
338                                         &cStats.compartmentObject,
339                                         &cStats.compartmentTables,
340                                         &cStats.innerViewsTable,
341                                         &cStats.lazyArrayBuffersTable,
342                                         &cStats.objectMetadataTable,
343                                         &cStats.crossCompartmentWrappersTable,
344                                         &cStats.regexpCompartment,
345                                         &cStats.savedStacksSet,
346                                         &cStats.nonSyntacticLexicalScopesTable);
347 }
348 
349 static void
StatsArenaCallback(JSRuntime * rt,void * data,gc::Arena * arena,JS::TraceKind traceKind,size_t thingSize)350 StatsArenaCallback(JSRuntime* rt, void* data, gc::Arena* arena,
351                    JS::TraceKind traceKind, size_t thingSize)
352 {
353     RuntimeStats* rtStats = static_cast<StatsClosure*>(data)->rtStats;
354 
355     // The admin space includes (a) the header and (b) the padding between the
356     // end of the header and the start of the first GC thing.
357     size_t allocationSpace = arena->thingsSpan(thingSize);
358     rtStats->currZoneStats->gcHeapArenaAdmin += gc::ArenaSize - allocationSpace;
359 
360     // We don't call the callback on unused things.  So we compute the
361     // unused space like this:  arenaUnused = maxArenaUnused - arenaUsed.
362     // We do this by setting arenaUnused to maxArenaUnused here, and then
363     // subtracting thingSize for every used cell, in StatsCellCallback().
364     rtStats->currZoneStats->unusedGCThings.addToKind(traceKind, allocationSpace);
365 }
366 
367 static CompartmentStats*
GetCompartmentStats(JSCompartment * comp)368 GetCompartmentStats(JSCompartment* comp)
369 {
370     return static_cast<CompartmentStats*>(comp->compartmentStats);
371 }
372 
373 // FineGrained is used for normal memory reporting.  CoarseGrained is used by
374 // AddSizeOfTab(), which aggregates all the measurements into a handful of
375 // high-level numbers, which means that fine-grained reporting would be a waste
376 // of effort.
377 enum Granularity {
378     FineGrained,
379     CoarseGrained
380 };
381 
382 static void
AddClassInfo(Granularity granularity,CompartmentStats * cStats,const char * className,JS::ClassInfo & info)383 AddClassInfo(Granularity granularity, CompartmentStats* cStats, const char* className,
384              JS::ClassInfo& info)
385 {
386     if (granularity == FineGrained) {
387         if (!className)
388             className = "<no class name>";
389         CompartmentStats::ClassesHashMap::AddPtr p =
390             cStats->allClasses->lookupForAdd(className);
391         if (!p) {
392             // Ignore failure -- we just won't record the
393             // object/shape/base-shape as notable.
394             (void)cStats->allClasses->add(p, className, info);
395         } else {
396             p->value().add(info);
397         }
398     }
399 }
400 
401 // The various kinds of hashing are expensive, and the results are unused when
402 // doing coarse-grained measurements. Skipping them more than doubles the
403 // profile speed for complex pages such as gmail.com.
404 template <Granularity granularity>
405 static void
StatsCellCallback(JSRuntime * rt,void * data,void * thing,JS::TraceKind traceKind,size_t thingSize)406 StatsCellCallback(JSRuntime* rt, void* data, void* thing, JS::TraceKind traceKind,
407                   size_t thingSize)
408 {
409     StatsClosure* closure = static_cast<StatsClosure*>(data);
410     RuntimeStats* rtStats = closure->rtStats;
411     ZoneStats* zStats = rtStats->currZoneStats;
412     switch (traceKind) {
413       case JS::TraceKind::Object: {
414         JSObject* obj = static_cast<JSObject*>(thing);
415         CompartmentStats* cStats = GetCompartmentStats(obj->compartment());
416         JS::ClassInfo info;        // This zeroes all the sizes.
417         info.objectsGCHeap += thingSize;
418         obj->addSizeOfExcludingThis(rtStats->mallocSizeOf_, &info);
419 
420         cStats->classInfo.add(info);
421 
422         const Class* clasp = obj->getClass();
423         const char* className = clasp->name;
424         AddClassInfo(granularity, cStats, className, info);
425 
426         if (ObjectPrivateVisitor* opv = closure->opv) {
427             nsISupports* iface;
428             if (opv->getISupports_(obj, &iface) && iface)
429                 cStats->objectsPrivate += opv->sizeOfIncludingThis(iface);
430         }
431         break;
432       }
433 
434       case JS::TraceKind::Script: {
435         JSScript* script = static_cast<JSScript*>(thing);
436         CompartmentStats* cStats = GetCompartmentStats(script->compartment());
437         cStats->scriptsGCHeap += thingSize;
438         cStats->scriptsMallocHeapData += script->sizeOfData(rtStats->mallocSizeOf_);
439         cStats->typeInferenceTypeScripts += script->sizeOfTypeScript(rtStats->mallocSizeOf_);
440         jit::AddSizeOfBaselineData(script, rtStats->mallocSizeOf_, &cStats->baselineData,
441                                    &cStats->baselineStubsFallback);
442         cStats->ionData += jit::SizeOfIonData(script, rtStats->mallocSizeOf_);
443 
444         ScriptSource* ss = script->scriptSource();
445         SourceSet::AddPtr entry = closure->seenSources.lookupForAdd(ss);
446         if (!entry) {
447             (void)closure->seenSources.add(entry, ss); // Not much to be done on failure.
448 
449             JS::ScriptSourceInfo info;  // This zeroes all the sizes.
450             ss->addSizeOfIncludingThis(rtStats->mallocSizeOf_, &info);
451             MOZ_ASSERT(info.compressed == 0 || info.uncompressed == 0);
452 
453             rtStats->runtime.scriptSourceInfo.add(info);
454 
455             if (granularity == FineGrained) {
456                 const char* filename = ss->filename();
457                 if (!filename)
458                     filename = "<no filename>";
459 
460                 JS::RuntimeSizes::ScriptSourcesHashMap::AddPtr p =
461                     rtStats->runtime.allScriptSources->lookupForAdd(filename);
462                 if (!p) {
463                     // Ignore failure -- we just won't record the script source as notable.
464                     (void)rtStats->runtime.allScriptSources->add(p, filename, info);
465                 } else {
466                     p->value().add(info);
467                 }
468             }
469         }
470 
471         break;
472       }
473 
474       case JS::TraceKind::String: {
475         JSString* str = static_cast<JSString*>(thing);
476 
477         JS::StringInfo info;
478         if (str->hasLatin1Chars()) {
479             info.gcHeapLatin1 = thingSize;
480             info.mallocHeapLatin1 = str->sizeOfExcludingThis(rtStats->mallocSizeOf_);
481         } else {
482             info.gcHeapTwoByte = thingSize;
483             info.mallocHeapTwoByte = str->sizeOfExcludingThis(rtStats->mallocSizeOf_);
484         }
485         info.numCopies = 1;
486 
487         zStats->stringInfo.add(info);
488 
489         // The primary use case for anonymization is automated crash submission
490         // (to help detect OOM crashes). In that case, we don't want to pay the
491         // memory cost required to do notable string detection.
492         if (granularity == FineGrained && !closure->anonymize) {
493             ZoneStats::StringsHashMap::AddPtr p = zStats->allStrings->lookupForAdd(str);
494             if (!p) {
495                 // Ignore failure -- we just won't record the string as notable.
496                 (void)zStats->allStrings->add(p, str, info);
497             } else {
498                 p->value().add(info);
499             }
500         }
501         break;
502       }
503 
504       case JS::TraceKind::Symbol:
505         zStats->symbolsGCHeap += thingSize;
506         break;
507 
508       case JS::TraceKind::BaseShape: {
509         BaseShape* base = static_cast<BaseShape*>(thing);
510         CompartmentStats* cStats = GetCompartmentStats(base->compartment());
511 
512         JS::ClassInfo info;        // This zeroes all the sizes.
513         info.shapesGCHeapBase += thingSize;
514         // No malloc-heap measurements.
515 
516         cStats->classInfo.add(info);
517 
518         const Class* clasp = base->clasp();
519         const char* className = clasp->name;
520         AddClassInfo(granularity, cStats, className, info);
521         break;
522       }
523 
524       case JS::TraceKind::JitCode: {
525         zStats->jitCodesGCHeap += thingSize;
526         // The code for a script is counted in ExecutableAllocator::sizeOfCode().
527         break;
528       }
529 
530       case JS::TraceKind::LazyScript: {
531         LazyScript* lazy = static_cast<LazyScript*>(thing);
532         zStats->lazyScriptsGCHeap += thingSize;
533         zStats->lazyScriptsMallocHeap += lazy->sizeOfExcludingThis(rtStats->mallocSizeOf_);
534         break;
535       }
536 
537       case JS::TraceKind::Shape: {
538         Shape* shape = static_cast<Shape*>(thing);
539         CompartmentStats* cStats = GetCompartmentStats(shape->compartment());
540         JS::ClassInfo info;        // This zeroes all the sizes.
541         if (shape->inDictionary())
542             info.shapesGCHeapDict += thingSize;
543         else
544             info.shapesGCHeapTree += thingSize;
545         shape->addSizeOfExcludingThis(rtStats->mallocSizeOf_, &info);
546         cStats->classInfo.add(info);
547 
548         const BaseShape* base = shape->base();
549         const Class* clasp = base->clasp();
550         const char* className = clasp->name;
551         AddClassInfo(granularity, cStats, className, info);
552         break;
553       }
554 
555       case JS::TraceKind::ObjectGroup: {
556         ObjectGroup* group = static_cast<ObjectGroup*>(thing);
557         zStats->objectGroupsGCHeap += thingSize;
558         zStats->objectGroupsMallocHeap += group->sizeOfExcludingThis(rtStats->mallocSizeOf_);
559         break;
560       }
561 
562       default:
563         MOZ_CRASH("invalid traceKind in StatsCellCallback");
564     }
565 
566     // Yes, this is a subtraction:  see StatsArenaCallback() for details.
567     zStats->unusedGCThings.addToKind(traceKind, -thingSize);
568 }
569 
570 bool
initStrings(JSRuntime * rt)571 ZoneStats::initStrings(JSRuntime* rt)
572 {
573     isTotals = false;
574     allStrings = rt->new_<StringsHashMap>();
575     if (!allStrings || !allStrings->init()) {
576         js_delete(allStrings);
577         allStrings = nullptr;
578         return false;
579     }
580     return true;
581 }
582 
583 bool
initClasses(JSRuntime * rt)584 CompartmentStats::initClasses(JSRuntime* rt)
585 {
586     isTotals = false;
587     allClasses = rt->new_<ClassesHashMap>();
588     if (!allClasses || !allClasses->init()) {
589         js_delete(allClasses);
590         allClasses = nullptr;
591         return false;
592     }
593     return true;
594 }
595 
596 static bool
FindNotableStrings(ZoneStats & zStats)597 FindNotableStrings(ZoneStats& zStats)
598 {
599     using namespace JS;
600 
601     // We should only run FindNotableStrings once per ZoneStats object.
602     MOZ_ASSERT(zStats.notableStrings.empty());
603 
604     for (ZoneStats::StringsHashMap::Range r = zStats.allStrings->all(); !r.empty(); r.popFront()) {
605 
606         JSString* str = r.front().key();
607         StringInfo& info = r.front().value();
608 
609         if (!info.isNotable())
610             continue;
611 
612         if (!zStats.notableStrings.growBy(1))
613             return false;
614 
615         zStats.notableStrings.back() = NotableStringInfo(str, info);
616 
617         // We're moving this string from a non-notable to a notable bucket, so
618         // subtract it out of the non-notable tallies.
619         zStats.stringInfo.subtract(info);
620     }
621     // Delete |allStrings| now, rather than waiting for zStats's destruction,
622     // to reduce peak memory consumption during reporting.
623     js_delete(zStats.allStrings);
624     zStats.allStrings = nullptr;
625     return true;
626 }
627 
628 static bool
FindNotableClasses(CompartmentStats & cStats)629 FindNotableClasses(CompartmentStats& cStats)
630 {
631     using namespace JS;
632 
633     // We should only run FindNotableClasses once per ZoneStats object.
634     MOZ_ASSERT(cStats.notableClasses.empty());
635 
636     for (CompartmentStats::ClassesHashMap::Range r = cStats.allClasses->all();
637          !r.empty();
638          r.popFront())
639     {
640         const char* className = r.front().key();
641         ClassInfo& info = r.front().value();
642 
643         // If this class isn't notable, or if we can't grow the notableStrings
644         // vector, skip this string.
645         if (!info.isNotable())
646             continue;
647 
648         if (!cStats.notableClasses.growBy(1))
649             return false;
650 
651         cStats.notableClasses.back() = NotableClassInfo(className, info);
652 
653         // We're moving this class from a non-notable to a notable bucket, so
654         // subtract it out of the non-notable tallies.
655         cStats.classInfo.subtract(info);
656     }
657     // Delete |allClasses| now, rather than waiting for zStats's destruction,
658     // to reduce peak memory consumption during reporting.
659     js_delete(cStats.allClasses);
660     cStats.allClasses = nullptr;
661     return true;
662 }
663 
664 static bool
FindNotableScriptSources(JS::RuntimeSizes & runtime)665 FindNotableScriptSources(JS::RuntimeSizes& runtime)
666 {
667     using namespace JS;
668 
669     // We should only run FindNotableScriptSources once per RuntimeSizes.
670     MOZ_ASSERT(runtime.notableScriptSources.empty());
671 
672     for (RuntimeSizes::ScriptSourcesHashMap::Range r = runtime.allScriptSources->all();
673          !r.empty();
674          r.popFront())
675     {
676         const char* filename = r.front().key();
677         ScriptSourceInfo& info = r.front().value();
678 
679         if (!info.isNotable())
680             continue;
681 
682         if (!runtime.notableScriptSources.growBy(1))
683             return false;
684 
685         runtime.notableScriptSources.back() = NotableScriptSourceInfo(filename, info);
686 
687         // We're moving this script source from a non-notable to a notable
688         // bucket, so subtract its sizes from the non-notable tallies.
689         runtime.scriptSourceInfo.subtract(info);
690     }
691     // Delete |allScriptSources| now, rather than waiting for zStats's
692     // destruction, to reduce peak memory consumption during reporting.
693     js_delete(runtime.allScriptSources);
694     runtime.allScriptSources = nullptr;
695     return true;
696 }
697 
698 static bool
CollectRuntimeStatsHelper(JSRuntime * rt,RuntimeStats * rtStats,ObjectPrivateVisitor * opv,bool anonymize,IterateCellCallback statsCellCallback)699 CollectRuntimeStatsHelper(JSRuntime* rt, RuntimeStats* rtStats, ObjectPrivateVisitor* opv,
700                           bool anonymize, IterateCellCallback statsCellCallback)
701 {
702     if (!rtStats->compartmentStatsVector.reserve(rt->numCompartments))
703         return false;
704 
705     if (!rtStats->zoneStatsVector.reserve(rt->gc.zones.length()))
706         return false;
707 
708     rtStats->gcHeapChunkTotal =
709         size_t(JS_GetGCParameter(rt, JSGC_TOTAL_CHUNKS)) * gc::ChunkSize;
710 
711     rtStats->gcHeapUnusedChunks =
712         size_t(JS_GetGCParameter(rt, JSGC_UNUSED_CHUNKS)) * gc::ChunkSize;
713 
714     IterateChunks(rt, &rtStats->gcHeapDecommittedArenas,
715                   DecommittedArenasChunkCallback);
716 
717     // Take the per-compartment measurements.
718     StatsClosure closure(rtStats, opv, anonymize);
719     if (!closure.init())
720         return false;
721     IterateZonesCompartmentsArenasCells(rt, &closure,
722                                         StatsZoneCallback,
723                                         StatsCompartmentCallback,
724                                         StatsArenaCallback,
725                                         statsCellCallback);
726 
727     // Take the "explicit/js/runtime/" measurements.
728     rt->addSizeOfIncludingThis(rtStats->mallocSizeOf_, &rtStats->runtime);
729 
730     if (!FindNotableScriptSources(rtStats->runtime))
731         return false;
732 
733     JS::ZoneStatsVector& zs = rtStats->zoneStatsVector;
734     ZoneStats& zTotals = rtStats->zTotals;
735 
736     // We don't look for notable strings for zTotals. So we first sum all the
737     // zones' measurements to get the totals. Then we find the notable strings
738     // within each zone.
739     for (size_t i = 0; i < zs.length(); i++)
740         zTotals.addSizes(zs[i]);
741 
742     for (size_t i = 0; i < zs.length(); i++)
743         if (!FindNotableStrings(zs[i]))
744             return false;
745 
746     MOZ_ASSERT(!zTotals.allStrings);
747 
748     JS::CompartmentStatsVector& cs = rtStats->compartmentStatsVector;
749     CompartmentStats& cTotals = rtStats->cTotals;
750 
751     // As with the zones, we sum all compartments first, and then get the
752     // notable classes within each zone.
753     for (size_t i = 0; i < cs.length(); i++)
754         cTotals.addSizes(cs[i]);
755 
756     for (size_t i = 0; i < cs.length(); i++) {
757         if (!FindNotableClasses(cs[i]))
758             return false;
759     }
760 
761     MOZ_ASSERT(!cTotals.allClasses);
762 
763     rtStats->gcHeapGCThings = rtStats->zTotals.sizeOfLiveGCThings() +
764                               rtStats->cTotals.sizeOfLiveGCThings();
765 
766 #ifdef DEBUG
767     // Check that the in-arena measurements look ok.
768     size_t totalArenaSize = rtStats->zTotals.gcHeapArenaAdmin +
769                             rtStats->zTotals.unusedGCThings.totalSize() +
770                             rtStats->gcHeapGCThings;
771     MOZ_ASSERT(totalArenaSize % gc::ArenaSize == 0);
772 #endif
773 
774     for (CompartmentsIter comp(rt, WithAtoms); !comp.done(); comp.next())
775         comp->compartmentStats = nullptr;
776 
777     size_t numDirtyChunks =
778         (rtStats->gcHeapChunkTotal - rtStats->gcHeapUnusedChunks) / gc::ChunkSize;
779     size_t perChunkAdmin =
780         sizeof(gc::Chunk) - (sizeof(gc::Arena) * gc::ArenasPerChunk);
781     rtStats->gcHeapChunkAdmin = numDirtyChunks * perChunkAdmin;
782 
783     // |gcHeapUnusedArenas| is the only thing left.  Compute it in terms of
784     // all the others.  See the comment in RuntimeStats for explanation.
785     rtStats->gcHeapUnusedArenas = rtStats->gcHeapChunkTotal -
786                                   rtStats->gcHeapDecommittedArenas -
787                                   rtStats->gcHeapUnusedChunks -
788                                   rtStats->zTotals.unusedGCThings.totalSize() -
789                                   rtStats->gcHeapChunkAdmin -
790                                   rtStats->zTotals.gcHeapArenaAdmin -
791                                   rtStats->gcHeapGCThings;
792     return true;
793 }
794 
JS_PUBLIC_API(bool)795 JS_PUBLIC_API(bool)
796 JS::CollectRuntimeStats(JSRuntime *rt, RuntimeStats *rtStats, ObjectPrivateVisitor *opv,
797                         bool anonymize)
798 {
799     return CollectRuntimeStatsHelper(rt, rtStats, opv, anonymize, StatsCellCallback<FineGrained>);
800 }
801 
JS_PUBLIC_API(size_t)802 JS_PUBLIC_API(size_t)
803 JS::SystemCompartmentCount(JSRuntime* rt)
804 {
805     size_t n = 0;
806     for (CompartmentsIter comp(rt, WithAtoms); !comp.done(); comp.next()) {
807         if (comp->isSystem())
808             ++n;
809     }
810     return n;
811 }
812 
JS_PUBLIC_API(size_t)813 JS_PUBLIC_API(size_t)
814 JS::UserCompartmentCount(JSRuntime* rt)
815 {
816     size_t n = 0;
817     for (CompartmentsIter comp(rt, WithAtoms); !comp.done(); comp.next()) {
818         if (!comp->isSystem())
819             ++n;
820     }
821     return n;
822 }
823 
JS_PUBLIC_API(size_t)824 JS_PUBLIC_API(size_t)
825 JS::PeakSizeOfTemporary(const JSRuntime* rt)
826 {
827     return rt->tempLifoAlloc.peakSizeOfExcludingThis();
828 }
829 
830 namespace JS {
831 
832 class SimpleJSRuntimeStats : public JS::RuntimeStats
833 {
834   public:
SimpleJSRuntimeStats(MallocSizeOf mallocSizeOf)835     explicit SimpleJSRuntimeStats(MallocSizeOf mallocSizeOf)
836       : JS::RuntimeStats(mallocSizeOf)
837     {}
838 
initExtraZoneStats(JS::Zone * zone,JS::ZoneStats * zStats)839     virtual void initExtraZoneStats(JS::Zone* zone, JS::ZoneStats* zStats)
840         override
841     {}
842 
initExtraCompartmentStats(JSCompartment * c,JS::CompartmentStats * cStats)843     virtual void initExtraCompartmentStats(
844         JSCompartment* c, JS::CompartmentStats* cStats) override
845     {}
846 };
847 
848 JS_PUBLIC_API(bool)
AddSizeOfTab(JSRuntime * rt,HandleObject obj,MallocSizeOf mallocSizeOf,ObjectPrivateVisitor * opv,TabSizes * sizes)849 AddSizeOfTab(JSRuntime* rt, HandleObject obj, MallocSizeOf mallocSizeOf, ObjectPrivateVisitor* opv,
850              TabSizes* sizes)
851 {
852     SimpleJSRuntimeStats rtStats(mallocSizeOf);
853 
854     JS::Zone* zone = GetObjectZone(obj);
855 
856     if (!rtStats.compartmentStatsVector.reserve(zone->compartments.length()))
857         return false;
858 
859     if (!rtStats.zoneStatsVector.reserve(1))
860         return false;
861 
862     // Take the per-compartment measurements. No need to anonymize because
863     // these measurements will be aggregated.
864     StatsClosure closure(&rtStats, opv, /* anonymize = */ false);
865     if (!closure.init())
866         return false;
867     IterateZoneCompartmentsArenasCells(rt, zone, &closure,
868                                        StatsZoneCallback,
869                                        StatsCompartmentCallback,
870                                        StatsArenaCallback,
871                                        StatsCellCallback<CoarseGrained>);
872 
873     MOZ_ASSERT(rtStats.zoneStatsVector.length() == 1);
874     rtStats.zTotals.addSizes(rtStats.zoneStatsVector[0]);
875 
876     for (size_t i = 0; i < rtStats.compartmentStatsVector.length(); i++)
877         rtStats.cTotals.addSizes(rtStats.compartmentStatsVector[i]);
878 
879     for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
880         comp->compartmentStats = nullptr;
881 
882     rtStats.zTotals.addToTabSizes(sizes);
883     rtStats.cTotals.addToTabSizes(sizes);
884 
885     return true;
886 }
887 
888 JS_PUBLIC_API(bool)
AddServoSizeOf(JSRuntime * rt,MallocSizeOf mallocSizeOf,ObjectPrivateVisitor * opv,ServoSizes * sizes)889 AddServoSizeOf(JSRuntime *rt, MallocSizeOf mallocSizeOf, ObjectPrivateVisitor *opv,
890                ServoSizes *sizes)
891 {
892     SimpleJSRuntimeStats rtStats(mallocSizeOf);
893 
894     // No need to anonymize because the results will be aggregated.
895     if (!CollectRuntimeStatsHelper(rt, &rtStats, opv, /* anonymize = */ false,
896                                    StatsCellCallback<CoarseGrained>))
897         return false;
898 
899 #ifdef DEBUG
900     size_t gcHeapTotalOriginal = sizes->gcHeapUsed +
901                                  sizes->gcHeapUnused +
902                                  sizes->gcHeapAdmin +
903                                  sizes->gcHeapDecommitted;
904 #endif
905 
906     rtStats.addToServoSizes(sizes);
907     rtStats.zTotals.addToServoSizes(sizes);
908     rtStats.cTotals.addToServoSizes(sizes);
909 
910 #ifdef DEBUG
911     size_t gcHeapTotal = sizes->gcHeapUsed +
912                          sizes->gcHeapUnused +
913                          sizes->gcHeapAdmin +
914                          sizes->gcHeapDecommitted;
915     MOZ_ASSERT(rtStats.gcHeapChunkTotal == gcHeapTotal - gcHeapTotalOriginal);
916 #endif
917 
918     return true;
919 }
920 
921 } // namespace JS
922 
923