1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 // We're dividing JS objects into 3 categories:
8 //
9 // 1. "real" roots, held by the JS engine itself or rooted through the root
10 //    and lock JS APIs. Roots from this category are considered black in the
11 //    cycle collector, any cycle they participate in is uncollectable.
12 //
13 // 2. certain roots held by C++ objects that are guaranteed to be alive.
14 //    Roots from this category are considered black in the cycle collector,
15 //    and any cycle they participate in is uncollectable. These roots are
16 //    traced from TraceNativeBlackRoots.
17 //
18 // 3. all other roots held by C++ objects that participate in cycle collection,
19 //    held by us (see TraceNativeGrayRoots). Roots from this category are
20 //    considered grey in the cycle collector; whether or not they are collected
21 //    depends on the objects that hold them.
22 //
23 // Note that if a root is in multiple categories the fact that it is in
24 // category 1 or 2 that takes precedence, so it will be considered black.
25 //
26 // During garbage collection we switch to an additional mark color (gray) when
27 // tracing inside TraceNativeGrayRoots. This allows us to walk those roots later
28 // on and add all objects reachable only from them to the cycle collector.
29 //
30 // Phases:
31 //
32 // 1. marking of the roots in category 1 by having the JS GC do its marking
33 // 2. marking of the roots in category 2 by having the JS GC call us back
34 //    (via JS_SetExtraGCRootsTracer) and running TraceNativeBlackRoots
35 // 3. marking of the roots in category 3 by
36 //    TraceNativeGrayRootsInCollectingZones using an additional color (gray).
37 // 4. end of GC, GC can sweep its heap
38 //
39 // At some later point, when the cycle collector runs:
40 //
41 // 5. walk gray objects and add them to the cycle collector, cycle collect
42 //
43 // JS objects that are part of cycles the cycle collector breaks will be
44 // collected by the next JS GC.
45 //
46 // If WantAllTraces() is false the cycle collector will not traverse roots
47 // from category 1 or any JS objects held by them. Any JS objects they hold
48 // will already be marked by the JS GC and will thus be colored black
49 // themselves. Any C++ objects they hold will have a missing (untraversed)
50 // edge from the JS object to the C++ object and so it will be marked black
51 // too. This decreases the number of objects that the cycle collector has to
52 // deal with.
53 // To improve debugging, if WantAllTraces() is true all JS objects are
54 // traversed.
55 
56 #include "mozilla/CycleCollectedJSRuntime.h"
57 
58 #include <algorithm>
59 #include <utility>
60 
61 #include "GeckoProfiler.h"
62 #include "js/Debug.h"
63 #include "js/GCAPI.h"
64 #include "js/HeapAPI.h"
65 #include "js/Warnings.h"  // JS::SetWarningReporter
66 #include "jsfriendapi.h"
67 #include "mozilla/ArrayUtils.h"
68 #include "mozilla/AutoRestore.h"
69 #include "mozilla/CycleCollectedJSContext.h"
70 #include "mozilla/DebuggerOnGCRunnable.h"
71 #include "mozilla/MemoryReporting.h"
72 #include "mozilla/Sprintf.h"
73 #include "mozilla/Telemetry.h"
74 #include "mozilla/TimelineConsumers.h"
75 #include "mozilla/TimelineMarker.h"
76 #include "mozilla/Unused.h"
77 #include "mozilla/dom/DOMJSClass.h"
78 #include "mozilla/dom/JSExecutionManager.h"
79 #include "mozilla/dom/ProfileTimelineMarkerBinding.h"
80 #include "mozilla/dom/Promise.h"
81 #include "mozilla/dom/PromiseBinding.h"
82 #include "mozilla/dom/PromiseDebugging.h"
83 #include "mozilla/dom/ScriptSettings.h"
84 #include "nsContentUtils.h"
85 #include "nsCycleCollectionNoteRootCallback.h"
86 #include "nsCycleCollectionParticipant.h"
87 #include "nsCycleCollector.h"
88 #include "nsDOMJSUtils.h"
89 #include "nsExceptionHandler.h"
90 #include "nsJSUtils.h"
91 #include "nsStringBuffer.h"
92 #include "nsWrapperCache.h"
93 
94 #ifdef MOZ_GECKO_PROFILER
95 #  include "ProfilerMarkerPayload.h"
96 #endif
97 
98 #if defined(XP_MACOSX)
99 #  include "nsMacUtilsImpl.h"
100 #endif
101 
102 #include "nsThread.h"
103 #include "nsThreadUtils.h"
104 #include "xpcpublic.h"
105 
106 #ifdef NIGHTLY_BUILD
107 // For performance reasons, we make the JS Dev Error Interceptor a Nightly-only
108 // feature.
109 #  define MOZ_JS_DEV_ERROR_INTERCEPTOR = 1
110 #endif  // NIGHTLY_BUILD
111 
112 using namespace mozilla;
113 using namespace mozilla::dom;
114 
115 namespace mozilla {
116 
117 struct DeferredFinalizeFunctionHolder {
118   DeferredFinalizeFunction run;
119   void* data;
120 };
121 
122 class IncrementalFinalizeRunnable : public CancelableRunnable {
123   typedef AutoTArray<DeferredFinalizeFunctionHolder, 16> DeferredFinalizeArray;
124   typedef CycleCollectedJSRuntime::DeferredFinalizerTable
125       DeferredFinalizerTable;
126 
127   CycleCollectedJSRuntime* mRuntime;
128   DeferredFinalizeArray mDeferredFinalizeFunctions;
129   uint32_t mFinalizeFunctionToRun;
130   bool mReleasing;
131 
132   static const PRTime SliceMillis = 5; /* ms */
133 
134  public:
135   IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt,
136                               DeferredFinalizerTable& aFinalizerTable);
137   virtual ~IncrementalFinalizeRunnable();
138 
139   void ReleaseNow(bool aLimited);
140 
141   NS_DECL_NSIRUNNABLE
142 };
143 
144 }  // namespace mozilla
145 
146 struct NoteWeakMapChildrenTracer : public JS::CallbackTracer {
NoteWeakMapChildrenTracerNoteWeakMapChildrenTracer147   NoteWeakMapChildrenTracer(JSRuntime* aRt,
148                             nsCycleCollectionNoteRootCallback& aCb)
149       : JS::CallbackTracer(aRt),
150         mCb(aCb),
151         mTracedAny(false),
152         mMap(nullptr),
153         mKey(nullptr),
154         mKeyDelegate(nullptr) {
155     setCanSkipJsids(true);
156   }
157   bool onChild(const JS::GCCellPtr& aThing) override;
158   nsCycleCollectionNoteRootCallback& mCb;
159   bool mTracedAny;
160   JSObject* mMap;
161   JS::GCCellPtr mKey;
162   JSObject* mKeyDelegate;
163 };
164 
onChild(const JS::GCCellPtr & aThing)165 bool NoteWeakMapChildrenTracer::onChild(const JS::GCCellPtr& aThing) {
166   if (aThing.is<JSString>()) {
167     return true;
168   }
169 
170   if (!JS::GCThingIsMarkedGray(aThing) && !mCb.WantAllTraces()) {
171     return true;
172   }
173 
174   if (JS::IsCCTraceKind(aThing.kind())) {
175     mCb.NoteWeakMapping(mMap, mKey, mKeyDelegate, aThing);
176     mTracedAny = true;
177   } else {
178     JS::TraceChildren(this, aThing);
179   }
180   return true;
181 }
182 
183 struct NoteWeakMapsTracer : public js::WeakMapTracer {
NoteWeakMapsTracerNoteWeakMapsTracer184   NoteWeakMapsTracer(JSRuntime* aRt, nsCycleCollectionNoteRootCallback& aCccb)
185       : js::WeakMapTracer(aRt), mCb(aCccb), mChildTracer(aRt, aCccb) {}
186   void trace(JSObject* aMap, JS::GCCellPtr aKey, JS::GCCellPtr aValue) override;
187   nsCycleCollectionNoteRootCallback& mCb;
188   NoteWeakMapChildrenTracer mChildTracer;
189 };
190 
trace(JSObject * aMap,JS::GCCellPtr aKey,JS::GCCellPtr aValue)191 void NoteWeakMapsTracer::trace(JSObject* aMap, JS::GCCellPtr aKey,
192                                JS::GCCellPtr aValue) {
193   // If nothing that could be held alive by this entry is marked gray, return.
194   if ((!aKey || !JS::GCThingIsMarkedGray(aKey)) &&
195       MOZ_LIKELY(!mCb.WantAllTraces())) {
196     if (!aValue || !JS::GCThingIsMarkedGray(aValue) || aValue.is<JSString>()) {
197       return;
198     }
199   }
200 
201   // The cycle collector can only properly reason about weak maps if it can
202   // reason about the liveness of their keys, which in turn requires that
203   // the key can be represented in the cycle collector graph.  All existing
204   // uses of weak maps use either objects or scripts as keys, which are okay.
205   MOZ_ASSERT(JS::IsCCTraceKind(aKey.kind()));
206 
207   // As an emergency fallback for non-debug builds, if the key is not
208   // representable in the cycle collector graph, we treat it as marked.  This
209   // can cause leaks, but is preferable to ignoring the binding, which could
210   // cause the cycle collector to free live objects.
211   if (!JS::IsCCTraceKind(aKey.kind())) {
212     aKey = nullptr;
213   }
214 
215   JSObject* kdelegate = nullptr;
216   if (aKey.is<JSObject>()) {
217     kdelegate = js::UncheckedUnwrapWithoutExpose(&aKey.as<JSObject>());
218   }
219 
220   if (JS::IsCCTraceKind(aValue.kind())) {
221     mCb.NoteWeakMapping(aMap, aKey, kdelegate, aValue);
222   } else {
223     mChildTracer.mTracedAny = false;
224     mChildTracer.mMap = aMap;
225     mChildTracer.mKey = aKey;
226     mChildTracer.mKeyDelegate = kdelegate;
227 
228     if (!aValue.is<JSString>()) {
229       JS::TraceChildren(&mChildTracer, aValue);
230     }
231 
232     // The delegate could hold alive the key, so report something to the CC
233     // if we haven't already.
234     if (!mChildTracer.mTracedAny && aKey && JS::GCThingIsMarkedGray(aKey) &&
235         kdelegate) {
236       mCb.NoteWeakMapping(aMap, aKey, kdelegate, nullptr);
237     }
238   }
239 }
240 
241 // Report whether the key or value of a weak mapping entry are gray but need to
242 // be marked black.
ShouldWeakMappingEntryBeBlack(JSObject * aMap,JS::GCCellPtr aKey,JS::GCCellPtr aValue,bool * aKeyShouldBeBlack,bool * aValueShouldBeBlack)243 static void ShouldWeakMappingEntryBeBlack(JSObject* aMap, JS::GCCellPtr aKey,
244                                           JS::GCCellPtr aValue,
245                                           bool* aKeyShouldBeBlack,
246                                           bool* aValueShouldBeBlack) {
247   *aKeyShouldBeBlack = false;
248   *aValueShouldBeBlack = false;
249 
250   // If nothing that could be held alive by this entry is marked gray, return.
251   bool keyMightNeedMarking = aKey && JS::GCThingIsMarkedGray(aKey);
252   bool valueMightNeedMarking = aValue && JS::GCThingIsMarkedGray(aValue) &&
253                                aValue.kind() != JS::TraceKind::String;
254   if (!keyMightNeedMarking && !valueMightNeedMarking) {
255     return;
256   }
257 
258   if (!JS::IsCCTraceKind(aKey.kind())) {
259     aKey = nullptr;
260   }
261 
262   if (keyMightNeedMarking && aKey.is<JSObject>()) {
263     JSObject* kdelegate =
264         js::UncheckedUnwrapWithoutExpose(&aKey.as<JSObject>());
265     if (kdelegate && !JS::ObjectIsMarkedGray(kdelegate) &&
266         (!aMap || !JS::ObjectIsMarkedGray(aMap))) {
267       *aKeyShouldBeBlack = true;
268     }
269   }
270 
271   if (aValue && JS::GCThingIsMarkedGray(aValue) &&
272       (!aKey || !JS::GCThingIsMarkedGray(aKey)) &&
273       (!aMap || !JS::ObjectIsMarkedGray(aMap)) &&
274       aValue.kind() != JS::TraceKind::Shape) {
275     *aValueShouldBeBlack = true;
276   }
277 }
278 
279 struct FixWeakMappingGrayBitsTracer : public js::WeakMapTracer {
FixWeakMappingGrayBitsTracerFixWeakMappingGrayBitsTracer280   explicit FixWeakMappingGrayBitsTracer(JSRuntime* aRt)
281       : js::WeakMapTracer(aRt) {}
282 
FixAllFixWeakMappingGrayBitsTracer283   void FixAll() {
284     do {
285       mAnyMarked = false;
286       js::TraceWeakMaps(this);
287     } while (mAnyMarked);
288   }
289 
traceFixWeakMappingGrayBitsTracer290   void trace(JSObject* aMap, JS::GCCellPtr aKey,
291              JS::GCCellPtr aValue) override {
292     bool keyShouldBeBlack;
293     bool valueShouldBeBlack;
294     ShouldWeakMappingEntryBeBlack(aMap, aKey, aValue, &keyShouldBeBlack,
295                                   &valueShouldBeBlack);
296     if (keyShouldBeBlack && JS::UnmarkGrayGCThingRecursively(aKey)) {
297       mAnyMarked = true;
298     }
299 
300     if (valueShouldBeBlack && JS::UnmarkGrayGCThingRecursively(aValue)) {
301       mAnyMarked = true;
302     }
303   }
304 
305   MOZ_INIT_OUTSIDE_CTOR bool mAnyMarked;
306 };
307 
308 #ifdef DEBUG
309 // Check whether weak maps are marked correctly according to the logic above.
310 struct CheckWeakMappingGrayBitsTracer : public js::WeakMapTracer {
CheckWeakMappingGrayBitsTracerCheckWeakMappingGrayBitsTracer311   explicit CheckWeakMappingGrayBitsTracer(JSRuntime* aRt)
312       : js::WeakMapTracer(aRt), mFailed(false) {}
313 
CheckCheckWeakMappingGrayBitsTracer314   static bool Check(JSRuntime* aRt) {
315     CheckWeakMappingGrayBitsTracer tracer(aRt);
316     js::TraceWeakMaps(&tracer);
317     return !tracer.mFailed;
318   }
319 
traceCheckWeakMappingGrayBitsTracer320   void trace(JSObject* aMap, JS::GCCellPtr aKey,
321              JS::GCCellPtr aValue) override {
322     bool keyShouldBeBlack;
323     bool valueShouldBeBlack;
324     ShouldWeakMappingEntryBeBlack(aMap, aKey, aValue, &keyShouldBeBlack,
325                                   &valueShouldBeBlack);
326 
327     if (keyShouldBeBlack) {
328       fprintf(stderr, "Weak mapping key %p of map %p should be black\n",
329               aKey.asCell(), aMap);
330       mFailed = true;
331     }
332 
333     if (valueShouldBeBlack) {
334       fprintf(stderr, "Weak mapping value %p of map %p should be black\n",
335               aValue.asCell(), aMap);
336       mFailed = true;
337     }
338   }
339 
340   bool mFailed;
341 };
342 #endif  // DEBUG
343 
CheckParticipatesInCycleCollection(JS::GCCellPtr aThing,const char * aName,void * aClosure)344 static void CheckParticipatesInCycleCollection(JS::GCCellPtr aThing,
345                                                const char* aName,
346                                                void* aClosure) {
347   bool* cycleCollectionEnabled = static_cast<bool*>(aClosure);
348 
349   if (*cycleCollectionEnabled) {
350     return;
351   }
352 
353   if (JS::IsCCTraceKind(aThing.kind()) && JS::GCThingIsMarkedGray(aThing)) {
354     *cycleCollectionEnabled = true;
355   }
356 }
357 
358 NS_IMETHODIMP
TraverseNative(void * aPtr,nsCycleCollectionTraversalCallback & aCb)359 JSGCThingParticipant::TraverseNative(void* aPtr,
360                                      nsCycleCollectionTraversalCallback& aCb) {
361   auto runtime = reinterpret_cast<CycleCollectedJSRuntime*>(
362       reinterpret_cast<char*>(this) -
363       offsetof(CycleCollectedJSRuntime, mGCThingCycleCollectorGlobal));
364 
365   JS::GCCellPtr cellPtr(aPtr, JS::GCThingTraceKind(aPtr));
366   runtime->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_FULL, cellPtr,
367                            aCb);
368   return NS_OK;
369 }
370 
371 // NB: This is only used to initialize the participant in
372 // CycleCollectedJSRuntime. It should never be used directly.
373 static JSGCThingParticipant sGCThingCycleCollectorGlobal;
374 
375 NS_IMETHODIMP
TraverseNative(void * aPtr,nsCycleCollectionTraversalCallback & aCb)376 JSZoneParticipant::TraverseNative(void* aPtr,
377                                   nsCycleCollectionTraversalCallback& aCb) {
378   auto runtime = reinterpret_cast<CycleCollectedJSRuntime*>(
379       reinterpret_cast<char*>(this) -
380       offsetof(CycleCollectedJSRuntime, mJSZoneCycleCollectorGlobal));
381 
382   MOZ_ASSERT(!aCb.WantAllTraces());
383   JS::Zone* zone = static_cast<JS::Zone*>(aPtr);
384 
385   runtime->TraverseZone(zone, aCb);
386   return NS_OK;
387 }
388 
389 struct TraversalTracer : public JS::CallbackTracer {
TraversalTracerTraversalTracer390   TraversalTracer(JSRuntime* aRt, nsCycleCollectionTraversalCallback& aCb)
391       : JS::CallbackTracer(aRt, DoNotTraceWeakMaps), mCb(aCb) {
392     setCanSkipJsids(true);
393   }
394   bool onChild(const JS::GCCellPtr& aThing) override;
395   nsCycleCollectionTraversalCallback& mCb;
396 };
397 
onChild(const JS::GCCellPtr & aThing)398 bool TraversalTracer::onChild(const JS::GCCellPtr& aThing) {
399   // Checking strings and symbols for being gray is rather slow, and we don't
400   // need either of them for the cycle collector.
401   if (aThing.is<JSString>() || aThing.is<JS::Symbol>()) {
402     return true;
403   }
404 
405   // Don't traverse non-gray objects, unless we want all traces.
406   if (!JS::GCThingIsMarkedGray(aThing) && !mCb.WantAllTraces()) {
407     return true;
408   }
409 
410   /*
411    * This function needs to be careful to avoid stack overflow. Normally, when
412    * IsCCTraceKind is true, the recursion terminates immediately as we just add
413    * |thing| to the CC graph. So overflow is only possible when there are long
414    * or cyclic chains of non-IsCCTraceKind GC things. Places where this can
415    * occur use special APIs to handle such chains iteratively.
416    */
417   if (JS::IsCCTraceKind(aThing.kind())) {
418     if (MOZ_UNLIKELY(mCb.WantDebugInfo())) {
419       char buffer[200];
420       getTracingEdgeName(buffer, sizeof(buffer));
421       mCb.NoteNextEdgeName(buffer);
422     }
423     mCb.NoteJSChild(aThing);
424   } else if (aThing.is<js::Shape>()) {
425     // The maximum depth of traversal when tracing a Shape is unbounded, due to
426     // the parent pointers on the shape.
427     JS_TraceShapeCycleCollectorChildren(this, aThing);
428   } else if (aThing.is<js::ObjectGroup>()) {
429     // The maximum depth of traversal when tracing an ObjectGroup is unbounded,
430     // due to information attached to the groups which can lead other groups to
431     // be traced.
432     JS_TraceObjectGroupCycleCollectorChildren(this, aThing);
433   } else {
434     JS::TraceChildren(this, aThing);
435   }
436   return true;
437 }
438 
NoteJSChildGrayWrapperShim(void * aData,JS::GCCellPtr aThing)439 static void NoteJSChildGrayWrapperShim(void* aData, JS::GCCellPtr aThing) {
440   TraversalTracer* trc = static_cast<TraversalTracer*>(aData);
441   trc->onChild(aThing);
442 }
443 
444 /*
445  * The cycle collection participant for a Zone is intended to produce the same
446  * results as if all of the gray GCthings in a zone were merged into a single
447  * node, except for self-edges. This avoids the overhead of representing all of
448  * the GCthings in the zone in the cycle collector graph, which should be much
449  * faster if many of the GCthings in the zone are gray.
450  *
451  * Zone merging should not always be used, because it is a conservative
452  * approximation of the true cycle collector graph that can incorrectly identify
453  * some garbage objects as being live. For instance, consider two cycles that
454  * pass through a zone, where one is garbage and the other is live. If we merge
455  * the entire zone, the cycle collector will think that both are alive.
456  *
457  * We don't have to worry about losing track of a garbage cycle, because any
458  * such garbage cycle incorrectly identified as live must contain at least one
459  * C++ to JS edge, and XPConnect will always add the C++ object to the CC graph.
460  * (This is in contrast to pure C++ garbage cycles, which must always be
461  * properly identified, because we clear the purple buffer during every CC,
462  * which may contain the last reference to a garbage cycle.)
463  */
464 
465 // NB: This is only used to initialize the participant in
466 // CycleCollectedJSRuntime. It should never be used directly.
467 static const JSZoneParticipant sJSZoneCycleCollectorGlobal;
468 
JSObjectsTenuredCb(JSContext * aContext,void * aData)469 static void JSObjectsTenuredCb(JSContext* aContext, void* aData) {
470   static_cast<CycleCollectedJSRuntime*>(aData)->JSObjectsTenured();
471 }
472 
MozCrashWarningReporter(JSContext *,JSErrorReport *)473 static void MozCrashWarningReporter(JSContext*, JSErrorReport*) {
474   MOZ_CRASH("Why is someone touching JSAPI without an AutoJSAPI?");
475 }
476 
Entry()477 JSHolderMap::Entry::Entry() : Entry(nullptr, nullptr, nullptr) {}
478 
Entry(void * aHolder,nsScriptObjectTracer * aTracer,JS::Zone * aZone)479 JSHolderMap::Entry::Entry(void* aHolder, nsScriptObjectTracer* aTracer,
480                           JS::Zone* aZone)
481     : mHolder(aHolder),
482       mTracer(aTracer)
483 #ifdef DEBUG
484       ,
485       mZone(aZone)
486 #endif
487 {
488 }
489 
JSHolderMap()490 JSHolderMap::JSHolderMap() : mJSHolderMap(256) {}
491 
492 template <typename F>
ForEach(F && f,WhichHolders aWhich)493 inline void JSHolderMap::ForEach(F&& f, WhichHolders aWhich) {
494   // Multi-zone JS holders must always be considered.
495   ForEach(mAnyZoneJSHolders, f, nullptr);
496 
497   for (auto i = mPerZoneJSHolders.modIter(); !i.done(); i.next()) {
498     if (aWhich == HoldersInCollectingZones &&
499         !JS::ZoneIsCollecting(i.get().key())) {
500       continue;
501     }
502 
503     EntryVector* holders = i.get().value().get();
504     ForEach(*holders, f, i.get().key());
505     if (holders->IsEmpty()) {
506       i.remove();
507     }
508   }
509 }
510 
511 template <typename F>
ForEach(EntryVector & aJSHolders,const F & f,JS::Zone * aZone)512 inline void JSHolderMap::ForEach(EntryVector& aJSHolders, const F& f,
513                                  JS::Zone* aZone) {
514   for (auto iter = aJSHolders.Iter(); !iter.Done(); iter.Next()) {
515     Entry* entry = &iter.Get();
516 
517     // If the entry has been cleared, remove it and shrink the vector.
518     if (!entry->mHolder && !RemoveEntry(aJSHolders, entry)) {
519       break;  // Removed the last entry.
520     }
521 
522     MOZ_ASSERT_IF(aZone, entry->mZone == aZone);
523     f(entry->mHolder, entry->mTracer, aZone);
524   }
525 }
526 
RemoveEntry(EntryVector & aJSHolders,Entry * aEntry)527 bool JSHolderMap::RemoveEntry(EntryVector& aJSHolders, Entry* aEntry) {
528   MOZ_ASSERT(aEntry);
529   MOZ_ASSERT(!aEntry->mHolder);
530 
531   // Remove all dead entries from the end of the vector.
532   while (!aJSHolders.GetLast().mHolder && &aJSHolders.GetLast() != aEntry) {
533     aJSHolders.PopLast();
534   }
535 
536   // Swap the element we want to remove with the last one and update the hash
537   // table.
538   Entry* lastEntry = &aJSHolders.GetLast();
539   if (aEntry != lastEntry) {
540     MOZ_ASSERT(lastEntry->mHolder);
541     *aEntry = *lastEntry;
542     MOZ_ASSERT(mJSHolderMap.has(aEntry->mHolder));
543     MOZ_ALWAYS_TRUE(mJSHolderMap.put(aEntry->mHolder, aEntry));
544   }
545 
546   aJSHolders.PopLast();
547 
548   // Return whether aEntry is still in the vector.
549   return aEntry != lastEntry;
550 }
551 
Has(void * aHolder) const552 inline bool JSHolderMap::Has(void* aHolder) const {
553   return mJSHolderMap.has(aHolder);
554 }
555 
Get(void * aHolder) const556 inline nsScriptObjectTracer* JSHolderMap::Get(void* aHolder) const {
557   auto ptr = mJSHolderMap.lookup(aHolder);
558   if (!ptr) {
559     return nullptr;
560   }
561 
562   Entry* entry = ptr->value();
563   MOZ_ASSERT(entry->mHolder == aHolder);
564   return entry->mTracer;
565 }
566 
GetAndRemove(void * aHolder)567 inline nsScriptObjectTracer* JSHolderMap::GetAndRemove(void* aHolder) {
568   MOZ_ASSERT(aHolder);
569 
570   auto ptr = mJSHolderMap.lookup(aHolder);
571   if (!ptr) {
572     return nullptr;
573   }
574 
575   Entry* entry = ptr->value();
576   MOZ_ASSERT(entry->mHolder == aHolder);
577   nsScriptObjectTracer* tracer = entry->mTracer;
578 
579   // Clear the entry's contents. It will be removed during the next iteration.
580   *entry = Entry();
581 
582   mJSHolderMap.remove(ptr);
583 
584   return tracer;
585 }
586 
Put(void * aHolder,nsScriptObjectTracer * aTracer,JS::Zone * aZone)587 inline void JSHolderMap::Put(void* aHolder, nsScriptObjectTracer* aTracer,
588                              JS::Zone* aZone) {
589   MOZ_ASSERT(aHolder);
590   MOZ_ASSERT(aTracer);
591 
592   // Don't associate multi-zone holders with a zone, even if one is supplied.
593   if (aTracer->IsMultiZoneJSHolder()) {
594     aZone = nullptr;
595   }
596 
597   auto ptr = mJSHolderMap.lookupForAdd(aHolder);
598   if (ptr) {
599     Entry* entry = ptr->value();
600 #ifdef DEBUG
601     MOZ_ASSERT(entry->mHolder == aHolder);
602     MOZ_ASSERT(entry->mTracer == aTracer,
603                "Don't call HoldJSObjects in superclass ctors");
604     if (aZone) {
605       if (entry->mZone) {
606         MOZ_ASSERT(entry->mZone == aZone);
607       } else {
608         entry->mZone = aZone;
609       }
610     }
611 #endif
612     entry->mTracer = aTracer;
613     return;
614   }
615 
616   EntryVector* vector = &mAnyZoneJSHolders;
617   if (aZone) {
618     auto ptr = mPerZoneJSHolders.lookupForAdd(aZone);
619     if (!ptr) {
620       MOZ_ALWAYS_TRUE(
621           mPerZoneJSHolders.add(ptr, aZone, MakeUnique<EntryVector>()));
622     }
623     vector = ptr->value().get();
624   }
625 
626   vector->InfallibleAppend(Entry{aHolder, aTracer, aZone});
627   MOZ_ALWAYS_TRUE(mJSHolderMap.add(ptr, aHolder, &vector->GetLast()));
628 }
629 
SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const630 size_t JSHolderMap::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const {
631   size_t n = 0;
632 
633   // We're deliberately not measuring anything hanging off the entries in
634   // mJSHolders.
635   n += mJSHolderMap.shallowSizeOfExcludingThis(aMallocSizeOf);
636   n += mAnyZoneJSHolders.SizeOfExcludingThis(aMallocSizeOf);
637   n += mPerZoneJSHolders.shallowSizeOfExcludingThis(aMallocSizeOf);
638   for (auto i = mPerZoneJSHolders.iter(); !i.done(); i.next()) {
639     n += i.get().value()->SizeOfExcludingThis(aMallocSizeOf);
640   }
641 
642   return n;
643 }
644 
CycleCollectedJSRuntime(JSContext * aCx)645 CycleCollectedJSRuntime::CycleCollectedJSRuntime(JSContext* aCx)
646     : mContext(nullptr),
647       mGCThingCycleCollectorGlobal(sGCThingCycleCollectorGlobal),
648       mJSZoneCycleCollectorGlobal(sJSZoneCycleCollectorGlobal),
649       mJSRuntime(JS_GetRuntime(aCx)),
650       mHasPendingIdleGCTask(false),
651       mPrevGCSliceCallback(nullptr),
652       mPrevGCNurseryCollectionCallback(nullptr),
653       mOutOfMemoryState(OOMState::OK),
654       mLargeAllocationFailureState(OOMState::OK)
655 #ifdef DEBUG
656       ,
657       mShutdownCalled(false)
658 #endif
659 {
660   MOZ_COUNT_CTOR(CycleCollectedJSRuntime);
661   MOZ_ASSERT(aCx);
662   MOZ_ASSERT(mJSRuntime);
663 
664 #if defined(XP_MACOSX)
665   if (!XRE_IsParentProcess()) {
666     nsMacUtilsImpl::EnableTCSMIfAvailable();
667   }
668 #endif
669 
670   if (!JS_AddExtraGCRootsTracer(aCx, TraceBlackJS, this)) {
671     MOZ_CRASH("JS_AddExtraGCRootsTracer failed");
672   }
673   JS_SetGrayGCRootsTracer(aCx, TraceGrayJS, this);
674   JS_SetGCCallback(aCx, GCCallback, this);
675   mPrevGCSliceCallback = JS::SetGCSliceCallback(aCx, GCSliceCallback);
676 
677   if (NS_IsMainThread()) {
678     // We would like to support all threads here, but the way timeline consumers
679     // are set up currently, you can either add a marker for one specific
680     // docshell, or for every consumer globally. We would like to add a marker
681     // for every consumer observing anything on this thread, but that is not
682     // currently possible. For now, add global markers only when we are on the
683     // main thread, since the UI for this tracing data only displays data
684     // relevant to the main-thread.
685     mPrevGCNurseryCollectionCallback =
686         JS::SetGCNurseryCollectionCallback(aCx, GCNurseryCollectionCallback);
687   }
688 
689   JS_SetObjectsTenuredCallback(aCx, JSObjectsTenuredCb, this);
690   JS::SetOutOfMemoryCallback(aCx, OutOfMemoryCallback, this);
691   JS::SetWaitCallback(mJSRuntime, BeforeWaitCallback, AfterWaitCallback,
692                       sizeof(dom::AutoYieldJSThreadExecution));
693   JS::SetWarningReporter(aCx, MozCrashWarningReporter);
694 
695   js::AutoEnterOOMUnsafeRegion::setAnnotateOOMAllocationSizeCallback(
696       CrashReporter::AnnotateOOMAllocationSize);
697 
698   static js::DOMCallbacks DOMcallbacks = {InstanceClassHasProtoAtDepth};
699   SetDOMCallbacks(aCx, &DOMcallbacks);
700   js::SetScriptEnvironmentPreparer(aCx, &mEnvironmentPreparer);
701 
702   JS::dbg::SetDebuggerMallocSizeOf(aCx, moz_malloc_size_of);
703 
704 #ifdef MOZ_JS_DEV_ERROR_INTERCEPTOR
705   JS_SetErrorInterceptorCallback(mJSRuntime, &mErrorInterceptor);
706 #endif  // MOZ_JS_DEV_ERROR_INTERCEPTOR
707 
708   JS_SetDestroyZoneCallback(aCx, OnZoneDestroyed);
709 }
710 
711 #ifdef NS_BUILD_REFCNT_LOGGING
712 class JSLeakTracer : public JS::CallbackTracer {
713  public:
JSLeakTracer(JSRuntime * aRuntime)714   explicit JSLeakTracer(JSRuntime* aRuntime)
715       : JS::CallbackTracer(aRuntime, TraceWeakMapKeysValues) {}
716 
717  private:
onChild(const JS::GCCellPtr & thing)718   bool onChild(const JS::GCCellPtr& thing) override {
719     const char* kindName = JS::GCTraceKindToAscii(thing.kind());
720     size_t size = JS::GCTraceKindSize(thing.kind());
721     MOZ_LOG_CTOR(thing.asCell(), kindName, size);
722     return true;
723   }
724 };
725 #endif
726 
Shutdown(JSContext * cx)727 void CycleCollectedJSRuntime::Shutdown(JSContext* cx) {
728 #ifdef MOZ_JS_DEV_ERROR_INTERCEPTOR
729   mErrorInterceptor.Shutdown(mJSRuntime);
730 #endif  // MOZ_JS_DEV_ERROR_INTERCEPTOR
731 
732   // There should not be any roots left to trace at this point. Ensure any that
733   // remain are flagged as leaks.
734 #ifdef NS_BUILD_REFCNT_LOGGING
735   JSLeakTracer tracer(Runtime());
736   TraceNativeBlackRoots(&tracer);
737   TraceNativeGrayRoots(&tracer, JSHolderMap::AllHolders);
738 #endif
739 
740 #ifdef DEBUG
741   mShutdownCalled = true;
742 #endif
743 
744   JS_SetDestroyZoneCallback(cx, nullptr);
745 }
746 
~CycleCollectedJSRuntime()747 CycleCollectedJSRuntime::~CycleCollectedJSRuntime() {
748   MOZ_COUNT_DTOR(CycleCollectedJSRuntime);
749   MOZ_ASSERT(!mDeferredFinalizerTable.Count());
750   MOZ_ASSERT(!mFinalizeRunnable);
751   MOZ_ASSERT(mShutdownCalled);
752 }
753 
SetContext(CycleCollectedJSContext * aContext)754 void CycleCollectedJSRuntime::SetContext(CycleCollectedJSContext* aContext) {
755   MOZ_ASSERT(!mContext || !aContext, "Don't replace the context!");
756   mContext = aContext;
757 }
758 
SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const759 size_t CycleCollectedJSRuntime::SizeOfExcludingThis(
760     MallocSizeOf aMallocSizeOf) const {
761   return mJSHolders.SizeOfExcludingThis(aMallocSizeOf);
762 }
763 
UnmarkSkippableJSHolders()764 void CycleCollectedJSRuntime::UnmarkSkippableJSHolders() {
765   mJSHolders.ForEach([](void* holder, nsScriptObjectTracer* tracer,
766                         JS::Zone* zone) { tracer->CanSkip(holder, true); });
767 }
768 
DescribeGCThing(bool aIsMarked,JS::GCCellPtr aThing,nsCycleCollectionTraversalCallback & aCb) const769 void CycleCollectedJSRuntime::DescribeGCThing(
770     bool aIsMarked, JS::GCCellPtr aThing,
771     nsCycleCollectionTraversalCallback& aCb) const {
772   if (!aCb.WantDebugInfo()) {
773     aCb.DescribeGCedNode(aIsMarked, "JS Object");
774     return;
775   }
776 
777   char name[72];
778   uint64_t compartmentAddress = 0;
779   if (aThing.is<JSObject>()) {
780     JSObject* obj = &aThing.as<JSObject>();
781     compartmentAddress = (uint64_t)js::GetObjectCompartment(obj);
782     const JSClass* clasp = js::GetObjectClass(obj);
783 
784     // Give the subclass a chance to do something
785     if (DescribeCustomObjects(obj, clasp, name)) {
786       // Nothing else to do!
787     } else if (js::IsFunctionObject(obj)) {
788       JSFunction* fun = JS_GetObjectFunction(obj);
789       JSString* str = JS_GetFunctionDisplayId(fun);
790       if (str) {
791         JSLinearString* linear = JS_ASSERT_STRING_IS_LINEAR(str);
792         nsAutoString chars;
793         AssignJSLinearString(chars, linear);
794         NS_ConvertUTF16toUTF8 fname(chars);
795         SprintfLiteral(name, "JS Object (Function - %s)", fname.get());
796       } else {
797         SprintfLiteral(name, "JS Object (Function)");
798       }
799     } else {
800       SprintfLiteral(name, "JS Object (%s)", clasp->name);
801     }
802   } else {
803     SprintfLiteral(name, "%s", JS::GCTraceKindToAscii(aThing.kind()));
804   }
805 
806   // Disable printing global for objects while we figure out ObjShrink fallout.
807   aCb.DescribeGCedNode(aIsMarked, name, compartmentAddress);
808 }
809 
NoteGCThingJSChildren(JS::GCCellPtr aThing,nsCycleCollectionTraversalCallback & aCb) const810 void CycleCollectedJSRuntime::NoteGCThingJSChildren(
811     JS::GCCellPtr aThing, nsCycleCollectionTraversalCallback& aCb) const {
812   TraversalTracer trc(mJSRuntime, aCb);
813   JS::TraceChildren(&trc, aThing);
814 }
815 
NoteGCThingXPCOMChildren(const JSClass * aClasp,JSObject * aObj,nsCycleCollectionTraversalCallback & aCb) const816 void CycleCollectedJSRuntime::NoteGCThingXPCOMChildren(
817     const JSClass* aClasp, JSObject* aObj,
818     nsCycleCollectionTraversalCallback& aCb) const {
819   MOZ_ASSERT(aClasp);
820   MOZ_ASSERT(aClasp == js::GetObjectClass(aObj));
821 
822   JS::Rooted<JSObject*> obj(RootingCx(), aObj);
823 
824   if (NoteCustomGCThingXPCOMChildren(aClasp, obj, aCb)) {
825     // Nothing else to do!
826     return;
827   }
828 
829   // XXX This test does seem fragile, we should probably whitelist classes
830   //     that do hold a strong reference, but that might not be possible.
831   if (aClasp->flags & JSCLASS_HAS_PRIVATE &&
832       aClasp->flags & JSCLASS_PRIVATE_IS_NSISUPPORTS) {
833     NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "js::GetObjectPrivate(obj)");
834     aCb.NoteXPCOMChild(static_cast<nsISupports*>(js::GetObjectPrivate(obj)));
835     return;
836   }
837 
838   const DOMJSClass* domClass = GetDOMClass(aClasp);
839   if (domClass) {
840     NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "UnwrapDOMObject(obj)");
841     // It's possible that our object is an unforgeable holder object, in
842     // which case it doesn't actually have a C++ DOM object associated with
843     // it.  Use UnwrapPossiblyNotInitializedDOMObject, which produces null in
844     // that case, since NoteXPCOMChild/NoteNativeChild are null-safe.
845     if (domClass->mDOMObjectIsISupports) {
846       aCb.NoteXPCOMChild(
847           UnwrapPossiblyNotInitializedDOMObject<nsISupports>(obj));
848     } else if (domClass->mParticipant) {
849       aCb.NoteNativeChild(UnwrapPossiblyNotInitializedDOMObject<void>(obj),
850                           domClass->mParticipant);
851     }
852     return;
853   }
854 
855   if (IsRemoteObjectProxy(obj)) {
856     auto handler =
857         static_cast<const RemoteObjectProxyBase*>(js::GetProxyHandler(obj));
858     return handler->NoteChildren(obj, aCb);
859   }
860 
861   JS::Value value = js::MaybeGetScriptPrivate(obj);
862   if (!value.isUndefined()) {
863     aCb.NoteXPCOMChild(static_cast<nsISupports*>(value.toPrivate()));
864   }
865 }
866 
TraverseGCThing(TraverseSelect aTs,JS::GCCellPtr aThing,nsCycleCollectionTraversalCallback & aCb)867 void CycleCollectedJSRuntime::TraverseGCThing(
868     TraverseSelect aTs, JS::GCCellPtr aThing,
869     nsCycleCollectionTraversalCallback& aCb) {
870   bool isMarkedGray = JS::GCThingIsMarkedGray(aThing);
871 
872   if (aTs == TRAVERSE_FULL) {
873     DescribeGCThing(!isMarkedGray, aThing, aCb);
874   }
875 
876   // If this object is alive, then all of its children are alive. For JS
877   // objects, the black-gray invariant ensures the children are also marked
878   // black. For C++ objects, the ref count from this object will keep them
879   // alive. Thus we don't need to trace our children, unless we are debugging
880   // using WantAllTraces.
881   if (!isMarkedGray && !aCb.WantAllTraces()) {
882     return;
883   }
884 
885   if (aTs == TRAVERSE_FULL) {
886     NoteGCThingJSChildren(aThing, aCb);
887   }
888 
889   if (aThing.is<JSObject>()) {
890     JSObject* obj = &aThing.as<JSObject>();
891     NoteGCThingXPCOMChildren(js::GetObjectClass(obj), obj, aCb);
892   }
893 }
894 
895 struct TraverseObjectShimClosure {
896   nsCycleCollectionTraversalCallback& cb;
897   CycleCollectedJSRuntime* self;
898 };
899 
TraverseZone(JS::Zone * aZone,nsCycleCollectionTraversalCallback & aCb)900 void CycleCollectedJSRuntime::TraverseZone(
901     JS::Zone* aZone, nsCycleCollectionTraversalCallback& aCb) {
902   /*
903    * We treat the zone as being gray. We handle non-gray GCthings in the
904    * zone by not reporting their children to the CC. The black-gray invariant
905    * ensures that any JS children will also be non-gray, and thus don't need to
906    * be added to the graph. For C++ children, not representing the edge from the
907    * non-gray JS GCthings to the C++ object will keep the child alive.
908    *
909    * We don't allow zone merging in a WantAllTraces CC, because then these
910    * assumptions don't hold.
911    */
912   aCb.DescribeGCedNode(false, "JS Zone");
913 
914   /*
915    * Every JS child of everything in the zone is either in the zone
916    * or is a cross-compartment wrapper. In the former case, we don't need to
917    * represent these edges in the CC graph because JS objects are not ref
918    * counted. In the latter case, the JS engine keeps a map of these wrappers,
919    * which we iterate over. Edges between compartments in the same zone will add
920    * unnecessary loop edges to the graph (bug 842137).
921    */
922   TraversalTracer trc(mJSRuntime, aCb);
923   js::VisitGrayWrapperTargets(aZone, NoteJSChildGrayWrapperShim, &trc);
924 
925   /*
926    * To find C++ children of things in the zone, we scan every JS Object in
927    * the zone. Only JS Objects can have C++ children.
928    */
929   TraverseObjectShimClosure closure = {aCb, this};
930   js::IterateGrayObjects(aZone, TraverseObjectShim, &closure);
931 }
932 
933 /* static */
TraverseObjectShim(void * aData,JS::GCCellPtr aThing)934 void CycleCollectedJSRuntime::TraverseObjectShim(void* aData,
935                                                  JS::GCCellPtr aThing) {
936   TraverseObjectShimClosure* closure =
937       static_cast<TraverseObjectShimClosure*>(aData);
938 
939   MOZ_ASSERT(aThing.is<JSObject>());
940   closure->self->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_CPP, aThing,
941                                  closure->cb);
942 }
943 
TraverseNativeRoots(nsCycleCollectionNoteRootCallback & aCb)944 void CycleCollectedJSRuntime::TraverseNativeRoots(
945     nsCycleCollectionNoteRootCallback& aCb) {
946   // NB: This is here just to preserve the existing XPConnect order. I doubt it
947   // would hurt to do this after the JS holders.
948   TraverseAdditionalNativeRoots(aCb);
949 
950   mJSHolders.ForEach(
951       [&aCb](void* holder, nsScriptObjectTracer* tracer, JS::Zone* zone) {
952         bool noteRoot = false;
953         if (MOZ_UNLIKELY(aCb.WantAllTraces())) {
954           noteRoot = true;
955         } else {
956           tracer->Trace(holder,
957                         TraceCallbackFunc(CheckParticipatesInCycleCollection),
958                         &noteRoot);
959         }
960 
961         if (noteRoot) {
962           aCb.NoteNativeRoot(holder, tracer);
963         }
964       });
965 }
966 
967 /* static */
TraceBlackJS(JSTracer * aTracer,void * aData)968 void CycleCollectedJSRuntime::TraceBlackJS(JSTracer* aTracer, void* aData) {
969   CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
970 
971   self->TraceNativeBlackRoots(aTracer);
972 }
973 
974 /* static */
TraceGrayJS(JSTracer * aTracer,void * aData)975 void CycleCollectedJSRuntime::TraceGrayJS(JSTracer* aTracer, void* aData) {
976   CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
977 
978   // Mark these roots as gray so the CC can walk them later.
979   self->TraceNativeGrayRoots(aTracer, JSHolderMap::HoldersInCollectingZones);
980 }
981 
982 /* static */
GCCallback(JSContext * aContext,JSGCStatus aStatus,JS::GCReason aReason,void * aData)983 void CycleCollectedJSRuntime::GCCallback(JSContext* aContext,
984                                          JSGCStatus aStatus,
985                                          JS::GCReason aReason, void* aData) {
986   CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
987 
988   MOZ_ASSERT(CycleCollectedJSContext::Get()->Context() == aContext);
989   MOZ_ASSERT(CycleCollectedJSContext::Get()->Runtime() == self);
990 
991   self->OnGC(aContext, aStatus, aReason);
992 }
993 
994 /* static */
GCSliceCallback(JSContext * aContext,JS::GCProgress aProgress,const JS::GCDescription & aDesc)995 void CycleCollectedJSRuntime::GCSliceCallback(JSContext* aContext,
996                                               JS::GCProgress aProgress,
997                                               const JS::GCDescription& aDesc) {
998   CycleCollectedJSRuntime* self = CycleCollectedJSRuntime::Get();
999   MOZ_ASSERT(CycleCollectedJSContext::Get()->Context() == aContext);
1000 
1001 #ifdef MOZ_GECKO_PROFILER
1002   if (profiler_thread_is_being_profiled()) {
1003     if (aProgress == JS::GC_CYCLE_END) {
1004       PROFILER_ADD_MARKER_WITH_PAYLOAD(
1005           "GCMajor", GCCC, GCMajorMarkerPayload,
1006           (aDesc.startTime(aContext), aDesc.endTime(aContext),
1007            aDesc.formatJSONProfiler(aContext)));
1008     } else if (aProgress == JS::GC_SLICE_END) {
1009       PROFILER_ADD_MARKER_WITH_PAYLOAD(
1010           "GCSlice", GCCC, GCSliceMarkerPayload,
1011           (aDesc.lastSliceStart(aContext), aDesc.lastSliceEnd(aContext),
1012            aDesc.sliceToJSONProfiler(aContext)));
1013     }
1014   }
1015 #endif
1016 
1017   if (aProgress == JS::GC_CYCLE_END &&
1018       JS::dbg::FireOnGarbageCollectionHookRequired(aContext)) {
1019     JS::GCReason reason = aDesc.reason_;
1020     Unused << NS_WARN_IF(
1021         NS_FAILED(DebuggerOnGCRunnable::Enqueue(aContext, aDesc)) &&
1022         reason != JS::GCReason::SHUTDOWN_CC &&
1023         reason != JS::GCReason::DESTROY_RUNTIME &&
1024         reason != JS::GCReason::XPCONNECT_SHUTDOWN);
1025   }
1026 
1027   if (self->mPrevGCSliceCallback) {
1028     self->mPrevGCSliceCallback(aContext, aProgress, aDesc);
1029   }
1030 }
1031 
1032 class MinorGCMarker : public TimelineMarker {
1033  private:
1034   JS::GCReason mReason;
1035 
1036  public:
MinorGCMarker(MarkerTracingType aTracingType,JS::GCReason aReason)1037   MinorGCMarker(MarkerTracingType aTracingType, JS::GCReason aReason)
1038       : TimelineMarker("MinorGC", aTracingType, MarkerStackRequest::NO_STACK),
1039         mReason(aReason) {
1040     MOZ_ASSERT(aTracingType == MarkerTracingType::START ||
1041                aTracingType == MarkerTracingType::END);
1042   }
1043 
MinorGCMarker(JS::GCNurseryProgress aProgress,JS::GCReason aReason)1044   MinorGCMarker(JS::GCNurseryProgress aProgress, JS::GCReason aReason)
1045       : TimelineMarker(
1046             "MinorGC",
1047             aProgress == JS::GCNurseryProgress::GC_NURSERY_COLLECTION_START
1048                 ? MarkerTracingType::START
1049                 : MarkerTracingType::END,
1050             MarkerStackRequest::NO_STACK),
1051         mReason(aReason) {}
1052 
AddDetails(JSContext * aCx,dom::ProfileTimelineMarker & aMarker)1053   virtual void AddDetails(JSContext* aCx,
1054                           dom::ProfileTimelineMarker& aMarker) override {
1055     TimelineMarker::AddDetails(aCx, aMarker);
1056 
1057     if (GetTracingType() == MarkerTracingType::START) {
1058       auto reason = JS::ExplainGCReason(mReason);
1059       aMarker.mCauseName.Construct(NS_ConvertUTF8toUTF16(reason));
1060     }
1061   }
1062 
Clone()1063   virtual UniquePtr<AbstractTimelineMarker> Clone() override {
1064     auto clone = MakeUnique<MinorGCMarker>(GetTracingType(), mReason);
1065     clone->SetCustomTime(GetTime());
1066     return UniquePtr<AbstractTimelineMarker>(std::move(clone));
1067   }
1068 };
1069 
1070 /* static */
GCNurseryCollectionCallback(JSContext * aContext,JS::GCNurseryProgress aProgress,JS::GCReason aReason)1071 void CycleCollectedJSRuntime::GCNurseryCollectionCallback(
1072     JSContext* aContext, JS::GCNurseryProgress aProgress,
1073     JS::GCReason aReason) {
1074   CycleCollectedJSRuntime* self = CycleCollectedJSRuntime::Get();
1075   MOZ_ASSERT(CycleCollectedJSContext::Get()->Context() == aContext);
1076   MOZ_ASSERT(NS_IsMainThread());
1077 
1078   RefPtr<TimelineConsumers> timelines = TimelineConsumers::Get();
1079   if (timelines && !timelines->IsEmpty()) {
1080     UniquePtr<AbstractTimelineMarker> abstractMarker(
1081         MakeUnique<MinorGCMarker>(aProgress, aReason));
1082     timelines->AddMarkerForAllObservedDocShells(abstractMarker);
1083   }
1084 
1085   if (aProgress == JS::GCNurseryProgress::GC_NURSERY_COLLECTION_START) {
1086     self->mLatestNurseryCollectionStart = TimeStamp::Now();
1087   }
1088 #ifdef MOZ_GECKO_PROFILER
1089   else if (aProgress == JS::GCNurseryProgress::GC_NURSERY_COLLECTION_END &&
1090            profiler_thread_is_being_profiled()) {
1091     PROFILER_ADD_MARKER_WITH_PAYLOAD(
1092         "GCMinor", GCCC, GCMinorMarkerPayload,
1093         (self->mLatestNurseryCollectionStart, TimeStamp::Now(),
1094          JS::MinorGcToJSON(aContext)));
1095   }
1096 #endif
1097 
1098   if (self->mPrevGCNurseryCollectionCallback) {
1099     self->mPrevGCNurseryCollectionCallback(aContext, aProgress, aReason);
1100   }
1101 }
1102 
1103 /* static */
OutOfMemoryCallback(JSContext * aContext,void * aData)1104 void CycleCollectedJSRuntime::OutOfMemoryCallback(JSContext* aContext,
1105                                                   void* aData) {
1106   CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
1107 
1108   MOZ_ASSERT(CycleCollectedJSContext::Get()->Context() == aContext);
1109   MOZ_ASSERT(CycleCollectedJSContext::Get()->Runtime() == self);
1110 
1111   self->OnOutOfMemory();
1112 }
1113 
1114 /* static */
BeforeWaitCallback(uint8_t * aMemory)1115 void* CycleCollectedJSRuntime::BeforeWaitCallback(uint8_t* aMemory) {
1116   MOZ_ASSERT(aMemory);
1117 
1118   // aMemory is stack allocated memory to contain our RAII object. This allows
1119   // for us to avoid allocations on the heap during this callback.
1120   return new (aMemory) dom::AutoYieldJSThreadExecution;
1121 }
1122 
1123 /* static */
AfterWaitCallback(void * aCookie)1124 void CycleCollectedJSRuntime::AfterWaitCallback(void* aCookie) {
1125   MOZ_ASSERT(aCookie);
1126   static_cast<dom::AutoYieldJSThreadExecution*>(aCookie)
1127       ->~AutoYieldJSThreadExecution();
1128 }
1129 
1130 struct JsGcTracer : public TraceCallbacks {
TraceJsGcTracer1131   virtual void Trace(JS::Heap<JS::Value>* aPtr, const char* aName,
1132                      void* aClosure) const override {
1133     JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1134   }
TraceJsGcTracer1135   virtual void Trace(JS::Heap<jsid>* aPtr, const char* aName,
1136                      void* aClosure) const override {
1137     JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1138   }
TraceJsGcTracer1139   virtual void Trace(JS::Heap<JSObject*>* aPtr, const char* aName,
1140                      void* aClosure) const override {
1141     JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1142   }
TraceJsGcTracer1143   virtual void Trace(nsWrapperCache* aPtr, const char* aName,
1144                      void* aClosure) const override {
1145     aPtr->TraceWrapper(static_cast<JSTracer*>(aClosure), aName);
1146   }
TraceJsGcTracer1147   virtual void Trace(JS::TenuredHeap<JSObject*>* aPtr, const char* aName,
1148                      void* aClosure) const override {
1149     JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1150   }
TraceJsGcTracer1151   virtual void Trace(JS::Heap<JSString*>* aPtr, const char* aName,
1152                      void* aClosure) const override {
1153     JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1154   }
TraceJsGcTracer1155   virtual void Trace(JS::Heap<JSScript*>* aPtr, const char* aName,
1156                      void* aClosure) const override {
1157     JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1158   }
TraceJsGcTracer1159   virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char* aName,
1160                      void* aClosure) const override {
1161     JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1162   }
1163 };
1164 
TraceScriptHolder(nsISupports * aHolder,JSTracer * aTracer)1165 void mozilla::TraceScriptHolder(nsISupports* aHolder, JSTracer* aTracer) {
1166   nsXPCOMCycleCollectionParticipant* participant = nullptr;
1167   CallQueryInterface(aHolder, &participant);
1168   participant->Trace(aHolder, JsGcTracer(), aTracer);
1169 }
1170 
1171 #if defined(NIGHTLY_BUILD) || defined(MOZ_DEV_EDITION) || defined(DEBUG)
1172 #  define CHECK_SINGLE_ZONE_JS_HOLDERS
1173 #endif
1174 
1175 #ifdef CHECK_SINGLE_ZONE_JS_HOLDERS
1176 
1177 // A tracer that checks that a JS holder only holds JS GC things in a single
1178 // JS::Zone.
1179 struct CheckZoneTracer : public TraceCallbacks {
1180   const char* mClassName;
1181   mutable JS::Zone* mZone;
1182 
CheckZoneTracerCheckZoneTracer1183   explicit CheckZoneTracer(const char* aClassName, JS::Zone* aZone = nullptr)
1184       : mClassName(aClassName), mZone(aZone) {}
1185 
checkZoneCheckZoneTracer1186   void checkZone(JS::Zone* aZone, const char* aName) const {
1187     if (!mZone) {
1188       mZone = aZone;
1189       return;
1190     }
1191 
1192     if (aZone == mZone) {
1193       return;
1194     }
1195 
1196     // Most JS holders only contain pointers to GC things in a single zone. In
1197     // the future this will allow us to improve GC performance by only tracing
1198     // holders in zones that are being collected.
1199     //
1200     // If you added a holder that has pointers into multiple zones please try to
1201     // remedy this. Some options are:
1202     //
1203     //  - wrap all JS GC things into the same compartment
1204     //  - split GC thing pointers between separate cycle collected objects
1205     //
1206     // If all else fails, flag the class as containing pointers into multiple
1207     // zones by using NS_IMPL_CYCLE_COLLECTION_MULTI_ZONE_JSHOLDER_CLASS.
1208     MOZ_CRASH_UNSAFE_PRINTF(
1209         "JS holder %s contains pointers to GC things in more than one zone ("
1210         "found in %s)\n",
1211         mClassName, aName);
1212   }
1213 
TraceCheckZoneTracer1214   virtual void Trace(JS::Heap<JS::Value>* aPtr, const char* aName,
1215                      void* aClosure) const override {
1216     JS::Value value = aPtr->unbarrieredGet();
1217     if (value.isGCThing()) {
1218       checkZone(JS::GetGCThingZone(value.toGCCellPtr()), aName);
1219     }
1220   }
TraceCheckZoneTracer1221   virtual void Trace(JS::Heap<jsid>* aPtr, const char* aName,
1222                      void* aClosure) const override {
1223     jsid id = aPtr->unbarrieredGet();
1224     if (id.isGCThing()) {
1225       checkZone(JS::GetTenuredGCThingZone(id.toGCCellPtr()), aName);
1226     }
1227   }
TraceCheckZoneTracer1228   virtual void Trace(JS::Heap<JSObject*>* aPtr, const char* aName,
1229                      void* aClosure) const override {
1230     JSObject* obj = aPtr->unbarrieredGet();
1231     if (obj) {
1232       checkZone(js::GetObjectZoneFromAnyThread(obj), aName);
1233     }
1234   }
TraceCheckZoneTracer1235   virtual void Trace(nsWrapperCache* aPtr, const char* aName,
1236                      void* aClosure) const override {
1237     JSObject* obj = aPtr->GetWrapperPreserveColor();
1238     if (obj) {
1239       checkZone(js::GetObjectZoneFromAnyThread(obj), aName);
1240     }
1241   }
TraceCheckZoneTracer1242   virtual void Trace(JS::TenuredHeap<JSObject*>* aPtr, const char* aName,
1243                      void* aClosure) const override {
1244     JSObject* obj = aPtr->unbarrieredGetPtr();
1245     if (obj) {
1246       checkZone(js::GetObjectZoneFromAnyThread(obj), aName);
1247     }
1248   }
TraceCheckZoneTracer1249   virtual void Trace(JS::Heap<JSString*>* aPtr, const char* aName,
1250                      void* aClosure) const override {
1251     JSString* str = aPtr->unbarrieredGet();
1252     if (str) {
1253       checkZone(JS::GetStringZone(str), aName);
1254     }
1255   }
TraceCheckZoneTracer1256   virtual void Trace(JS::Heap<JSScript*>* aPtr, const char* aName,
1257                      void* aClosure) const override {
1258     JSScript* script = aPtr->unbarrieredGet();
1259     if (script) {
1260       checkZone(JS::GetTenuredGCThingZone(JS::GCCellPtr(script)), aName);
1261     }
1262   }
TraceCheckZoneTracer1263   virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char* aName,
1264                      void* aClosure) const override {
1265     JSFunction* fun = aPtr->unbarrieredGet();
1266     if (fun) {
1267       checkZone(js::GetObjectZoneFromAnyThread(JS_GetFunctionObject(fun)),
1268                 aName);
1269     }
1270   }
1271 };
1272 
CheckHolderIsSingleZone(void * aHolder,nsCycleCollectionParticipant * aParticipant,JS::Zone * aZone)1273 static inline void CheckHolderIsSingleZone(
1274     void* aHolder, nsCycleCollectionParticipant* aParticipant,
1275     JS::Zone* aZone) {
1276   CheckZoneTracer tracer(aParticipant->ClassName(), aZone);
1277   aParticipant->Trace(aHolder, tracer, nullptr);
1278 }
1279 
1280 #endif
1281 
ShouldCheckSingleZoneHolders()1282 static inline bool ShouldCheckSingleZoneHolders() {
1283 #if defined(DEBUG)
1284   return true;
1285 #elif defined(NIGHTLY_BUILD) || defined(MOZ_DEV_EDITION)
1286   // Don't check every time to avoid performance impact.
1287   return rand() % 256 == 0;
1288 #else
1289   return false;
1290 #endif
1291 }
1292 
TraceNativeGrayRoots(JSTracer * aTracer,JSHolderMap::WhichHolders aWhich)1293 void CycleCollectedJSRuntime::TraceNativeGrayRoots(
1294     JSTracer* aTracer, JSHolderMap::WhichHolders aWhich) {
1295   // NB: This is here just to preserve the existing XPConnect order. I doubt it
1296   // would hurt to do this after the JS holders.
1297   TraceAdditionalNativeGrayRoots(aTracer);
1298 
1299   bool checkSingleZoneHolders = ShouldCheckSingleZoneHolders();
1300   mJSHolders.ForEach(
1301       [aTracer, checkSingleZoneHolders](
1302           void* holder, nsScriptObjectTracer* tracer, JS::Zone* zone) {
1303 #ifdef CHECK_SINGLE_ZONE_JS_HOLDERS
1304         if (checkSingleZoneHolders && !tracer->IsMultiZoneJSHolder()) {
1305           CheckHolderIsSingleZone(holder, tracer, zone);
1306         }
1307 #else
1308         Unused << checkSingleZoneHolders;
1309 #endif
1310         tracer->Trace(holder, JsGcTracer(), aTracer);
1311       },
1312       aWhich);
1313 }
1314 
AddJSHolder(void * aHolder,nsScriptObjectTracer * aTracer,JS::Zone * aZone)1315 void CycleCollectedJSRuntime::AddJSHolder(void* aHolder,
1316                                           nsScriptObjectTracer* aTracer,
1317                                           JS::Zone* aZone) {
1318   mJSHolders.Put(aHolder, aTracer, aZone);
1319 }
1320 
1321 struct ClearJSHolder : public TraceCallbacks {
TraceClearJSHolder1322   virtual void Trace(JS::Heap<JS::Value>* aPtr, const char*,
1323                      void*) const override {
1324     aPtr->setUndefined();
1325   }
1326 
TraceClearJSHolder1327   virtual void Trace(JS::Heap<jsid>* aPtr, const char*, void*) const override {
1328     *aPtr = JSID_VOID;
1329   }
1330 
TraceClearJSHolder1331   virtual void Trace(JS::Heap<JSObject*>* aPtr, const char*,
1332                      void*) const override {
1333     *aPtr = nullptr;
1334   }
1335 
TraceClearJSHolder1336   virtual void Trace(nsWrapperCache* aPtr, const char* aName,
1337                      void* aClosure) const override {
1338     aPtr->ClearWrapper();
1339   }
1340 
TraceClearJSHolder1341   virtual void Trace(JS::TenuredHeap<JSObject*>* aPtr, const char*,
1342                      void*) const override {
1343     *aPtr = nullptr;
1344   }
1345 
TraceClearJSHolder1346   virtual void Trace(JS::Heap<JSString*>* aPtr, const char*,
1347                      void*) const override {
1348     *aPtr = nullptr;
1349   }
1350 
TraceClearJSHolder1351   virtual void Trace(JS::Heap<JSScript*>* aPtr, const char*,
1352                      void*) const override {
1353     *aPtr = nullptr;
1354   }
1355 
TraceClearJSHolder1356   virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char*,
1357                      void*) const override {
1358     *aPtr = nullptr;
1359   }
1360 };
1361 
RemoveJSHolder(void * aHolder)1362 void CycleCollectedJSRuntime::RemoveJSHolder(void* aHolder) {
1363   nsScriptObjectTracer* tracer = mJSHolders.GetAndRemove(aHolder);
1364   if (tracer) {
1365     // Bug 1531951: The analysis can't see through the virtual call but we know
1366     // that the ClearJSHolder tracer will never GC.
1367     JS::AutoSuppressGCAnalysis nogc;
1368     tracer->Trace(aHolder, ClearJSHolder(), nullptr);
1369   }
1370 }
1371 
1372 #ifdef DEBUG
AssertNoGcThing(JS::GCCellPtr aGCThing,const char * aName,void * aClosure)1373 static void AssertNoGcThing(JS::GCCellPtr aGCThing, const char* aName,
1374                             void* aClosure) {
1375   MOZ_ASSERT(!aGCThing);
1376 }
1377 
AssertNoObjectsToTrace(void * aPossibleJSHolder)1378 void CycleCollectedJSRuntime::AssertNoObjectsToTrace(void* aPossibleJSHolder) {
1379   nsScriptObjectTracer* tracer = mJSHolders.Get(aPossibleJSHolder);
1380   if (tracer) {
1381     tracer->Trace(aPossibleJSHolder, TraceCallbackFunc(AssertNoGcThing),
1382                   nullptr);
1383   }
1384 }
1385 #endif
1386 
GCThingParticipant()1387 nsCycleCollectionParticipant* CycleCollectedJSRuntime::GCThingParticipant() {
1388   return &mGCThingCycleCollectorGlobal;
1389 }
1390 
ZoneParticipant()1391 nsCycleCollectionParticipant* CycleCollectedJSRuntime::ZoneParticipant() {
1392   return &mJSZoneCycleCollectorGlobal;
1393 }
1394 
TraverseRoots(nsCycleCollectionNoteRootCallback & aCb)1395 nsresult CycleCollectedJSRuntime::TraverseRoots(
1396     nsCycleCollectionNoteRootCallback& aCb) {
1397   TraverseNativeRoots(aCb);
1398 
1399   NoteWeakMapsTracer trc(mJSRuntime, aCb);
1400   js::TraceWeakMaps(&trc);
1401 
1402   return NS_OK;
1403 }
1404 
UsefulToMergeZones() const1405 bool CycleCollectedJSRuntime::UsefulToMergeZones() const { return false; }
1406 
FixWeakMappingGrayBits() const1407 void CycleCollectedJSRuntime::FixWeakMappingGrayBits() const {
1408   MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSRuntime),
1409              "Don't call FixWeakMappingGrayBits during a GC.");
1410   FixWeakMappingGrayBitsTracer fixer(mJSRuntime);
1411   fixer.FixAll();
1412 }
1413 
CheckGrayBits() const1414 void CycleCollectedJSRuntime::CheckGrayBits() const {
1415   MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSRuntime),
1416              "Don't call CheckGrayBits during a GC.");
1417 
1418 #ifndef ANDROID
1419   // Bug 1346874 - The gray state check is expensive. Android tests are already
1420   // slow enough that this check can easily push them over the threshold to a
1421   // timeout.
1422 
1423   MOZ_ASSERT(js::CheckGrayMarkingState(mJSRuntime));
1424   MOZ_ASSERT(CheckWeakMappingGrayBitsTracer::Check(mJSRuntime));
1425 #endif
1426 }
1427 
AreGCGrayBitsValid() const1428 bool CycleCollectedJSRuntime::AreGCGrayBitsValid() const {
1429   return js::AreGCGrayBitsValid(mJSRuntime);
1430 }
1431 
GarbageCollect(JS::GCReason aReason) const1432 void CycleCollectedJSRuntime::GarbageCollect(JS::GCReason aReason) const {
1433   JSContext* cx = CycleCollectedJSContext::Get()->Context();
1434   JS::PrepareForFullGC(cx);
1435   JS::NonIncrementalGC(cx, GC_NORMAL, aReason);
1436 }
1437 
JSObjectsTenured()1438 void CycleCollectedJSRuntime::JSObjectsTenured() {
1439   JSContext* cx = CycleCollectedJSContext::Get()->Context();
1440   for (auto iter = mNurseryObjects.Iter(); !iter.Done(); iter.Next()) {
1441     nsWrapperCache* cache = iter.Get();
1442     JSObject* wrapper = cache->GetWrapperMaybeDead();
1443     MOZ_DIAGNOSTIC_ASSERT(wrapper);
1444     if (!JS::ObjectIsTenured(wrapper)) {
1445       MOZ_ASSERT(!cache->PreservingWrapper());
1446       js::gc::FinalizeDeadNurseryObject(cx, wrapper);
1447     }
1448   }
1449 
1450 #ifdef DEBUG
1451   for (auto iter = mPreservedNurseryObjects.Iter(); !iter.Done(); iter.Next()) {
1452     MOZ_ASSERT(JS::ObjectIsTenured(iter.Get().get()));
1453   }
1454 #endif
1455 
1456   mNurseryObjects.Clear();
1457   mPreservedNurseryObjects.Clear();
1458 }
1459 
NurseryWrapperAdded(nsWrapperCache * aCache)1460 void CycleCollectedJSRuntime::NurseryWrapperAdded(nsWrapperCache* aCache) {
1461   MOZ_ASSERT(aCache);
1462   MOZ_ASSERT(aCache->GetWrapperMaybeDead());
1463   MOZ_ASSERT(!JS::ObjectIsTenured(aCache->GetWrapperMaybeDead()));
1464   mNurseryObjects.InfallibleAppend(aCache);
1465 }
1466 
NurseryWrapperPreserved(JSObject * aWrapper)1467 void CycleCollectedJSRuntime::NurseryWrapperPreserved(JSObject* aWrapper) {
1468   mPreservedNurseryObjects.InfallibleAppend(
1469       JS::PersistentRooted<JSObject*>(mJSRuntime, aWrapper));
1470 }
1471 
DeferredFinalize(DeferredFinalizeAppendFunction aAppendFunc,DeferredFinalizeFunction aFunc,void * aThing)1472 void CycleCollectedJSRuntime::DeferredFinalize(
1473     DeferredFinalizeAppendFunction aAppendFunc, DeferredFinalizeFunction aFunc,
1474     void* aThing) {
1475   // Tell the analysis that the function pointers will not GC.
1476   JS::AutoSuppressGCAnalysis suppress;
1477   if (auto entry = mDeferredFinalizerTable.LookupForAdd(aFunc)) {
1478     aAppendFunc(entry.Data(), aThing);
1479   } else {
1480     entry.OrInsert(
1481         [aAppendFunc, aThing]() { return aAppendFunc(nullptr, aThing); });
1482   }
1483 }
1484 
DeferredFinalize(nsISupports * aSupports)1485 void CycleCollectedJSRuntime::DeferredFinalize(nsISupports* aSupports) {
1486   typedef DeferredFinalizerImpl<nsISupports> Impl;
1487   DeferredFinalize(Impl::AppendDeferredFinalizePointer, Impl::DeferredFinalize,
1488                    aSupports);
1489 }
1490 
DumpJSHeap(FILE * aFile)1491 void CycleCollectedJSRuntime::DumpJSHeap(FILE* aFile) {
1492   JSContext* cx = CycleCollectedJSContext::Get()->Context();
1493 
1494   mozilla::MallocSizeOf mallocSizeOf =
1495       PR_GetEnv("MOZ_GC_LOG_SIZE") ? moz_malloc_size_of : nullptr;
1496   js::DumpHeap(cx, aFile, js::CollectNurseryBeforeDump, mallocSizeOf);
1497 }
1498 
IncrementalFinalizeRunnable(CycleCollectedJSRuntime * aRt,DeferredFinalizerTable & aFinalizers)1499 IncrementalFinalizeRunnable::IncrementalFinalizeRunnable(
1500     CycleCollectedJSRuntime* aRt, DeferredFinalizerTable& aFinalizers)
1501     : CancelableRunnable("IncrementalFinalizeRunnable"),
1502       mRuntime(aRt),
1503       mFinalizeFunctionToRun(0),
1504       mReleasing(false) {
1505   for (auto iter = aFinalizers.Iter(); !iter.Done(); iter.Next()) {
1506     DeferredFinalizeFunction& function = iter.Key();
1507     void*& data = iter.Data();
1508 
1509     DeferredFinalizeFunctionHolder* holder =
1510         mDeferredFinalizeFunctions.AppendElement();
1511     holder->run = function;
1512     holder->data = data;
1513 
1514     iter.Remove();
1515   }
1516   MOZ_ASSERT(mDeferredFinalizeFunctions.Length());
1517 }
1518 
~IncrementalFinalizeRunnable()1519 IncrementalFinalizeRunnable::~IncrementalFinalizeRunnable() {
1520   MOZ_ASSERT(!mDeferredFinalizeFunctions.Length());
1521   MOZ_ASSERT(!mRuntime);
1522 }
1523 
ReleaseNow(bool aLimited)1524 void IncrementalFinalizeRunnable::ReleaseNow(bool aLimited) {
1525   if (mReleasing) {
1526     NS_WARNING("Re-entering ReleaseNow");
1527     return;
1528   }
1529   {
1530     mozilla::AutoRestore<bool> ar(mReleasing);
1531     mReleasing = true;
1532     MOZ_ASSERT(mDeferredFinalizeFunctions.Length() != 0,
1533                "We should have at least ReleaseSliceNow to run");
1534     MOZ_ASSERT(mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length(),
1535                "No more finalizers to run?");
1536 
1537     TimeDuration sliceTime = TimeDuration::FromMilliseconds(SliceMillis);
1538     TimeStamp started = aLimited ? TimeStamp::Now() : TimeStamp();
1539     bool timeout = false;
1540     do {
1541       const DeferredFinalizeFunctionHolder& function =
1542           mDeferredFinalizeFunctions[mFinalizeFunctionToRun];
1543       if (aLimited) {
1544         bool done = false;
1545         while (!timeout && !done) {
1546           /*
1547            * We don't want to read the clock too often, so we try to
1548            * release slices of 100 items.
1549            */
1550           done = function.run(100, function.data);
1551           timeout = TimeStamp::Now() - started >= sliceTime;
1552         }
1553         if (done) {
1554           ++mFinalizeFunctionToRun;
1555         }
1556         if (timeout) {
1557           break;
1558         }
1559       } else {
1560         while (!function.run(UINT32_MAX, function.data))
1561           ;
1562         ++mFinalizeFunctionToRun;
1563       }
1564     } while (mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length());
1565   }
1566 
1567   if (mFinalizeFunctionToRun == mDeferredFinalizeFunctions.Length()) {
1568     MOZ_ASSERT(mRuntime->mFinalizeRunnable == this);
1569     mDeferredFinalizeFunctions.Clear();
1570     CycleCollectedJSRuntime* runtime = mRuntime;
1571     mRuntime = nullptr;
1572     // NB: This may delete this!
1573     runtime->mFinalizeRunnable = nullptr;
1574   }
1575 }
1576 
1577 NS_IMETHODIMP
Run()1578 IncrementalFinalizeRunnable::Run() {
1579   AUTO_PROFILER_LABEL("IncrementalFinalizeRunnable::Run", GCCC);
1580 
1581   if (!mDeferredFinalizeFunctions.Length()) {
1582     /* These items were already processed synchronously in JSGC_END. */
1583     MOZ_ASSERT(!mRuntime);
1584     return NS_OK;
1585   }
1586 
1587   MOZ_ASSERT(mRuntime->mFinalizeRunnable == this);
1588   TimeStamp start = TimeStamp::Now();
1589   ReleaseNow(true);
1590 
1591   if (mDeferredFinalizeFunctions.Length()) {
1592     nsresult rv = NS_DispatchToCurrentThread(this);
1593     if (NS_FAILED(rv)) {
1594       ReleaseNow(false);
1595     }
1596   } else {
1597     MOZ_ASSERT(!mRuntime);
1598   }
1599 
1600   uint32_t duration = (uint32_t)((TimeStamp::Now() - start).ToMilliseconds());
1601   Telemetry::Accumulate(Telemetry::DEFERRED_FINALIZE_ASYNC, duration);
1602 
1603   return NS_OK;
1604 }
1605 
FinalizeDeferredThings(CycleCollectedJSContext::DeferredFinalizeType aType)1606 void CycleCollectedJSRuntime::FinalizeDeferredThings(
1607     CycleCollectedJSContext::DeferredFinalizeType aType) {
1608   /*
1609    * If the previous GC created a runnable to finalize objects
1610    * incrementally, and if it hasn't finished yet, finish it now. We
1611    * don't want these to build up. We also don't want to allow any
1612    * existing incremental finalize runnables to run after a
1613    * non-incremental GC, since they are often used to detect leaks.
1614    */
1615   if (mFinalizeRunnable) {
1616     mFinalizeRunnable->ReleaseNow(false);
1617     if (mFinalizeRunnable) {
1618       // If we re-entered ReleaseNow, we couldn't delete mFinalizeRunnable and
1619       // we need to just continue processing it.
1620       return;
1621     }
1622   }
1623 
1624   if (mDeferredFinalizerTable.Count() == 0) {
1625     return;
1626   }
1627 
1628   mFinalizeRunnable =
1629       new IncrementalFinalizeRunnable(this, mDeferredFinalizerTable);
1630 
1631   // Everything should be gone now.
1632   MOZ_ASSERT(mDeferredFinalizerTable.Count() == 0);
1633 
1634   if (aType == CycleCollectedJSContext::FinalizeIncrementally) {
1635     NS_DispatchToCurrentThreadQueue(do_AddRef(mFinalizeRunnable), 2500,
1636                                     EventQueuePriority::Idle);
1637   } else {
1638     mFinalizeRunnable->ReleaseNow(false);
1639     MOZ_ASSERT(!mFinalizeRunnable);
1640   }
1641 }
1642 
OOMStateToString(const OOMState aOomState) const1643 const char* CycleCollectedJSRuntime::OOMStateToString(
1644     const OOMState aOomState) const {
1645   switch (aOomState) {
1646     case OOMState::OK:
1647       return "OK";
1648     case OOMState::Reporting:
1649       return "Reporting";
1650     case OOMState::Reported:
1651       return "Reported";
1652     case OOMState::Recovered:
1653       return "Recovered";
1654     default:
1655       MOZ_ASSERT_UNREACHABLE("OOMState holds an invalid value");
1656       return "Unknown";
1657   }
1658 }
1659 
AnnotateAndSetOutOfMemory(OOMState * aStatePtr,OOMState aNewState)1660 void CycleCollectedJSRuntime::AnnotateAndSetOutOfMemory(OOMState* aStatePtr,
1661                                                         OOMState aNewState) {
1662   *aStatePtr = aNewState;
1663   CrashReporter::Annotation annotation =
1664       (aStatePtr == &mOutOfMemoryState)
1665           ? CrashReporter::Annotation::JSOutOfMemory
1666           : CrashReporter::Annotation::JSLargeAllocationFailure;
1667 
1668   CrashReporter::AnnotateCrashReport(
1669       annotation, nsDependentCString(OOMStateToString(aNewState)));
1670 }
1671 
OnGC(JSContext * aContext,JSGCStatus aStatus,JS::GCReason aReason)1672 void CycleCollectedJSRuntime::OnGC(JSContext* aContext, JSGCStatus aStatus,
1673                                    JS::GCReason aReason) {
1674   switch (aStatus) {
1675     case JSGC_BEGIN:
1676       nsCycleCollector_prepareForGarbageCollection();
1677       PrepareWaitingZonesForGC();
1678       break;
1679     case JSGC_END: {
1680       if (mOutOfMemoryState == OOMState::Reported) {
1681         AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Recovered);
1682       }
1683       if (mLargeAllocationFailureState == OOMState::Reported) {
1684         AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState,
1685                                   OOMState::Recovered);
1686       }
1687 
1688       // Do any deferred finalization of native objects. We will run the
1689       // finalizer later after we've returned to the event loop if any of
1690       // three conditions hold:
1691       // a) The GC is incremental. In this case, we probably care about pauses.
1692       // b) There is a pending exception. The finalizers are not set up to run
1693       // in that state.
1694       // c) The GC was triggered for internal JS engine reasons. If this is the
1695       // case, then we may be in the middle of running some code that the JIT
1696       // has assumed can't have certain kinds of side effects. Finalizers can do
1697       // all sorts of things, such as run JS, so we want to run them later.
1698       // However, if we're shutting down, we need to destroy things immediately.
1699       //
1700       // Why do we ever bother finalizing things immediately if that's so
1701       // questionable? In some situations, such as while testing or in low
1702       // memory situations, we really want to free things right away.
1703       bool finalizeIncrementally = JS::WasIncrementalGC(mJSRuntime) ||
1704                                    JS_IsExceptionPending(aContext) ||
1705                                    (JS::InternalGCReason(aReason) &&
1706                                     aReason != JS::GCReason::DESTROY_RUNTIME);
1707 
1708       FinalizeDeferredThings(
1709           finalizeIncrementally ? CycleCollectedJSContext::FinalizeIncrementally
1710                                 : CycleCollectedJSContext::FinalizeNow);
1711 
1712       break;
1713     }
1714     default:
1715       MOZ_CRASH();
1716   }
1717 
1718   CustomGCCallback(aStatus);
1719 }
1720 
OnOutOfMemory()1721 void CycleCollectedJSRuntime::OnOutOfMemory() {
1722   AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Reporting);
1723   CustomOutOfMemoryCallback();
1724   AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Reported);
1725 }
1726 
SetLargeAllocationFailure(OOMState aNewState)1727 void CycleCollectedJSRuntime::SetLargeAllocationFailure(OOMState aNewState) {
1728   AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState, aNewState);
1729 }
1730 
PrepareWaitingZonesForGC()1731 void CycleCollectedJSRuntime::PrepareWaitingZonesForGC() {
1732   JSContext* cx = CycleCollectedJSContext::Get()->Context();
1733   if (mZonesWaitingForGC.Count() == 0) {
1734     JS::PrepareForFullGC(cx);
1735   } else {
1736     for (auto iter = mZonesWaitingForGC.Iter(); !iter.Done(); iter.Next()) {
1737       JS::PrepareZoneForGC(cx, iter.Get()->GetKey());
1738     }
1739     mZonesWaitingForGC.Clear();
1740   }
1741 }
1742 
1743 /* static */
OnZoneDestroyed(JSFreeOp * aFop,JS::Zone * aZone)1744 void CycleCollectedJSRuntime::OnZoneDestroyed(JSFreeOp* aFop, JS::Zone* aZone) {
1745   // Remove the zone from the set of zones waiting for GC, if present. This can
1746   // happen if a zone is added to the set during an incremental GC in which it
1747   // is later destroyed.
1748   CycleCollectedJSRuntime* runtime = Get();
1749   runtime->mZonesWaitingForGC.RemoveEntry(aZone);
1750 }
1751 
invoke(JS::HandleObject global,js::ScriptEnvironmentPreparer::Closure & closure)1752 void CycleCollectedJSRuntime::EnvironmentPreparer::invoke(
1753     JS::HandleObject global, js::ScriptEnvironmentPreparer::Closure& closure) {
1754   MOZ_ASSERT(JS_IsGlobalObject(global));
1755   nsIGlobalObject* nativeGlobal = xpc::NativeGlobal(global);
1756 
1757   // Not much we can do if we simply don't have a usable global here...
1758   NS_ENSURE_TRUE_VOID(nativeGlobal && nativeGlobal->HasJSGlobal());
1759 
1760   AutoEntryScript aes(nativeGlobal, "JS-engine-initiated execution");
1761 
1762   MOZ_ASSERT(!JS_IsExceptionPending(aes.cx()));
1763 
1764   DebugOnly<bool> ok = closure(aes.cx());
1765 
1766   MOZ_ASSERT_IF(ok, !JS_IsExceptionPending(aes.cx()));
1767 
1768   // The AutoEntryScript will check for JS_IsExceptionPending on the
1769   // JSContext and report it as needed as it comes off the stack.
1770 }
1771 
1772 /* static */
Get()1773 CycleCollectedJSRuntime* CycleCollectedJSRuntime::Get() {
1774   auto context = CycleCollectedJSContext::Get();
1775   if (context) {
1776     return context->Runtime();
1777   }
1778   return nullptr;
1779 }
1780 
1781 #ifdef MOZ_JS_DEV_ERROR_INTERCEPTOR
1782 
1783 namespace js {
1784 extern void DumpValue(const JS::Value& val);
1785 }
1786 
Shutdown(JSRuntime * rt)1787 void CycleCollectedJSRuntime::ErrorInterceptor::Shutdown(JSRuntime* rt) {
1788   JS_SetErrorInterceptorCallback(rt, nullptr);
1789   mThrownError.reset();
1790 }
1791 
1792 /* virtual */
interceptError(JSContext * cx,JS::HandleValue exn)1793 void CycleCollectedJSRuntime::ErrorInterceptor::interceptError(
1794     JSContext* cx, JS::HandleValue exn) {
1795   if (mThrownError) {
1796     // We already have an error, we don't need anything more.
1797     return;
1798   }
1799 
1800   if (!nsContentUtils::ThreadsafeIsSystemCaller(cx)) {
1801     // We are only interested in chrome code.
1802     return;
1803   }
1804 
1805   const auto type = JS_GetErrorType(exn);
1806   if (!type) {
1807     // This is not one of the primitive error types.
1808     return;
1809   }
1810 
1811   switch (*type) {
1812     case JSExnType::JSEXN_REFERENCEERR:
1813     case JSExnType::JSEXN_SYNTAXERR:
1814     case JSExnType::JSEXN_TYPEERR:
1815       break;
1816     default:
1817       // Not one of the errors we are interested in.
1818       return;
1819   }
1820 
1821   // Now copy the details of the exception locally.
1822   // While copying the details of an exception could be expensive, in most runs,
1823   // this will be done at most once during the execution of the process, so the
1824   // total cost should be reasonable.
1825 
1826   ErrorDetails details;
1827   details.mType = *type;
1828   // If `exn` isn't an exception object, `ExtractErrorValues` could end up
1829   // calling `toString()`, which could in turn end up throwing an error. While
1830   // this should work, we want to avoid that complex use case. Fortunately, we
1831   // have already checked above that `exn` is an exception object, so nothing
1832   // such should happen.
1833   nsContentUtils::ExtractErrorValues(cx, exn, details.mFilename, &details.mLine,
1834                                      &details.mColumn, details.mMessage);
1835 
1836   JS::UniqueChars buf =
1837       JS::FormatStackDump(cx, /* showArgs = */ false, /* showLocals = */ false,
1838                           /* showThisProps = */ false);
1839   CopyUTF8toUTF16(mozilla::MakeStringSpan(buf.get()), details.mStack);
1840 
1841   mThrownError.emplace(std::move(details));
1842 }
1843 
ClearRecentDevError()1844 void CycleCollectedJSRuntime::ClearRecentDevError() {
1845   mErrorInterceptor.mThrownError.reset();
1846 }
1847 
GetRecentDevError(JSContext * cx,JS::MutableHandle<JS::Value> error)1848 bool CycleCollectedJSRuntime::GetRecentDevError(
1849     JSContext* cx, JS::MutableHandle<JS::Value> error) {
1850   if (!mErrorInterceptor.mThrownError) {
1851     return true;
1852   }
1853 
1854   // Create a copy of the exception.
1855   JS::RootedObject obj(cx, JS_NewPlainObject(cx));
1856   if (!obj) {
1857     return false;
1858   }
1859 
1860   JS::RootedValue message(cx);
1861   JS::RootedValue filename(cx);
1862   JS::RootedValue stack(cx);
1863   if (!ToJSValue(cx, mErrorInterceptor.mThrownError->mMessage, &message) ||
1864       !ToJSValue(cx, mErrorInterceptor.mThrownError->mFilename, &filename) ||
1865       !ToJSValue(cx, mErrorInterceptor.mThrownError->mStack, &stack)) {
1866     return false;
1867   }
1868 
1869   // Build the object.
1870   const auto FLAGS = JSPROP_READONLY | JSPROP_ENUMERATE | JSPROP_PERMANENT;
1871   if (!JS_DefineProperty(cx, obj, "message", message, FLAGS) ||
1872       !JS_DefineProperty(cx, obj, "fileName", filename, FLAGS) ||
1873       !JS_DefineProperty(cx, obj, "lineNumber",
1874                          mErrorInterceptor.mThrownError->mLine, FLAGS) ||
1875       !JS_DefineProperty(cx, obj, "stack", stack, FLAGS)) {
1876     return false;
1877   }
1878 
1879   // Pass the result.
1880   error.setObject(*obj);
1881   return true;
1882 }
1883 #endif  // MOZ_JS_DEV_ERROR_INTERCEPTOR
1884 
1885 #undef MOZ_JS_DEV_ERROR_INTERCEPTOR
1886