1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "gc/Zone-inl.h"
8 #include "js/shadow/Zone.h"  // JS::shadow::Zone
9 
10 #include <type_traits>
11 
12 #include "gc/FreeOp.h"
13 #include "gc/GCLock.h"
14 #include "gc/Policy.h"
15 #include "gc/PublicIterators.h"
16 #include "jit/BaselineIC.h"
17 #include "jit/BaselineJIT.h"
18 #include "jit/Invalidation.h"
19 #include "jit/Ion.h"
20 #include "jit/JitZone.h"
21 #include "vm/Runtime.h"
22 #include "wasm/WasmInstance.h"
23 
24 #include "debugger/DebugAPI-inl.h"
25 #include "gc/GC-inl.h"
26 #include "gc/Marking-inl.h"
27 #include "gc/Nursery-inl.h"
28 #include "gc/WeakMap-inl.h"
29 #include "vm/JSScript-inl.h"
30 #include "vm/Realm-inl.h"
31 
32 using namespace js;
33 using namespace js::gc;
34 
35 Zone* const Zone::NotOnList = reinterpret_cast<Zone*>(1);
36 
ZoneAllocator(JSRuntime * rt,Kind kind)37 ZoneAllocator::ZoneAllocator(JSRuntime* rt, Kind kind)
38     : JS::shadow::Zone(rt, &rt->gc.barrierTracer, kind),
39       gcHeapSize(&rt->gc.heapSize),
40       mallocHeapSize(nullptr),
41       jitHeapSize(nullptr),
42       jitHeapThreshold(jit::MaxCodeBytesPerProcess * 0.8) {}
43 
~ZoneAllocator()44 ZoneAllocator::~ZoneAllocator() {
45 #ifdef DEBUG
46   mallocTracker.checkEmptyOnDestroy();
47   MOZ_ASSERT(gcHeapSize.bytes() == 0);
48   MOZ_ASSERT(mallocHeapSize.bytes() == 0);
49   MOZ_ASSERT(jitHeapSize.bytes() == 0);
50 #endif
51 }
52 
fixupAfterMovingGC()53 void ZoneAllocator::fixupAfterMovingGC() {
54 #ifdef DEBUG
55   mallocTracker.fixupAfterMovingGC();
56 #endif
57 }
58 
updateMemoryCountersOnGCStart()59 void js::ZoneAllocator::updateMemoryCountersOnGCStart() {
60   gcHeapSize.updateOnGCStart();
61   mallocHeapSize.updateOnGCStart();
62 }
63 
updateGCStartThresholds(GCRuntime & gc,JS::GCOptions options,const js::AutoLockGC & lock)64 void js::ZoneAllocator::updateGCStartThresholds(GCRuntime& gc,
65                                                 JS::GCOptions options,
66                                                 const js::AutoLockGC& lock) {
67   bool isAtomsZone = JS::Zone::from(this)->isAtomsZone();
68   gcHeapThreshold.updateStartThreshold(gcHeapSize.retainedBytes(), options,
69                                        gc.tunables, gc.schedulingState,
70                                        isAtomsZone, lock);
71   mallocHeapThreshold.updateStartThreshold(mallocHeapSize.retainedBytes(),
72                                            gc.tunables, lock);
73 }
74 
setGCSliceThresholds(GCRuntime & gc)75 void js::ZoneAllocator::setGCSliceThresholds(GCRuntime& gc) {
76   gcHeapThreshold.setSliceThreshold(this, gcHeapSize, gc.tunables);
77   mallocHeapThreshold.setSliceThreshold(this, mallocHeapSize, gc.tunables);
78   jitHeapThreshold.setSliceThreshold(this, jitHeapSize, gc.tunables);
79 }
80 
clearGCSliceThresholds()81 void js::ZoneAllocator::clearGCSliceThresholds() {
82   gcHeapThreshold.clearSliceThreshold();
83   mallocHeapThreshold.clearSliceThreshold();
84   jitHeapThreshold.clearSliceThreshold();
85 }
86 
addSharedMemory(void * mem,size_t nbytes,MemoryUse use)87 bool ZoneAllocator::addSharedMemory(void* mem, size_t nbytes, MemoryUse use) {
88   // nbytes can be zero here for SharedArrayBuffers.
89 
90   MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
91 
92   auto ptr = sharedMemoryUseCounts.lookupForAdd(mem);
93   MOZ_ASSERT_IF(ptr, ptr->value().use == use);
94 
95   if (!ptr && !sharedMemoryUseCounts.add(ptr, mem, gc::SharedMemoryUse(use))) {
96     return false;
97   }
98 
99   ptr->value().count++;
100 
101   // Allocations can grow, so add any increase over the previous size and record
102   // the new size.
103   if (nbytes > ptr->value().nbytes) {
104     mallocHeapSize.addBytes(nbytes - ptr->value().nbytes);
105     ptr->value().nbytes = nbytes;
106   }
107 
108   maybeTriggerGCOnMalloc();
109 
110   return true;
111 }
112 
removeSharedMemory(void * mem,size_t nbytes,MemoryUse use)113 void ZoneAllocator::removeSharedMemory(void* mem, size_t nbytes,
114                                        MemoryUse use) {
115   // nbytes can be zero here for SharedArrayBuffers.
116 
117   MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
118   MOZ_ASSERT(CurrentThreadIsGCFinalizing());
119 
120   auto ptr = sharedMemoryUseCounts.lookup(mem);
121 
122   MOZ_ASSERT(ptr);
123   MOZ_ASSERT(ptr->value().count != 0);
124   MOZ_ASSERT(ptr->value().use == use);
125   MOZ_ASSERT(ptr->value().nbytes >= nbytes);
126 
127   ptr->value().count--;
128   if (ptr->value().count == 0) {
129     mallocHeapSize.removeBytes(ptr->value().nbytes, true);
130     sharedMemoryUseCounts.remove(ptr);
131   }
132 }
133 
decMemory(size_t nbytes)134 void ZoneAllocPolicy::decMemory(size_t nbytes) {
135   // Unfortunately we don't have enough context here to know whether we're being
136   // called on behalf of the collector so we have to do a TLS lookup to find
137   // out.
138   JSContext* cx = TlsContext.get();
139   zone_->decNonGCMemory(this, nbytes, MemoryUse::ZoneAllocPolicy,
140                         cx->defaultFreeOp()->isCollecting());
141 }
142 
Zone(JSRuntime * rt,Kind kind)143 JS::Zone::Zone(JSRuntime* rt, Kind kind)
144     : ZoneAllocator(rt, kind),
145       // Note: don't use |this| before initializing helperThreadUse_!
146       // ProtectedData checks in CheckZone::check may read this field.
147       helperThreadUse_(HelperThreadUse::None),
148       helperThreadOwnerContext_(nullptr),
149       arenas(this),
150       data(this, nullptr),
151       tenuredBigInts(this, 0),
152       nurseryAllocatedStrings(this, 0),
153       markedStrings(this, 0),
154       finalizedStrings(this, 0),
155       allocNurseryStrings(this, true),
156       allocNurseryBigInts(this, true),
157       suppressAllocationMetadataBuilder(this, false),
158       previousGCStringStats(this),
159       stringStats(this),
160       pretenuring(this),
161       uniqueIds_(this),
162       tenuredAllocsSinceMinorGC_(0),
163       gcWeakMapList_(this),
164       compartments_(),
165       crossZoneStringWrappers_(this),
166       gcGrayRoots_(this),
167       weakCaches_(this),
168       gcEphemeronEdges_(this, SystemAllocPolicy(),
169                         rt->randomHashCodeScrambler()),
170       gcNurseryEphemeronEdges_(this, SystemAllocPolicy(),
171                                rt->randomHashCodeScrambler()),
172       rttValueObjects_(this, this),
173       markedAtoms_(this),
174       atomCache_(this),
175       externalStringCache_(this),
176       functionToStringCache_(this),
177       shapeZone_(this, this),
178       finalizationRegistries_(this, this),
179       finalizationRecordMap_(this, this),
180       jitZone_(this, nullptr),
181       gcScheduled_(false),
182       gcScheduledSaved_(false),
183       gcPreserveCode_(false),
184       keepPropMapTables_(this, false),
185       wasCollected_(false),
186       listNext_(NotOnList),
187       weakRefMap_(this, this),
188       keptObjects(this, this) {
189   /* Ensure that there are no vtables to mess us up here. */
190   MOZ_ASSERT(reinterpret_cast<JS::shadow::Zone*>(this) ==
191              static_cast<JS::shadow::Zone*>(this));
192   MOZ_ASSERT_IF(isAtomsZone(), !rt->unsafeAtomsZone());
193   MOZ_ASSERT_IF(isSelfHostingZone(), !rt->hasInitializedSelfHosting());
194 
195   // We can't call updateGCStartThresholds until the Zone has been constructed.
196   AutoLockGC lock(rt);
197   updateGCStartThresholds(rt->gc, JS::GCOptions::Normal, lock);
198 }
199 
~Zone()200 Zone::~Zone() {
201   MOZ_ASSERT(helperThreadUse_ == HelperThreadUse::None);
202   MOZ_ASSERT_IF(regExps_.ref(), regExps().empty());
203 
204   DebugAPI::deleteDebugScriptMap(debugScriptMap);
205 
206   MOZ_ASSERT(gcWeakMapList().isEmpty());
207 
208   JSRuntime* rt = runtimeFromAnyThread();
209   if (this == rt->gc.systemZone) {
210     MOZ_ASSERT(isSystemZone());
211     rt->gc.systemZone = nullptr;
212   }
213 
214   js_delete(jitZone_.ref());
215 }
216 
init()217 bool Zone::init() {
218   regExps_.ref() = make_unique<RegExpZone>(this);
219   return regExps_.ref() && gcEphemeronEdges().init() &&
220          gcNurseryEphemeronEdges().init();
221 }
222 
setNeedsIncrementalBarrier(bool needs)223 void Zone::setNeedsIncrementalBarrier(bool needs) {
224   needsIncrementalBarrier_ = needs;
225 }
226 
changeGCState(GCState prev,GCState next)227 void Zone::changeGCState(GCState prev, GCState next) {
228   MOZ_ASSERT(RuntimeHeapIsBusy());
229   MOZ_ASSERT(canCollect());
230   MOZ_ASSERT(gcState() == prev);
231 
232   // This can be called when barriers have been temporarily disabled by
233   // AutoDisableBarriers. In that case, don't update needsIncrementalBarrier_
234   // and barriers will be re-enabled by ~AutoDisableBarriers() if necessary.
235   bool barriersDisabled = isGCMarking() && !needsIncrementalBarrier();
236 
237   gcState_ = next;
238 
239   // Update the barriers state when we transition between marking and
240   // non-marking states, unless barriers have been disabled.
241   if (!barriersDisabled) {
242     needsIncrementalBarrier_ = isGCMarking();
243   }
244 }
245 
246 template <class Pred>
EraseIf(js::gc::EphemeronEdgeVector & entries,Pred pred)247 static void EraseIf(js::gc::EphemeronEdgeVector& entries, Pred pred) {
248   auto* begin = entries.begin();
249   auto* const end = entries.end();
250 
251   auto* newEnd = begin;
252   for (auto* p = begin; p != end; p++) {
253     if (!pred(*p)) {
254       *newEnd++ = *p;
255     }
256   }
257 
258   size_t removed = end - newEnd;
259   entries.shrinkBy(removed);
260 }
261 
SweepEphemeronEdgesWhileMinorSweeping(js::gc::EphemeronEdgeVector & entries)262 static void SweepEphemeronEdgesWhileMinorSweeping(
263     js::gc::EphemeronEdgeVector& entries) {
264   EraseIf(entries, [](js::gc::EphemeronEdge& edge) -> bool {
265     return IsAboutToBeFinalizedDuringMinorSweep(&edge.target);
266   });
267 }
268 
sweepAfterMinorGC(JSTracer * trc)269 void Zone::sweepAfterMinorGC(JSTracer* trc) {
270   sweepEphemeronTablesAfterMinorGC();
271   crossZoneStringWrappers().sweepAfterMinorGC(trc);
272 }
273 
sweepEphemeronTablesAfterMinorGC()274 void Zone::sweepEphemeronTablesAfterMinorGC() {
275   for (EphemeronEdgeTable::Range r = gcNurseryEphemeronEdges().all();
276        !r.empty(); r.popFront()) {
277     // Sweep gcNurseryEphemeronEdges to move live (forwarded) keys to
278     // gcEphemeronEdges, scanning through all the entries for such keys to
279     // update them.
280     //
281     // Forwarded and dead keys may also appear in their delegates' entries,
282     // so sweep those too (see below.)
283 
284     // The tricky case is when the key has a delegate that was already
285     // tenured. Then it will be in its compartment's gcEphemeronEdges, but we
286     // still need to update the key (which will be in the entries
287     // associated with it.)
288     gc::Cell* key = r.front().key;
289     MOZ_ASSERT(!key->isTenured());
290     if (!Nursery::getForwardedPointer(&key)) {
291       // Dead nursery cell => discard.
292       continue;
293     }
294 
295     // Key been moved. The value is an array of <map,key> pairs; update all
296     // keys in that array.
297     EphemeronEdgeVector& entries = r.front().value;
298     SweepEphemeronEdgesWhileMinorSweeping(entries);
299 
300     // Live (moved) nursery cell. Append entries to gcEphemeronEdges.
301     auto* entry = gcEphemeronEdges().get(key);
302     if (!entry) {
303       if (!gcEphemeronEdges().put(key, gc::EphemeronEdgeVector())) {
304         AutoEnterOOMUnsafeRegion oomUnsafe;
305         oomUnsafe.crash("Failed to tenure weak keys entry");
306       }
307       entry = gcEphemeronEdges().get(key);
308     }
309 
310     for (auto& markable : entries) {
311       if (!entry->value.append(markable)) {
312         AutoEnterOOMUnsafeRegion oomUnsafe;
313         oomUnsafe.crash("Failed to tenure weak keys entry");
314       }
315     }
316 
317     // If the key has a delegate, then it will map to a WeakKeyEntryVector
318     // containing the key that needs to be updated.
319 
320     JSObject* delegate = gc::detail::GetDelegate(key->as<JSObject>());
321     if (!delegate) {
322       continue;
323     }
324     MOZ_ASSERT(delegate->isTenured());
325 
326     // If delegate was formerly nursery-allocated, we will sweep its entries
327     // when we visit its gcNurseryEphemeronEdges (if we haven't already). Note
328     // that we don't know the nursery address of the delegate, since the
329     // location it was stored in has already been updated.
330     //
331     // Otherwise, it will be in gcEphemeronEdges and we sweep it here.
332     auto* p = delegate->zone()->gcEphemeronEdges().get(delegate);
333     if (p) {
334       SweepEphemeronEdgesWhileMinorSweeping(p->value);
335     }
336   }
337 
338   if (!gcNurseryEphemeronEdges().clear()) {
339     AutoEnterOOMUnsafeRegion oomUnsafe;
340     oomUnsafe.crash("OOM while clearing gcNurseryEphemeronEdges.");
341   }
342 }
343 
sweepAllCrossCompartmentWrappers()344 void Zone::sweepAllCrossCompartmentWrappers() {
345   crossZoneStringWrappers().sweep();
346   for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
347     comp->sweepCrossCompartmentObjectWrappers();
348   }
349 }
350 
351 /* static */
fixupAllCrossCompartmentWrappersAfterMovingGC(JSTracer * trc)352 void Zone::fixupAllCrossCompartmentWrappersAfterMovingGC(JSTracer* trc) {
353   MOZ_ASSERT(trc->runtime()->gc.isHeapCompacting());
354 
355   for (ZonesIter zone(trc->runtime(), WithAtoms); !zone.done(); zone.next()) {
356     // Sweep the wrapper map to update keys (wrapped values) in other
357     // compartments that may have been moved.
358     zone->crossZoneStringWrappers().sweep();
359 
360     for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
361       comp->fixupCrossCompartmentObjectWrappersAfterMovingGC(trc);
362     }
363   }
364 }
365 
dropStringWrappersOnGC()366 void Zone::dropStringWrappersOnGC() {
367   MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
368   crossZoneStringWrappers().clear();
369 }
370 
371 #ifdef JSGC_HASH_TABLE_CHECKS
372 
checkAllCrossCompartmentWrappersAfterMovingGC()373 void Zone::checkAllCrossCompartmentWrappersAfterMovingGC() {
374   checkStringWrappersAfterMovingGC();
375   for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
376     comp->checkObjectWrappersAfterMovingGC();
377   }
378 }
379 
checkStringWrappersAfterMovingGC()380 void Zone::checkStringWrappersAfterMovingGC() {
381   for (StringWrapperMap::Enum e(crossZoneStringWrappers()); !e.empty();
382        e.popFront()) {
383     // Assert that the postbarriers have worked and that nothing is left in the
384     // wrapper map that points into the nursery, and that the hash table entries
385     // are discoverable.
386     auto key = e.front().key();
387     CheckGCThingAfterMovingGC(key);
388 
389     auto ptr = crossZoneStringWrappers().lookup(key);
390     MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &e.front());
391   }
392 }
393 #endif
394 
sweepWeakMaps()395 void Zone::sweepWeakMaps() {
396   /* Finalize unreachable (key,value) pairs in all weak maps. */
397   WeakMapBase::sweepZone(this);
398 }
399 
discardJitCode(JSFreeOp * fop,const DiscardOptions & options)400 void Zone::discardJitCode(JSFreeOp* fop, const DiscardOptions& options) {
401   if (!jitZone()) {
402     return;
403   }
404 
405   if (isPreservingCode()) {
406     return;
407   }
408 
409   if (options.discardBaselineCode || options.discardJitScripts) {
410 #ifdef DEBUG
411     // Assert no JitScripts are marked as active.
412     for (auto iter = cellIter<BaseScript>(); !iter.done(); iter.next()) {
413       BaseScript* base = iter.unbarrieredGet();
414       if (jit::JitScript* jitScript = base->maybeJitScript()) {
415         MOZ_ASSERT(!jitScript->active());
416       }
417     }
418 #endif
419 
420     // Mark JitScripts on the stack as active.
421     jit::MarkActiveJitScripts(this);
422   }
423 
424   // Invalidate all Ion code in this zone.
425   jit::InvalidateAll(fop, this);
426 
427   for (auto base = cellIterUnsafe<BaseScript>(); !base.done(); base.next()) {
428     jit::JitScript* jitScript = base->maybeJitScript();
429     if (!jitScript) {
430       continue;
431     }
432 
433     JSScript* script = base->asJSScript();
434     jit::FinishInvalidation(fop, script);
435 
436     // Discard baseline script if it's not marked as active.
437     if (options.discardBaselineCode) {
438       if (jitScript->hasBaselineScript() && !jitScript->active()) {
439         jit::FinishDiscardBaselineScript(fop, script);
440       }
441     }
442 
443 #ifdef JS_CACHEIR_SPEW
444     maybeUpdateWarmUpCount(script);
445 #endif
446 
447     // Warm-up counter for scripts are reset on GC. After discarding code we
448     // need to let it warm back up to get information such as which
449     // opcodes are setting array holes or accessing getter properties.
450     script->resetWarmUpCounterForGC();
451 
452     // Try to release the script's JitScript. This should happen after
453     // releasing JIT code because we can't do this when the script still has
454     // JIT code.
455     if (options.discardJitScripts) {
456       script->maybeReleaseJitScript(fop);
457       jitScript = script->maybeJitScript();
458       if (!jitScript) {
459         // Try to discard the ScriptCounts too.
460         if (!script->realm()->collectCoverageForDebug() &&
461             !fop->runtime()->profilingScripts) {
462           script->destroyScriptCounts();
463         }
464         continue;
465       }
466     }
467 
468     // If we did not release the JitScript, we need to purge optimized IC
469     // stubs because the optimizedStubSpace will be purged below.
470     if (options.discardBaselineCode) {
471       jitScript->purgeOptimizedStubs(script);
472     }
473 
474     if (options.resetNurseryAllocSites || options.resetPretenuredAllocSites) {
475       jitScript->resetAllocSites(options.resetNurseryAllocSites,
476                                  options.resetPretenuredAllocSites);
477     }
478 
479     // Finally, reset the active flag.
480     jitScript->resetActive();
481   }
482 
483   /*
484    * When scripts contains pointers to nursery things, the store buffer
485    * can contain entries that point into the optimized stub space. Since
486    * this method can be called outside the context of a GC, this situation
487    * could result in us trying to mark invalid store buffer entries.
488    *
489    * Defer freeing any allocated blocks until after the next minor GC.
490    */
491   if (options.discardBaselineCode) {
492     jitZone()->optimizedStubSpace()->freeAllAfterMinorGC(this);
493     jitZone()->purgeIonCacheIRStubInfo();
494   }
495 }
496 
resetAllocSitesAndInvalidate(bool resetNurserySites,bool resetPretenuredSites)497 void JS::Zone::resetAllocSitesAndInvalidate(bool resetNurserySites,
498                                             bool resetPretenuredSites) {
499   MOZ_ASSERT(resetNurserySites || resetPretenuredSites);
500 
501   if (!jitZone()) {
502     return;
503   }
504 
505   JSContext* cx = runtime_->mainContextFromOwnThread();
506   for (auto base = cellIterUnsafe<BaseScript>(); !base.done(); base.next()) {
507     jit::JitScript* jitScript = base->maybeJitScript();
508     if (!jitScript) {
509       continue;
510     }
511 
512     if (!jitScript->resetAllocSites(resetNurserySites, resetPretenuredSites)) {
513       continue;
514     }
515 
516     JSScript* script = base->asJSScript();
517     CancelOffThreadIonCompile(script);
518 
519     if (!script->hasIonScript()) {
520       continue;
521     }
522 
523     jit::Invalidate(cx, script,
524                     /* resetUses = */ true,
525                     /* cancelOffThread = */ true);
526   }
527 }
528 
beforeClearDelegateInternal(JSObject * wrapper,JSObject * delegate)529 void JS::Zone::beforeClearDelegateInternal(JSObject* wrapper,
530                                            JSObject* delegate) {
531   MOZ_ASSERT(js::gc::detail::GetDelegate(wrapper) == delegate);
532   MOZ_ASSERT(needsIncrementalBarrier());
533   MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(this));
534   runtimeFromMainThread()->gc.marker.severWeakDelegate(wrapper, delegate);
535 }
536 
afterAddDelegateInternal(JSObject * wrapper)537 void JS::Zone::afterAddDelegateInternal(JSObject* wrapper) {
538   MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(this));
539   JSObject* delegate = js::gc::detail::GetDelegate(wrapper);
540   if (delegate) {
541     runtimeFromMainThread()->gc.marker.restoreWeakDelegate(wrapper, delegate);
542   }
543 }
544 
545 #ifdef JSGC_HASH_TABLE_CHECKS
checkUniqueIdTableAfterMovingGC()546 void JS::Zone::checkUniqueIdTableAfterMovingGC() {
547   for (auto r = uniqueIds().all(); !r.empty(); r.popFront()) {
548     js::gc::CheckGCThingAfterMovingGC(r.front().key());
549   }
550 }
551 #endif
552 
gcNumber()553 uint64_t Zone::gcNumber() {
554   // Zones in use by exclusive threads are not collected, and threads using
555   // them cannot access the main runtime's gcNumber without racing.
556   return usedByHelperThread() ? 0 : runtimeFromMainThread()->gc.gcNumber();
557 }
558 
createJitZone(JSContext * cx)559 js::jit::JitZone* Zone::createJitZone(JSContext* cx) {
560   MOZ_ASSERT(!jitZone_);
561   MOZ_ASSERT(cx->runtime()->hasJitRuntime());
562 
563   UniquePtr<jit::JitZone> jitZone(cx->new_<js::jit::JitZone>());
564   if (!jitZone) {
565     return nullptr;
566   }
567 
568   jitZone_ = jitZone.release();
569   return jitZone_;
570 }
571 
hasMarkedRealms()572 bool Zone::hasMarkedRealms() {
573   for (RealmsInZoneIter realm(this); !realm.done(); realm.next()) {
574     if (realm->marked()) {
575       return true;
576     }
577   }
578   return false;
579 }
580 
canCollect()581 bool Zone::canCollect() {
582   // The atoms zone cannot be collected while off-thread parsing is taking
583   // place.
584   if (isAtomsZone()) {
585     return !runtimeFromAnyThread()->hasHelperThreadZones();
586   }
587 
588   // We don't collect the self hosting zone after it has been initialized.
589   if (isSelfHostingZone()) {
590     return !runtimeFromAnyThread()->gc.isSelfHostingZoneFrozen();
591   }
592 
593   // Zones that will be or are currently used by other threads cannot be
594   // collected.
595   return !createdForHelperThread();
596 }
597 
notifyObservingDebuggers()598 void Zone::notifyObservingDebuggers() {
599   AutoAssertNoGC nogc;
600   MOZ_ASSERT(JS::RuntimeHeapIsCollecting(),
601              "This method should be called during GC.");
602 
603   JSRuntime* rt = runtimeFromMainThread();
604 
605   for (RealmsInZoneIter realms(this); !realms.done(); realms.next()) {
606     GlobalObject* global = realms->unsafeUnbarrieredMaybeGlobal();
607     if (!global) {
608       continue;
609     }
610 
611     DebugAPI::notifyParticipatesInGC(global, rt->gc.majorGCCount());
612   }
613 }
614 
isOnList() const615 bool Zone::isOnList() const { return listNext_ != NotOnList; }
616 
nextZone() const617 Zone* Zone::nextZone() const {
618   MOZ_ASSERT(isOnList());
619   return listNext_;
620 }
621 
clearTables()622 void Zone::clearTables() {
623   MOZ_ASSERT(regExps().empty());
624 
625   shapeZone().clearTables(runtimeFromMainThread()->defaultFreeOp());
626 }
627 
fixupAfterMovingGC()628 void Zone::fixupAfterMovingGC() {
629   ZoneAllocator::fixupAfterMovingGC();
630   shapeZone().fixupPropMapShapeTableAfterMovingGC();
631 }
632 
addRttValueObject(JSContext * cx,HandleObject obj)633 bool Zone::addRttValueObject(JSContext* cx, HandleObject obj) {
634   // Type descriptor objects are always tenured so we don't need post barriers
635   // on the set.
636   MOZ_ASSERT(!IsInsideNursery(obj));
637 
638   if (!rttValueObjects().put(obj)) {
639     ReportOutOfMemory(cx);
640     return false;
641   }
642 
643   return true;
644 }
645 
deleteEmptyCompartment(JS::Compartment * comp)646 void Zone::deleteEmptyCompartment(JS::Compartment* comp) {
647   MOZ_ASSERT(comp->zone() == this);
648   arenas.checkEmptyArenaLists();
649 
650   MOZ_ASSERT(compartments().length() == 1);
651   MOZ_ASSERT(compartments()[0] == comp);
652   MOZ_ASSERT(comp->realms().length() == 1);
653 
654   Realm* realm = comp->realms()[0];
655   JSFreeOp* fop = runtimeFromMainThread()->defaultFreeOp();
656   realm->destroy(fop);
657   comp->destroy(fop);
658 
659   compartments().clear();
660 }
661 
setHelperThreadOwnerContext(JSContext * cx)662 void Zone::setHelperThreadOwnerContext(JSContext* cx) {
663   MOZ_ASSERT_IF(cx, TlsContext.get() == cx);
664   helperThreadOwnerContext_ = cx;
665 }
666 
ownedByCurrentHelperThread()667 bool Zone::ownedByCurrentHelperThread() {
668   MOZ_ASSERT(usedByHelperThread());
669   MOZ_ASSERT(TlsContext.get());
670   return helperThreadOwnerContext_ == TlsContext.get();
671 }
672 
purgeAtomCache()673 void Zone::purgeAtomCache() {
674   atomCache().clearAndCompact();
675 
676   // Also purge the dtoa caches so that subsequent lookups populate atom
677   // cache too.
678   for (RealmsInZoneIter r(this); !r.done(); r.next()) {
679     r->dtoaCache.purge();
680   }
681 }
682 
addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,JS::CodeSizes * code,size_t * regexpZone,size_t * jitZone,size_t * baselineStubsOptimized,size_t * uniqueIdMap,size_t * initialPropMapTable,size_t * shapeTables,size_t * atomsMarkBitmaps,size_t * compartmentObjects,size_t * crossCompartmentWrappersTables,size_t * compartmentsPrivateData,size_t * scriptCountsMapArg)683 void Zone::addSizeOfIncludingThis(
684     mozilla::MallocSizeOf mallocSizeOf, JS::CodeSizes* code, size_t* regexpZone,
685     size_t* jitZone, size_t* baselineStubsOptimized, size_t* uniqueIdMap,
686     size_t* initialPropMapTable, size_t* shapeTables, size_t* atomsMarkBitmaps,
687     size_t* compartmentObjects, size_t* crossCompartmentWrappersTables,
688     size_t* compartmentsPrivateData, size_t* scriptCountsMapArg) {
689   *regexpZone += regExps().sizeOfIncludingThis(mallocSizeOf);
690   if (jitZone_) {
691     jitZone_->addSizeOfIncludingThis(mallocSizeOf, code, jitZone,
692                                      baselineStubsOptimized);
693   }
694   *uniqueIdMap += uniqueIds().shallowSizeOfExcludingThis(mallocSizeOf);
695   shapeZone().addSizeOfExcludingThis(mallocSizeOf, initialPropMapTable,
696                                      shapeTables);
697   *atomsMarkBitmaps += markedAtoms().sizeOfExcludingThis(mallocSizeOf);
698   *crossCompartmentWrappersTables +=
699       crossZoneStringWrappers().sizeOfExcludingThis(mallocSizeOf);
700 
701   for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
702     comp->addSizeOfIncludingThis(mallocSizeOf, compartmentObjects,
703                                  crossCompartmentWrappersTables,
704                                  compartmentsPrivateData);
705   }
706 
707   if (scriptCountsMap) {
708     *scriptCountsMapArg +=
709         scriptCountsMap->shallowSizeOfIncludingThis(mallocSizeOf);
710     for (auto r = scriptCountsMap->all(); !r.empty(); r.popFront()) {
711       *scriptCountsMapArg +=
712           r.front().value()->sizeOfIncludingThis(mallocSizeOf);
713     }
714   }
715 }
716 
onOutOfMemory(js::AllocFunction allocFunc,arena_id_t arena,size_t nbytes,void * reallocPtr)717 void* ZoneAllocator::onOutOfMemory(js::AllocFunction allocFunc,
718                                    arena_id_t arena, size_t nbytes,
719                                    void* reallocPtr) {
720   if (!js::CurrentThreadCanAccessRuntime(runtime_)) {
721     return nullptr;
722   }
723   // The analysis sees that JSRuntime::onOutOfMemory could report an error,
724   // which with a JSErrorInterceptor could GC. But we're passing a null cx (to
725   // a default parameter) so the error will not be reported.
726   JS::AutoSuppressGCAnalysis suppress;
727   return runtimeFromMainThread()->onOutOfMemory(allocFunc, arena, nbytes,
728                                                 reallocPtr);
729 }
730 
reportAllocationOverflow() const731 void ZoneAllocator::reportAllocationOverflow() const {
732   js::ReportAllocationOverflow(nullptr);
733 }
734 
ZoneList()735 ZoneList::ZoneList() : head(nullptr), tail(nullptr) {}
736 
ZoneList(Zone * zone)737 ZoneList::ZoneList(Zone* zone) : head(zone), tail(zone) {
738   MOZ_RELEASE_ASSERT(!zone->isOnList());
739   zone->listNext_ = nullptr;
740 }
741 
~ZoneList()742 ZoneList::~ZoneList() { MOZ_ASSERT(isEmpty()); }
743 
check() const744 void ZoneList::check() const {
745 #ifdef DEBUG
746   MOZ_ASSERT((head == nullptr) == (tail == nullptr));
747   if (!head) {
748     return;
749   }
750 
751   Zone* zone = head;
752   for (;;) {
753     MOZ_ASSERT(zone && zone->isOnList());
754     if (zone == tail) break;
755     zone = zone->listNext_;
756   }
757   MOZ_ASSERT(!zone->listNext_);
758 #endif
759 }
760 
isEmpty() const761 bool ZoneList::isEmpty() const { return head == nullptr; }
762 
front() const763 Zone* ZoneList::front() const {
764   MOZ_ASSERT(!isEmpty());
765   MOZ_ASSERT(head->isOnList());
766   return head;
767 }
768 
append(Zone * zone)769 void ZoneList::append(Zone* zone) {
770   ZoneList singleZone(zone);
771   transferFrom(singleZone);
772 }
773 
transferFrom(ZoneList & other)774 void ZoneList::transferFrom(ZoneList& other) {
775   check();
776   other.check();
777   if (!other.head) {
778     return;
779   }
780 
781   MOZ_ASSERT(tail != other.tail);
782 
783   if (tail) {
784     tail->listNext_ = other.head;
785   } else {
786     head = other.head;
787   }
788   tail = other.tail;
789 
790   other.head = nullptr;
791   other.tail = nullptr;
792 }
793 
removeFront()794 Zone* ZoneList::removeFront() {
795   MOZ_ASSERT(!isEmpty());
796   check();
797 
798   Zone* front = head;
799   head = head->listNext_;
800   if (!head) {
801     tail = nullptr;
802   }
803 
804   front->listNext_ = Zone::NotOnList;
805 
806   return front;
807 }
808 
clear()809 void ZoneList::clear() {
810   while (!isEmpty()) {
811     removeFront();
812   }
813 }
814 
RegisterWeakCache(JS::Zone * zone,detail::WeakCacheBase * cachep)815 JS_PUBLIC_API void JS::shadow::RegisterWeakCache(
816     JS::Zone* zone, detail::WeakCacheBase* cachep) {
817   zone->registerWeakCache(cachep);
818 }
819 
traceScriptTableRoots(JSTracer * trc)820 void Zone::traceScriptTableRoots(JSTracer* trc) {
821   static_assert(std::is_convertible_v<BaseScript*, gc::TenuredCell*>,
822                 "BaseScript must not be nursery-allocated for script-table "
823                 "tracing to work");
824 
825   // Performance optimization: the script-table keys are JSScripts, which
826   // cannot be in the nursery, so we can skip this tracing if we are only in a
827   // minor collection. We static-assert this fact above.
828   if (JS::RuntimeHeapIsMinorCollecting()) {
829     return;
830   }
831 
832   // N.B.: the script-table keys are weak *except* in an exceptional case: when
833   // then --dump-bytecode command line option or the PCCount JSFriend API is
834   // used, then the scripts for all counts must remain alive. We only trace
835   // when the `trc->runtime()->profilingScripts` flag is set. This flag is
836   // cleared in JSRuntime::destroyRuntime() during shutdown to ensure that
837   // scripts are collected before the runtime goes away completely.
838   if (scriptCountsMap && trc->runtime()->profilingScripts) {
839     for (ScriptCountsMap::Range r = scriptCountsMap->all(); !r.empty();
840          r.popFront()) {
841       BaseScript* script = r.front().key();
842       MOZ_ASSERT(script->hasScriptCounts());
843       TraceRoot(trc, &script, "profilingScripts");
844     }
845   }
846 
847   // Trace the debugger's DebugScript weak map.
848   if (debugScriptMap) {
849     DebugAPI::traceDebugScriptMap(trc, debugScriptMap);
850   }
851 }
852 
fixupScriptMapsAfterMovingGC(JSTracer * trc)853 void Zone::fixupScriptMapsAfterMovingGC(JSTracer* trc) {
854   // Map entries are removed by BaseScript::finalize, but we need to update the
855   // script pointers here in case they are moved by the GC.
856 
857   if (scriptCountsMap) {
858     for (ScriptCountsMap::Enum e(*scriptCountsMap); !e.empty(); e.popFront()) {
859       BaseScript* script = e.front().key();
860       TraceManuallyBarrieredEdge(trc, &script, "Realm::scriptCountsMap::key");
861       if (script != e.front().key()) {
862         e.rekeyFront(script);
863       }
864     }
865   }
866 
867   if (scriptLCovMap) {
868     for (ScriptLCovMap::Enum e(*scriptLCovMap); !e.empty(); e.popFront()) {
869       BaseScript* script = e.front().key();
870       if (!IsAboutToBeFinalizedUnbarriered(&script) &&
871           script != e.front().key()) {
872         e.rekeyFront(script);
873       }
874     }
875   }
876 
877 #ifdef MOZ_VTUNE
878   if (scriptVTuneIdMap) {
879     for (ScriptVTuneIdMap::Enum e(*scriptVTuneIdMap); !e.empty();
880          e.popFront()) {
881       BaseScript* script = e.front().key();
882       if (!IsAboutToBeFinalizedUnbarriered(&script) &&
883           script != e.front().key()) {
884         e.rekeyFront(script);
885       }
886     }
887   }
888 #endif
889 
890 #ifdef JS_CACHEIR_SPEW
891   if (scriptFinalWarmUpCountMap) {
892     for (ScriptFinalWarmUpCountMap::Enum e(*scriptFinalWarmUpCountMap);
893          !e.empty(); e.popFront()) {
894       BaseScript* script = e.front().key();
895       if (!IsAboutToBeFinalizedUnbarriered(&script) &&
896           script != e.front().key()) {
897         e.rekeyFront(script);
898       }
899     }
900   }
901 #endif
902 }
903 
904 #ifdef JSGC_HASH_TABLE_CHECKS
checkScriptMapsAfterMovingGC()905 void Zone::checkScriptMapsAfterMovingGC() {
906   if (scriptCountsMap) {
907     for (auto r = scriptCountsMap->all(); !r.empty(); r.popFront()) {
908       BaseScript* script = r.front().key();
909       MOZ_ASSERT(script->zone() == this);
910       CheckGCThingAfterMovingGC(script);
911       auto ptr = scriptCountsMap->lookup(script);
912       MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front());
913     }
914   }
915 
916   if (scriptLCovMap) {
917     for (auto r = scriptLCovMap->all(); !r.empty(); r.popFront()) {
918       BaseScript* script = r.front().key();
919       MOZ_ASSERT(script->zone() == this);
920       CheckGCThingAfterMovingGC(script);
921       auto ptr = scriptLCovMap->lookup(script);
922       MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front());
923     }
924   }
925 
926 #  ifdef MOZ_VTUNE
927   if (scriptVTuneIdMap) {
928     for (auto r = scriptVTuneIdMap->all(); !r.empty(); r.popFront()) {
929       BaseScript* script = r.front().key();
930       MOZ_ASSERT(script->zone() == this);
931       CheckGCThingAfterMovingGC(script);
932       auto ptr = scriptVTuneIdMap->lookup(script);
933       MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front());
934     }
935   }
936 #  endif  // MOZ_VTUNE
937 
938 #  ifdef JS_CACHEIR_SPEW
939   if (scriptFinalWarmUpCountMap) {
940     for (auto r = scriptFinalWarmUpCountMap->all(); !r.empty(); r.popFront()) {
941       BaseScript* script = r.front().key();
942       MOZ_ASSERT(script->zone() == this);
943       CheckGCThingAfterMovingGC(script);
944       auto ptr = scriptFinalWarmUpCountMap->lookup(script);
945       MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front());
946     }
947   }
948 #  endif  // JS_CACHEIR_SPEW
949 }
950 #endif
951 
clearScriptCounts(Realm * realm)952 void Zone::clearScriptCounts(Realm* realm) {
953   if (!scriptCountsMap) {
954     return;
955   }
956 
957   // Clear all hasScriptCounts_ flags of BaseScript, in order to release all
958   // ScriptCounts entries of the given realm.
959   for (auto i = scriptCountsMap->modIter(); !i.done(); i.next()) {
960     BaseScript* script = i.get().key();
961     if (script->realm() != realm) {
962       continue;
963     }
964     // We can't destroy the ScriptCounts yet if the script has Baseline code,
965     // because Baseline code bakes in pointers to the counters. The ScriptCounts
966     // will be destroyed in Zone::discardJitCode when discarding the JitScript.
967     if (script->hasBaselineScript()) {
968       continue;
969     }
970     script->clearHasScriptCounts();
971     i.remove();
972   }
973 }
974 
clearScriptLCov(Realm * realm)975 void Zone::clearScriptLCov(Realm* realm) {
976   if (!scriptLCovMap) {
977     return;
978   }
979 
980   for (auto i = scriptLCovMap->modIter(); !i.done(); i.next()) {
981     BaseScript* script = i.get().key();
982     if (script->realm() == realm) {
983       i.remove();
984     }
985   }
986 }
987 
clearRootsForShutdownGC()988 void Zone::clearRootsForShutdownGC() {
989   // Finalization callbacks are not called if we're shutting down.
990   finalizationRecordMap().clear();
991 
992   clearKeptObjects();
993 }
994 
finishRoots()995 void Zone::finishRoots() {
996   for (RealmsInZoneIter r(this); !r.done(); r.next()) {
997     r->finishRoots();
998   }
999 }
1000 
traceKeptObjects(JSTracer * trc)1001 void Zone::traceKeptObjects(JSTracer* trc) { keptObjects.ref().trace(trc); }
1002 
keepDuringJob(HandleObject target)1003 bool Zone::keepDuringJob(HandleObject target) {
1004   return keptObjects.ref().put(target);
1005 }
1006 
clearKeptObjects()1007 void Zone::clearKeptObjects() { keptObjects.ref().clear(); }
1008