1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7 /*
8 * Implementation of GC sweeping.
9 *
10 * In the SpiderMonkey GC, 'sweeping' is used to mean two things:
11 * - updating data structures to remove pointers to dead GC things and updating
12 * pointers to moved GC things
13 * - finalizing dead GC things
14 *
15 * Furthermore, the GC carries out gray and weak marking after the start of the
16 * sweep phase. This is also implemented in this file.
17 */
18
19 #include "mozilla/Maybe.h"
20 #include "mozilla/ScopeExit.h"
21
22 #include "builtin/FinalizationRegistryObject.h"
23 #include "builtin/WeakRefObject.h"
24 #include "debugger/DebugAPI.h"
25 #include "gc/AllocKind.h"
26 #include "gc/FinalizationObservers.h"
27 #include "gc/GCInternals.h"
28 #include "gc/GCLock.h"
29 #include "gc/GCProbes.h"
30 #include "gc/GCRuntime.h"
31 #include "gc/ParallelWork.h"
32 #include "gc/Statistics.h"
33 #include "gc/WeakMap.h"
34 #include "gc/Zone.h"
35 #include "jit/JitRuntime.h"
36 #include "jit/JitZone.h"
37 #include "proxy/DeadObjectProxy.h"
38 #include "vm/HelperThreads.h"
39 #include "vm/JSContext.h"
40 #include "vm/TraceLogging.h"
41 #include "vm/WrapperObject.h"
42
43 #include "gc/PrivateIterators-inl.h"
44 #include "vm/GeckoProfiler-inl.h"
45 #include "vm/JSObject-inl.h"
46 #include "vm/JSScript-inl.h"
47 #include "vm/PropMap-inl.h"
48 #include "vm/Stack-inl.h"
49 #include "vm/StringType-inl.h"
50
51 using namespace js;
52 using namespace js::gc;
53
54 struct js::gc::FinalizePhase {
55 gcstats::PhaseKind statsPhase;
56 AllocKinds kinds;
57 };
58
59 /*
60 * Finalization order for objects swept incrementally on the main thread.
61 */
62 static constexpr FinalizePhase ForegroundObjectFinalizePhase = {
63 gcstats::PhaseKind::FINALIZE_OBJECT,
64 {AllocKind::OBJECT0, AllocKind::OBJECT2, AllocKind::OBJECT4,
65 AllocKind::OBJECT8, AllocKind::OBJECT12, AllocKind::OBJECT16}};
66
67 /*
68 * Finalization order for GC things swept incrementally on the main thread.
69 */
70 static constexpr FinalizePhase ForegroundNonObjectFinalizePhase = {
71 gcstats::PhaseKind::FINALIZE_NON_OBJECT,
72 {AllocKind::SCRIPT, AllocKind::JITCODE}};
73
74 /*
75 * Finalization order for GC things swept on the background thread.
76 */
77 static constexpr FinalizePhase BackgroundFinalizePhases[] = {
78 {gcstats::PhaseKind::FINALIZE_OBJECT,
79 {AllocKind::FUNCTION, AllocKind::FUNCTION_EXTENDED,
80 AllocKind::OBJECT0_BACKGROUND, AllocKind::OBJECT2_BACKGROUND,
81 AllocKind::ARRAYBUFFER4, AllocKind::OBJECT4_BACKGROUND,
82 AllocKind::ARRAYBUFFER8, AllocKind::OBJECT8_BACKGROUND,
83 AllocKind::ARRAYBUFFER12, AllocKind::OBJECT12_BACKGROUND,
84 AllocKind::ARRAYBUFFER16, AllocKind::OBJECT16_BACKGROUND}},
85 {gcstats::PhaseKind::FINALIZE_NON_OBJECT,
86 {AllocKind::SCOPE, AllocKind::REGEXP_SHARED, AllocKind::FAT_INLINE_STRING,
87 AllocKind::STRING, AllocKind::EXTERNAL_STRING, AllocKind::FAT_INLINE_ATOM,
88 AllocKind::ATOM, AllocKind::SYMBOL, AllocKind::BIGINT, AllocKind::SHAPE,
89 AllocKind::BASE_SHAPE, AllocKind::GETTER_SETTER,
90 AllocKind::COMPACT_PROP_MAP, AllocKind::NORMAL_PROP_MAP,
91 AllocKind::DICT_PROP_MAP}}};
92
93 template <typename T>
finalize(JSFreeOp * fop,AllocKind thingKind,size_t thingSize)94 inline size_t Arena::finalize(JSFreeOp* fop, AllocKind thingKind,
95 size_t thingSize) {
96 /* Enforce requirements on size of T. */
97 MOZ_ASSERT(thingSize % CellAlignBytes == 0);
98 MOZ_ASSERT(thingSize >= MinCellSize);
99 MOZ_ASSERT(thingSize <= 255);
100
101 MOZ_ASSERT(allocated());
102 MOZ_ASSERT(thingKind == getAllocKind());
103 MOZ_ASSERT(thingSize == getThingSize());
104 MOZ_ASSERT(!onDelayedMarkingList_);
105
106 uint_fast16_t firstThing = firstThingOffset(thingKind);
107 uint_fast16_t firstThingOrSuccessorOfLastMarkedThing = firstThing;
108 uint_fast16_t lastThing = ArenaSize - thingSize;
109
110 FreeSpan newListHead;
111 FreeSpan* newListTail = &newListHead;
112 size_t nmarked = 0, nfinalized = 0;
113
114 for (ArenaCellIterUnderFinalize cell(this); !cell.done(); cell.next()) {
115 T* t = cell.as<T>();
116 if (t->asTenured().isMarkedAny()) {
117 uint_fast16_t thing = uintptr_t(t) & ArenaMask;
118 if (thing != firstThingOrSuccessorOfLastMarkedThing) {
119 // We just finished passing over one or more free things,
120 // so record a new FreeSpan.
121 newListTail->initBounds(firstThingOrSuccessorOfLastMarkedThing,
122 thing - thingSize, this);
123 newListTail = newListTail->nextSpanUnchecked(this);
124 }
125 firstThingOrSuccessorOfLastMarkedThing = thing + thingSize;
126 nmarked++;
127 } else {
128 t->finalize(fop);
129 AlwaysPoison(t, JS_SWEPT_TENURED_PATTERN, thingSize,
130 MemCheckKind::MakeUndefined);
131 gcprobes::TenuredFinalize(t);
132 nfinalized++;
133 }
134 }
135
136 if constexpr (std::is_same_v<T, JSObject>) {
137 if (isNewlyCreated) {
138 zone->pretenuring.updateCellCountsInNewlyCreatedArenas(
139 nmarked + nfinalized, nmarked);
140 }
141 }
142 isNewlyCreated = 0;
143
144 if (thingKind == AllocKind::STRING ||
145 thingKind == AllocKind::FAT_INLINE_STRING) {
146 zone->markedStrings += nmarked;
147 zone->finalizedStrings += nfinalized;
148 }
149
150 if (nmarked == 0) {
151 // Do nothing. The caller will update the arena appropriately.
152 MOZ_ASSERT(newListTail == &newListHead);
153 DebugOnlyPoison(data, JS_SWEPT_TENURED_PATTERN, sizeof(data),
154 MemCheckKind::MakeUndefined);
155 return nmarked;
156 }
157
158 MOZ_ASSERT(firstThingOrSuccessorOfLastMarkedThing != firstThing);
159 uint_fast16_t lastMarkedThing =
160 firstThingOrSuccessorOfLastMarkedThing - thingSize;
161 if (lastThing == lastMarkedThing) {
162 // If the last thing was marked, we will have already set the bounds of
163 // the final span, and we just need to terminate the list.
164 newListTail->initAsEmpty();
165 } else {
166 // Otherwise, end the list with a span that covers the final stretch of free
167 // things.
168 newListTail->initFinal(firstThingOrSuccessorOfLastMarkedThing, lastThing,
169 this);
170 }
171
172 firstFreeSpan = newListHead;
173 #ifdef DEBUG
174 size_t nfree = numFreeThings(thingSize);
175 MOZ_ASSERT(nfree + nmarked == thingsPerArena(thingKind));
176 #endif
177 return nmarked;
178 }
179
180 // Finalize arenas from src list, releasing empty arenas if keepArenas wasn't
181 // specified and inserting the others into the appropriate destination size
182 // bins.
183 template <typename T>
FinalizeTypedArenas(JSFreeOp * fop,ArenaList & src,SortedArenaList & dest,AllocKind thingKind,SliceBudget & budget)184 static inline bool FinalizeTypedArenas(JSFreeOp* fop, ArenaList& src,
185 SortedArenaList& dest,
186 AllocKind thingKind,
187 SliceBudget& budget) {
188 AutoSetThreadIsFinalizing setThreadUse;
189
190 size_t thingSize = Arena::thingSize(thingKind);
191 size_t thingsPerArena = Arena::thingsPerArena(thingKind);
192
193 while (Arena* arena = src.takeFirstArena()) {
194 size_t nmarked = arena->finalize<T>(fop, thingKind, thingSize);
195 size_t nfree = thingsPerArena - nmarked;
196
197 if (nmarked) {
198 dest.insertAt(arena, nfree);
199 } else {
200 arena->chunk()->recycleArena(arena, dest, thingsPerArena);
201 }
202
203 budget.step(thingsPerArena);
204 if (budget.isOverBudget()) {
205 return false;
206 }
207 }
208
209 return true;
210 }
211
212 /*
213 * Finalize the list of areans.
214 */
FinalizeArenas(JSFreeOp * fop,ArenaList & src,SortedArenaList & dest,AllocKind thingKind,SliceBudget & budget)215 static bool FinalizeArenas(JSFreeOp* fop, ArenaList& src, SortedArenaList& dest,
216 AllocKind thingKind, SliceBudget& budget) {
217 switch (thingKind) {
218 #define EXPAND_CASE(allocKind, traceKind, type, sizedType, bgFinal, nursery, \
219 compact) \
220 case AllocKind::allocKind: \
221 return FinalizeTypedArenas<type>(fop, src, dest, thingKind, budget);
222 FOR_EACH_ALLOCKIND(EXPAND_CASE)
223 #undef EXPAND_CASE
224
225 default:
226 MOZ_CRASH("Invalid alloc kind");
227 }
228 }
229
initBackgroundSweep(Zone * zone,JSFreeOp * fop,const FinalizePhase & phase)230 void GCRuntime::initBackgroundSweep(Zone* zone, JSFreeOp* fop,
231 const FinalizePhase& phase) {
232 gcstats::AutoPhase ap(stats(), phase.statsPhase);
233 for (auto kind : phase.kinds) {
234 zone->arenas.initBackgroundSweep(kind);
235 }
236 }
237
initBackgroundSweep(AllocKind thingKind)238 void ArenaLists::initBackgroundSweep(AllocKind thingKind) {
239 MOZ_ASSERT(IsBackgroundFinalized(thingKind));
240 MOZ_ASSERT(concurrentUse(thingKind) == ConcurrentUse::None);
241
242 if (!collectingArenaList(thingKind).isEmpty()) {
243 concurrentUse(thingKind) = ConcurrentUse::BackgroundFinalize;
244 }
245 }
246
backgroundFinalize(JSFreeOp * fop,Zone * zone,AllocKind kind,Arena ** empty)247 void GCRuntime::backgroundFinalize(JSFreeOp* fop, Zone* zone, AllocKind kind,
248 Arena** empty) {
249 MOZ_ASSERT(empty);
250
251 ArenaLists* lists = &zone->arenas;
252 ArenaList& arenas = lists->collectingArenaList(kind);
253 if (arenas.isEmpty()) {
254 MOZ_ASSERT(lists->concurrentUse(kind) == ArenaLists::ConcurrentUse::None);
255 return;
256 }
257
258 SortedArenaList finalizedSorted(Arena::thingsPerArena(kind));
259
260 auto unlimited = SliceBudget::unlimited();
261 FinalizeArenas(fop, arenas, finalizedSorted, kind, unlimited);
262 MOZ_ASSERT(arenas.isEmpty());
263
264 finalizedSorted.extractEmpty(empty);
265
266 // When marking begins, all arenas are moved from arenaLists to
267 // collectingArenaLists. When the mutator runs, new arenas are allocated in
268 // arenaLists. Now that finalization is complete, we want to merge these lists
269 // back together.
270
271 // We must take the GC lock to be able to safely modify the ArenaList;
272 // however, this does not by itself make the changes visible to all threads,
273 // as not all threads take the GC lock to read the ArenaLists.
274 // That safety is provided by the ReleaseAcquire memory ordering of the
275 // background finalize state, which we explicitly set as the final step.
276 {
277 AutoLockGC lock(rt);
278 MOZ_ASSERT(lists->concurrentUse(kind) ==
279 ArenaLists::ConcurrentUse::BackgroundFinalize);
280 lists->mergeFinalizedArenas(kind, finalizedSorted);
281 }
282
283 lists->concurrentUse(kind) = ArenaLists::ConcurrentUse::None;
284 }
285
286 // After finalizing arenas, merge the following to get the final state of an
287 // arena list:
288 // - arenas allocated during marking
289 // - arenas allocated during sweeping
290 // - finalized arenas
mergeFinalizedArenas(AllocKind kind,SortedArenaList & finalizedArenas)291 void ArenaLists::mergeFinalizedArenas(AllocKind kind,
292 SortedArenaList& finalizedArenas) {
293 #ifdef DEBUG
294 // Updating arena lists off-thread requires taking the GC lock because the
295 // main thread uses these when allocating.
296 if (IsBackgroundFinalized(kind)) {
297 runtimeFromAnyThread()->gc.assertCurrentThreadHasLockedGC();
298 }
299 #endif
300
301 ArenaList& arenas = arenaList(kind);
302
303 ArenaList allocatedDuringCollection = std::move(arenas);
304 arenas = finalizedArenas.toArenaList();
305 arenas.insertListWithCursorAtEnd(allocatedDuringCollection);
306
307 collectingArenaList(kind).clear();
308 }
309
queueForegroundThingsForSweep()310 void ArenaLists::queueForegroundThingsForSweep() {
311 gcCompactPropMapArenasToUpdate =
312 collectingArenaList(AllocKind::COMPACT_PROP_MAP).head();
313 gcNormalPropMapArenasToUpdate =
314 collectingArenaList(AllocKind::NORMAL_PROP_MAP).head();
315 }
316
sweepBackgroundThings(ZoneList & zones)317 void GCRuntime::sweepBackgroundThings(ZoneList& zones) {
318 if (zones.isEmpty()) {
319 return;
320 }
321
322 JSFreeOp fop(nullptr);
323
324 // Sweep zones in order. The atoms zone must be finalized last as other
325 // zones may have direct pointers into it.
326 while (!zones.isEmpty()) {
327 Zone* zone = zones.removeFront();
328 MOZ_ASSERT(zone->isGCFinished());
329
330 Arena* emptyArenas = zone->arenas.takeSweptEmptyArenas();
331
332 AutoSetThreadIsSweeping threadIsSweeping(zone);
333
334 // We must finalize thing kinds in the order specified by
335 // BackgroundFinalizePhases.
336 for (auto phase : BackgroundFinalizePhases) {
337 for (auto kind : phase.kinds) {
338 backgroundFinalize(&fop, zone, kind, &emptyArenas);
339 }
340 }
341
342 // Release any arenas that are now empty.
343 //
344 // Empty arenas are only released after everything has been finalized so
345 // that it's still possible to get a thing's zone after the thing has been
346 // finalized. The HeapPtr destructor depends on this, and this allows
347 // HeapPtrs between things of different alloc kind regardless of
348 // finalization order.
349 //
350 // Periodically drop and reaquire the GC lock every so often to avoid
351 // blocking the main thread from allocating chunks.
352 static const size_t LockReleasePeriod = 32;
353
354 while (emptyArenas) {
355 AutoLockGC lock(this);
356 for (size_t i = 0; i < LockReleasePeriod && emptyArenas; i++) {
357 Arena* arena = emptyArenas;
358 emptyArenas = emptyArenas->next;
359 releaseArena(arena, lock);
360 }
361 }
362 }
363 }
364
assertBackgroundSweepingFinished()365 void GCRuntime::assertBackgroundSweepingFinished() {
366 #ifdef DEBUG
367 {
368 AutoLockHelperThreadState lock;
369 MOZ_ASSERT(backgroundSweepZones.ref().isEmpty());
370 }
371
372 for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
373 for (auto kind : AllAllocKinds()) {
374 MOZ_ASSERT_IF(state() != State::Prepare && state() != State::Mark &&
375 state() != State::Sweep,
376 zone->arenas.collectingArenaList(kind).isEmpty());
377 MOZ_ASSERT(zone->arenas.doneBackgroundFinalize(kind));
378 }
379 }
380 #endif
381 }
382
queueZonesAndStartBackgroundSweep(ZoneList & zones)383 void GCRuntime::queueZonesAndStartBackgroundSweep(ZoneList& zones) {
384 {
385 AutoLockHelperThreadState lock;
386 MOZ_ASSERT(!requestSliceAfterBackgroundTask);
387 backgroundSweepZones.ref().transferFrom(zones);
388 if (sweepOnBackgroundThread) {
389 sweepTask.startOrRunIfIdle(lock);
390 }
391 }
392 if (!sweepOnBackgroundThread) {
393 sweepTask.join();
394 sweepTask.runFromMainThread();
395 }
396 }
397
BackgroundSweepTask(GCRuntime * gc)398 BackgroundSweepTask::BackgroundSweepTask(GCRuntime* gc)
399 : GCParallelTask(gc, gcstats::PhaseKind::SWEEP) {}
400
run(AutoLockHelperThreadState & lock)401 void BackgroundSweepTask::run(AutoLockHelperThreadState& lock) {
402 AutoTraceLog logSweeping(TraceLoggerForCurrentThread(),
403 TraceLogger_GCSweeping);
404
405 gc->sweepFromBackgroundThread(lock);
406 }
407
sweepFromBackgroundThread(AutoLockHelperThreadState & lock)408 void GCRuntime::sweepFromBackgroundThread(AutoLockHelperThreadState& lock) {
409 do {
410 ZoneList zones;
411 zones.transferFrom(backgroundSweepZones.ref());
412
413 AutoUnlockHelperThreadState unlock(lock);
414 sweepBackgroundThings(zones);
415
416 // The main thread may call queueZonesAndStartBackgroundSweep() while this
417 // is running so we must check there is no more work after releasing the
418 // lock.
419 } while (!backgroundSweepZones.ref().isEmpty());
420
421 maybeRequestGCAfterBackgroundTask(lock);
422 }
423
waitBackgroundSweepEnd()424 void GCRuntime::waitBackgroundSweepEnd() {
425 sweepTask.join();
426 if (state() != State::Sweep) {
427 assertBackgroundSweepingFinished();
428 }
429 }
430
startBackgroundFree()431 void GCRuntime::startBackgroundFree() {
432 AutoLockHelperThreadState lock;
433 freeTask.startOrRunIfIdle(lock);
434 }
435
BackgroundFreeTask(GCRuntime * gc)436 BackgroundFreeTask::BackgroundFreeTask(GCRuntime* gc)
437 : GCParallelTask(gc, gcstats::PhaseKind::NONE) {
438 // This can occur outside GCs so doesn't have a stats phase.
439 }
440
run(AutoLockHelperThreadState & lock)441 void BackgroundFreeTask::run(AutoLockHelperThreadState& lock) {
442 AutoTraceLog logFreeing(TraceLoggerForCurrentThread(), TraceLogger_GCFree);
443
444 gc->freeFromBackgroundThread(lock);
445 }
446
freeFromBackgroundThread(AutoLockHelperThreadState & lock)447 void GCRuntime::freeFromBackgroundThread(AutoLockHelperThreadState& lock) {
448 do {
449 LifoAlloc lifoBlocks(JSContext::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE);
450 lifoBlocks.transferFrom(&lifoBlocksToFree.ref());
451
452 Nursery::BufferSet buffers;
453 std::swap(buffers, buffersToFreeAfterMinorGC.ref());
454
455 AutoUnlockHelperThreadState unlock(lock);
456
457 lifoBlocks.freeAll();
458
459 JSFreeOp* fop = TlsContext.get()->defaultFreeOp();
460 for (Nursery::BufferSet::Range r = buffers.all(); !r.empty();
461 r.popFront()) {
462 // Malloc memory associated with nursery objects is not tracked as these
463 // are assumed to be short lived.
464 fop->freeUntracked(r.front());
465 }
466 } while (!lifoBlocksToFree.ref().isEmpty() ||
467 !buffersToFreeAfterMinorGC.ref().empty());
468 }
469
waitBackgroundFreeEnd()470 void GCRuntime::waitBackgroundFreeEnd() { freeTask.join(); }
471
472 template <class ZoneIterT>
markWeakReferences(SliceBudget & incrementalBudget)473 IncrementalProgress GCRuntime::markWeakReferences(
474 SliceBudget& incrementalBudget) {
475 MOZ_ASSERT(!marker.isWeakMarking());
476
477 gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::SWEEP_MARK_WEAK);
478
479 auto unlimited = SliceBudget::unlimited();
480 SliceBudget& budget =
481 marker.incrementalWeakMapMarkingEnabled ? incrementalBudget : unlimited;
482
483 // Ensure we don't return to the mutator while we're still in weak marking
484 // mode.
485 auto leaveOnExit =
486 mozilla::MakeScopeExit([&] { marker.leaveWeakMarkingMode(); });
487
488 if (marker.enterWeakMarkingMode()) {
489 // Do not rely on the information about not-yet-marked weak keys that have
490 // been collected by barriers. Clear out the gcEphemeronEdges entries and
491 // rebuild the full table. Note that this a cross-zone operation; delegate
492 // zone entries will be populated by map zone traversals, so everything
493 // needs to be cleared first, then populated.
494 if (!marker.incrementalWeakMapMarkingEnabled) {
495 for (ZoneIterT zone(this); !zone.done(); zone.next()) {
496 AutoEnterOOMUnsafeRegion oomUnsafe;
497 if (!zone->gcEphemeronEdges().clear()) {
498 oomUnsafe.crash("clearing weak keys when entering weak marking mode");
499 }
500 }
501 }
502
503 for (ZoneIterT zone(this); !zone.done(); zone.next()) {
504 if (zone->enterWeakMarkingMode(&marker, budget) == NotFinished) {
505 return NotFinished;
506 }
507 }
508 }
509
510 bool markedAny = true;
511 while (markedAny) {
512 if (!marker.markUntilBudgetExhausted(budget)) {
513 MOZ_ASSERT(marker.incrementalWeakMapMarkingEnabled);
514 return NotFinished;
515 }
516
517 markedAny = false;
518
519 if (!marker.isWeakMarking()) {
520 for (ZoneIterT zone(this); !zone.done(); zone.next()) {
521 markedAny |= WeakMapBase::markZoneIteratively(zone, &marker);
522 }
523 }
524
525 markedAny |= jit::JitRuntime::MarkJitcodeGlobalTableIteratively(&marker);
526 }
527 MOZ_ASSERT(marker.isDrained());
528
529 return Finished;
530 }
531
markWeakReferencesInCurrentGroup(SliceBudget & budget)532 IncrementalProgress GCRuntime::markWeakReferencesInCurrentGroup(
533 SliceBudget& budget) {
534 return markWeakReferences<SweepGroupZonesIter>(budget);
535 }
536
537 template <class ZoneIterT>
markGrayRoots(SliceBudget & budget,gcstats::PhaseKind phase)538 IncrementalProgress GCRuntime::markGrayRoots(SliceBudget& budget,
539 gcstats::PhaseKind phase) {
540 MOZ_ASSERT(marker.markColor() == MarkColor::Gray);
541
542 gcstats::AutoPhase ap(stats(), phase);
543
544 AutoUpdateLiveCompartments updateLive(this);
545
546 if (traceEmbeddingGrayRoots(&marker, budget) == NotFinished) {
547 return NotFinished;
548 }
549
550 Compartment::traceIncomingCrossCompartmentEdgesForZoneGC(
551 &marker, Compartment::GrayEdges);
552
553 return Finished;
554 }
555
markAllWeakReferences()556 IncrementalProgress GCRuntime::markAllWeakReferences() {
557 SliceBudget budget = SliceBudget::unlimited();
558 return markWeakReferences<GCZonesIter>(budget);
559 }
560
markAllGrayReferences(gcstats::PhaseKind phase)561 void GCRuntime::markAllGrayReferences(gcstats::PhaseKind phase) {
562 SliceBudget budget = SliceBudget::unlimited();
563 markGrayRoots<GCZonesIter>(budget, phase);
564 drainMarkStack();
565 }
566
dropStringWrappers()567 void GCRuntime::dropStringWrappers() {
568 /*
569 * String "wrappers" are dropped on GC because their presence would require
570 * us to sweep the wrappers in all compartments every time we sweep a
571 * compartment group.
572 */
573 for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
574 zone->dropStringWrappersOnGC();
575 }
576 }
577
578 /*
579 * Group zones that must be swept at the same time.
580 *
581 * From the point of view of the mutator, groups of zones transition atomically
582 * from marking to sweeping. If compartment A has an edge to an unmarked object
583 * in compartment B, then we must not start sweeping A in a later slice than we
584 * start sweeping B. That's because a write barrier in A could lead to the
585 * unmarked object in B becoming marked. However, if we had already swept that
586 * object, we would be in trouble.
587 *
588 * If we consider these dependencies as a graph, then all the compartments in
589 * any strongly-connected component of this graph must start sweeping in the
590 * same slice.
591 *
592 * Tarjan's algorithm is used to calculate the components.
593 */
594
findSweepGroupEdges()595 bool Compartment::findSweepGroupEdges() {
596 Zone* source = zone();
597 for (WrappedObjectCompartmentEnum e(this); !e.empty(); e.popFront()) {
598 Compartment* targetComp = e.front();
599 Zone* target = targetComp->zone();
600
601 if (!target->isGCMarking() || source->hasSweepGroupEdgeTo(target)) {
602 continue;
603 }
604
605 for (ObjectWrapperEnum e(this, targetComp); !e.empty(); e.popFront()) {
606 JSObject* key = e.front().mutableKey();
607 MOZ_ASSERT(key->zone() == target);
608
609 // Add an edge to the wrapped object's zone to ensure that the wrapper
610 // zone is not still being marked when we start sweeping the wrapped zone.
611 // As an optimization, if the wrapped object is already marked black there
612 // is no danger of later marking and we can skip this.
613 if (key->isMarkedBlack()) {
614 continue;
615 }
616
617 if (!source->addSweepGroupEdgeTo(target)) {
618 return false;
619 }
620
621 // We don't need to consider any more wrappers for this target
622 // compartment since we already added an edge.
623 break;
624 }
625 }
626
627 return true;
628 }
629
findSweepGroupEdges(Zone * atomsZone)630 bool Zone::findSweepGroupEdges(Zone* atomsZone) {
631 #ifdef DEBUG
632 if (FinalizationObservers* observers = finalizationObservers()) {
633 observers->checkTables();
634 }
635 #endif
636
637 // Any zone may have a pointer to an atom in the atoms zone, and these aren't
638 // in the cross compartment map.
639 if (atomsZone->wasGCStarted() && !addSweepGroupEdgeTo(atomsZone)) {
640 return false;
641 }
642
643 for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
644 if (!comp->findSweepGroupEdges()) {
645 return false;
646 }
647 }
648
649 return WeakMapBase::findSweepGroupEdgesForZone(this);
650 }
651
AddEdgesForMarkQueue(GCMarker & marker)652 static bool AddEdgesForMarkQueue(GCMarker& marker) {
653 #ifdef DEBUG
654 // For testing only.
655 //
656 // Add edges between all objects mentioned in the test mark queue, since
657 // otherwise they will get marked in a different order than their sweep
658 // groups. Note that this is only done at the beginning of an incremental
659 // collection, so it is possible for objects to be added later that do not
660 // follow the sweep group ordering. These objects will wait until their sweep
661 // group comes up, or will be skipped if their sweep group is already past.
662 JS::Zone* prevZone = nullptr;
663 for (size_t i = 0; i < marker.markQueue.length(); i++) {
664 Value val = marker.markQueue[i].get();
665 if (!val.isObject()) {
666 continue;
667 }
668 JSObject* obj = &val.toObject();
669 JS::Zone* zone = obj->zone();
670 if (!zone->isGCMarking()) {
671 continue;
672 }
673 if (prevZone && prevZone != zone) {
674 if (!prevZone->addSweepGroupEdgeTo(zone)) {
675 return false;
676 }
677 }
678 prevZone = zone;
679 }
680 #endif
681 return true;
682 }
683
findSweepGroupEdges()684 bool GCRuntime::findSweepGroupEdges() {
685 for (GCZonesIter zone(this); !zone.done(); zone.next()) {
686 if (!zone->findSweepGroupEdges(atomsZone)) {
687 return false;
688 }
689 }
690
691 if (!AddEdgesForMarkQueue(marker)) {
692 return false;
693 }
694
695 return DebugAPI::findSweepGroupEdges(rt);
696 }
697
groupZonesForSweeping(JS::GCReason reason)698 void GCRuntime::groupZonesForSweeping(JS::GCReason reason) {
699 #ifdef DEBUG
700 for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
701 MOZ_ASSERT(zone->gcSweepGroupEdges().empty());
702 }
703 #endif
704
705 JSContext* cx = rt->mainContextFromOwnThread();
706 ZoneComponentFinder finder(cx);
707 if (!isIncremental || !findSweepGroupEdges()) {
708 finder.useOneComponent();
709 }
710
711 // Use one component for two-slice zeal modes.
712 if (useZeal && hasIncrementalTwoSliceZealMode()) {
713 finder.useOneComponent();
714 }
715
716 for (GCZonesIter zone(this); !zone.done(); zone.next()) {
717 MOZ_ASSERT(zone->isGCMarking());
718 finder.addNode(zone);
719 }
720 sweepGroups = finder.getResultsList();
721 currentSweepGroup = sweepGroups;
722 sweepGroupIndex = 1;
723
724 for (GCZonesIter zone(this); !zone.done(); zone.next()) {
725 zone->clearSweepGroupEdges();
726 }
727
728 #ifdef DEBUG
729 unsigned idx = sweepGroupIndex;
730 for (Zone* head = currentSweepGroup; head; head = head->nextGroup()) {
731 for (Zone* zone = head; zone; zone = zone->nextNodeInGroup()) {
732 MOZ_ASSERT(zone->isGCMarking());
733 zone->gcSweepGroupIndex = idx;
734 }
735 idx++;
736 }
737
738 MOZ_ASSERT_IF(!isIncremental, !currentSweepGroup->nextGroup());
739 for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
740 MOZ_ASSERT(zone->gcSweepGroupEdges().empty());
741 }
742 #endif
743 }
744
getNextSweepGroup()745 void GCRuntime::getNextSweepGroup() {
746 currentSweepGroup = currentSweepGroup->nextGroup();
747 ++sweepGroupIndex;
748 if (!currentSweepGroup) {
749 abortSweepAfterCurrentGroup = false;
750 return;
751 }
752
753 MOZ_ASSERT_IF(abortSweepAfterCurrentGroup, !isIncremental);
754 if (!isIncremental) {
755 ZoneComponentFinder::mergeGroups(currentSweepGroup);
756 }
757
758 for (Zone* zone = currentSweepGroup; zone; zone = zone->nextNodeInGroup()) {
759 MOZ_ASSERT(zone->isGCMarkingBlackOnly());
760 MOZ_ASSERT(!zone->isQueuedForBackgroundSweep());
761 }
762
763 if (abortSweepAfterCurrentGroup) {
764 markTask.join();
765
766 // Abort collection of subsequent sweep groups.
767 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
768 MOZ_ASSERT(!zone->gcNextGraphComponent);
769 zone->changeGCState(Zone::MarkBlackOnly, Zone::NoGC);
770 zone->arenas.unmarkPreMarkedFreeCells();
771 zone->arenas.mergeArenasFromCollectingLists();
772 zone->clearGCSliceThresholds();
773 }
774
775 for (SweepGroupCompartmentsIter comp(rt); !comp.done(); comp.next()) {
776 resetGrayList(comp);
777 }
778
779 abortSweepAfterCurrentGroup = false;
780 currentSweepGroup = nullptr;
781 }
782 }
783
784 /*
785 * Gray marking:
786 *
787 * At the end of collection, anything reachable from a gray root that has not
788 * otherwise been marked black must be marked gray.
789 *
790 * This means that when marking things gray we must not allow marking to leave
791 * the current compartment group, as that could result in things being marked
792 * gray when they might subsequently be marked black. To achieve this, when we
793 * find a cross compartment pointer we don't mark the referent but add it to a
794 * singly-linked list of incoming gray pointers that is stored with each
795 * compartment.
796 *
797 * The list head is stored in Compartment::gcIncomingGrayPointers and contains
798 * cross compartment wrapper objects. The next pointer is stored in the second
799 * extra slot of the cross compartment wrapper.
800 *
801 * The list is created during gray marking when one of the
802 * MarkCrossCompartmentXXX functions is called for a pointer that leaves the
803 * current compartent group. This calls DelayCrossCompartmentGrayMarking to
804 * push the referring object onto the list.
805 *
806 * The list is traversed and then unlinked in
807 * GCRuntime::markIncomingGrayCrossCompartmentPointers.
808 */
809
IsGrayListObject(JSObject * obj)810 static bool IsGrayListObject(JSObject* obj) {
811 MOZ_ASSERT(obj);
812 return obj->is<CrossCompartmentWrapperObject>() && !IsDeadProxyObject(obj);
813 }
814
815 /* static */
grayLinkReservedSlot(JSObject * obj)816 unsigned ProxyObject::grayLinkReservedSlot(JSObject* obj) {
817 MOZ_ASSERT(IsGrayListObject(obj));
818 return CrossCompartmentWrapperObject::GrayLinkReservedSlot;
819 }
820
821 #ifdef DEBUG
AssertNotOnGrayList(JSObject * obj)822 static void AssertNotOnGrayList(JSObject* obj) {
823 MOZ_ASSERT_IF(
824 IsGrayListObject(obj),
825 GetProxyReservedSlot(obj, ProxyObject::grayLinkReservedSlot(obj))
826 .isUndefined());
827 }
828 #endif
829
AssertNoWrappersInGrayList(JSRuntime * rt)830 static void AssertNoWrappersInGrayList(JSRuntime* rt) {
831 #ifdef DEBUG
832 for (CompartmentsIter c(rt); !c.done(); c.next()) {
833 MOZ_ASSERT(!c->gcIncomingGrayPointers);
834 for (Compartment::ObjectWrapperEnum e(c); !e.empty(); e.popFront()) {
835 AssertNotOnGrayList(e.front().value().unbarrieredGet());
836 }
837 }
838 #endif
839 }
840
CrossCompartmentPointerReferent(JSObject * obj)841 static JSObject* CrossCompartmentPointerReferent(JSObject* obj) {
842 MOZ_ASSERT(IsGrayListObject(obj));
843 return &obj->as<ProxyObject>().private_().toObject();
844 }
845
NextIncomingCrossCompartmentPointer(JSObject * prev,bool unlink)846 static JSObject* NextIncomingCrossCompartmentPointer(JSObject* prev,
847 bool unlink) {
848 unsigned slot = ProxyObject::grayLinkReservedSlot(prev);
849 JSObject* next = GetProxyReservedSlot(prev, slot).toObjectOrNull();
850 MOZ_ASSERT_IF(next, IsGrayListObject(next));
851
852 if (unlink) {
853 SetProxyReservedSlot(prev, slot, UndefinedValue());
854 }
855
856 return next;
857 }
858
DelayCrossCompartmentGrayMarking(JSObject * src)859 void js::gc::DelayCrossCompartmentGrayMarking(JSObject* src) {
860 MOZ_ASSERT(IsGrayListObject(src));
861 MOZ_ASSERT(src->isMarkedGray());
862
863 AutoTouchingGrayThings tgt;
864
865 /* Called from MarkCrossCompartmentXXX functions. */
866 unsigned slot = ProxyObject::grayLinkReservedSlot(src);
867 JSObject* dest = CrossCompartmentPointerReferent(src);
868 Compartment* comp = dest->compartment();
869
870 if (GetProxyReservedSlot(src, slot).isUndefined()) {
871 SetProxyReservedSlot(src, slot,
872 ObjectOrNullValue(comp->gcIncomingGrayPointers));
873 comp->gcIncomingGrayPointers = src;
874 } else {
875 MOZ_ASSERT(GetProxyReservedSlot(src, slot).isObjectOrNull());
876 }
877
878 #ifdef DEBUG
879 /*
880 * Assert that the object is in our list, also walking the list to check its
881 * integrity.
882 */
883 JSObject* obj = comp->gcIncomingGrayPointers;
884 bool found = false;
885 while (obj) {
886 if (obj == src) {
887 found = true;
888 }
889 obj = NextIncomingCrossCompartmentPointer(obj, false);
890 }
891 MOZ_ASSERT(found);
892 #endif
893 }
894
markIncomingGrayCrossCompartmentPointers()895 void GCRuntime::markIncomingGrayCrossCompartmentPointers() {
896 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_MARK_INCOMING_GRAY);
897
898 for (SweepGroupCompartmentsIter c(rt); !c.done(); c.next()) {
899 MOZ_ASSERT(c->zone()->isGCMarkingBlackAndGray());
900 MOZ_ASSERT_IF(c->gcIncomingGrayPointers,
901 IsGrayListObject(c->gcIncomingGrayPointers));
902
903 for (JSObject* src = c->gcIncomingGrayPointers; src;
904 src = NextIncomingCrossCompartmentPointer(src, true)) {
905 JSObject* dst = CrossCompartmentPointerReferent(src);
906 MOZ_ASSERT(dst->compartment() == c);
907 MOZ_ASSERT_IF(src->asTenured().isMarkedBlack(),
908 dst->asTenured().isMarkedBlack());
909
910 if (src->asTenured().isMarkedGray()) {
911 TraceManuallyBarrieredEdge(&marker, &dst,
912 "cross-compartment gray pointer");
913 }
914 }
915
916 c->gcIncomingGrayPointers = nullptr;
917 }
918 }
919
RemoveFromGrayList(JSObject * wrapper)920 static bool RemoveFromGrayList(JSObject* wrapper) {
921 AutoTouchingGrayThings tgt;
922
923 if (!IsGrayListObject(wrapper)) {
924 return false;
925 }
926
927 unsigned slot = ProxyObject::grayLinkReservedSlot(wrapper);
928 if (GetProxyReservedSlot(wrapper, slot).isUndefined()) {
929 return false; /* Not on our list. */
930 }
931
932 JSObject* tail = GetProxyReservedSlot(wrapper, slot).toObjectOrNull();
933 SetProxyReservedSlot(wrapper, slot, UndefinedValue());
934
935 Compartment* comp = CrossCompartmentPointerReferent(wrapper)->compartment();
936 JSObject* obj = comp->gcIncomingGrayPointers;
937 if (obj == wrapper) {
938 comp->gcIncomingGrayPointers = tail;
939 return true;
940 }
941
942 while (obj) {
943 unsigned slot = ProxyObject::grayLinkReservedSlot(obj);
944 JSObject* next = GetProxyReservedSlot(obj, slot).toObjectOrNull();
945 if (next == wrapper) {
946 js::detail::SetProxyReservedSlotUnchecked(obj, slot,
947 ObjectOrNullValue(tail));
948 return true;
949 }
950 obj = next;
951 }
952
953 MOZ_CRASH("object not found in gray link list");
954 }
955
resetGrayList(Compartment * comp)956 void GCRuntime::resetGrayList(Compartment* comp) {
957 JSObject* src = comp->gcIncomingGrayPointers;
958 while (src) {
959 src = NextIncomingCrossCompartmentPointer(src, true);
960 }
961 comp->gcIncomingGrayPointers = nullptr;
962 }
963
964 #ifdef DEBUG
HasIncomingCrossCompartmentPointers(JSRuntime * rt)965 static bool HasIncomingCrossCompartmentPointers(JSRuntime* rt) {
966 for (SweepGroupCompartmentsIter c(rt); !c.done(); c.next()) {
967 if (c->gcIncomingGrayPointers) {
968 return true;
969 }
970 }
971
972 return false;
973 }
974 #endif
975
NotifyGCNukeWrapper(JSContext * cx,JSObject * wrapper)976 void js::NotifyGCNukeWrapper(JSContext* cx, JSObject* wrapper) {
977 MOZ_ASSERT(IsCrossCompartmentWrapper(wrapper));
978
979 /*
980 * References to target of wrapper are being removed, we no longer have to
981 * remember to mark it.
982 */
983 RemoveFromGrayList(wrapper);
984
985 /*
986 * Clean up WeakRef maps which might include this wrapper.
987 */
988 JSObject* target = UncheckedUnwrapWithoutExpose(wrapper);
989 if (target->is<WeakRefObject>()) {
990 WeakRefObject* weakRef = &target->as<WeakRefObject>();
991 if (weakRef->target()) {
992 cx->runtime()->gc.nukeWeakRefWrapper(wrapper, weakRef);
993 }
994 }
995
996 /*
997 * Clean up FinalizationRecord record objects which might be the target of
998 * this wrapper.
999 */
1000 if (target->is<FinalizationRecordObject>()) {
1001 auto* record = &target->as<FinalizationRecordObject>();
1002 cx->runtime()->gc.nukeFinalizationRecordWrapper(wrapper, record);
1003 }
1004 }
1005
1006 enum {
1007 JS_GC_SWAP_OBJECT_A_REMOVED = 1 << 0,
1008 JS_GC_SWAP_OBJECT_B_REMOVED = 1 << 1
1009 };
1010
NotifyGCPreSwap(JSObject * a,JSObject * b)1011 unsigned js::NotifyGCPreSwap(JSObject* a, JSObject* b) {
1012 /*
1013 * Two objects in the same compartment are about to have had their contents
1014 * swapped. If either of them are in our gray pointer list, then we remove
1015 * them from the lists, returning a bitset indicating what happened.
1016 */
1017 return (RemoveFromGrayList(a) ? JS_GC_SWAP_OBJECT_A_REMOVED : 0) |
1018 (RemoveFromGrayList(b) ? JS_GC_SWAP_OBJECT_B_REMOVED : 0);
1019 }
1020
NotifyGCPostSwap(JSObject * a,JSObject * b,unsigned removedFlags)1021 void js::NotifyGCPostSwap(JSObject* a, JSObject* b, unsigned removedFlags) {
1022 /*
1023 * Two objects in the same compartment have had their contents swapped. If
1024 * either of them were in our gray pointer list, we re-add them again.
1025 */
1026 if (removedFlags & JS_GC_SWAP_OBJECT_A_REMOVED) {
1027 DelayCrossCompartmentGrayMarking(b);
1028 }
1029 if (removedFlags & JS_GC_SWAP_OBJECT_B_REMOVED) {
1030 DelayCrossCompartmentGrayMarking(a);
1031 }
1032 }
1033
MaybeCheckWeakMapMarking(GCRuntime * gc)1034 static inline void MaybeCheckWeakMapMarking(GCRuntime* gc) {
1035 #if defined(JS_GC_ZEAL) || defined(DEBUG)
1036
1037 bool shouldCheck;
1038 # if defined(DEBUG)
1039 shouldCheck = true;
1040 # else
1041 shouldCheck = gc->hasZealMode(ZealMode::CheckWeakMapMarking);
1042 # endif
1043
1044 if (shouldCheck) {
1045 for (SweepGroupZonesIter zone(gc); !zone.done(); zone.next()) {
1046 MOZ_RELEASE_ASSERT(WeakMapBase::checkMarkingForZone(zone));
1047 }
1048 }
1049
1050 #endif
1051 }
1052
beginMarkingSweepGroup(JSFreeOp * fop,SliceBudget & budget)1053 IncrementalProgress GCRuntime::beginMarkingSweepGroup(JSFreeOp* fop,
1054 SliceBudget& budget) {
1055 MOZ_ASSERT(!markOnBackgroundThreadDuringSweeping);
1056 MOZ_ASSERT(marker.isDrained());
1057 MOZ_ASSERT(marker.markColor() == MarkColor::Black);
1058 MOZ_ASSERT(cellsToAssertNotGray.ref().empty());
1059
1060 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_MARK);
1061
1062 // Change state of current group to MarkBlackAndGray to restrict gray marking
1063 // to this group. Note that there may be pointers to the atoms zone, and these
1064 // will be marked through, as they are not marked with
1065 // TraceCrossCompartmentEdge.
1066 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
1067 zone->changeGCState(Zone::MarkBlackOnly, Zone::MarkBlackAndGray);
1068 }
1069
1070 AutoSetMarkColor setColorGray(marker, MarkColor::Gray);
1071
1072 // Mark incoming gray pointers from previously swept compartments.
1073 markIncomingGrayCrossCompartmentPointers();
1074
1075 return Finished;
1076 }
1077
markGrayRootsInCurrentGroup(JSFreeOp * fop,SliceBudget & budget)1078 IncrementalProgress GCRuntime::markGrayRootsInCurrentGroup(
1079 JSFreeOp* fop, SliceBudget& budget) {
1080 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_MARK);
1081
1082 AutoSetMarkColor setColorGray(marker, MarkColor::Gray);
1083
1084 return markGrayRoots<SweepGroupZonesIter>(
1085 budget, gcstats::PhaseKind::SWEEP_MARK_GRAY);
1086 }
1087
markGray(JSFreeOp * fop,SliceBudget & budget)1088 IncrementalProgress GCRuntime::markGray(JSFreeOp* fop, SliceBudget& budget) {
1089 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_MARK);
1090
1091 if (markUntilBudgetExhausted(budget) == NotFinished) {
1092 return NotFinished;
1093 }
1094
1095 return Finished;
1096 }
1097
endMarkingSweepGroup(JSFreeOp * fop,SliceBudget & budget)1098 IncrementalProgress GCRuntime::endMarkingSweepGroup(JSFreeOp* fop,
1099 SliceBudget& budget) {
1100 MOZ_ASSERT(!markOnBackgroundThreadDuringSweeping);
1101 MOZ_ASSERT(marker.isDrained());
1102
1103 MOZ_ASSERT(marker.markColor() == MarkColor::Black);
1104 MOZ_ASSERT(!HasIncomingCrossCompartmentPointers(rt));
1105
1106 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_MARK);
1107
1108 if (markWeakReferencesInCurrentGroup(budget) == NotFinished) {
1109 return NotFinished;
1110 }
1111
1112 AutoSetMarkColor setColorGray(marker, MarkColor::Gray);
1113
1114 // Mark transitively inside the current compartment group.
1115 if (markWeakReferencesInCurrentGroup(budget) == NotFinished) {
1116 return NotFinished;
1117 }
1118
1119 MOZ_ASSERT(marker.isDrained());
1120
1121 // We must not yield after this point before we start sweeping the group.
1122 safeToYield = false;
1123
1124 MaybeCheckWeakMapMarking(this);
1125
1126 return Finished;
1127 }
1128
1129 // Causes the given WeakCache to be swept when run.
1130 class ImmediateSweepWeakCacheTask : public GCParallelTask {
1131 Zone* zone;
1132 JS::detail::WeakCacheBase& cache;
1133
1134 ImmediateSweepWeakCacheTask(const ImmediateSweepWeakCacheTask&) = delete;
1135
1136 public:
ImmediateSweepWeakCacheTask(GCRuntime * gc,Zone * zone,JS::detail::WeakCacheBase & wc)1137 ImmediateSweepWeakCacheTask(GCRuntime* gc, Zone* zone,
1138 JS::detail::WeakCacheBase& wc)
1139 : GCParallelTask(gc, gcstats::PhaseKind::SWEEP_WEAK_CACHES),
1140 zone(zone),
1141 cache(wc) {}
1142
ImmediateSweepWeakCacheTask(ImmediateSweepWeakCacheTask && other)1143 ImmediateSweepWeakCacheTask(ImmediateSweepWeakCacheTask&& other)
1144 : GCParallelTask(std::move(other)),
1145 zone(other.zone),
1146 cache(other.cache) {}
1147
run(AutoLockHelperThreadState & lock)1148 void run(AutoLockHelperThreadState& lock) override {
1149 AutoUnlockHelperThreadState unlock(lock);
1150 AutoSetThreadIsSweeping threadIsSweeping(zone);
1151 SweepingTracer trc(gc->rt);
1152 cache.traceWeak(&trc, &gc->storeBuffer());
1153 }
1154 };
1155
updateAtomsBitmap()1156 void GCRuntime::updateAtomsBitmap() {
1157 DenseBitmap marked;
1158 if (atomMarking.computeBitmapFromChunkMarkBits(rt, marked)) {
1159 for (GCZonesIter zone(this); !zone.done(); zone.next()) {
1160 atomMarking.refineZoneBitmapForCollectedZone(zone, marked);
1161 }
1162 } else {
1163 // Ignore OOM in computeBitmapFromChunkMarkBits. The
1164 // refineZoneBitmapForCollectedZone call can only remove atoms from the
1165 // zone bitmap, so it is conservative to just not call it.
1166 }
1167
1168 atomMarking.markAtomsUsedByUncollectedZones(rt);
1169
1170 // For convenience sweep these tables non-incrementally as part of bitmap
1171 // sweeping; they are likely to be much smaller than the main atoms table.
1172 SweepingTracer trc(rt);
1173 rt->symbolRegistry().traceWeak(&trc);
1174 }
1175
sweepCCWrappers()1176 void GCRuntime::sweepCCWrappers() {
1177 SweepingTracer trc(rt);
1178 AutoSetThreadIsSweeping threadIsSweeping; // This can touch all zones.
1179 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
1180 zone->traceWeakCCWEdges(&trc);
1181 }
1182 }
1183
sweepRealmGlobals()1184 void GCRuntime::sweepRealmGlobals() {
1185 SweepingTracer trc(rt);
1186 for (SweepGroupRealmsIter r(this); !r.done(); r.next()) {
1187 AutoSetThreadIsSweeping threadIsSweeping(r->zone());
1188 r->traceWeakGlobalEdge(&trc);
1189 }
1190 }
1191
sweepMisc()1192 void GCRuntime::sweepMisc() {
1193 SweepingTracer trc(rt);
1194 for (SweepGroupRealmsIter r(this); !r.done(); r.next()) {
1195 AutoSetThreadIsSweeping threadIsSweeping(r->zone());
1196 r->traceWeakSavedStacks(&trc);
1197 r->traceWeakObjectRealm(&trc);
1198 r->traceWeakRegExps(&trc);
1199 }
1200 }
1201
sweepCompressionTasks()1202 void GCRuntime::sweepCompressionTasks() {
1203 JSRuntime* runtime = rt;
1204
1205 // Attach finished compression tasks.
1206 AutoLockHelperThreadState lock;
1207 AttachFinishedCompressions(runtime, lock);
1208 SweepPendingCompressions(lock);
1209 }
1210
sweepWeakMaps()1211 void GCRuntime::sweepWeakMaps() {
1212 SweepingTracer trc(rt);
1213 AutoSetThreadIsSweeping threadIsSweeping; // This may touch any zone.
1214 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
1215 /* No need to look up any more weakmap keys from this sweep group. */
1216 AutoEnterOOMUnsafeRegion oomUnsafe;
1217 if (!zone->gcEphemeronEdges().clear()) {
1218 oomUnsafe.crash("clearing weak keys in beginSweepingSweepGroup()");
1219 }
1220
1221 // Lock the storebuffer since this may access it when rehashing or resizing
1222 // the tables.
1223 AutoLockStoreBuffer lock(&storeBuffer());
1224 zone->sweepWeakMaps(&trc);
1225 }
1226 }
1227
sweepUniqueIds()1228 void GCRuntime::sweepUniqueIds() {
1229 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
1230 AutoSetThreadIsSweeping threadIsSweeping(zone);
1231 zone->sweepUniqueIds();
1232 }
1233 }
1234
sweepUniqueIds()1235 void JS::Zone::sweepUniqueIds() {
1236 SweepingTracer trc(runtimeFromAnyThread());
1237 uniqueIds().traceWeak(&trc);
1238 }
1239
1240 /* static */
traceWeak(JSTracer * trc,Cell ** keyp,uint64_t * valuep)1241 bool UniqueIdGCPolicy::traceWeak(JSTracer* trc, Cell** keyp, uint64_t* valuep) {
1242 // Since this is only ever used for sweeping, we can optimize it for that
1243 // case. (Compacting GC updates this table manually when it moves a cell.)
1244 MOZ_ASSERT(trc->kind() == JS::TracerKind::Sweeping);
1245 return (*keyp)->isMarkedAny();
1246 }
1247
sweepFinalizationObserversOnMainThread()1248 void GCRuntime::sweepFinalizationObserversOnMainThread() {
1249 // This calls back into the browser which expects to be called from the main
1250 // thread.
1251 gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::SWEEP_COMPARTMENTS);
1252 gcstats::AutoPhase ap2(stats(),
1253 gcstats::PhaseKind::SWEEP_FINALIZATION_OBSERVERS);
1254 SweepingTracer trc(rt);
1255 AutoLockStoreBuffer lock(&storeBuffer());
1256 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
1257 traceWeakFinalizationObserverEdges(&trc, zone);
1258 }
1259 }
1260
startTask(GCParallelTask & task,AutoLockHelperThreadState & lock)1261 void GCRuntime::startTask(GCParallelTask& task,
1262 AutoLockHelperThreadState& lock) {
1263 if (!CanUseExtraThreads()) {
1264 AutoUnlockHelperThreadState unlock(lock);
1265 task.runFromMainThread();
1266 stats().recordParallelPhase(task.phaseKind, task.duration());
1267 return;
1268 }
1269
1270 task.startWithLockHeld(lock);
1271 }
1272
joinTask(GCParallelTask & task,AutoLockHelperThreadState & lock)1273 void GCRuntime::joinTask(GCParallelTask& task,
1274 AutoLockHelperThreadState& lock) {
1275 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::JOIN_PARALLEL_TASKS);
1276 task.joinWithLockHeld(lock);
1277 }
1278
sweepDebuggerOnMainThread(JSFreeOp * fop)1279 void GCRuntime::sweepDebuggerOnMainThread(JSFreeOp* fop) {
1280 SweepingTracer trc(rt);
1281 AutoLockStoreBuffer lock(&storeBuffer());
1282
1283 // Detach unreachable debuggers and global objects from each other.
1284 // This can modify weakmaps and so must happen before weakmap sweeping.
1285 DebugAPI::sweepAll(fop);
1286
1287 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_COMPARTMENTS);
1288
1289 // Sweep debug environment information. This performs lookups in the Zone's
1290 // unique IDs table and so must not happen in parallel with sweeping that
1291 // table.
1292 {
1293 gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::SWEEP_MISC);
1294 for (SweepGroupRealmsIter r(rt); !r.done(); r.next()) {
1295 r->traceWeakDebugEnvironmentEdges(&trc);
1296 }
1297 }
1298 }
1299
sweepJitDataOnMainThread(JSFreeOp * fop)1300 void GCRuntime::sweepJitDataOnMainThread(JSFreeOp* fop) {
1301 SweepingTracer trc(rt);
1302 {
1303 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_JIT_DATA);
1304
1305 if (initialState != State::NotActive) {
1306 // Cancel any active or pending off thread compilations. We also did
1307 // this before marking (in DiscardJITCodeForGC) so this is a no-op
1308 // for non-incremental GCs.
1309 js::CancelOffThreadIonCompile(rt, JS::Zone::Sweep);
1310 }
1311
1312 // Bug 1071218: the following method has not yet been refactored to
1313 // work on a single zone-group at once.
1314
1315 // Sweep entries containing about-to-be-finalized JitCode and
1316 // update relocated TypeSet::Types inside the JitcodeGlobalTable.
1317 jit::JitRuntime::TraceWeakJitcodeGlobalTable(rt, &trc);
1318 }
1319
1320 if (initialState != State::NotActive) {
1321 gcstats::AutoPhase apdc(stats(), gcstats::PhaseKind::SWEEP_DISCARD_CODE);
1322 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
1323 zone->discardJitCode(fop);
1324 }
1325 }
1326
1327 // JitZone/JitRealm must be swept *after* discarding JIT code, because
1328 // Zone::discardJitCode might access CacheIRStubInfos deleted here.
1329 {
1330 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_JIT_DATA);
1331
1332 for (SweepGroupRealmsIter r(rt); !r.done(); r.next()) {
1333 r->traceWeakEdgesInJitRealm(&trc);
1334 }
1335
1336 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
1337 if (jit::JitZone* jitZone = zone->jitZone()) {
1338 jitZone->traceWeak(&trc);
1339 }
1340 }
1341 }
1342 }
1343
1344 using WeakCacheTaskVector =
1345 mozilla::Vector<ImmediateSweepWeakCacheTask, 0, SystemAllocPolicy>;
1346
1347 // Call a functor for all weak caches that need to be swept in the current
1348 // sweep group.
1349 template <typename Functor>
IterateWeakCaches(JSRuntime * rt,Functor f)1350 static inline bool IterateWeakCaches(JSRuntime* rt, Functor f) {
1351 for (SweepGroupZonesIter zone(rt); !zone.done(); zone.next()) {
1352 for (JS::detail::WeakCacheBase* cache : zone->weakCaches()) {
1353 if (!f(cache, zone.get())) {
1354 return false;
1355 }
1356 }
1357 }
1358
1359 for (JS::detail::WeakCacheBase* cache : rt->weakCaches()) {
1360 if (!f(cache, nullptr)) {
1361 return false;
1362 }
1363 }
1364
1365 return true;
1366 }
1367
PrepareWeakCacheTasks(JSRuntime * rt,WeakCacheTaskVector * immediateTasks)1368 static bool PrepareWeakCacheTasks(JSRuntime* rt,
1369 WeakCacheTaskVector* immediateTasks) {
1370 // Start incremental sweeping for caches that support it or add to a vector
1371 // of sweep tasks to run on a helper thread.
1372
1373 MOZ_ASSERT(immediateTasks->empty());
1374
1375 GCRuntime* gc = &rt->gc;
1376 bool ok =
1377 IterateWeakCaches(rt, [&](JS::detail::WeakCacheBase* cache, Zone* zone) {
1378 if (cache->empty()) {
1379 return true;
1380 }
1381
1382 // Caches that support incremental sweeping will be swept later.
1383 if (zone && cache->setIncrementalBarrierTracer(&gc->sweepingTracer)) {
1384 return true;
1385 }
1386
1387 return immediateTasks->emplaceBack(gc, zone, *cache);
1388 });
1389
1390 if (!ok) {
1391 immediateTasks->clearAndFree();
1392 }
1393
1394 return ok;
1395 }
1396
SweepAllWeakCachesOnMainThread(JSRuntime * rt)1397 static void SweepAllWeakCachesOnMainThread(JSRuntime* rt) {
1398 // If we ran out of memory, do all the work on the main thread.
1399 gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::SWEEP_WEAK_CACHES);
1400 SweepingTracer trc(rt);
1401 IterateWeakCaches(rt, [&](JS::detail::WeakCacheBase* cache, Zone* zone) {
1402 if (cache->needsIncrementalBarrier()) {
1403 cache->setIncrementalBarrierTracer(nullptr);
1404 }
1405 cache->traceWeak(&trc, &rt->gc.storeBuffer());
1406 return true;
1407 });
1408 }
1409
sweepEmbeddingWeakPointers(JSFreeOp * fop)1410 void GCRuntime::sweepEmbeddingWeakPointers(JSFreeOp* fop) {
1411 using namespace gcstats;
1412
1413 AutoLockStoreBuffer lock(&storeBuffer());
1414
1415 AutoPhase ap(stats(), PhaseKind::FINALIZE_START);
1416 callFinalizeCallbacks(fop, JSFINALIZE_GROUP_PREPARE);
1417 {
1418 AutoPhase ap2(stats(), PhaseKind::WEAK_ZONES_CALLBACK);
1419 callWeakPointerZonesCallbacks(&sweepingTracer);
1420 }
1421 {
1422 AutoPhase ap2(stats(), PhaseKind::WEAK_COMPARTMENT_CALLBACK);
1423 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
1424 for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
1425 callWeakPointerCompartmentCallbacks(&sweepingTracer, comp);
1426 }
1427 }
1428 }
1429 callFinalizeCallbacks(fop, JSFINALIZE_GROUP_START);
1430 }
1431
beginSweepingSweepGroup(JSFreeOp * fop,SliceBudget & budget)1432 IncrementalProgress GCRuntime::beginSweepingSweepGroup(JSFreeOp* fop,
1433 SliceBudget& budget) {
1434 /*
1435 * Begin sweeping the group of zones in currentSweepGroup, performing
1436 * actions that must be done before yielding to caller.
1437 */
1438
1439 using namespace gcstats;
1440
1441 AutoSCC scc(stats(), sweepGroupIndex);
1442
1443 bool sweepingAtoms = false;
1444 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
1445 /* Set the GC state to sweeping. */
1446 zone->changeGCState(Zone::MarkBlackAndGray, Zone::Sweep);
1447
1448 /* Purge the ArenaLists before sweeping. */
1449 zone->arenas.checkSweepStateNotInUse();
1450 zone->arenas.unmarkPreMarkedFreeCells();
1451 zone->arenas.clearFreeLists();
1452
1453 if (zone->isAtomsZone()) {
1454 sweepingAtoms = true;
1455 }
1456 }
1457
1458 #ifdef JS_GC_ZEAL
1459 validateIncrementalMarking();
1460 #endif
1461
1462 #ifdef DEBUG
1463 for (auto cell : cellsToAssertNotGray.ref()) {
1464 JS::AssertCellIsNotGray(cell);
1465 }
1466 cellsToAssertNotGray.ref().clearAndFree();
1467 #endif
1468
1469 // Updating the atom marking bitmaps. This marks atoms referenced by
1470 // uncollected zones so cannot be done in parallel with the other sweeping
1471 // work below.
1472 if (sweepingAtoms) {
1473 AutoPhase ap(stats(), PhaseKind::UPDATE_ATOMS_BITMAP);
1474 updateAtomsBitmap();
1475 }
1476
1477 AutoSetThreadIsSweeping threadIsSweeping;
1478
1479 // This must happen before sweeping realm globals.
1480 sweepDebuggerOnMainThread(fop);
1481
1482 // FinalizationRegistry sweeping touches weak maps and so must not run in
1483 // parallel with that. This triggers a read barrier and can add marking work
1484 // for zones that are still marking. Must happen before sweeping realm
1485 // globals.
1486 sweepFinalizationObserversOnMainThread();
1487
1488 // This must happen before updating embedding weak pointers.
1489 sweepRealmGlobals();
1490
1491 sweepEmbeddingWeakPointers(fop);
1492
1493 {
1494 AutoLockHelperThreadState lock;
1495
1496 AutoPhase ap(stats(), PhaseKind::SWEEP_COMPARTMENTS);
1497
1498 AutoRunParallelTask sweepCCWrappers(this, &GCRuntime::sweepCCWrappers,
1499 PhaseKind::SWEEP_CC_WRAPPER, lock);
1500 AutoRunParallelTask sweepMisc(this, &GCRuntime::sweepMisc,
1501 PhaseKind::SWEEP_MISC, lock);
1502 AutoRunParallelTask sweepCompTasks(this, &GCRuntime::sweepCompressionTasks,
1503 PhaseKind::SWEEP_COMPRESSION, lock);
1504 AutoRunParallelTask sweepWeakMaps(this, &GCRuntime::sweepWeakMaps,
1505 PhaseKind::SWEEP_WEAKMAPS, lock);
1506 AutoRunParallelTask sweepUniqueIds(this, &GCRuntime::sweepUniqueIds,
1507 PhaseKind::SWEEP_UNIQUEIDS, lock);
1508
1509 WeakCacheTaskVector sweepCacheTasks;
1510 bool canSweepWeakCachesOffThread =
1511 PrepareWeakCacheTasks(rt, &sweepCacheTasks);
1512 if (canSweepWeakCachesOffThread) {
1513 weakCachesToSweep.ref().emplace(currentSweepGroup);
1514 for (auto& task : sweepCacheTasks) {
1515 startTask(task, lock);
1516 }
1517 }
1518
1519 {
1520 AutoUnlockHelperThreadState unlock(lock);
1521 sweepJitDataOnMainThread(fop);
1522
1523 if (!canSweepWeakCachesOffThread) {
1524 MOZ_ASSERT(sweepCacheTasks.empty());
1525 SweepAllWeakCachesOnMainThread(rt);
1526 }
1527 }
1528
1529 for (auto& task : sweepCacheTasks) {
1530 joinTask(task, lock);
1531 }
1532 }
1533
1534 if (sweepingAtoms) {
1535 startSweepingAtomsTable();
1536 }
1537
1538 // Queue all GC things in all zones for sweeping, either on the foreground
1539 // or on the background thread.
1540
1541 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
1542 for (const auto& phase : BackgroundFinalizePhases) {
1543 initBackgroundSweep(zone, fop, phase);
1544 }
1545
1546 zone->arenas.queueForegroundThingsForSweep();
1547 }
1548
1549 MOZ_ASSERT(!sweepZone);
1550
1551 safeToYield = true;
1552 markOnBackgroundThreadDuringSweeping = CanUseExtraThreads();
1553
1554 return Finished;
1555 }
1556
1557 #ifdef JS_GC_ZEAL
shouldYieldForZeal(ZealMode mode)1558 bool GCRuntime::shouldYieldForZeal(ZealMode mode) {
1559 bool yield = useZeal && hasZealMode(mode);
1560
1561 // Only yield on the first sweep slice for this mode.
1562 bool firstSweepSlice = initialState != State::Sweep;
1563 if (mode == ZealMode::IncrementalMultipleSlices && !firstSweepSlice) {
1564 yield = false;
1565 }
1566
1567 return yield;
1568 }
1569 #endif
1570
endSweepingSweepGroup(JSFreeOp * fop,SliceBudget & budget)1571 IncrementalProgress GCRuntime::endSweepingSweepGroup(JSFreeOp* fop,
1572 SliceBudget& budget) {
1573 // This is to prevent a race between markTask checking the zone state and
1574 // us changing it below.
1575 if (joinBackgroundMarkTask() == NotFinished) {
1576 return NotFinished;
1577 }
1578
1579 MOZ_ASSERT(marker.isDrained());
1580
1581 // Disable background marking during sweeping until we start sweeping the next
1582 // zone group.
1583 markOnBackgroundThreadDuringSweeping = false;
1584
1585 {
1586 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::FINALIZE_END);
1587 AutoLockStoreBuffer lock(&storeBuffer());
1588 JSFreeOp fop(rt);
1589 callFinalizeCallbacks(&fop, JSFINALIZE_GROUP_END);
1590 }
1591
1592 /* Free LIFO blocks on a background thread if possible. */
1593 startBackgroundFree();
1594
1595 /* Update the GC state for zones we have swept. */
1596 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
1597 if (jit::JitZone* jitZone = zone->jitZone()) {
1598 // Clear out any small pools that we're hanging on to.
1599 jitZone->execAlloc().purge();
1600 }
1601 AutoLockGC lock(this);
1602 zone->changeGCState(Zone::Sweep, Zone::Finished);
1603 zone->arenas.unmarkPreMarkedFreeCells();
1604 zone->arenas.checkNoArenasToUpdate();
1605 zone->pretenuring.clearCellCountsInNewlyCreatedArenas();
1606 }
1607
1608 /*
1609 * Start background thread to sweep zones if required, sweeping the atoms
1610 * zone last if present.
1611 */
1612 bool sweepAtomsZone = false;
1613 ZoneList zones;
1614 for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
1615 if (zone->isAtomsZone()) {
1616 sweepAtomsZone = true;
1617 } else {
1618 zones.append(zone);
1619 }
1620 }
1621 if (sweepAtomsZone) {
1622 zones.append(atomsZone);
1623 }
1624
1625 queueZonesAndStartBackgroundSweep(zones);
1626
1627 return Finished;
1628 }
1629
markDuringSweeping(JSFreeOp * fop,SliceBudget & budget)1630 IncrementalProgress GCRuntime::markDuringSweeping(JSFreeOp* fop,
1631 SliceBudget& budget) {
1632 MOZ_ASSERT(markTask.isIdle());
1633
1634 if (marker.isDrained()) {
1635 return Finished;
1636 }
1637
1638 if (markOnBackgroundThreadDuringSweeping) {
1639 AutoLockHelperThreadState lock;
1640 MOZ_ASSERT(markTask.isIdle(lock));
1641 markTask.setBudget(budget);
1642 markTask.startOrRunIfIdle(lock);
1643 return Finished; // This means don't yield to the mutator here.
1644 }
1645
1646 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_MARK);
1647 return markUntilBudgetExhausted(budget);
1648 }
1649
beginSweepPhase(JS::GCReason reason,AutoGCSession & session)1650 void GCRuntime::beginSweepPhase(JS::GCReason reason, AutoGCSession& session) {
1651 /*
1652 * Sweep phase.
1653 *
1654 * Finalize as we sweep, outside of lock but with RuntimeHeapIsBusy()
1655 * true so that any attempt to allocate a GC-thing from a finalizer will
1656 * fail, rather than nest badly and leave the unmarked newborn to be swept.
1657 */
1658
1659 MOZ_ASSERT(!abortSweepAfterCurrentGroup);
1660 MOZ_ASSERT(!markOnBackgroundThreadDuringSweeping);
1661
1662 #ifdef DEBUG
1663 releaseHeldRelocatedArenas();
1664 verifyAllChunks();
1665 #endif
1666
1667 #ifdef JS_GC_ZEAL
1668 computeNonIncrementalMarkingForValidation(session);
1669 #endif
1670
1671 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP);
1672
1673 AssertNoWrappersInGrayList(rt);
1674 dropStringWrappers();
1675
1676 groupZonesForSweeping(reason);
1677
1678 sweepActions->assertFinished();
1679 }
1680
foregroundFinalize(JSFreeOp * fop,Zone * zone,AllocKind thingKind,SliceBudget & sliceBudget,SortedArenaList & sweepList)1681 bool GCRuntime::foregroundFinalize(JSFreeOp* fop, Zone* zone,
1682 AllocKind thingKind,
1683 SliceBudget& sliceBudget,
1684 SortedArenaList& sweepList) {
1685 ArenaLists& lists = zone->arenas;
1686 lists.checkNoArenasToUpdateForKind(thingKind);
1687
1688 // Non-empty arenas are reused for use for new allocations as soon as the
1689 // finalizers for that allocation kind have run. Empty arenas are only
1690 // released when everything in the zone has been swept (see
1691 // GCRuntime::sweepBackgroundThings for more details).
1692 if (!FinalizeArenas(fop, lists.collectingArenaList(thingKind), sweepList,
1693 thingKind, sliceBudget)) {
1694 // Copy the current contents of sweepList so that ArenaIter can find them.
1695 lists.setIncrementalSweptArenas(thingKind, sweepList);
1696 return false;
1697 }
1698
1699 sweepList.extractEmpty(&lists.savedEmptyArenas.ref());
1700 lists.mergeFinalizedArenas(thingKind, sweepList);
1701 lists.clearIncrementalSweptArenas();
1702
1703 return true;
1704 }
1705
BackgroundMarkTask(GCRuntime * gc)1706 BackgroundMarkTask::BackgroundMarkTask(GCRuntime* gc)
1707 : GCParallelTask(gc, gcstats::PhaseKind::SWEEP_MARK),
1708 budget(SliceBudget::unlimited()) {}
1709
run(AutoLockHelperThreadState & lock)1710 void js::gc::BackgroundMarkTask::run(AutoLockHelperThreadState& lock) {
1711 AutoUnlockHelperThreadState unlock(lock);
1712
1713 // Time reporting is handled separately for parallel tasks.
1714 gc->sweepMarkResult =
1715 gc->markUntilBudgetExhausted(this->budget, GCMarker::DontReportMarkTime);
1716 }
1717
joinBackgroundMarkTask()1718 IncrementalProgress GCRuntime::joinBackgroundMarkTask() {
1719 AutoLockHelperThreadState lock;
1720 if (markTask.isIdle(lock)) {
1721 return Finished;
1722 }
1723
1724 joinTask(markTask, lock);
1725
1726 IncrementalProgress result = sweepMarkResult;
1727 sweepMarkResult = Finished;
1728 return result;
1729 }
1730
1731 template <typename T>
SweepThing(JSFreeOp * fop,T * thing)1732 static void SweepThing(JSFreeOp* fop, T* thing) {
1733 if (!thing->isMarkedAny()) {
1734 thing->sweep(fop);
1735 }
1736 }
1737
1738 template <typename T>
SweepArenaList(JSFreeOp * fop,Arena ** arenasToSweep,SliceBudget & sliceBudget)1739 static bool SweepArenaList(JSFreeOp* fop, Arena** arenasToSweep,
1740 SliceBudget& sliceBudget) {
1741 while (Arena* arena = *arenasToSweep) {
1742 MOZ_ASSERT(arena->zone->isGCSweeping());
1743
1744 for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
1745 SweepThing(fop, cell.as<T>());
1746 }
1747
1748 Arena* next = arena->next;
1749 MOZ_ASSERT_IF(next, next->zone == arena->zone);
1750 *arenasToSweep = next;
1751
1752 AllocKind kind = MapTypeToAllocKind<T>::kind;
1753 sliceBudget.step(Arena::thingsPerArena(kind));
1754 if (sliceBudget.isOverBudget()) {
1755 return false;
1756 }
1757 }
1758
1759 return true;
1760 }
1761
startSweepingAtomsTable()1762 void GCRuntime::startSweepingAtomsTable() {
1763 auto& maybeAtoms = maybeAtomsToSweep.ref();
1764 MOZ_ASSERT(maybeAtoms.isNothing());
1765
1766 AtomsTable* atomsTable = rt->atomsForSweeping();
1767 if (!atomsTable) {
1768 return;
1769 }
1770
1771 // Create secondary tables to hold new atoms added while we're sweeping the
1772 // main tables incrementally.
1773 if (!atomsTable->startIncrementalSweep(maybeAtoms)) {
1774 SweepingTracer trc(rt);
1775 atomsTable->traceWeak(&trc);
1776 }
1777 }
1778
sweepAtomsTable(JSFreeOp * fop,SliceBudget & budget)1779 IncrementalProgress GCRuntime::sweepAtomsTable(JSFreeOp* fop,
1780 SliceBudget& budget) {
1781 if (!atomsZone->isGCSweeping()) {
1782 return Finished;
1783 }
1784
1785 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_ATOMS_TABLE);
1786
1787 auto& maybeAtoms = maybeAtomsToSweep.ref();
1788 if (!maybeAtoms) {
1789 return Finished;
1790 }
1791
1792 if (!rt->atomsForSweeping()->sweepIncrementally(maybeAtoms.ref(), budget)) {
1793 return NotFinished;
1794 }
1795
1796 maybeAtoms.reset();
1797
1798 return Finished;
1799 }
1800
IncrementalSweepWeakCache(GCRuntime * gc,const WeakCacheToSweep & item)1801 static size_t IncrementalSweepWeakCache(GCRuntime* gc,
1802 const WeakCacheToSweep& item) {
1803 AutoSetThreadIsSweeping threadIsSweeping(item.zone);
1804
1805 JS::detail::WeakCacheBase* cache = item.cache;
1806 MOZ_ASSERT(cache->needsIncrementalBarrier());
1807
1808 SweepingTracer trc(gc->rt);
1809 size_t steps = cache->traceWeak(&trc, &gc->storeBuffer());
1810 cache->setIncrementalBarrierTracer(nullptr);
1811
1812 return steps;
1813 }
1814
WeakCacheSweepIterator(JS::Zone * sweepGroup)1815 WeakCacheSweepIterator::WeakCacheSweepIterator(JS::Zone* sweepGroup)
1816 : sweepZone(sweepGroup), sweepCache(sweepZone->weakCaches().getFirst()) {
1817 settle();
1818 }
1819
done() const1820 bool WeakCacheSweepIterator::done() const { return !sweepZone; }
1821
get() const1822 WeakCacheToSweep WeakCacheSweepIterator::get() const {
1823 MOZ_ASSERT(!done());
1824
1825 return {sweepCache, sweepZone};
1826 }
1827
next()1828 void WeakCacheSweepIterator::next() {
1829 MOZ_ASSERT(!done());
1830
1831 sweepCache = sweepCache->getNext();
1832 settle();
1833 }
1834
settle()1835 void WeakCacheSweepIterator::settle() {
1836 while (sweepZone) {
1837 while (sweepCache && !sweepCache->needsIncrementalBarrier()) {
1838 sweepCache = sweepCache->getNext();
1839 }
1840
1841 if (sweepCache) {
1842 break;
1843 }
1844
1845 sweepZone = sweepZone->nextNodeInGroup();
1846 if (sweepZone) {
1847 sweepCache = sweepZone->weakCaches().getFirst();
1848 }
1849 }
1850
1851 MOZ_ASSERT((!sweepZone && !sweepCache) ||
1852 (sweepCache && sweepCache->needsIncrementalBarrier()));
1853 }
1854
sweepWeakCaches(JSFreeOp * fop,SliceBudget & budget)1855 IncrementalProgress GCRuntime::sweepWeakCaches(JSFreeOp* fop,
1856 SliceBudget& budget) {
1857 if (weakCachesToSweep.ref().isNothing()) {
1858 return Finished;
1859 }
1860
1861 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_COMPARTMENTS);
1862
1863 WeakCacheSweepIterator& work = weakCachesToSweep.ref().ref();
1864
1865 AutoLockHelperThreadState lock;
1866
1867 {
1868 AutoRunParallelWork runWork(this, IncrementalSweepWeakCache,
1869 gcstats::PhaseKind::SWEEP_WEAK_CACHES, work,
1870 budget, lock);
1871 AutoUnlockHelperThreadState unlock(lock);
1872 }
1873
1874 if (work.done()) {
1875 weakCachesToSweep.ref().reset();
1876 return Finished;
1877 }
1878
1879 return NotFinished;
1880 }
1881
finalizeAllocKind(JSFreeOp * fop,SliceBudget & budget)1882 IncrementalProgress GCRuntime::finalizeAllocKind(JSFreeOp* fop,
1883 SliceBudget& budget) {
1884 MOZ_ASSERT(sweepZone->isGCSweeping());
1885
1886 // Set the number of things per arena for this AllocKind.
1887 size_t thingsPerArena = Arena::thingsPerArena(sweepAllocKind);
1888 auto& sweepList = incrementalSweepList.ref();
1889 sweepList.setThingsPerArena(thingsPerArena);
1890
1891 AutoSetThreadIsSweeping threadIsSweeping(sweepZone);
1892
1893 if (!foregroundFinalize(fop, sweepZone, sweepAllocKind, budget, sweepList)) {
1894 return NotFinished;
1895 }
1896
1897 // Reset the slots of the sweep list that we used.
1898 sweepList.reset(thingsPerArena);
1899
1900 return Finished;
1901 }
1902
sweepPropMapTree(JSFreeOp * fop,SliceBudget & budget)1903 IncrementalProgress GCRuntime::sweepPropMapTree(JSFreeOp* fop,
1904 SliceBudget& budget) {
1905 // Remove dead SharedPropMaps from the tree. This happens incrementally on the
1906 // main thread. PropMaps are finalized later on the a background thread.
1907
1908 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_PROP_MAP);
1909
1910 ArenaLists& al = sweepZone->arenas;
1911
1912 if (!SweepArenaList<CompactPropMap>(
1913 fop, &al.gcCompactPropMapArenasToUpdate.ref(), budget)) {
1914 return NotFinished;
1915 }
1916 if (!SweepArenaList<NormalPropMap>(
1917 fop, &al.gcNormalPropMapArenasToUpdate.ref(), budget)) {
1918 return NotFinished;
1919 }
1920
1921 return Finished;
1922 }
1923
1924 // An iterator for a standard container that provides an STL-like begin()/end()
1925 // interface. This iterator provides a done()/get()/next() style interface.
1926 template <typename Container>
1927 class ContainerIter {
1928 using Iter = decltype(std::declval<const Container>().begin());
1929 using Elem = decltype(*std::declval<Iter>());
1930
1931 Iter iter;
1932 const Iter end;
1933
1934 public:
ContainerIter(const Container & container)1935 explicit ContainerIter(const Container& container)
1936 : iter(container.begin()), end(container.end()) {}
1937
done() const1938 bool done() const { return iter == end; }
1939
get() const1940 Elem get() const { return *iter; }
1941
next()1942 void next() {
1943 MOZ_ASSERT(!done());
1944 ++iter;
1945 }
1946 };
1947
1948 // IncrementalIter is a template class that makes a normal iterator into one
1949 // that can be used to perform incremental work by using external state that
1950 // persists between instantiations. The state is only initialised on the first
1951 // use and subsequent uses carry on from the previous state.
1952 template <typename Iter>
1953 struct IncrementalIter {
1954 using State = mozilla::Maybe<Iter>;
1955 using Elem = decltype(std::declval<Iter>().get());
1956
1957 private:
1958 State& maybeIter;
1959
1960 public:
1961 template <typename... Args>
IncrementalIterIncrementalIter1962 explicit IncrementalIter(State& maybeIter, Args&&... args)
1963 : maybeIter(maybeIter) {
1964 if (maybeIter.isNothing()) {
1965 maybeIter.emplace(std::forward<Args>(args)...);
1966 }
1967 }
1968
~IncrementalIterIncrementalIter1969 ~IncrementalIter() {
1970 if (done()) {
1971 maybeIter.reset();
1972 }
1973 }
1974
doneIncrementalIter1975 bool done() const { return maybeIter.ref().done(); }
1976
getIncrementalIter1977 Elem get() const { return maybeIter.ref().get(); }
1978
nextIncrementalIter1979 void next() { maybeIter.ref().next(); }
1980 };
1981
1982 // Iterate through the sweep groups created by
1983 // GCRuntime::groupZonesForSweeping().
1984 class js::gc::SweepGroupsIter {
1985 GCRuntime* gc;
1986
1987 public:
SweepGroupsIter(JSRuntime * rt)1988 explicit SweepGroupsIter(JSRuntime* rt) : gc(&rt->gc) {
1989 MOZ_ASSERT(gc->currentSweepGroup);
1990 }
1991
done() const1992 bool done() const { return !gc->currentSweepGroup; }
1993
get() const1994 Zone* get() const { return gc->currentSweepGroup; }
1995
next()1996 void next() {
1997 MOZ_ASSERT(!done());
1998 gc->getNextSweepGroup();
1999 }
2000 };
2001
2002 namespace sweepaction {
2003
2004 // Implementation of the SweepAction interface that calls a method on GCRuntime.
2005 class SweepActionCall final : public SweepAction {
2006 using Method = IncrementalProgress (GCRuntime::*)(JSFreeOp* fop,
2007 SliceBudget& budget);
2008
2009 Method method;
2010
2011 public:
SweepActionCall(Method m)2012 explicit SweepActionCall(Method m) : method(m) {}
run(Args & args)2013 IncrementalProgress run(Args& args) override {
2014 return (args.gc->*method)(args.fop, args.budget);
2015 }
assertFinished() const2016 void assertFinished() const override {}
2017 };
2018
2019 // Implementation of the SweepAction interface that yields in a specified zeal
2020 // mode.
2021 class SweepActionMaybeYield final : public SweepAction {
2022 #ifdef JS_GC_ZEAL
2023 ZealMode mode;
2024 bool isYielding;
2025 #endif
2026
2027 public:
SweepActionMaybeYield(ZealMode mode)2028 explicit SweepActionMaybeYield(ZealMode mode)
2029 #ifdef JS_GC_ZEAL
2030 : mode(mode),
2031 isYielding(false)
2032 #endif
2033 {
2034 }
2035
run(Args & args)2036 IncrementalProgress run(Args& args) override {
2037 #ifdef JS_GC_ZEAL
2038 if (!isYielding && args.gc->shouldYieldForZeal(mode)) {
2039 isYielding = true;
2040 return NotFinished;
2041 }
2042
2043 isYielding = false;
2044 #endif
2045 return Finished;
2046 }
2047
assertFinished() const2048 void assertFinished() const override { MOZ_ASSERT(!isYielding); }
2049
2050 // These actions should be skipped if GC zeal is not configured.
2051 #ifndef JS_GC_ZEAL
shouldSkip()2052 bool shouldSkip() override { return true; }
2053 #endif
2054 };
2055
2056 // Implementation of the SweepAction interface that calls a list of actions in
2057 // sequence.
2058 class SweepActionSequence final : public SweepAction {
2059 using ActionVector = Vector<UniquePtr<SweepAction>, 0, SystemAllocPolicy>;
2060 using Iter = IncrementalIter<ContainerIter<ActionVector>>;
2061
2062 ActionVector actions;
2063 typename Iter::State iterState;
2064
2065 public:
init(UniquePtr<SweepAction> * acts,size_t count)2066 bool init(UniquePtr<SweepAction>* acts, size_t count) {
2067 for (size_t i = 0; i < count; i++) {
2068 auto& action = acts[i];
2069 if (!action) {
2070 return false;
2071 }
2072 if (action->shouldSkip()) {
2073 continue;
2074 }
2075 if (!actions.emplaceBack(std::move(action))) {
2076 return false;
2077 }
2078 }
2079 return true;
2080 }
2081
run(Args & args)2082 IncrementalProgress run(Args& args) override {
2083 for (Iter iter(iterState, actions); !iter.done(); iter.next()) {
2084 if (iter.get()->run(args) == NotFinished) {
2085 return NotFinished;
2086 }
2087 }
2088 return Finished;
2089 }
2090
assertFinished() const2091 void assertFinished() const override {
2092 MOZ_ASSERT(iterState.isNothing());
2093 for (const auto& action : actions) {
2094 action->assertFinished();
2095 }
2096 }
2097 };
2098
2099 template <typename Iter, typename Init>
2100 class SweepActionForEach final : public SweepAction {
2101 using Elem = decltype(std::declval<Iter>().get());
2102 using IncrIter = IncrementalIter<Iter>;
2103
2104 Init iterInit;
2105 Elem* elemOut;
2106 UniquePtr<SweepAction> action;
2107 typename IncrIter::State iterState;
2108
2109 public:
SweepActionForEach(const Init & init,Elem * maybeElemOut,UniquePtr<SweepAction> action)2110 SweepActionForEach(const Init& init, Elem* maybeElemOut,
2111 UniquePtr<SweepAction> action)
2112 : iterInit(init), elemOut(maybeElemOut), action(std::move(action)) {}
2113
run(Args & args)2114 IncrementalProgress run(Args& args) override {
2115 MOZ_ASSERT_IF(elemOut, *elemOut == Elem());
2116 auto clearElem = mozilla::MakeScopeExit([&] { setElem(Elem()); });
2117 for (IncrIter iter(iterState, iterInit); !iter.done(); iter.next()) {
2118 setElem(iter.get());
2119 if (action->run(args) == NotFinished) {
2120 return NotFinished;
2121 }
2122 }
2123 return Finished;
2124 }
2125
assertFinished() const2126 void assertFinished() const override {
2127 MOZ_ASSERT(iterState.isNothing());
2128 MOZ_ASSERT_IF(elemOut, *elemOut == Elem());
2129 action->assertFinished();
2130 }
2131
2132 private:
setElem(const Elem & value)2133 void setElem(const Elem& value) {
2134 if (elemOut) {
2135 *elemOut = value;
2136 }
2137 }
2138 };
2139
Call(IncrementalProgress (GCRuntime::* method)(JSFreeOp * fop,SliceBudget & budget))2140 static UniquePtr<SweepAction> Call(IncrementalProgress (GCRuntime::*method)(
2141 JSFreeOp* fop, SliceBudget& budget)) {
2142 return MakeUnique<SweepActionCall>(method);
2143 }
2144
MaybeYield(ZealMode zealMode)2145 static UniquePtr<SweepAction> MaybeYield(ZealMode zealMode) {
2146 return MakeUnique<SweepActionMaybeYield>(zealMode);
2147 }
2148
2149 template <typename... Rest>
Sequence(UniquePtr<SweepAction> first,Rest...rest)2150 static UniquePtr<SweepAction> Sequence(UniquePtr<SweepAction> first,
2151 Rest... rest) {
2152 UniquePtr<SweepAction> actions[] = {std::move(first), std::move(rest)...};
2153 auto seq = MakeUnique<SweepActionSequence>();
2154 if (!seq || !seq->init(actions, std::size(actions))) {
2155 return nullptr;
2156 }
2157
2158 return UniquePtr<SweepAction>(std::move(seq));
2159 }
2160
RepeatForSweepGroup(JSRuntime * rt,UniquePtr<SweepAction> action)2161 static UniquePtr<SweepAction> RepeatForSweepGroup(
2162 JSRuntime* rt, UniquePtr<SweepAction> action) {
2163 if (!action) {
2164 return nullptr;
2165 }
2166
2167 using Action = SweepActionForEach<SweepGroupsIter, JSRuntime*>;
2168 return js::MakeUnique<Action>(rt, nullptr, std::move(action));
2169 }
2170
ForEachZoneInSweepGroup(JSRuntime * rt,Zone ** zoneOut,UniquePtr<SweepAction> action)2171 static UniquePtr<SweepAction> ForEachZoneInSweepGroup(
2172 JSRuntime* rt, Zone** zoneOut, UniquePtr<SweepAction> action) {
2173 if (!action) {
2174 return nullptr;
2175 }
2176
2177 using Action = SweepActionForEach<SweepGroupZonesIter, JSRuntime*>;
2178 return js::MakeUnique<Action>(rt, zoneOut, std::move(action));
2179 }
2180
ForEachAllocKind(AllocKinds kinds,AllocKind * kindOut,UniquePtr<SweepAction> action)2181 static UniquePtr<SweepAction> ForEachAllocKind(AllocKinds kinds,
2182 AllocKind* kindOut,
2183 UniquePtr<SweepAction> action) {
2184 if (!action) {
2185 return nullptr;
2186 }
2187
2188 using Action = SweepActionForEach<ContainerIter<AllocKinds>, AllocKinds>;
2189 return js::MakeUnique<Action>(kinds, kindOut, std::move(action));
2190 }
2191
2192 } // namespace sweepaction
2193
initSweepActions()2194 bool GCRuntime::initSweepActions() {
2195 using namespace sweepaction;
2196 using sweepaction::Call;
2197
2198 sweepActions.ref() = RepeatForSweepGroup(
2199 rt,
2200 Sequence(
2201 Call(&GCRuntime::beginMarkingSweepGroup),
2202 Call(&GCRuntime::markGrayRootsInCurrentGroup),
2203 MaybeYield(ZealMode::YieldWhileGrayMarking),
2204 Call(&GCRuntime::markGray), Call(&GCRuntime::endMarkingSweepGroup),
2205 Call(&GCRuntime::beginSweepingSweepGroup),
2206 MaybeYield(ZealMode::IncrementalMultipleSlices),
2207 MaybeYield(ZealMode::YieldBeforeSweepingAtoms),
2208 Call(&GCRuntime::sweepAtomsTable),
2209 MaybeYield(ZealMode::YieldBeforeSweepingCaches),
2210 Call(&GCRuntime::sweepWeakCaches),
2211 ForEachZoneInSweepGroup(
2212 rt, &sweepZone.ref(),
2213 Sequence(MaybeYield(ZealMode::YieldBeforeSweepingObjects),
2214 ForEachAllocKind(ForegroundObjectFinalizePhase.kinds,
2215 &sweepAllocKind.ref(),
2216 Call(&GCRuntime::finalizeAllocKind)),
2217 MaybeYield(ZealMode::YieldBeforeSweepingNonObjects),
2218 ForEachAllocKind(ForegroundNonObjectFinalizePhase.kinds,
2219 &sweepAllocKind.ref(),
2220 Call(&GCRuntime::finalizeAllocKind)),
2221 MaybeYield(ZealMode::YieldBeforeSweepingPropMapTrees),
2222 Call(&GCRuntime::sweepPropMapTree))),
2223 Call(&GCRuntime::endSweepingSweepGroup)));
2224
2225 return sweepActions != nullptr;
2226 }
2227
performSweepActions(SliceBudget & budget)2228 IncrementalProgress GCRuntime::performSweepActions(SliceBudget& budget) {
2229 AutoMajorGCProfilerEntry s(this);
2230 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP);
2231 JSFreeOp fop(rt);
2232
2233 // Don't trigger pre-barriers when finalizing.
2234 AutoDisableBarriers disableBarriers(this);
2235
2236 // Drain the mark stack, possibly in a parallel task if we're in a part of
2237 // sweeping that allows it.
2238 //
2239 // In the first sweep slice where we must not yield to the mutator until we've
2240 // starting sweeping a sweep group but in that case the stack must be empty
2241 // already.
2242
2243 MOZ_ASSERT(initialState <= State::Sweep);
2244 MOZ_ASSERT_IF(initialState != State::Sweep, marker.isDrained());
2245 if (initialState == State::Sweep &&
2246 markDuringSweeping(&fop, budget) == NotFinished) {
2247 return NotFinished;
2248 }
2249
2250 // Then continue running sweep actions.
2251
2252 SweepAction::Args args{this, &fop, budget};
2253 IncrementalProgress sweepProgress = sweepActions->run(args);
2254 IncrementalProgress markProgress = joinBackgroundMarkTask();
2255
2256 if (sweepProgress == Finished && markProgress == Finished) {
2257 return Finished;
2258 }
2259
2260 MOZ_ASSERT(isIncremental);
2261 return NotFinished;
2262 }
2263
allCCVisibleZonesWereCollected()2264 bool GCRuntime::allCCVisibleZonesWereCollected() {
2265 // Calculate whether the gray marking state is now valid.
2266 //
2267 // The gray bits change from invalid to valid if we finished a full GC from
2268 // the point of view of the cycle collector. We ignore the following:
2269 //
2270 // - Helper thread zones, as these are not reachable from the main heap.
2271 // - The atoms zone, since strings and symbols are never marked gray.
2272 // - Empty zones.
2273 //
2274 // These exceptions ensure that when the CC requests a full GC the gray mark
2275 // state ends up valid even it we don't collect all of the zones.
2276
2277 for (ZonesIter zone(this, SkipAtoms); !zone.done(); zone.next()) {
2278 if (!zone->isCollecting() && !zone->arenas.arenaListsAreEmpty()) {
2279 return false;
2280 }
2281 }
2282
2283 return true;
2284 }
2285
endSweepPhase(bool destroyingRuntime)2286 void GCRuntime::endSweepPhase(bool destroyingRuntime) {
2287 MOZ_ASSERT(!markOnBackgroundThreadDuringSweeping);
2288
2289 sweepActions->assertFinished();
2290
2291 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP);
2292 JSFreeOp fop(rt);
2293
2294 MOZ_ASSERT_IF(destroyingRuntime, !sweepOnBackgroundThread);
2295
2296 {
2297 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::DESTROY);
2298
2299 /*
2300 * Sweep script filenames after sweeping functions in the generic loop
2301 * above. In this way when a scripted function's finalizer destroys the
2302 * script and calls rt->destroyScriptHook, the hook can still access the
2303 * script's filename. See bug 323267.
2304 */
2305 SweepScriptData(rt);
2306 }
2307
2308 {
2309 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::FINALIZE_END);
2310 AutoLockStoreBuffer lock(&storeBuffer());
2311 callFinalizeCallbacks(&fop, JSFINALIZE_COLLECTION_END);
2312
2313 if (allCCVisibleZonesWereCollected()) {
2314 grayBitsValid = true;
2315 }
2316 }
2317
2318 if (isIncremental) {
2319 findDeadCompartments();
2320 }
2321
2322 #ifdef JS_GC_ZEAL
2323 finishMarkingValidation();
2324 #endif
2325
2326 #ifdef DEBUG
2327 for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
2328 for (auto i : AllAllocKinds()) {
2329 MOZ_ASSERT_IF(!IsBackgroundFinalized(i) || !sweepOnBackgroundThread,
2330 zone->arenas.collectingArenaList(i).isEmpty());
2331 }
2332 }
2333 #endif
2334
2335 AssertNoWrappersInGrayList(rt);
2336 }
2337