1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 /*
8  * Implementation of compacting GC.
9  */
10 
11 #include "mozilla/Maybe.h"
12 
13 #include "debugger/DebugAPI.h"
14 #include "gc/ArenaList.h"
15 #include "gc/GCInternals.h"
16 #include "gc/GCLock.h"
17 #include "gc/ParallelWork.h"
18 #include "gc/Zone.h"
19 #include "jit/JitCode.h"
20 #include "jit/JitRuntime.h"
21 #include "jit/JitZone.h"
22 #include "js/GCAPI.h"
23 #include "vm/HelperThreads.h"
24 #include "vm/Realm.h"
25 #include "wasm/TypedObject.h"
26 
27 #include "gc/Heap-inl.h"
28 #include "gc/Marking-inl.h"
29 #include "gc/PrivateIterators-inl.h"
30 #include "gc/Zone-inl.h"
31 #include "vm/GeckoProfiler-inl.h"
32 #include "vm/JSContext-inl.h"
33 
34 using namespace js;
35 using namespace js::gc;
36 
37 using mozilla::Maybe;
38 
canRelocateZone(Zone * zone) const39 bool GCRuntime::canRelocateZone(Zone* zone) const {
40   return !zone->isAtomsZone();
41 }
42 
beginCompactPhase()43 void GCRuntime::beginCompactPhase() {
44   MOZ_ASSERT(!isBackgroundSweeping());
45   assertBackgroundSweepingFinished();
46 
47   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
48 
49   MOZ_ASSERT(zonesToMaybeCompact.ref().isEmpty());
50   for (GCZonesIter zone(this); !zone.done(); zone.next()) {
51     if (canRelocateZone(zone)) {
52       zonesToMaybeCompact.ref().append(zone);
53     }
54   }
55 
56   startedCompacting = true;
57   zonesCompacted = 0;
58 
59 #ifdef DEBUG
60   AutoLockGC lock(this);
61   MOZ_ASSERT(!relocatedArenasToRelease);
62 #endif
63 }
64 
compactPhase(JS::GCReason reason,SliceBudget & sliceBudget,AutoGCSession & session)65 IncrementalProgress GCRuntime::compactPhase(JS::GCReason reason,
66                                             SliceBudget& sliceBudget,
67                                             AutoGCSession& session) {
68   assertBackgroundSweepingFinished();
69   MOZ_ASSERT(startedCompacting);
70 
71   AutoMajorGCProfilerEntry s(this);
72   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
73 
74   // TODO: JSScripts can move. If the sampler interrupts the GC in the
75   // middle of relocating an arena, invalid JSScript pointers may be
76   // accessed. Suppress all sampling until a finer-grained solution can be
77   // found. See bug 1295775.
78   AutoSuppressProfilerSampling suppressSampling(rt->mainContextFromOwnThread());
79 
80   ZoneList relocatedZones;
81   Arena* relocatedArenas = nullptr;
82   while (!zonesToMaybeCompact.ref().isEmpty()) {
83     Zone* zone = zonesToMaybeCompact.ref().front();
84     zonesToMaybeCompact.ref().removeFront();
85 
86     MOZ_ASSERT(nursery().isEmpty());
87     zone->changeGCState(Zone::Finished, Zone::Compact);
88 
89     if (relocateArenas(zone, reason, relocatedArenas, sliceBudget)) {
90       updateZonePointersToRelocatedCells(zone);
91       relocatedZones.append(zone);
92       zonesCompacted++;
93     } else {
94       zone->changeGCState(Zone::Compact, Zone::Finished);
95     }
96 
97     if (sliceBudget.isOverBudget()) {
98       break;
99     }
100   }
101 
102   if (!relocatedZones.isEmpty()) {
103     updateRuntimePointersToRelocatedCells(session);
104 
105     do {
106       Zone* zone = relocatedZones.front();
107       relocatedZones.removeFront();
108       zone->changeGCState(Zone::Compact, Zone::Finished);
109     } while (!relocatedZones.isEmpty());
110   }
111 
112   clearRelocatedArenas(relocatedArenas, reason);
113 
114 #ifdef DEBUG
115   protectOrReleaseRelocatedArenas(relocatedArenas, reason);
116 #else
117   releaseRelocatedArenas(relocatedArenas);
118 #endif
119 
120   // Clear caches that can contain cell pointers.
121   rt->caches().purgeForCompaction();
122 
123 #ifdef DEBUG
124   checkHashTablesAfterMovingGC();
125 #endif
126 
127   return zonesToMaybeCompact.ref().isEmpty() ? Finished : NotFinished;
128 }
129 
endCompactPhase()130 void GCRuntime::endCompactPhase() { startedCompacting = false; }
131 
ShouldRelocateAllArenas(JS::GCReason reason)132 static bool ShouldRelocateAllArenas(JS::GCReason reason) {
133   return reason == JS::GCReason::DEBUG_GC;
134 }
135 
136 /*
137  * Choose which arenas to relocate all cells from. Return an arena cursor that
138  * can be passed to removeRemainingArenas().
139  */
pickArenasToRelocate(size_t & arenaTotalOut,size_t & relocTotalOut)140 Arena** ArenaList::pickArenasToRelocate(size_t& arenaTotalOut,
141                                         size_t& relocTotalOut) {
142   // Relocate the greatest number of arenas such that the number of used cells
143   // in relocated arenas is less than or equal to the number of free cells in
144   // unrelocated arenas. In other words we only relocate cells we can move
145   // into existing arenas, and we choose the least full areans to relocate.
146   //
147   // This is made easier by the fact that the arena list has been sorted in
148   // descending order of number of used cells, so we will always relocate a
149   // tail of the arena list. All we need to do is find the point at which to
150   // start relocating.
151 
152   check();
153 
154   if (isCursorAtEnd()) {
155     return nullptr;
156   }
157 
158   Arena** arenap = cursorp_;      // Next arena to consider for relocation.
159   size_t previousFreeCells = 0;   // Count of free cells before arenap.
160   size_t followingUsedCells = 0;  // Count of used cells after arenap.
161   size_t fullArenaCount = 0;      // Number of full arenas (not relocated).
162   size_t nonFullArenaCount =
163       0;  // Number of non-full arenas (considered for relocation).
164   size_t arenaIndex = 0;  // Index of the next arena to consider.
165 
166   for (Arena* arena = head_; arena != *cursorp_; arena = arena->next) {
167     fullArenaCount++;
168   }
169 
170   for (Arena* arena = *cursorp_; arena; arena = arena->next) {
171     followingUsedCells += arena->countUsedCells();
172     nonFullArenaCount++;
173   }
174 
175   mozilla::DebugOnly<size_t> lastFreeCells(0);
176   size_t cellsPerArena = Arena::thingsPerArena((*arenap)->getAllocKind());
177 
178   while (*arenap) {
179     Arena* arena = *arenap;
180     if (followingUsedCells <= previousFreeCells) {
181       break;
182     }
183 
184     size_t freeCells = arena->countFreeCells();
185     size_t usedCells = cellsPerArena - freeCells;
186     followingUsedCells -= usedCells;
187 #ifdef DEBUG
188     MOZ_ASSERT(freeCells >= lastFreeCells);
189     lastFreeCells = freeCells;
190 #endif
191     previousFreeCells += freeCells;
192     arenap = &arena->next;
193     arenaIndex++;
194   }
195 
196   size_t relocCount = nonFullArenaCount - arenaIndex;
197   MOZ_ASSERT(relocCount < nonFullArenaCount);
198   MOZ_ASSERT((relocCount == 0) == (!*arenap));
199   arenaTotalOut += fullArenaCount + nonFullArenaCount;
200   relocTotalOut += relocCount;
201 
202   return arenap;
203 }
204 
205 #ifdef DEBUG
PtrIsInRange(const void * ptr,const void * start,size_t length)206 inline bool PtrIsInRange(const void* ptr, const void* start, size_t length) {
207   return uintptr_t(ptr) - uintptr_t(start) < length;
208 }
209 #endif
210 
RelocateCell(Zone * zone,TenuredCell * src,AllocKind thingKind,size_t thingSize)211 static void RelocateCell(Zone* zone, TenuredCell* src, AllocKind thingKind,
212                          size_t thingSize) {
213   JS::AutoSuppressGCAnalysis nogc(TlsContext.get());
214 
215   // Allocate a new cell.
216   MOZ_ASSERT(zone == src->zone());
217   TenuredCell* dst = AllocateCellInGC(zone, thingKind);
218 
219   // Copy source cell contents to destination.
220   memcpy(dst, src, thingSize);
221 
222   // Move any uid attached to the object.
223   src->zone()->transferUniqueId(dst, src);
224 
225   if (IsObjectAllocKind(thingKind)) {
226     auto* srcObj = static_cast<JSObject*>(static_cast<Cell*>(src));
227     auto* dstObj = static_cast<JSObject*>(static_cast<Cell*>(dst));
228 
229     if (srcObj->is<NativeObject>()) {
230       NativeObject* srcNative = &srcObj->as<NativeObject>();
231       NativeObject* dstNative = &dstObj->as<NativeObject>();
232 
233       // Fixup the pointer to inline object elements if necessary.
234       if (srcNative->hasFixedElements()) {
235         uint32_t numShifted =
236             srcNative->getElementsHeader()->numShiftedElements();
237         dstNative->setFixedElements(numShifted);
238       }
239     } else if (srcObj->is<ProxyObject>()) {
240       if (srcObj->as<ProxyObject>().usingInlineValueArray()) {
241         dstObj->as<ProxyObject>().setInlineValueArray();
242       }
243     }
244 
245     // Call object moved hook if present.
246     if (JSObjectMovedOp op = srcObj->getClass()->extObjectMovedOp()) {
247       op(dstObj, srcObj);
248     }
249 
250     MOZ_ASSERT_IF(
251         dstObj->is<NativeObject>(),
252         !PtrIsInRange(
253             (const Value*)dstObj->as<NativeObject>().getDenseElements(), src,
254             thingSize));
255   }
256 
257   // Copy the mark bits.
258   dst->copyMarkBitsFrom(src);
259 
260   // Poison the source cell contents except for the forwarding flag and pointer
261   // which will be stored in the first word. We can't do this for native object
262   // with fixed elements because this would overwrite the element flags and
263   // these are needed when updating COW elements referred to by other objects.
264 #ifdef DEBUG
265   JSObject* srcObj = IsObjectAllocKind(thingKind)
266                          ? static_cast<JSObject*>(static_cast<Cell*>(src))
267                          : nullptr;
268   if (!srcObj || !srcObj->is<NativeObject>() ||
269       !srcObj->as<NativeObject>().hasFixedElements()) {
270     AlwaysPoison(reinterpret_cast<uint8_t*>(src) + sizeof(uintptr_t),
271                  JS_MOVED_TENURED_PATTERN, thingSize - sizeof(uintptr_t),
272                  MemCheckKind::MakeNoAccess);
273   }
274 #endif
275 
276   // Mark source cell as forwarded and leave a pointer to the destination.
277   RelocationOverlay::forwardCell(src, dst);
278 }
279 
RelocateArena(Arena * arena,SliceBudget & sliceBudget)280 static void RelocateArena(Arena* arena, SliceBudget& sliceBudget) {
281   MOZ_ASSERT(arena->allocated());
282   MOZ_ASSERT(!arena->onDelayedMarkingList());
283   MOZ_ASSERT(arena->bufferedCells()->isEmpty());
284 
285   Zone* zone = arena->zone;
286 
287   AllocKind thingKind = arena->getAllocKind();
288   size_t thingSize = arena->getThingSize();
289 
290   for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
291     RelocateCell(zone, cell, thingKind, thingSize);
292     sliceBudget.step();
293   }
294 
295 #ifdef DEBUG
296   for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
297     TenuredCell* src = cell;
298     MOZ_ASSERT(src->isForwarded());
299     TenuredCell* dest = Forwarded(src);
300     MOZ_ASSERT(src->isMarkedBlack() == dest->isMarkedBlack());
301     MOZ_ASSERT(src->isMarkedGray() == dest->isMarkedGray());
302   }
303 #endif
304 }
305 
306 /*
307  * Relocate all arenas identified by pickArenasToRelocate: for each arena,
308  * relocate each cell within it, then add it to a list of relocated arenas.
309  */
relocateArenas(Arena * toRelocate,Arena * relocated,SliceBudget & sliceBudget,gcstats::Statistics & stats)310 Arena* ArenaList::relocateArenas(Arena* toRelocate, Arena* relocated,
311                                  SliceBudget& sliceBudget,
312                                  gcstats::Statistics& stats) {
313   check();
314 
315   while (Arena* arena = toRelocate) {
316     toRelocate = arena->next;
317     RelocateArena(arena, sliceBudget);
318     // Prepend to list of relocated arenas
319     arena->next = relocated;
320     relocated = arena;
321     stats.count(gcstats::COUNT_ARENA_RELOCATED);
322   }
323 
324   check();
325 
326   return relocated;
327 }
328 
329 // Skip compacting zones unless we can free a certain proportion of their GC
330 // heap memory.
331 static const float MIN_ZONE_RECLAIM_PERCENT = 2.0;
332 
ShouldRelocateZone(size_t arenaCount,size_t relocCount,JS::GCReason reason)333 static bool ShouldRelocateZone(size_t arenaCount, size_t relocCount,
334                                JS::GCReason reason) {
335   if (relocCount == 0) {
336     return false;
337   }
338 
339   if (IsOOMReason(reason)) {
340     return true;
341   }
342 
343   return (relocCount * 100.0f) / arenaCount >= MIN_ZONE_RECLAIM_PERCENT;
344 }
345 
CompactingAllocKinds()346 static AllocKinds CompactingAllocKinds() {
347   AllocKinds result;
348   for (AllocKind kind : AllAllocKinds()) {
349     if (IsCompactingKind(kind)) {
350       result += kind;
351     }
352   }
353   return result;
354 }
355 
relocateArenas(Arena * & relocatedListOut,JS::GCReason reason,SliceBudget & sliceBudget,gcstats::Statistics & stats)356 bool ArenaLists::relocateArenas(Arena*& relocatedListOut, JS::GCReason reason,
357                                 SliceBudget& sliceBudget,
358                                 gcstats::Statistics& stats) {
359   // This is only called from the main thread while we are doing a GC, so
360   // there is no need to lock.
361   MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
362   MOZ_ASSERT(runtime()->gc.isHeapCompacting());
363   MOZ_ASSERT(!runtime()->gc.isBackgroundSweeping());
364 
365   // Relocate all compatible kinds
366   AllocKinds allocKindsToRelocate = CompactingAllocKinds();
367 
368   // Clear all the free lists.
369   clearFreeLists();
370 
371   if (ShouldRelocateAllArenas(reason)) {
372     zone_->prepareForCompacting();
373     for (auto kind : allocKindsToRelocate) {
374       ArenaList& al = arenaList(kind);
375       Arena* allArenas = al.head();
376       al.clear();
377       relocatedListOut =
378           al.relocateArenas(allArenas, relocatedListOut, sliceBudget, stats);
379     }
380   } else {
381     size_t arenaCount = 0;
382     size_t relocCount = 0;
383     AllAllocKindArray<Arena**> toRelocate;
384 
385     for (auto kind : allocKindsToRelocate) {
386       toRelocate[kind] =
387           arenaList(kind).pickArenasToRelocate(arenaCount, relocCount);
388     }
389 
390     if (!ShouldRelocateZone(arenaCount, relocCount, reason)) {
391       return false;
392     }
393 
394     zone_->prepareForCompacting();
395     for (auto kind : allocKindsToRelocate) {
396       if (toRelocate[kind]) {
397         ArenaList& al = arenaList(kind);
398         Arena* arenas = al.removeRemainingArenas(toRelocate[kind]);
399         relocatedListOut =
400             al.relocateArenas(arenas, relocatedListOut, sliceBudget, stats);
401       }
402     }
403   }
404 
405   return true;
406 }
407 
relocateArenas(Zone * zone,JS::GCReason reason,Arena * & relocatedListOut,SliceBudget & sliceBudget)408 bool GCRuntime::relocateArenas(Zone* zone, JS::GCReason reason,
409                                Arena*& relocatedListOut,
410                                SliceBudget& sliceBudget) {
411   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_MOVE);
412 
413   MOZ_ASSERT(!zone->isPreservingCode());
414   MOZ_ASSERT(canRelocateZone(zone));
415 
416   js::CancelOffThreadIonCompile(rt, JS::Zone::Compact);
417 
418   if (!zone->arenas.relocateArenas(relocatedListOut, reason, sliceBudget,
419                                    stats())) {
420     return false;
421   }
422 
423 #ifdef DEBUG
424   // Check that we did as much compaction as we should have. There
425   // should always be less than one arena's worth of free cells.
426   for (auto kind : CompactingAllocKinds()) {
427     ArenaList& al = zone->arenas.arenaList(kind);
428     size_t freeCells = 0;
429     for (Arena* arena = al.arenaAfterCursor(); arena; arena = arena->next) {
430       freeCells += arena->countFreeCells();
431     }
432     MOZ_ASSERT(freeCells < Arena::thingsPerArena(kind));
433   }
434 #endif
435 
436   return true;
437 }
438 
MovingTracer(JSRuntime * rt)439 MovingTracer::MovingTracer(JSRuntime* rt)
440     : GenericTracerImpl(rt, JS::TracerKind::Moving,
441                         JS::WeakMapTraceAction::TraceKeysAndValues) {}
442 
443 template <typename T>
onEdge(T * thing)444 inline T* MovingTracer::onEdge(T* thing) {
445   if (thing->runtimeFromAnyThread() == runtime() && IsForwarded(thing)) {
446     thing = Forwarded(thing);
447   }
448 
449   return thing;
450 }
451 
prepareForCompacting()452 void Zone::prepareForCompacting() {
453   JSFreeOp* fop = runtimeFromMainThread()->defaultFreeOp();
454   discardJitCode(fop);
455 }
456 
sweepZoneAfterCompacting(MovingTracer * trc,Zone * zone)457 void GCRuntime::sweepZoneAfterCompacting(MovingTracer* trc, Zone* zone) {
458   MOZ_ASSERT(zone->isCollecting());
459   traceWeakFinalizationObserverEdges(trc, zone);
460 
461   zone->traceWeakMaps(trc);
462 
463   for (auto* cache : zone->weakCaches()) {
464     cache->traceWeak(trc, nullptr);
465   }
466 
467   if (jit::JitZone* jitZone = zone->jitZone()) {
468     jitZone->traceWeak(trc);
469   }
470 
471   for (RealmsInZoneIter r(zone); !r.done(); r.next()) {
472     r->traceWeakRegExps(trc);
473     r->traceWeakSavedStacks(trc);
474     r->traceWeakGlobalEdge(trc);
475     r->traceWeakDebugEnvironmentEdges(trc);
476     r->traceWeakEdgesInJitRealm(trc);
477     r->traceWeakObjectRealm(trc);
478   }
479 }
480 
481 template <typename T>
UpdateCellPointers(MovingTracer * trc,T * cell)482 static inline void UpdateCellPointers(MovingTracer* trc, T* cell) {
483   // We only update unmoved GC things or the new copy of moved GC things, never
484   // the old copy. If this happened it could clear the forwarded flag which
485   // could lead to pointers to the old copy not being updated.
486   MOZ_ASSERT(!cell->isForwarded());
487 
488   cell->fixupAfterMovingGC();
489   cell->traceChildren(trc);
490 }
491 
492 template <typename T>
UpdateArenaPointersTyped(MovingTracer * trc,Arena * arena)493 static void UpdateArenaPointersTyped(MovingTracer* trc, Arena* arena) {
494   for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
495     UpdateCellPointers(trc, cell.as<T>());
496   }
497 }
498 
CanUpdateKindInBackground(AllocKind kind)499 static bool CanUpdateKindInBackground(AllocKind kind) {
500   // We try to update as many GC things in parallel as we can, but there are
501   // kinds for which this might not be safe:
502   //  - we assume JSObjects that are foreground finalized are not safe to
503   //    update in parallel
504   //  - updating a SharedPropMap touches child maps in
505   //    SharedPropMap::fixupAfterMovingGC
506   return js::gc::IsBackgroundFinalized(kind) && !IsShapeAllocKind(kind) &&
507          kind != AllocKind::BASE_SHAPE;
508 }
509 
510 /*
511  * Update the internal pointers for all cells in an arena.
512  */
UpdateArenaPointers(MovingTracer * trc,Arena * arena)513 static void UpdateArenaPointers(MovingTracer* trc, Arena* arena) {
514   AllocKind kind = arena->getAllocKind();
515 
516   MOZ_ASSERT_IF(!CanUpdateKindInBackground(kind),
517                 CurrentThreadCanAccessRuntime(trc->runtime()));
518 
519   switch (kind) {
520 #define EXPAND_CASE(allocKind, traceKind, type, sizedType, bgFinal, nursery, \
521                     compact)                                                 \
522   case AllocKind::allocKind:                                                 \
523     UpdateArenaPointersTyped<type>(trc, arena);                              \
524     return;
525     FOR_EACH_ALLOCKIND(EXPAND_CASE)
526 #undef EXPAND_CASE
527 
528     default:
529       MOZ_CRASH("Invalid alloc kind for UpdateArenaPointers");
530   }
531 }
532 
533 struct ArenaListSegment {
534   Arena* begin;
535   Arena* end;
536 };
537 
538 /*
539  * Update the internal pointers for all arenas in a segment of an arena list.
540  *
541  * Returns the number of steps to count against the slice budget.
542  */
UpdateArenaListSegmentPointers(GCRuntime * gc,const ArenaListSegment & arenas)543 static size_t UpdateArenaListSegmentPointers(GCRuntime* gc,
544                                              const ArenaListSegment& arenas) {
545   MOZ_ASSERT(arenas.begin);
546   MovingTracer trc(gc->rt);
547   size_t count = 0;
548   for (Arena* arena = arenas.begin; arena != arenas.end; arena = arena->next) {
549     UpdateArenaPointers(&trc, arena);
550     count++;
551   }
552   return count * 256;
553 }
554 
555 class ArenasToUpdate {
556   // Maximum number of arenas to update in one block.
557 #ifdef DEBUG
558   static const unsigned MaxArenasToProcess = 16;
559 #else
560   static const unsigned MaxArenasToProcess = 256;
561 #endif
562 
563  public:
564   explicit ArenasToUpdate(Zone* zone);
565   ArenasToUpdate(Zone* zone, const AllocKinds& kinds);
566 
done() const567   bool done() const { return !segmentBegin; }
568 
get() const569   ArenaListSegment get() const {
570     MOZ_ASSERT(!done());
571     return {segmentBegin, segmentEnd};
572   }
573 
574   void next();
575 
576  private:
577   Maybe<AllocKinds> kinds;            // Selects which thing kinds to update.
578   Zone* zone;                         // Zone to process.
579   AllocKind kind = AllocKind::FIRST;  // Current alloc kind to process.
580   Arena* segmentBegin = nullptr;
581   Arena* segmentEnd = nullptr;
582 
nextAllocKind(AllocKind i)583   static AllocKind nextAllocKind(AllocKind i) {
584     return AllocKind(uint8_t(i) + 1);
585   }
586 
587   void settle();
588   void findSegmentEnd();
589 };
590 
ArenasToUpdate(Zone * zone)591 ArenasToUpdate::ArenasToUpdate(Zone* zone) : zone(zone) { settle(); }
592 
ArenasToUpdate(Zone * zone,const AllocKinds & kinds)593 ArenasToUpdate::ArenasToUpdate(Zone* zone, const AllocKinds& kinds)
594     : kinds(Some(kinds)), zone(zone) {
595   settle();
596 }
597 
settle()598 void ArenasToUpdate::settle() {
599   // Called when we have set |kind| to a new kind. Sets |arena| to the next
600   // arena or null if there are no more arenas to update.
601 
602   MOZ_ASSERT(!segmentBegin);
603 
604   for (; kind < AllocKind::LIMIT; kind = nextAllocKind(kind)) {
605     if (kinds && !kinds.ref().contains(kind)) {
606       continue;
607     }
608 
609     Arena* arena = zone->arenas.getFirstArena(kind);
610     if (arena) {
611       segmentBegin = arena;
612       findSegmentEnd();
613       break;
614     }
615   }
616 }
617 
findSegmentEnd()618 void ArenasToUpdate::findSegmentEnd() {
619   // Take up to MaxArenasToProcess arenas from the list starting at
620   // |segmentBegin| and set |segmentEnd|.
621   Arena* arena = segmentBegin;
622   for (size_t i = 0; arena && i < MaxArenasToProcess; i++) {
623     arena = arena->next;
624   }
625   segmentEnd = arena;
626 }
627 
next()628 void ArenasToUpdate::next() {
629   MOZ_ASSERT(!done());
630 
631   segmentBegin = segmentEnd;
632   if (segmentBegin) {
633     findSegmentEnd();
634     return;
635   }
636 
637   kind = nextAllocKind(kind);
638   settle();
639 }
640 
ForegroundUpdateKinds(AllocKinds kinds)641 static AllocKinds ForegroundUpdateKinds(AllocKinds kinds) {
642   AllocKinds result;
643   for (AllocKind kind : kinds) {
644     if (!CanUpdateKindInBackground(kind)) {
645       result += kind;
646     }
647   }
648   return result;
649 }
650 
updateRttValueObjects(MovingTracer * trc,Zone * zone)651 void GCRuntime::updateRttValueObjects(MovingTracer* trc, Zone* zone) {
652   // We need to update each type descriptor object and any objects stored in
653   // its reserved slots, since some of these contain array objects that also
654   // need to be updated. Do not update any non-reserved slots, since they might
655   // point back to unprocessed descriptor objects.
656 
657   zone->rttValueObjects().traceWeak(trc, nullptr);
658 
659   for (auto r = zone->rttValueObjects().all(); !r.empty(); r.popFront()) {
660     RttValue* obj = &MaybeForwardedObjectAs<RttValue>(r.front());
661     UpdateCellPointers(trc, obj);
662     for (size_t i = 0; i < RttValue::SlotCount; i++) {
663       Value value = obj->getSlot(i);
664       if (value.isObject()) {
665         UpdateCellPointers(trc, &value.toObject());
666       }
667     }
668   }
669 }
670 
updateCellPointers(Zone * zone,AllocKinds kinds)671 void GCRuntime::updateCellPointers(Zone* zone, AllocKinds kinds) {
672   AllocKinds fgKinds = ForegroundUpdateKinds(kinds);
673   AllocKinds bgKinds = kinds - fgKinds;
674 
675   ArenasToUpdate fgArenas(zone, fgKinds);
676   ArenasToUpdate bgArenas(zone, bgKinds);
677 
678   AutoLockHelperThreadState lock;
679 
680   AutoRunParallelWork bgTasks(this, UpdateArenaListSegmentPointers,
681                               gcstats::PhaseKind::COMPACT_UPDATE_CELLS,
682                               bgArenas, SliceBudget::unlimited(), lock);
683 
684   AutoUnlockHelperThreadState unlock(lock);
685 
686   for (; !fgArenas.done(); fgArenas.next()) {
687     UpdateArenaListSegmentPointers(this, fgArenas.get());
688   }
689 }
690 
691 // After cells have been relocated any pointers to a cell's old locations must
692 // be updated to point to the new location.  This happens by iterating through
693 // all cells in heap and tracing their children (non-recursively) to update
694 // them.
695 //
696 // This is complicated by the fact that updating a GC thing sometimes depends on
697 // making use of other GC things.  After a moving GC these things may not be in
698 // a valid state since they may contain pointers which have not been updated
699 // yet.
700 //
701 // The main dependencies are:
702 //
703 //   - Updating a JSObject makes use of its shape
704 //   - Updating a typed object makes use of its type descriptor object
705 //
706 // This means we require at least three phases for update:
707 //
708 //  1) shapes
709 //  2) typed object type descriptor objects
710 //  3) all other objects
711 //
712 // Also, there can be data races calling IsForwarded() on the new location of a
713 // cell whose first word is being updated in parallel on another thread. This
714 // easiest way to avoid this is to not store a GC pointer in the first word of a
715 // cell. Otherwise this can be avoided by updating different kinds of cell in
716 // different phases.
717 //
718 // Since we want to minimize the number of phases, arrange kinds into three
719 // arbitrary phases.
720 
721 static constexpr AllocKinds UpdatePhaseOne{AllocKind::SCRIPT,
722                                            AllocKind::BASE_SHAPE,
723                                            AllocKind::SHAPE,
724                                            AllocKind::STRING,
725                                            AllocKind::JITCODE,
726                                            AllocKind::REGEXP_SHARED,
727                                            AllocKind::SCOPE,
728                                            AllocKind::GETTER_SETTER,
729                                            AllocKind::COMPACT_PROP_MAP,
730                                            AllocKind::NORMAL_PROP_MAP,
731                                            AllocKind::DICT_PROP_MAP};
732 
733 // UpdatePhaseTwo is typed object descriptor objects.
734 
735 static constexpr AllocKinds UpdatePhaseThree{AllocKind::FUNCTION,
736                                              AllocKind::FUNCTION_EXTENDED,
737                                              AllocKind::OBJECT0,
738                                              AllocKind::OBJECT0_BACKGROUND,
739                                              AllocKind::OBJECT2,
740                                              AllocKind::OBJECT2_BACKGROUND,
741                                              AllocKind::ARRAYBUFFER4,
742                                              AllocKind::OBJECT4,
743                                              AllocKind::OBJECT4_BACKGROUND,
744                                              AllocKind::ARRAYBUFFER8,
745                                              AllocKind::OBJECT8,
746                                              AllocKind::OBJECT8_BACKGROUND,
747                                              AllocKind::ARRAYBUFFER12,
748                                              AllocKind::OBJECT12,
749                                              AllocKind::OBJECT12_BACKGROUND,
750                                              AllocKind::ARRAYBUFFER16,
751                                              AllocKind::OBJECT16,
752                                              AllocKind::OBJECT16_BACKGROUND};
753 
updateAllCellPointers(MovingTracer * trc,Zone * zone)754 void GCRuntime::updateAllCellPointers(MovingTracer* trc, Zone* zone) {
755   updateCellPointers(zone, UpdatePhaseOne);
756 
757   // UpdatePhaseTwo: Update RttValues before all other objects as typed
758   // objects access these objects when we trace them.
759   updateRttValueObjects(trc, zone);
760 
761   updateCellPointers(zone, UpdatePhaseThree);
762 }
763 
764 /*
765  * Update pointers to relocated cells in a single zone by doing a traversal of
766  * that zone's arenas and calling per-zone sweep hooks.
767  *
768  * The latter is necessary to update weak references which are not marked as
769  * part of the traversal.
770  */
updateZonePointersToRelocatedCells(Zone * zone)771 void GCRuntime::updateZonePointersToRelocatedCells(Zone* zone) {
772   MOZ_ASSERT(!rt->isBeingDestroyed());
773   MOZ_ASSERT(zone->isGCCompacting());
774 
775   AutoTouchingGrayThings tgt;
776 
777   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
778   MovingTracer trc(rt);
779 
780   zone->fixupAfterMovingGC();
781   zone->fixupScriptMapsAfterMovingGC(&trc);
782 
783   // Fixup compartment global pointers as these get accessed during marking.
784   for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
785     comp->fixupAfterMovingGC(&trc);
786   }
787 
788   zone->externalStringCache().purge();
789   zone->functionToStringCache().purge();
790   zone->shapeZone().purgeShapeCaches(rt->defaultFreeOp());
791   rt->caches().stringToAtomCache.purge();
792 
793   // Iterate through all cells that can contain relocatable pointers to update
794   // them. Since updating each cell is independent we try to parallelize this
795   // as much as possible.
796   updateAllCellPointers(&trc, zone);
797 
798   // Sweep everything to fix up weak pointers.
799   sweepZoneAfterCompacting(&trc, zone);
800 
801   // Call callbacks to get the rest of the system to fixup other untraced
802   // pointers.
803   for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
804     callWeakPointerCompartmentCallbacks(&trc, comp);
805   }
806 }
807 
808 /*
809  * Update runtime-wide pointers to relocated cells.
810  */
updateRuntimePointersToRelocatedCells(AutoGCSession & session)811 void GCRuntime::updateRuntimePointersToRelocatedCells(AutoGCSession& session) {
812   MOZ_ASSERT(!rt->isBeingDestroyed());
813 
814   gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
815   MovingTracer trc(rt);
816 
817   Zone::fixupAllCrossCompartmentWrappersAfterMovingGC(&trc);
818 
819   rt->geckoProfiler().fixupStringsMapAfterMovingGC();
820 
821   // Mark roots to update them.
822 
823   traceRuntimeForMajorGC(&trc, session);
824 
825   {
826     gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::MARK_ROOTS);
827     DebugAPI::traceAllForMovingGC(&trc);
828     DebugAPI::traceCrossCompartmentEdges(&trc);
829 
830     // Mark all gray roots.
831     traceEmbeddingGrayRoots(&trc);
832     Compartment::traceIncomingCrossCompartmentEdgesForZoneGC(
833         &trc, Compartment::GrayEdges);
834   }
835 
836   // Sweep everything to fix up weak pointers.
837   jit::JitRuntime::TraceWeakJitcodeGlobalTable(rt, &trc);
838   for (JS::detail::WeakCacheBase* cache : rt->weakCaches()) {
839     cache->traceWeak(&trc, nullptr);
840   }
841 
842   // Type inference may put more blocks here to free.
843   {
844     AutoLockHelperThreadState lock;
845     lifoBlocksToFree.ref().freeAll();
846   }
847 
848   // Call callbacks to get the rest of the system to fixup other untraced
849   // pointers.
850   callWeakPointerZonesCallbacks(&trc);
851 }
852 
clearRelocatedArenas(Arena * arenaList,JS::GCReason reason)853 void GCRuntime::clearRelocatedArenas(Arena* arenaList, JS::GCReason reason) {
854   AutoLockGC lock(this);
855   clearRelocatedArenasWithoutUnlocking(arenaList, reason, lock);
856 }
857 
clearRelocatedArenasWithoutUnlocking(Arena * arenaList,JS::GCReason reason,const AutoLockGC & lock)858 void GCRuntime::clearRelocatedArenasWithoutUnlocking(Arena* arenaList,
859                                                      JS::GCReason reason,
860                                                      const AutoLockGC& lock) {
861   // Clear the relocated arenas, now containing only forwarding pointers
862   while (arenaList) {
863     Arena* arena = arenaList;
864     arenaList = arenaList->next;
865 
866     // Clear the mark bits
867     arena->unmarkAll();
868 
869     // Mark arena as empty
870     arena->setAsFullyUnused();
871 
872 #ifdef DEBUG
873     // The cell contents have been partially marked no access in RelocateCell,
874     // so we need to mark the region as undefined again so we can poison it.
875     SetMemCheckKind(reinterpret_cast<void*>(arena->thingsStart()),
876                     arena->getThingsSpan(), MemCheckKind::MakeUndefined);
877 #endif
878 
879     AlwaysPoison(reinterpret_cast<void*>(arena->thingsStart()),
880                  JS_MOVED_TENURED_PATTERN, arena->getThingsSpan(),
881                  MemCheckKind::MakeNoAccess);
882 
883     // Don't count arenas as being freed by the GC if we purposely moved
884     // everything to new arenas, as that will already have allocated a similar
885     // number of arenas. This only happens for collections triggered by GC zeal.
886     bool allArenasRelocated = ShouldRelocateAllArenas(reason);
887     arena->zone->gcHeapSize.removeBytes(ArenaSize, !allArenasRelocated);
888 
889     // Release the arena but don't return it to the chunk yet.
890     arena->release(lock);
891   }
892 }
893 
894 #ifdef DEBUG
895 
896 // In debug mode we don't always release relocated arenas straight away.
897 // Sometimes protect them instead and hold onto them until the next GC sweep
898 // phase to catch any pointers to them that didn't get forwarded.
899 
CanProtectArenas()900 static inline bool CanProtectArenas() {
901   // On some systems the page size is larger than the size of an arena so we
902   // can't change the mapping permissions per arena.
903   return SystemPageSize() <= ArenaSize;
904 }
905 
ShouldProtectRelocatedArenas(JS::GCReason reason)906 static inline bool ShouldProtectRelocatedArenas(JS::GCReason reason) {
907   // For zeal mode collections we don't release the relocated arenas
908   // immediately. Instead we protect them and keep them around until the next
909   // collection so we can catch any stray accesses to them.
910   return reason == JS::GCReason::DEBUG_GC && CanProtectArenas();
911 }
912 
protectOrReleaseRelocatedArenas(Arena * arenaList,JS::GCReason reason)913 void GCRuntime::protectOrReleaseRelocatedArenas(Arena* arenaList,
914                                                 JS::GCReason reason) {
915   if (ShouldProtectRelocatedArenas(reason)) {
916     protectAndHoldArenas(arenaList);
917     return;
918   }
919 
920   releaseRelocatedArenas(arenaList);
921 }
922 
protectAndHoldArenas(Arena * arenaList)923 void GCRuntime::protectAndHoldArenas(Arena* arenaList) {
924   for (Arena* arena = arenaList; arena;) {
925     MOZ_ASSERT(!arena->allocated());
926     Arena* next = arena->next;
927     if (!next) {
928       // Prepend to hold list before we protect the memory.
929       AutoLockGC lock(this);
930       arena->next = relocatedArenasToRelease;
931       relocatedArenasToRelease = arenaList;
932     }
933     ProtectPages(arena, ArenaSize);
934     arena = next;
935   }
936 }
937 
unprotectHeldRelocatedArenas(const AutoLockGC & lock)938 void GCRuntime::unprotectHeldRelocatedArenas(const AutoLockGC& lock) {
939   for (Arena* arena = relocatedArenasToRelease; arena; arena = arena->next) {
940     UnprotectPages(arena, ArenaSize);
941     MOZ_ASSERT(!arena->allocated());
942   }
943 }
944 
releaseHeldRelocatedArenas()945 void GCRuntime::releaseHeldRelocatedArenas() {
946   AutoLockGC lock(this);
947   unprotectHeldRelocatedArenas(lock);
948   Arena* arenas = relocatedArenasToRelease;
949   relocatedArenasToRelease = nullptr;
950   releaseRelocatedArenasWithoutUnlocking(arenas, lock);
951 }
952 
releaseHeldRelocatedArenasWithoutUnlocking(const AutoLockGC & lock)953 void GCRuntime::releaseHeldRelocatedArenasWithoutUnlocking(
954     const AutoLockGC& lock) {
955   unprotectHeldRelocatedArenas(lock);
956   releaseRelocatedArenasWithoutUnlocking(relocatedArenasToRelease, lock);
957   relocatedArenasToRelease = nullptr;
958 }
959 
960 #endif
961 
releaseRelocatedArenas(Arena * arenaList)962 void GCRuntime::releaseRelocatedArenas(Arena* arenaList) {
963   AutoLockGC lock(this);
964   releaseRelocatedArenasWithoutUnlocking(arenaList, lock);
965 }
966 
releaseRelocatedArenasWithoutUnlocking(Arena * arenaList,const AutoLockGC & lock)967 void GCRuntime::releaseRelocatedArenasWithoutUnlocking(Arena* arenaList,
968                                                        const AutoLockGC& lock) {
969   // Release relocated arenas previously cleared with clearRelocatedArenas().
970   while (arenaList) {
971     Arena* arena = arenaList;
972     arenaList = arenaList->next;
973 
974     // We already updated the memory accounting so just call
975     // Chunk::releaseArena.
976     arena->chunk()->releaseArena(this, arena, lock);
977   }
978 }
979