1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "mozilla/DebugOnly.h"
8 #include "mozilla/IntegerPrintfMacros.h"
9 #include "mozilla/Sprintf.h"
10 
11 #include <algorithm>
12 #include <utility>
13 
14 #ifdef MOZ_VALGRIND
15 #  include <valgrind/memcheck.h>
16 #endif
17 
18 #include "gc/GCInternals.h"
19 #include "gc/GCLock.h"
20 #include "gc/PublicIterators.h"
21 #include "gc/WeakMap.h"
22 #include "gc/Zone.h"
23 #include "js/friend/DumpFunctions.h"  // js::DumpObject
24 #include "js/HashTable.h"
25 #include "vm/JSContext.h"
26 
27 #include "gc/ArenaList-inl.h"
28 #include "gc/GC-inl.h"
29 #include "gc/Marking-inl.h"
30 #include "gc/PrivateIterators-inl.h"
31 #include "vm/JSContext-inl.h"
32 
33 using namespace js;
34 using namespace js::gc;
35 
36 using mozilla::DebugOnly;
37 
38 #ifdef DEBUG
RuntimeIsVerifyingPreBarriers(JSRuntime * runtime)39 bool js::RuntimeIsVerifyingPreBarriers(JSRuntime* runtime) {
40 #  ifdef JS_GC_ZEAL
41   return runtime->gc.isVerifyPreBarriersEnabled();
42 #  else
43   return false;
44 #  endif
45 }
46 #endif
47 
48 #ifdef JS_GC_ZEAL
49 
50 /*
51  * Write barrier verification
52  *
53  * The next few functions are for write barrier verification.
54  *
55  * The VerifyBarriers function is a shorthand. It checks if a verification phase
56  * is currently running. If not, it starts one. Otherwise, it ends the current
57  * phase and starts a new one.
58  *
59  * The user can adjust the frequency of verifications, which causes
60  * VerifyBarriers to be a no-op all but one out of N calls. However, if the
61  * |always| parameter is true, it starts a new phase no matter what.
62  *
63  * Pre-Barrier Verifier:
64  *   When StartVerifyBarriers is called, a snapshot is taken of all objects in
65  *   the GC heap and saved in an explicit graph data structure. Later,
66  *   EndVerifyBarriers traverses the heap again. Any pointer values that were in
67  *   the snapshot and are no longer found must be marked; otherwise an assertion
68  *   triggers. Note that we must not GC in between starting and finishing a
69  *   verification phase.
70  */
71 
72 struct EdgeValue {
73   JS::GCCellPtr thing;
74   const char* label;
75 };
76 
77 struct VerifyNode {
78   JS::GCCellPtr thing;
79   uint32_t count;
80   EdgeValue edges[1];
81 };
82 
83 typedef HashMap<Cell*, VerifyNode*, DefaultHasher<Cell*>, SystemAllocPolicy>
84     NodeMap;
85 
86 /*
87  * The verifier data structures are simple. The entire graph is stored in a
88  * single block of memory. At the beginning is a VerifyNode for the root
89  * node. It is followed by a sequence of EdgeValues--the exact number is given
90  * in the node. After the edges come more nodes and their edges.
91  *
92  * The edgeptr and term fields are used to allocate out of the block of memory
93  * for the graph. If we run out of memory (i.e., if edgeptr goes beyond term),
94  * we just abandon the verification.
95  *
96  * The nodemap field is a hashtable that maps from the address of the GC thing
97  * to the VerifyNode that represents it.
98  */
99 class js::VerifyPreTracer final : public JS::CallbackTracer {
100   JS::AutoDisableGenerationalGC noggc;
101 
102   void onChild(const JS::GCCellPtr& thing) override;
103 
104  public:
105   /* The gcNumber when the verification began. */
106   uint64_t number;
107 
108   /* This counts up to gcZealFrequency to decide whether to verify. */
109   int count;
110 
111   /* This graph represents the initial GC "snapshot". */
112   VerifyNode* curnode;
113   VerifyNode* root;
114   char* edgeptr;
115   char* term;
116   NodeMap nodemap;
117 
VerifyPreTracer(JSRuntime * rt)118   explicit VerifyPreTracer(JSRuntime* rt)
119       : JS::CallbackTracer(rt, JS::TracerKind::Callback,
120                            JS::WeakEdgeTraceAction::Skip),
121         noggc(rt->mainContextFromOwnThread()),
122         number(rt->gc.gcNumber()),
123         count(0),
124         curnode(nullptr),
125         root(nullptr),
126         edgeptr(nullptr),
127         term(nullptr) {
128     // We don't care about weak edges here. Since they are not marked they
129     // cannot cause the problem that the pre-write barrier protects against.
130   }
131 
~VerifyPreTracer()132   ~VerifyPreTracer() { js_free(root); }
133 };
134 
135 /*
136  * This function builds up the heap snapshot by adding edges to the current
137  * node.
138  */
onChild(const JS::GCCellPtr & thing)139 void VerifyPreTracer::onChild(const JS::GCCellPtr& thing) {
140   MOZ_ASSERT(!IsInsideNursery(thing.asCell()));
141 
142   // Skip things in other runtimes.
143   if (thing.asCell()->asTenured().runtimeFromAnyThread() != runtime()) {
144     return;
145   }
146 
147   edgeptr += sizeof(EdgeValue);
148   if (edgeptr >= term) {
149     edgeptr = term;
150     return;
151   }
152 
153   VerifyNode* node = curnode;
154   uint32_t i = node->count;
155 
156   node->edges[i].thing = thing;
157   node->edges[i].label = context().name();
158   node->count++;
159 }
160 
MakeNode(VerifyPreTracer * trc,JS::GCCellPtr thing)161 static VerifyNode* MakeNode(VerifyPreTracer* trc, JS::GCCellPtr thing) {
162   NodeMap::AddPtr p = trc->nodemap.lookupForAdd(thing.asCell());
163   if (!p) {
164     VerifyNode* node = (VerifyNode*)trc->edgeptr;
165     trc->edgeptr += sizeof(VerifyNode) - sizeof(EdgeValue);
166     if (trc->edgeptr >= trc->term) {
167       trc->edgeptr = trc->term;
168       return nullptr;
169     }
170 
171     node->thing = thing;
172     node->count = 0;
173     if (!trc->nodemap.add(p, thing.asCell(), node)) {
174       trc->edgeptr = trc->term;
175       return nullptr;
176     }
177 
178     return node;
179   }
180   return nullptr;
181 }
182 
NextNode(VerifyNode * node)183 static VerifyNode* NextNode(VerifyNode* node) {
184   if (node->count == 0) {
185     return (VerifyNode*)((char*)node + sizeof(VerifyNode) - sizeof(EdgeValue));
186   } else {
187     return (VerifyNode*)((char*)node + sizeof(VerifyNode) +
188                          sizeof(EdgeValue) * (node->count - 1));
189   }
190 }
191 
startVerifyPreBarriers()192 void gc::GCRuntime::startVerifyPreBarriers() {
193   if (verifyPreData || isIncrementalGCInProgress()) {
194     return;
195   }
196 
197   JSContext* cx = rt->mainContextFromOwnThread();
198 
199   if (IsIncrementalGCUnsafe(rt) != GCAbortReason::None ||
200       rt->hasHelperThreadZones()) {
201     return;
202   }
203 
204   number++;
205 
206   VerifyPreTracer* trc = js_new<VerifyPreTracer>(rt);
207   if (!trc) {
208     return;
209   }
210 
211   AutoPrepareForTracing prep(cx);
212 
213   {
214     AutoLockGC lock(this);
215     for (auto chunk = allNonEmptyChunks(lock); !chunk.done(); chunk.next()) {
216       chunk->markBits.clear();
217     }
218   }
219 
220   gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::TRACE_HEAP);
221 
222   const size_t size = 64 * 1024 * 1024;
223   trc->root = (VerifyNode*)js_malloc(size);
224   if (!trc->root) {
225     goto oom;
226   }
227   trc->edgeptr = (char*)trc->root;
228   trc->term = trc->edgeptr + size;
229 
230   /* Create the root node. */
231   trc->curnode = MakeNode(trc, JS::GCCellPtr());
232 
233   MOZ_ASSERT(incrementalState == State::NotActive);
234   incrementalState = State::MarkRoots;
235 
236   /* Make all the roots be edges emanating from the root node. */
237   traceRuntime(trc, prep);
238 
239   VerifyNode* node;
240   node = trc->curnode;
241   if (trc->edgeptr == trc->term) {
242     goto oom;
243   }
244 
245   /* For each edge, make a node for it if one doesn't already exist. */
246   while ((char*)node < trc->edgeptr) {
247     for (uint32_t i = 0; i < node->count; i++) {
248       EdgeValue& e = node->edges[i];
249       VerifyNode* child = MakeNode(trc, e.thing);
250       if (child) {
251         trc->curnode = child;
252         JS::TraceChildren(trc, e.thing);
253       }
254       if (trc->edgeptr == trc->term) {
255         goto oom;
256       }
257     }
258 
259     node = NextNode(node);
260   }
261 
262   verifyPreData = trc;
263   incrementalState = State::Mark;
264   marker.start();
265 
266   for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
267     MOZ_ASSERT(!zone->usedByHelperThread());
268     zone->setNeedsIncrementalBarrier(true);
269     zone->arenas.clearFreeLists();
270   }
271 
272   return;
273 
274 oom:
275   incrementalState = State::NotActive;
276   js_delete(trc);
277   verifyPreData = nullptr;
278 }
279 
IsMarkedOrAllocated(TenuredCell * cell)280 static bool IsMarkedOrAllocated(TenuredCell* cell) {
281   return cell->isMarkedAny();
282 }
283 
284 struct CheckEdgeTracer final : public JS::CallbackTracer {
285   VerifyNode* node;
CheckEdgeTracerCheckEdgeTracer286   explicit CheckEdgeTracer(JSRuntime* rt)
287       : JS::CallbackTracer(rt), node(nullptr) {}
288   void onChild(const JS::GCCellPtr& thing) override;
289 };
290 
291 static const uint32_t MAX_VERIFIER_EDGES = 1000;
292 
293 /*
294  * This function is called by EndVerifyBarriers for every heap edge. If the edge
295  * already existed in the original snapshot, we "cancel it out" by overwriting
296  * it with nullptr. EndVerifyBarriers later asserts that the remaining
297  * non-nullptr edges (i.e., the ones from the original snapshot that must have
298  * been modified) must point to marked objects.
299  */
onChild(const JS::GCCellPtr & thing)300 void CheckEdgeTracer::onChild(const JS::GCCellPtr& thing) {
301   // Skip things in other runtimes.
302   if (thing.asCell()->asTenured().runtimeFromAnyThread() != runtime()) {
303     return;
304   }
305 
306   /* Avoid n^2 behavior. */
307   if (node->count > MAX_VERIFIER_EDGES) {
308     return;
309   }
310 
311   for (uint32_t i = 0; i < node->count; i++) {
312     if (node->edges[i].thing == thing) {
313       node->edges[i].thing = JS::GCCellPtr();
314       return;
315     }
316   }
317 }
318 
IsMarkedOrAllocated(const EdgeValue & edge)319 static bool IsMarkedOrAllocated(const EdgeValue& edge) {
320   if (!edge.thing || IsMarkedOrAllocated(&edge.thing.asCell()->asTenured())) {
321     return true;
322   }
323 
324   // Permanent atoms and well-known symbols aren't marked during graph
325   // traversal.
326   if (edge.thing.is<JSString>() &&
327       edge.thing.as<JSString>().isPermanentAtom()) {
328     return true;
329   }
330   if (edge.thing.is<JS::Symbol>() &&
331       edge.thing.as<JS::Symbol>().isWellKnownSymbol()) {
332     return true;
333   }
334 
335   return false;
336 }
337 
endVerifyPreBarriers()338 void gc::GCRuntime::endVerifyPreBarriers() {
339   VerifyPreTracer* trc = verifyPreData;
340 
341   if (!trc) {
342     return;
343   }
344 
345   MOZ_ASSERT(!JS::IsGenerationalGCEnabled(rt));
346 
347   // Flush the barrier tracer's buffer to ensure the pre-barrier has marked
348   // everything it's going to. This usually happens as part of GC.
349   SliceBudget budget = SliceBudget::unlimited();
350   marker.traceBarrieredCells(budget);
351 
352   // Now that barrier marking has finished, prepare the heap to allow this
353   // method to trace cells and discover their outgoing edges.
354   AutoPrepareForTracing prep(rt->mainContextFromOwnThread());
355 
356   bool compartmentCreated = false;
357 
358   /* We need to disable barriers before tracing, which may invoke barriers. */
359   for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
360     if (!zone->needsIncrementalBarrier()) {
361       compartmentCreated = true;
362     }
363     zone->setNeedsIncrementalBarrier(false);
364   }
365 
366   verifyPreData = nullptr;
367   MOZ_ASSERT(incrementalState == State::Mark);
368   incrementalState = State::NotActive;
369 
370   if (!compartmentCreated && IsIncrementalGCUnsafe(rt) == GCAbortReason::None &&
371       !rt->hasHelperThreadZones()) {
372     CheckEdgeTracer cetrc(rt);
373 
374     /* Start after the roots. */
375     VerifyNode* node = NextNode(trc->root);
376     while ((char*)node < trc->edgeptr) {
377       cetrc.node = node;
378       JS::TraceChildren(&cetrc, node->thing);
379 
380       if (node->count <= MAX_VERIFIER_EDGES) {
381         for (uint32_t i = 0; i < node->count; i++) {
382           EdgeValue& edge = node->edges[i];
383           if (!IsMarkedOrAllocated(edge)) {
384             char msgbuf[1024];
385             SprintfLiteral(
386                 msgbuf,
387                 "[barrier verifier] Unmarked edge: %s %p '%s' edge to %s %p",
388                 JS::GCTraceKindToAscii(node->thing.kind()),
389                 node->thing.asCell(), edge.label,
390                 JS::GCTraceKindToAscii(edge.thing.kind()), edge.thing.asCell());
391             MOZ_ReportAssertionFailure(msgbuf, __FILE__, __LINE__);
392             MOZ_CRASH();
393           }
394         }
395       }
396 
397       node = NextNode(node);
398     }
399   }
400 
401   marker.reset();
402   marker.stop();
403 
404   js_delete(trc);
405 }
406 
407 /*** Barrier Verifier Scheduling ***/
408 
verifyPreBarriers()409 void gc::GCRuntime::verifyPreBarriers() {
410   if (verifyPreData) {
411     endVerifyPreBarriers();
412   } else {
413     startVerifyPreBarriers();
414   }
415 }
416 
VerifyBarriers(JSRuntime * rt,VerifierType type)417 void gc::VerifyBarriers(JSRuntime* rt, VerifierType type) {
418   if (type == PreBarrierVerifier) {
419     rt->gc.verifyPreBarriers();
420   }
421 }
422 
maybeVerifyPreBarriers(bool always)423 void gc::GCRuntime::maybeVerifyPreBarriers(bool always) {
424   if (!hasZealMode(ZealMode::VerifierPre)) {
425     return;
426   }
427 
428   if (rt->mainContextFromOwnThread()->suppressGC) {
429     return;
430   }
431 
432   if (verifyPreData) {
433     if (++verifyPreData->count < zealFrequency && !always) {
434       return;
435     }
436 
437     endVerifyPreBarriers();
438   }
439 
440   startVerifyPreBarriers();
441 }
442 
MaybeVerifyBarriers(JSContext * cx,bool always)443 void js::gc::MaybeVerifyBarriers(JSContext* cx, bool always) {
444   GCRuntime* gc = &cx->runtime()->gc;
445   gc->maybeVerifyPreBarriers(always);
446 }
447 
finishVerifier()448 void js::gc::GCRuntime::finishVerifier() {
449   if (verifyPreData) {
450     js_delete(verifyPreData.ref());
451     verifyPreData = nullptr;
452   }
453 }
454 
455 struct GCChunkHasher {
456   typedef gc::TenuredChunk* Lookup;
457 
458   /*
459    * Strip zeros for better distribution after multiplying by the golden
460    * ratio.
461    */
hashGCChunkHasher462   static HashNumber hash(gc::TenuredChunk* chunk) {
463     MOZ_ASSERT(!(uintptr_t(chunk) & gc::ChunkMask));
464     return HashNumber(uintptr_t(chunk) >> gc::ChunkShift);
465   }
466 
matchGCChunkHasher467   static bool match(gc::TenuredChunk* k, gc::TenuredChunk* l) {
468     MOZ_ASSERT(!(uintptr_t(k) & gc::ChunkMask));
469     MOZ_ASSERT(!(uintptr_t(l) & gc::ChunkMask));
470     return k == l;
471   }
472 };
473 
474 class js::gc::MarkingValidator {
475  public:
476   explicit MarkingValidator(GCRuntime* gc);
477   void nonIncrementalMark(AutoGCSession& session);
478   void validate();
479 
480  private:
481   GCRuntime* gc;
482   bool initialized;
483 
484   using BitmapMap = HashMap<TenuredChunk*, UniquePtr<MarkBitmap>, GCChunkHasher,
485                             SystemAllocPolicy>;
486   BitmapMap map;
487 };
488 
MarkingValidator(GCRuntime * gc)489 js::gc::MarkingValidator::MarkingValidator(GCRuntime* gc)
490     : gc(gc), initialized(false) {}
491 
nonIncrementalMark(AutoGCSession & session)492 void js::gc::MarkingValidator::nonIncrementalMark(AutoGCSession& session) {
493   /*
494    * Perform a non-incremental mark for all collecting zones and record
495    * the results for later comparison.
496    *
497    * Currently this does not validate gray marking.
498    */
499 
500   JSRuntime* runtime = gc->rt;
501   GCMarker* gcmarker = &gc->marker;
502 
503   MOZ_ASSERT(!gcmarker->isWeakMarking());
504 
505   /* Wait for off-thread parsing which can allocate. */
506   WaitForAllHelperThreads();
507 
508   gc->waitBackgroundAllocEnd();
509   gc->waitBackgroundSweepEnd();
510 
511   /* Save existing mark bits. */
512   {
513     AutoLockGC lock(gc);
514     for (auto chunk = gc->allNonEmptyChunks(lock); !chunk.done();
515          chunk.next()) {
516       MarkBitmap* bitmap = &chunk->markBits;
517       auto entry = MakeUnique<MarkBitmap>();
518       if (!entry) {
519         return;
520       }
521 
522       memcpy((void*)entry->bitmap, (void*)bitmap->bitmap,
523              sizeof(bitmap->bitmap));
524 
525       if (!map.putNew(chunk, std::move(entry))) {
526         return;
527       }
528     }
529   }
530 
531   /*
532    * Temporarily clear the weakmaps' mark flags for the compartments we are
533    * collecting.
534    */
535 
536   WeakMapColors markedWeakMaps;
537 
538   /*
539    * For saving, smush all of the keys into one big table and split them back
540    * up into per-zone tables when restoring.
541    */
542   gc::EphemeronEdgeTable savedEphemeronEdges(
543       SystemAllocPolicy(), runtime->randomHashCodeScrambler());
544   if (!savedEphemeronEdges.init()) {
545     return;
546   }
547 
548   for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
549     if (!WeakMapBase::saveZoneMarkedWeakMaps(zone, markedWeakMaps)) {
550       return;
551     }
552 
553     AutoEnterOOMUnsafeRegion oomUnsafe;
554     for (gc::EphemeronEdgeTable::Range r = zone->gcEphemeronEdges().all();
555          !r.empty(); r.popFront()) {
556       MOZ_ASSERT(r.front().key->asTenured().zone() == zone);
557       if (!savedEphemeronEdges.put(r.front().key, std::move(r.front().value))) {
558         oomUnsafe.crash("saving weak keys table for validator");
559       }
560     }
561 
562     if (!zone->gcEphemeronEdges().clear()) {
563       oomUnsafe.crash("clearing weak keys table for validator");
564     }
565   }
566 
567   /*
568    * After this point, the function should run to completion, so we shouldn't
569    * do anything fallible.
570    */
571   initialized = true;
572 
573   /* Re-do all the marking, but non-incrementally. */
574   js::gc::State state = gc->incrementalState;
575   gc->incrementalState = State::MarkRoots;
576 
577   {
578     gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::PREPARE);
579 
580     {
581       gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::UNMARK);
582 
583       for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
584         WeakMapBase::unmarkZone(zone);
585       }
586 
587       MOZ_ASSERT(gcmarker->isDrained());
588       gcmarker->reset();
589 
590       AutoLockGC lock(gc);
591       for (auto chunk = gc->allNonEmptyChunks(lock); !chunk.done();
592            chunk.next()) {
593         chunk->markBits.clear();
594       }
595     }
596   }
597 
598   {
599     gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::MARK);
600 
601     gc->traceRuntimeForMajorGC(gcmarker, session);
602 
603     gc->incrementalState = State::Mark;
604     gc->drainMarkStack();
605   }
606 
607   gc->incrementalState = State::Sweep;
608   {
609     gcstats::AutoPhase ap1(gc->stats(), gcstats::PhaseKind::SWEEP);
610     gcstats::AutoPhase ap2(gc->stats(), gcstats::PhaseKind::SWEEP_MARK);
611 
612     gc->markAllWeakReferences();
613 
614     /* Update zone state for gray marking. */
615     for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
616       zone->changeGCState(Zone::MarkBlackOnly, Zone::MarkBlackAndGray);
617     }
618 
619     AutoSetMarkColor setColorGray(*gcmarker, MarkColor::Gray);
620     gcmarker->setMainStackColor(MarkColor::Gray);
621 
622     gc->markAllGrayReferences(gcstats::PhaseKind::SWEEP_MARK_GRAY);
623     gc->markAllWeakReferences();
624     gc->marker.setMainStackColor(MarkColor::Black);
625 
626     /* Restore zone state. */
627     for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
628       zone->changeGCState(Zone::MarkBlackAndGray, Zone::MarkBlackOnly);
629     }
630     MOZ_ASSERT(gc->marker.isDrained());
631   }
632 
633   /* Take a copy of the non-incremental mark state and restore the original. */
634   {
635     AutoLockGC lock(gc);
636     for (auto chunk = gc->allNonEmptyChunks(lock); !chunk.done();
637          chunk.next()) {
638       MarkBitmap* bitmap = &chunk->markBits;
639       auto ptr = map.lookup(chunk);
640       MOZ_RELEASE_ASSERT(ptr, "Chunk not found in map");
641       MarkBitmap* entry = ptr->value().get();
642       for (size_t i = 0; i < MarkBitmap::WordCount; i++) {
643         uintptr_t v = entry->bitmap[i];
644         entry->bitmap[i] = uintptr_t(bitmap->bitmap[i]);
645         bitmap->bitmap[i] = v;
646       }
647     }
648   }
649 
650   for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
651     WeakMapBase::unmarkZone(zone);
652     AutoEnterOOMUnsafeRegion oomUnsafe;
653     if (!zone->gcEphemeronEdges().clear()) {
654       oomUnsafe.crash("clearing weak keys table for validator");
655     }
656   }
657 
658   WeakMapBase::restoreMarkedWeakMaps(markedWeakMaps);
659 
660   for (gc::EphemeronEdgeTable::Range r = savedEphemeronEdges.all(); !r.empty();
661        r.popFront()) {
662     AutoEnterOOMUnsafeRegion oomUnsafe;
663     Zone* zone = r.front().key->asTenured().zone();
664     if (!zone->gcEphemeronEdges().put(r.front().key,
665                                       std::move(r.front().value))) {
666       oomUnsafe.crash("restoring weak keys table for validator");
667     }
668   }
669 
670   gc->incrementalState = state;
671 }
672 
validate()673 void js::gc::MarkingValidator::validate() {
674   /*
675    * Validates the incremental marking for a single compartment by comparing
676    * the mark bits to those previously recorded for a non-incremental mark.
677    */
678 
679   if (!initialized) {
680     return;
681   }
682 
683   MOZ_ASSERT(!gc->marker.isWeakMarking());
684 
685   gc->waitBackgroundSweepEnd();
686 
687   AutoLockGC lock(gc->rt);
688   for (auto chunk = gc->allNonEmptyChunks(lock); !chunk.done(); chunk.next()) {
689     BitmapMap::Ptr ptr = map.lookup(chunk);
690     if (!ptr) {
691       continue; /* Allocated after we did the non-incremental mark. */
692     }
693 
694     MarkBitmap* bitmap = ptr->value().get();
695     MarkBitmap* incBitmap = &chunk->markBits;
696 
697     for (size_t i = 0; i < ArenasPerChunk; i++) {
698       if (chunk->decommittedPages[chunk->pageIndex(i)]) {
699         continue;
700       }
701       Arena* arena = &chunk->arenas[i];
702       if (!arena->allocated()) {
703         continue;
704       }
705       if (!arena->zone->isGCSweeping()) {
706         continue;
707       }
708 
709       AllocKind kind = arena->getAllocKind();
710       uintptr_t thing = arena->thingsStart();
711       uintptr_t end = arena->thingsEnd();
712       while (thing < end) {
713         auto* cell = reinterpret_cast<TenuredCell*>(thing);
714 
715         /*
716          * If a non-incremental GC wouldn't have collected a cell, then
717          * an incremental GC won't collect it.
718          */
719         if (bitmap->isMarkedAny(cell)) {
720           MOZ_RELEASE_ASSERT(incBitmap->isMarkedAny(cell));
721         }
722 
723         /*
724          * If the cycle collector isn't allowed to collect an object
725          * after a non-incremental GC has run, then it isn't allowed to
726          * collected it after an incremental GC.
727          */
728         if (!bitmap->isMarkedGray(cell)) {
729           MOZ_RELEASE_ASSERT(!incBitmap->isMarkedGray(cell));
730         }
731 
732         thing += Arena::thingSize(kind);
733       }
734     }
735   }
736 }
737 
computeNonIncrementalMarkingForValidation(AutoGCSession & session)738 void GCRuntime::computeNonIncrementalMarkingForValidation(
739     AutoGCSession& session) {
740   MOZ_ASSERT(!markingValidator);
741   if (isIncremental && hasZealMode(ZealMode::IncrementalMarkingValidator)) {
742     markingValidator = js_new<MarkingValidator>(this);
743   }
744   if (markingValidator) {
745     markingValidator->nonIncrementalMark(session);
746   }
747 }
748 
validateIncrementalMarking()749 void GCRuntime::validateIncrementalMarking() {
750   if (markingValidator) {
751     markingValidator->validate();
752   }
753 }
754 
finishMarkingValidation()755 void GCRuntime::finishMarkingValidation() {
756   js_delete(markingValidator.ref());
757   markingValidator = nullptr;
758 }
759 
760 #endif /* JS_GC_ZEAL */
761 
762 #if defined(JS_GC_ZEAL) || defined(DEBUG)
763 
764 class HeapCheckTracerBase : public JS::CallbackTracer {
765  public:
766   explicit HeapCheckTracerBase(JSRuntime* rt, JS::TraceOptions options);
767   bool traceHeap(AutoTraceSession& session);
768   virtual void checkCell(Cell* cell) = 0;
769 
770  protected:
771   void dumpCellInfo(Cell* cell);
772   void dumpCellPath();
773 
parentCell()774   Cell* parentCell() {
775     return parentIndex == -1 ? nullptr : stack[parentIndex].thing.asCell();
776   }
777 
778   size_t failures;
779 
780  private:
781   void onChild(const JS::GCCellPtr& thing) override;
782 
783   struct WorkItem {
WorkItemHeapCheckTracerBase::WorkItem784     WorkItem(JS::GCCellPtr thing, const char* name, int parentIndex)
785         : thing(thing),
786           name(name),
787           parentIndex(parentIndex),
788           processed(false) {}
789 
790     JS::GCCellPtr thing;
791     const char* name;
792     int parentIndex;
793     bool processed;
794   };
795 
796   JSRuntime* rt;
797   bool oom;
798   HashSet<Cell*, DefaultHasher<Cell*>, SystemAllocPolicy> visited;
799   Vector<WorkItem, 0, SystemAllocPolicy> stack;
800   int parentIndex;
801 };
802 
HeapCheckTracerBase(JSRuntime * rt,JS::TraceOptions options)803 HeapCheckTracerBase::HeapCheckTracerBase(JSRuntime* rt,
804                                          JS::TraceOptions options)
805     : CallbackTracer(rt, JS::TracerKind::Callback, options),
806       failures(0),
807       rt(rt),
808       oom(false),
809       parentIndex(-1) {}
810 
onChild(const JS::GCCellPtr & thing)811 void HeapCheckTracerBase::onChild(const JS::GCCellPtr& thing) {
812   Cell* cell = thing.asCell();
813   checkCell(cell);
814 
815   if (visited.lookup(cell)) {
816     return;
817   }
818 
819   if (!visited.put(cell)) {
820     oom = true;
821     return;
822   }
823 
824   // Don't trace into GC things owned by another runtime.
825   if (cell->runtimeFromAnyThread() != rt) {
826     return;
827   }
828 
829   // Don't trace into GC in zones being used by helper threads.
830   Zone* zone = thing.asCell()->zone();
831   if (zone->usedByHelperThread()) {
832     return;
833   }
834 
835   WorkItem item(thing, context().name(), parentIndex);
836   if (!stack.append(item)) {
837     oom = true;
838   }
839 }
840 
traceHeap(AutoTraceSession & session)841 bool HeapCheckTracerBase::traceHeap(AutoTraceSession& session) {
842   // The analysis thinks that traceRuntime might GC by calling a GC callback.
843   JS::AutoSuppressGCAnalysis nogc;
844   if (!rt->isBeingDestroyed()) {
845     rt->gc.traceRuntime(this, session);
846   }
847 
848   while (!stack.empty() && !oom) {
849     WorkItem item = stack.back();
850     if (item.processed) {
851       stack.popBack();
852     } else {
853       parentIndex = stack.length() - 1;
854       stack.back().processed = true;
855       TraceChildren(this, item.thing);
856     }
857   }
858 
859   return !oom;
860 }
861 
dumpCellInfo(Cell * cell)862 void HeapCheckTracerBase::dumpCellInfo(Cell* cell) {
863   auto kind = cell->getTraceKind();
864   JSObject* obj =
865       kind == JS::TraceKind::Object ? static_cast<JSObject*>(cell) : nullptr;
866 
867   fprintf(stderr, "%s %s", cell->color().name(), GCTraceKindToAscii(kind));
868   if (obj) {
869     fprintf(stderr, " %s", obj->getClass()->name);
870   }
871   fprintf(stderr, " %p", cell);
872   if (obj) {
873     fprintf(stderr, " (compartment %p)", obj->compartment());
874   }
875 }
876 
dumpCellPath()877 void HeapCheckTracerBase::dumpCellPath() {
878   const char* name = context().name();
879   for (int index = parentIndex; index != -1; index = stack[index].parentIndex) {
880     const WorkItem& parent = stack[index];
881     Cell* cell = parent.thing.asCell();
882     fprintf(stderr, "  from ");
883     dumpCellInfo(cell);
884     fprintf(stderr, " %s edge\n", name);
885     name = parent.name;
886   }
887   fprintf(stderr, "  from root %s\n", name);
888 }
889 
890 class CheckHeapTracer final : public HeapCheckTracerBase {
891  public:
892   enum GCType { Moving, NonMoving };
893 
894   explicit CheckHeapTracer(JSRuntime* rt, GCType type);
895   void check(AutoTraceSession& session);
896 
897  private:
898   void checkCell(Cell* cell) override;
899   GCType gcType;
900 };
901 
CheckHeapTracer(JSRuntime * rt,GCType type)902 CheckHeapTracer::CheckHeapTracer(JSRuntime* rt, GCType type)
903     : HeapCheckTracerBase(rt, JS::WeakMapTraceAction::TraceKeysAndValues),
904       gcType(type) {}
905 
IsValidGCThingPointer(Cell * cell)906 inline static bool IsValidGCThingPointer(Cell* cell) {
907   return (uintptr_t(cell) & CellAlignMask) == 0;
908 }
909 
checkCell(Cell * cell)910 void CheckHeapTracer::checkCell(Cell* cell) {
911   // Moving
912   if (!IsValidGCThingPointer(cell) ||
913       ((gcType == GCType::Moving) && !IsGCThingValidAfterMovingGC(cell)) ||
914       ((gcType == GCType::NonMoving) && cell->isForwarded())) {
915     failures++;
916     fprintf(stderr, "Bad pointer %p\n", cell);
917     dumpCellPath();
918   }
919 }
920 
check(AutoTraceSession & session)921 void CheckHeapTracer::check(AutoTraceSession& session) {
922   if (!traceHeap(session)) {
923     return;
924   }
925 
926   if (failures) {
927     fprintf(stderr, "Heap check: %zu failure(s)\n", failures);
928   }
929   MOZ_RELEASE_ASSERT(failures == 0);
930 }
931 
CheckHeapAfterGC(JSRuntime * rt)932 void js::gc::CheckHeapAfterGC(JSRuntime* rt) {
933   AutoTraceSession session(rt);
934   CheckHeapTracer::GCType gcType;
935 
936   if (rt->gc.nursery().isEmpty()) {
937     gcType = CheckHeapTracer::GCType::Moving;
938   } else {
939     gcType = CheckHeapTracer::GCType::NonMoving;
940   }
941 
942   CheckHeapTracer tracer(rt, gcType);
943   tracer.check(session);
944 }
945 
946 class CheckGrayMarkingTracer final : public HeapCheckTracerBase {
947  public:
948   explicit CheckGrayMarkingTracer(JSRuntime* rt);
949   bool check(AutoTraceSession& session);
950 
951  private:
952   void checkCell(Cell* cell) override;
953 };
954 
CheckGrayMarkingTracer(JSRuntime * rt)955 CheckGrayMarkingTracer::CheckGrayMarkingTracer(JSRuntime* rt)
956     : HeapCheckTracerBase(rt, JS::TraceOptions(JS::WeakMapTraceAction::Skip,
957                                                JS::WeakEdgeTraceAction::Skip)) {
958   // Weak gray->black edges are allowed.
959 }
960 
checkCell(Cell * cell)961 void CheckGrayMarkingTracer::checkCell(Cell* cell) {
962   Cell* parent = parentCell();
963   if (!parent) {
964     return;
965   }
966 
967   if (parent->isMarkedBlack() && cell->isMarkedGray()) {
968     failures++;
969 
970     fprintf(stderr, "Found black to gray edge to ");
971     dumpCellInfo(cell);
972     fprintf(stderr, "\n");
973     dumpCellPath();
974 
975 #  ifdef DEBUG
976     if (parent->is<JSObject>()) {
977       fprintf(stderr, "\nSource: ");
978       DumpObject(parent->as<JSObject>(), stderr);
979     }
980     if (cell->is<JSObject>()) {
981       fprintf(stderr, "\nTarget: ");
982       DumpObject(cell->as<JSObject>(), stderr);
983     }
984 #  endif
985   }
986 }
987 
check(AutoTraceSession & session)988 bool CheckGrayMarkingTracer::check(AutoTraceSession& session) {
989   if (!traceHeap(session)) {
990     return true;  // Ignore failure.
991   }
992 
993   return failures == 0;
994 }
995 
CheckGrayMarkingState(JSRuntime * rt)996 JS_PUBLIC_API bool js::CheckGrayMarkingState(JSRuntime* rt) {
997   MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
998   MOZ_ASSERT(!rt->gc.isIncrementalGCInProgress());
999   if (!rt->gc.areGrayBitsValid()) {
1000     return true;
1001   }
1002 
1003   gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::TRACE_HEAP);
1004   AutoTraceSession session(rt);
1005   CheckGrayMarkingTracer tracer(rt);
1006 
1007   return tracer.check(session);
1008 }
1009 
MaybeGetDelegate(Cell * cell)1010 static JSObject* MaybeGetDelegate(Cell* cell) {
1011   if (!cell->is<JSObject>()) {
1012     return nullptr;
1013   }
1014 
1015   JSObject* object = cell->as<JSObject>();
1016   return js::UncheckedUnwrapWithoutExpose(object);
1017 }
1018 
CheckWeakMapEntryMarking(const WeakMapBase * map,Cell * key,Cell * value)1019 bool js::gc::CheckWeakMapEntryMarking(const WeakMapBase* map, Cell* key,
1020                                       Cell* value) {
1021   bool ok = true;
1022 
1023   Zone* zone = map->zone();
1024   MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
1025   MOZ_ASSERT(zone->isGCMarking());
1026 
1027   JSObject* object = map->memberOf;
1028   MOZ_ASSERT_IF(object, object->zone() == zone);
1029 
1030   // Debugger weak maps can have keys in different zones.
1031   Zone* keyZone = key->zoneFromAnyThread();
1032   MOZ_ASSERT_IF(!map->allowKeysInOtherZones(),
1033                 keyZone == zone || keyZone->isAtomsZone());
1034 
1035   Zone* valueZone = value->zoneFromAnyThread();
1036   MOZ_ASSERT(valueZone == zone || valueZone->isAtomsZone());
1037 
1038   if (object && object->color() != map->mapColor) {
1039     fprintf(stderr, "WeakMap object is marked differently to the map\n");
1040     fprintf(stderr, "(map %p is %s, object %p is %s)\n", map,
1041             map->mapColor.name(), object, object->color().name());
1042     ok = false;
1043   }
1044 
1045   // Values belonging to other runtimes or in uncollected zones are treated as
1046   // black.
1047   JSRuntime* mapRuntime = zone->runtimeFromAnyThread();
1048   auto effectiveColor = [=](Cell* cell, Zone* cellZone) -> CellColor {
1049     if (cell->runtimeFromAnyThread() != mapRuntime) {
1050       return CellColor::Black;
1051     }
1052     if (cellZone->isGCMarkingOrSweeping()) {
1053       return cell->color();
1054     }
1055     return CellColor::Black;
1056   };
1057 
1058   CellColor valueColor = effectiveColor(value, valueZone);
1059   CellColor keyColor = effectiveColor(key, keyZone);
1060 
1061   if (valueColor < std::min(map->mapColor, keyColor)) {
1062     fprintf(stderr, "WeakMap value is less marked than map and key\n");
1063     fprintf(stderr, "(map %p is %s, key %p is %s, value %p is %s)\n", map,
1064             map->mapColor.name(), key, keyColor.name(), value,
1065             valueColor.name());
1066 #  ifdef DEBUG
1067     fprintf(stderr, "Key:\n");
1068     key->dump();
1069     if (auto delegate = MaybeGetDelegate(key); delegate) {
1070       fprintf(stderr, "Delegate:\n");
1071       delegate->dump();
1072     }
1073     fprintf(stderr, "Value:\n");
1074     value->dump();
1075 #  endif
1076 
1077     ok = false;
1078   }
1079 
1080   JSObject* delegate = MaybeGetDelegate(key);
1081   if (!delegate) {
1082     return ok;
1083   }
1084 
1085   CellColor delegateColor = effectiveColor(delegate, delegate->zone());
1086   if (keyColor < std::min(map->mapColor, delegateColor)) {
1087     fprintf(stderr, "WeakMap key is less marked than map or delegate\n");
1088     fprintf(stderr, "(map %p is %s, delegate %p is %s, key %p is %s)\n", map,
1089             map->mapColor.name(), delegate, delegateColor.name(), key,
1090             keyColor.name());
1091     ok = false;
1092   }
1093 
1094   return ok;
1095 }
1096 
1097 #endif  // defined(JS_GC_ZEAL) || defined(DEBUG)
1098 
1099 #ifdef DEBUG
1100 // Return whether an arbitrary pointer is within a cell with the given
1101 // traceKind. Only for assertions.
isPointerWithinTenuredCell(void * ptr,JS::TraceKind traceKind)1102 bool GCRuntime::isPointerWithinTenuredCell(void* ptr, JS::TraceKind traceKind) {
1103   AutoLockGC lock(this);
1104   for (auto chunk = allNonEmptyChunks(lock); !chunk.done(); chunk.next()) {
1105     MOZ_ASSERT(!chunk->isNurseryChunk());
1106     if (ptr >= &chunk->arenas[0] && ptr < &chunk->arenas[ArenasPerChunk]) {
1107       auto* arena = reinterpret_cast<Arena*>(uintptr_t(ptr) & ~ArenaMask);
1108       if (!arena->allocated()) {
1109         return false;
1110       }
1111 
1112       return MapAllocToTraceKind(arena->getAllocKind()) == traceKind;
1113     }
1114   }
1115 
1116   return false;
1117 }
1118 #endif  // DEBUG
1119