1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sts=4 et sw=4 tw=99:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "gc/Marking.h"
8 
9 #include "mozilla/DebugOnly.h"
10 #include "mozilla/IntegerRange.h"
11 #include "mozilla/ReentrancyGuard.h"
12 #include "mozilla/ScopeExit.h"
13 #include "mozilla/TypeTraits.h"
14 
15 #include "jsgc.h"
16 #include "jsprf.h"
17 
18 #include "builtin/ModuleObject.h"
19 #include "gc/GCInternals.h"
20 #include "jit/IonCode.h"
21 #include "js/SliceBudget.h"
22 #include "vm/ArgumentsObject.h"
23 #include "vm/ArrayObject.h"
24 #include "vm/ScopeObject.h"
25 #include "vm/Shape.h"
26 #include "vm/Symbol.h"
27 #include "vm/TypedArrayObject.h"
28 #include "vm/UnboxedObject.h"
29 
30 #include "jscompartmentinlines.h"
31 #include "jsgcinlines.h"
32 #include "jsobjinlines.h"
33 
34 #include "gc/Nursery-inl.h"
35 #include "vm/String-inl.h"
36 #include "vm/UnboxedObject-inl.h"
37 
38 using namespace js;
39 using namespace js::gc;
40 
41 using JS::MapTypeToTraceKind;
42 
43 using mozilla::ArrayLength;
44 using mozilla::DebugOnly;
45 using mozilla::IsBaseOf;
46 using mozilla::IsSame;
47 using mozilla::MakeRange;
48 using mozilla::PodCopy;
49 
50 // Tracing Overview
51 // ================
52 //
53 // Tracing, in this context, refers to an abstract visitation of some or all of
54 // the GC-controlled heap. The effect of tracing an edge of the graph depends
55 // on the subclass of the JSTracer on whose behalf we are tracing.
56 //
57 // Marking
58 // -------
59 //
60 // The primary JSTracer is the GCMarker. The marking tracer causes the target
61 // of each traversed edge to be marked black and the target edge's children to
62 // be marked either gray (in the gc algorithm sense) or immediately black.
63 //
64 // Callback
65 // --------
66 //
67 // The secondary JSTracer is the CallbackTracer. This simply invokes a callback
68 // on each edge in a child.
69 //
70 // The following is a rough outline of the general struture of the tracing
71 // internals.
72 //
73 //                                                                                              //
74 //   .---------.    .---------.    .--------------------------.       .----------.              //
75 //   |TraceEdge|    |TraceRoot|    |TraceManuallyBarrieredEdge|  ...  |TraceRange|   ... etc.   //
76 //   '---------'    '---------'    '--------------------------'       '----------'              //
77 //        \              \                        /                        /                    //
78 //         \              \  .----------------.  /                        /                     //
79 //          o------------->o-|DispatchToTracer|-o<-----------------------o                      //
80 //                           '----------------'                                                 //
81 //                              /          \                                                    //
82 //                             /            \                                                   //
83 //                       .---------.   .----------.         .-----------------.                 //
84 //                       |DoMarking|   |DoCallback|-------> |<JSTraceCallback>|----------->     //
85 //                       '---------'   '----------'         '-----------------'                 //
86 //                            |                                                                 //
87 //                            |                                                                 //
88 //                        .--------.                                                            //
89 //      o---------------->|traverse| .                                                          //
90 //     /_\                '--------'   ' .                                                      //
91 //      |                     .     .      ' .                                                  //
92 //      |                     .       .        ' .                                              //
93 //      |                     .         .          ' .                                          //
94 //      |             .-----------.    .-----------.   ' .     .--------------------.           //
95 //      |             |markAndScan|    |markAndPush|       ' - |markAndTraceChildren|---->      //
96 //      |             '-----------'    '-----------'           '--------------------'           //
97 //      |                   |                  \                                                //
98 //      |                   |                   \                                               //
99 //      |       .----------------------.     .----------------.                                 //
100 //      |       |T::eagerlyMarkChildren|     |pushMarkStackTop|<===Oo                           //
101 //      |       '----------------------'     '----------------'    ||                           //
102 //      |                  |                         ||            ||                           //
103 //      |                  |                         ||            ||                           //
104 //      |                  |                         ||            ||                           //
105 //      o<-----------------o<========================OO============Oo                           //
106 //                                                                                              //
107 //                                                                                              //
108 //   Legend:                                                                                    //
109 //     ------  Direct calls                                                                     //
110 //     . . .   Static dispatch                                                                  //
111 //     ======  Dispatch through a manual stack.                                                 //
112 //                                                                                              //
113 
114 
115 /*** Tracing Invariants **************************************************************************/
116 
117 #if defined(DEBUG)
118 template<typename T>
119 static inline bool
IsThingPoisoned(T * thing)120 IsThingPoisoned(T* thing)
121 {
122     const uint8_t poisonBytes[] = {
123         JS_FRESH_NURSERY_PATTERN,
124         JS_SWEPT_NURSERY_PATTERN,
125         JS_ALLOCATED_NURSERY_PATTERN,
126         JS_FRESH_TENURED_PATTERN,
127         JS_MOVED_TENURED_PATTERN,
128         JS_SWEPT_TENURED_PATTERN,
129         JS_ALLOCATED_TENURED_PATTERN,
130         JS_SWEPT_CODE_PATTERN
131     };
132     const int numPoisonBytes = sizeof(poisonBytes) / sizeof(poisonBytes[0]);
133     uint32_t* p = reinterpret_cast<uint32_t*>(reinterpret_cast<FreeSpan*>(thing) + 1);
134     // Note: all free patterns are odd to make the common, not-poisoned case a single test.
135     if ((*p & 1) == 0)
136         return false;
137     for (int i = 0; i < numPoisonBytes; ++i) {
138         const uint8_t pb = poisonBytes[i];
139         const uint32_t pw = pb | (pb << 8) | (pb << 16) | (pb << 24);
140         if (*p == pw)
141             return true;
142     }
143     return false;
144 }
145 
146 static bool
IsMovingTracer(JSTracer * trc)147 IsMovingTracer(JSTracer *trc)
148 {
149     return trc->isCallbackTracer() &&
150            trc->asCallbackTracer()->getTracerKind() == JS::CallbackTracer::TracerKind::Moving;
151 }
152 #endif
153 
ThingIsPermanentAtomOrWellKnownSymbol(T * thing)154 template <typename T> bool ThingIsPermanentAtomOrWellKnownSymbol(T* thing) { return false; }
ThingIsPermanentAtomOrWellKnownSymbol(JSString * str)155 template <> bool ThingIsPermanentAtomOrWellKnownSymbol<JSString>(JSString* str) {
156     return str->isPermanentAtom();
157 }
ThingIsPermanentAtomOrWellKnownSymbol(JSFlatString * str)158 template <> bool ThingIsPermanentAtomOrWellKnownSymbol<JSFlatString>(JSFlatString* str) {
159     return str->isPermanentAtom();
160 }
ThingIsPermanentAtomOrWellKnownSymbol(JSLinearString * str)161 template <> bool ThingIsPermanentAtomOrWellKnownSymbol<JSLinearString>(JSLinearString* str) {
162     return str->isPermanentAtom();
163 }
ThingIsPermanentAtomOrWellKnownSymbol(JSAtom * atom)164 template <> bool ThingIsPermanentAtomOrWellKnownSymbol<JSAtom>(JSAtom* atom) {
165     return atom->isPermanent();
166 }
ThingIsPermanentAtomOrWellKnownSymbol(PropertyName * name)167 template <> bool ThingIsPermanentAtomOrWellKnownSymbol<PropertyName>(PropertyName* name) {
168     return name->isPermanent();
169 }
ThingIsPermanentAtomOrWellKnownSymbol(JS::Symbol * sym)170 template <> bool ThingIsPermanentAtomOrWellKnownSymbol<JS::Symbol>(JS::Symbol* sym) {
171     return sym->isWellKnownSymbol();
172 }
173 
174 template <typename T>
175 static inline bool
IsOwnedByOtherRuntime(JSRuntime * rt,T thing)176 IsOwnedByOtherRuntime(JSRuntime* rt, T thing)
177 {
178     bool other = thing->runtimeFromAnyThread() != rt;
179     MOZ_ASSERT_IF(other,
180                   ThingIsPermanentAtomOrWellKnownSymbol(thing) ||
181                   thing->zoneFromAnyThread()->isSelfHostingZone());
182     return other;
183 }
184 
185 template<typename T>
186 void
CheckTracedThing(JSTracer * trc,T * thing)187 js::CheckTracedThing(JSTracer* trc, T* thing)
188 {
189 #ifdef DEBUG
190     MOZ_ASSERT(trc);
191     MOZ_ASSERT(thing);
192 
193     thing = MaybeForwarded(thing);
194 
195     /* This function uses data that's not available in the nursery. */
196     if (IsInsideNursery(thing))
197         return;
198 
199     MOZ_ASSERT_IF(!IsMovingTracer(trc) && !trc->isTenuringTracer(), !IsForwarded(thing));
200 
201     /*
202      * Permanent atoms and things in the self-hosting zone are not associated
203      * with this runtime, but will be ignored during marking.
204      */
205     if (IsOwnedByOtherRuntime(trc->runtime(), thing))
206         return;
207 
208     Zone* zone = thing->zoneFromAnyThread();
209     JSRuntime* rt = trc->runtime();
210 
211     MOZ_ASSERT_IF(!IsMovingTracer(trc), CurrentThreadCanAccessZone(zone));
212     MOZ_ASSERT_IF(!IsMovingTracer(trc), CurrentThreadCanAccessRuntime(rt));
213 
214     MOZ_ASSERT(zone->runtimeFromAnyThread() == trc->runtime());
215 
216     MOZ_ASSERT(thing->isAligned());
217     MOZ_ASSERT(MapTypeToTraceKind<typename mozilla::RemovePointer<T>::Type>::kind ==
218                thing->getTraceKind());
219 
220     /*
221      * Do not check IsMarkingTracer directly -- it should only be used in paths
222      * where we cannot be the gray buffering tracer.
223      */
224     bool isGcMarkingTracer = trc->isMarkingTracer();
225 
226     MOZ_ASSERT_IF(zone->requireGCTracer(), isGcMarkingTracer || IsBufferGrayRootsTracer(trc));
227 
228     if (isGcMarkingTracer) {
229         GCMarker* gcMarker = static_cast<GCMarker*>(trc);
230         MOZ_ASSERT_IF(gcMarker->shouldCheckCompartments(),
231                       zone->isCollecting() || zone->isAtomsZone());
232 
233         MOZ_ASSERT_IF(gcMarker->markColor() == GRAY,
234                       !zone->isGCMarkingBlack() || zone->isAtomsZone());
235 
236         MOZ_ASSERT(!(zone->isGCSweeping() || zone->isGCFinished() || zone->isGCCompacting()));
237     }
238 
239     /*
240      * Try to assert that the thing is allocated.  This is complicated by the
241      * fact that allocated things may still contain the poison pattern if that
242      * part has not been overwritten, and that the free span list head in the
243      * ArenaHeader may not be synced with the real one in ArenaLists.  Also,
244      * background sweeping may be running and concurrently modifiying the free
245      * list.
246      */
247     MOZ_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy() && !rt->gc.isBackgroundSweeping(),
248                   !InFreeList(thing->asTenured().arenaHeader(), thing));
249 #endif
250 }
251 
252 template <typename S>
253 struct CheckTracedFunctor : public VoidDefaultAdaptor<S> {
operator ()CheckTracedFunctor254     template <typename T> void operator()(T* t, JSTracer* trc) { CheckTracedThing(trc, t); }
255 };
256 
257 template<typename T>
258 void
CheckTracedThing(JSTracer * trc,T thing)259 js::CheckTracedThing(JSTracer* trc, T thing)
260 {
261     DispatchTyped(CheckTracedFunctor<T>(), thing, trc);
262 }
263 
264 namespace js {
265 #define IMPL_CHECK_TRACED_THING(_, type, __) \
266     template void CheckTracedThing<type>(JSTracer*, type*);
267 JS_FOR_EACH_TRACEKIND(IMPL_CHECK_TRACED_THING);
268 #undef IMPL_CHECK_TRACED_THING
269 } // namespace js
270 
271 static bool
ShouldMarkCrossCompartment(JSTracer * trc,JSObject * src,Cell * cell)272 ShouldMarkCrossCompartment(JSTracer* trc, JSObject* src, Cell* cell)
273 {
274     if (!trc->isMarkingTracer())
275         return true;
276 
277     uint32_t color = static_cast<GCMarker*>(trc)->markColor();
278     MOZ_ASSERT(color == BLACK || color == GRAY);
279 
280     if (!cell->isTenured()) {
281         MOZ_ASSERT(color == BLACK);
282         return false;
283     }
284     TenuredCell& tenured = cell->asTenured();
285 
286     JS::Zone* zone = tenured.zone();
287     if (color == BLACK) {
288         /*
289          * Having black->gray edges violates our promise to the cycle
290          * collector. This can happen if we're collecting a compartment and it
291          * has an edge to an uncollected compartment: it's possible that the
292          * source and destination of the cross-compartment edge should be gray,
293          * but the source was marked black by the conservative scanner.
294          */
295         if (tenured.isMarked(GRAY)) {
296             MOZ_ASSERT(!zone->isCollecting());
297             trc->runtime()->gc.setFoundBlackGrayEdges();
298         }
299         return zone->isGCMarking();
300     } else {
301         if (zone->isGCMarkingBlack()) {
302             /*
303              * The destination compartment is being not being marked gray now,
304              * but it will be later, so record the cell so it can be marked gray
305              * at the appropriate time.
306              */
307             if (!tenured.isMarked())
308                 DelayCrossCompartmentGrayMarking(src);
309             return false;
310         }
311         return zone->isGCMarkingGray();
312     }
313 }
314 
315 static bool
ShouldMarkCrossCompartment(JSTracer * trc,JSObject * src,Value val)316 ShouldMarkCrossCompartment(JSTracer* trc, JSObject* src, Value val)
317 {
318     return val.isMarkable() && ShouldMarkCrossCompartment(trc, src, (Cell*)val.toGCThing());
319 }
320 
321 static void
AssertZoneIsMarking(Cell * thing)322 AssertZoneIsMarking(Cell* thing)
323 {
324     MOZ_ASSERT(TenuredCell::fromPointer(thing)->zone()->isGCMarking());
325 }
326 
327 static void
AssertZoneIsMarking(JSString * str)328 AssertZoneIsMarking(JSString* str)
329 {
330 #ifdef DEBUG
331     Zone* zone = TenuredCell::fromPointer(str)->zone();
332     MOZ_ASSERT(zone->isGCMarking() || zone->isAtomsZone());
333 #endif
334 }
335 
336 static void
AssertZoneIsMarking(JS::Symbol * sym)337 AssertZoneIsMarking(JS::Symbol* sym)
338 {
339 #ifdef DEBUG
340     Zone* zone = TenuredCell::fromPointer(sym)->zone();
341     MOZ_ASSERT(zone->isGCMarking() || zone->isAtomsZone());
342 #endif
343 }
344 
345 static void
AssertRootMarkingPhase(JSTracer * trc)346 AssertRootMarkingPhase(JSTracer* trc)
347 {
348     MOZ_ASSERT_IF(trc->isMarkingTracer(),
349                   trc->runtime()->gc.state() == NO_INCREMENTAL ||
350                   trc->runtime()->gc.state() == MARK_ROOTS);
351 }
352 
353 
354 /*** Tracing Interface ***************************************************************************/
355 
356 // The second parameter to BaseGCType is derived automatically based on T. The
357 // relation here is that for any T, the TraceKind will automatically,
358 // statically select the correct Cell layout for marking. Below, we instantiate
359 // each override with a declaration of the most derived layout type.
360 //
361 // Usage:
362 //   BaseGCType<T>::type
363 //
364 // Examples:
365 //   BaseGCType<JSFunction>::type => JSObject
366 //   BaseGCType<UnownedBaseShape>::type => BaseShape
367 //   etc.
368 template <typename T,
369           JS::TraceKind = IsBaseOf<JSObject, T>::value     ? JS::TraceKind::Object
370                         : IsBaseOf<JSString, T>::value     ? JS::TraceKind::String
371                         : IsBaseOf<JS::Symbol, T>::value   ? JS::TraceKind::Symbol
372                         : IsBaseOf<JSScript, T>::value     ? JS::TraceKind::Script
373                         : IsBaseOf<Shape, T>::value        ? JS::TraceKind::Shape
374                         : IsBaseOf<BaseShape, T>::value    ? JS::TraceKind::BaseShape
375                         : IsBaseOf<jit::JitCode, T>::value ? JS::TraceKind::JitCode
376                         : IsBaseOf<LazyScript, T>::value   ? JS::TraceKind::LazyScript
377                         :                                    JS::TraceKind::ObjectGroup>
378 struct BaseGCType;
379 #define IMPL_BASE_GC_TYPE(name, type_, _) \
380     template <typename T> struct BaseGCType<T, JS::TraceKind:: name> { typedef type_ type; };
381 JS_FOR_EACH_TRACEKIND(IMPL_BASE_GC_TYPE);
382 #undef IMPL_BASE_GC_TYPE
383 
384 // Our barrier templates are parameterized on the pointer types so that we can
385 // share the definitions with Value and jsid. Thus, we need to strip the
386 // pointer before sending the type to BaseGCType and re-add it on the other
387 // side. As such:
388 template <typename T> struct PtrBaseGCType { typedef T type; };
389 template <typename T> struct PtrBaseGCType<T*> { typedef typename BaseGCType<T>::type* type; };
390 
391 template <typename T>
392 typename PtrBaseGCType<T>::type*
ConvertToBase(T * thingp)393 ConvertToBase(T* thingp)
394 {
395     return reinterpret_cast<typename PtrBaseGCType<T>::type*>(thingp);
396 }
397 
398 template <typename T> void DispatchToTracer(JSTracer* trc, T* thingp, const char* name);
399 template <typename T> T DoCallback(JS::CallbackTracer* trc, T* thingp, const char* name);
400 template <typename T> void DoMarking(GCMarker* gcmarker, T* thing);
401 template <typename T> void DoMarking(GCMarker* gcmarker, T thing);
402 template <typename T> void NoteWeakEdge(GCMarker* gcmarker, T** thingp);
403 template <typename T> void NoteWeakEdge(GCMarker* gcmarker, T* thingp);
404 
405 template <typename T>
406 void
TraceEdge(JSTracer * trc,WriteBarrieredBase<T> * thingp,const char * name)407 js::TraceEdge(JSTracer* trc, WriteBarrieredBase<T>* thingp, const char* name)
408 {
409     DispatchToTracer(trc, ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
410 }
411 
412 template <typename T>
JS_PUBLIC_API(void)413 JS_PUBLIC_API(void)
414 JS::TraceEdge(JSTracer* trc, JS::Heap<T>* thingp, const char* name)
415 {
416     DispatchToTracer(trc, ConvertToBase(thingp->unsafeGet()), name);
417 }
418 
419 template <typename T>
420 void
TraceManuallyBarrieredEdge(JSTracer * trc,T * thingp,const char * name)421 js::TraceManuallyBarrieredEdge(JSTracer* trc, T* thingp, const char* name)
422 {
423     DispatchToTracer(trc, ConvertToBase(thingp), name);
424 }
425 
426 template <typename T>
427 void
TraceWeakEdge(JSTracer * trc,WeakRef<T> * thingp,const char * name)428 js::TraceWeakEdge(JSTracer* trc, WeakRef<T>* thingp, const char* name)
429 {
430     // Non-marking tracers treat the edge strongly.
431     if (!trc->isMarkingTracer())
432         DispatchToTracer(trc, ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
433 
434     NoteWeakEdge(static_cast<GCMarker*>(trc),
435                  ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
436 }
437 
438 template <typename T>
439 void
TraceRoot(JSTracer * trc,T * thingp,const char * name)440 js::TraceRoot(JSTracer* trc, T* thingp, const char* name)
441 {
442     AssertRootMarkingPhase(trc);
443     DispatchToTracer(trc, ConvertToBase(thingp), name);
444 }
445 
446 template <typename T>
447 void
TraceRoot(JSTracer * trc,ReadBarriered<T> * thingp,const char * name)448 js::TraceRoot(JSTracer* trc, ReadBarriered<T>* thingp, const char* name)
449 {
450     TraceRoot(trc, thingp->unsafeGet(), name);
451 }
452 
453 template <typename T>
454 void
TraceNullableRoot(JSTracer * trc,T * thingp,const char * name)455 js::TraceNullableRoot(JSTracer* trc, T* thingp, const char* name)
456 {
457     AssertRootMarkingPhase(trc);
458     if (InternalGCMethods<T>::isMarkableTaggedPointer(*thingp))
459         DispatchToTracer(trc, ConvertToBase(thingp), name);
460 }
461 
462 template <typename T>
463 void
TraceNullableRoot(JSTracer * trc,ReadBarriered<T> * thingp,const char * name)464 js::TraceNullableRoot(JSTracer* trc, ReadBarriered<T>* thingp, const char* name)
465 {
466     TraceNullableRoot(trc, thingp->unsafeGet(), name);
467 }
468 
469 template <typename T>
470 void
TraceRange(JSTracer * trc,size_t len,WriteBarrieredBase<T> * vec,const char * name)471 js::TraceRange(JSTracer* trc, size_t len, WriteBarrieredBase<T>* vec, const char* name)
472 {
473     JS::AutoTracingIndex index(trc);
474     for (auto i : MakeRange(len)) {
475         if (InternalGCMethods<T>::isMarkable(vec[i].get()))
476             DispatchToTracer(trc, ConvertToBase(vec[i].unsafeUnbarrieredForTracing()), name);
477         ++index;
478     }
479 }
480 
481 template <typename T>
482 void
TraceRootRange(JSTracer * trc,size_t len,T * vec,const char * name)483 js::TraceRootRange(JSTracer* trc, size_t len, T* vec, const char* name)
484 {
485     AssertRootMarkingPhase(trc);
486     JS::AutoTracingIndex index(trc);
487     for (auto i : MakeRange(len)) {
488         if (InternalGCMethods<T>::isMarkable(vec[i]))
489             DispatchToTracer(trc, ConvertToBase(&vec[i]), name);
490         ++index;
491     }
492 }
493 
494 // Instantiate a copy of the Tracing templates for each derived type.
495 #define INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS(type) \
496     template void js::TraceEdge<type>(JSTracer*, WriteBarrieredBase<type>*, const char*); \
497     template JS_PUBLIC_API(void) JS::TraceEdge<type>(JSTracer*, JS::Heap<type>*, const char*); \
498     template void js::TraceManuallyBarrieredEdge<type>(JSTracer*, type*, const char*); \
499     template void js::TraceWeakEdge<type>(JSTracer*, WeakRef<type>*, const char*); \
500     template void js::TraceRoot<type>(JSTracer*, type*, const char*); \
501     template void js::TraceRoot<type>(JSTracer*, ReadBarriered<type>*, const char*); \
502     template void js::TraceNullableRoot<type>(JSTracer*, type*, const char*); \
503     template void js::TraceNullableRoot<type>(JSTracer*, ReadBarriered<type>*, const char*); \
504     template void js::TraceRange<type>(JSTracer*, size_t, WriteBarrieredBase<type>*, const char*); \
505     template void js::TraceRootRange<type>(JSTracer*, size_t, type*, const char*);
FOR_EACH_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS)506 FOR_EACH_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS)
507 #undef INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS
508 
509 template <typename T>
510 void
511 js::TraceManuallyBarrieredCrossCompartmentEdge(JSTracer* trc, JSObject* src, T* dst,
512                                                const char* name)
513 {
514     if (ShouldMarkCrossCompartment(trc, src, *dst))
515         DispatchToTracer(trc, dst, name);
516 }
517 template void js::TraceManuallyBarrieredCrossCompartmentEdge<JSObject*>(JSTracer*, JSObject*,
518                                                                         JSObject**, const char*);
519 template void js::TraceManuallyBarrieredCrossCompartmentEdge<JSScript*>(JSTracer*, JSObject*,
520                                                                         JSScript**, const char*);
521 
522 template <typename T>
523 void
TraceCrossCompartmentEdge(JSTracer * trc,JSObject * src,WriteBarrieredBase<T> * dst,const char * name)524 js::TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src, WriteBarrieredBase<T>* dst,
525                               const char* name)
526 {
527     if (ShouldMarkCrossCompartment(trc, src, dst->get()))
528         DispatchToTracer(trc, dst->unsafeUnbarrieredForTracing(), name);
529 }
530 template void js::TraceCrossCompartmentEdge<Value>(JSTracer*, JSObject*,
531                                                    WriteBarrieredBase<Value>*, const char*);
532 
533 template <typename T>
534 void
TraceProcessGlobalRoot(JSTracer * trc,T * thing,const char * name)535 js::TraceProcessGlobalRoot(JSTracer* trc, T* thing, const char* name)
536 {
537     AssertRootMarkingPhase(trc);
538     MOZ_ASSERT(ThingIsPermanentAtomOrWellKnownSymbol(thing));
539 
540     // We have to mark permanent atoms and well-known symbols through a special
541     // method because the default DoMarking implementation automatically skips
542     // them. Fortunately, atoms (permanent and non) cannot refer to other GC
543     // things so they do not need to go through the mark stack and may simply
544     // be marked directly.  Moreover, well-known symbols can refer only to
545     // permanent atoms, so likewise require no subsquent marking.
546     CheckTracedThing(trc, *ConvertToBase(&thing));
547     if (trc->isMarkingTracer())
548         thing->markIfUnmarked(gc::BLACK);
549     else
550         DoCallback(trc->asCallbackTracer(), ConvertToBase(&thing), name);
551 }
552 template void js::TraceProcessGlobalRoot<JSAtom>(JSTracer*, JSAtom*, const char*);
553 template void js::TraceProcessGlobalRoot<JS::Symbol>(JSTracer*, JS::Symbol*, const char*);
554 
555 // A typed functor adaptor for TraceRoot.
556 struct TraceRootFunctor {
557     template <typename T>
operator ()TraceRootFunctor558     void operator()(JSTracer* trc, Cell** thingp, const char* name) {
559         TraceRoot(trc, reinterpret_cast<T**>(thingp), name);
560     }
561 };
562 
563 void
TraceGenericPointerRoot(JSTracer * trc,Cell ** thingp,const char * name)564 js::TraceGenericPointerRoot(JSTracer* trc, Cell** thingp, const char* name)
565 {
566     MOZ_ASSERT(thingp);
567     if (!*thingp)
568         return;
569     TraceRootFunctor f;
570     DispatchTraceKindTyped(f, (*thingp)->getTraceKind(), trc, thingp, name);
571 }
572 
573 // A typed functor adaptor for TraceManuallyBarrieredEdge.
574 struct TraceManuallyBarrieredEdgeFunctor {
575     template <typename T>
operator ()TraceManuallyBarrieredEdgeFunctor576     void operator()(JSTracer* trc, Cell** thingp, const char* name) {
577         TraceManuallyBarrieredEdge(trc, reinterpret_cast<T**>(thingp), name);
578     }
579 };
580 
581 void
TraceManuallyBarrieredGenericPointerEdge(JSTracer * trc,Cell ** thingp,const char * name)582 js::TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp, const char* name)
583 {
584     MOZ_ASSERT(thingp);
585     if (!*thingp)
586         return;
587     TraceManuallyBarrieredEdgeFunctor f;
588     DispatchTraceKindTyped(f, (*thingp)->getTraceKind(), trc, thingp, name);
589 }
590 
591 // This method is responsible for dynamic dispatch to the real tracer
592 // implementation. Consider replacing this choke point with virtual dispatch:
593 // a sufficiently smart C++ compiler may be able to devirtualize some paths.
594 template <typename T>
595 void
DispatchToTracer(JSTracer * trc,T * thingp,const char * name)596 DispatchToTracer(JSTracer* trc, T* thingp, const char* name)
597 {
598 #define IS_SAME_TYPE_OR(name, type, _) mozilla::IsSame<type*, T>::value ||
599     static_assert(
600             JS_FOR_EACH_TRACEKIND(IS_SAME_TYPE_OR)
601             mozilla::IsSame<T, JS::Value>::value ||
602             mozilla::IsSame<T, jsid>::value ||
603             mozilla::IsSame<T, TaggedProto>::value,
604             "Only the base cell layout types are allowed into marking/tracing internals");
605 #undef IS_SAME_TYPE_OR
606     if (trc->isMarkingTracer())
607         return DoMarking(static_cast<GCMarker*>(trc), *thingp);
608     if (trc->isTenuringTracer())
609         return static_cast<TenuringTracer*>(trc)->traverse(thingp);
610     MOZ_ASSERT(trc->isCallbackTracer());
611     DoCallback(trc->asCallbackTracer(), thingp, name);
612 }
613 
614 
615 /*** GC Marking Interface *************************************************************************/
616 
617 namespace js {
618 
619 typedef bool HasNoImplicitEdgesType;
620 
621 template <typename T>
622 struct ImplicitEdgeHolderType {
623     typedef HasNoImplicitEdgesType Type;
624 };
625 
626 // For now, we only handle JSObject* and JSScript* keys, but the linear time
627 // algorithm can be easily extended by adding in more types here, then making
628 // GCMarker::traverse<T> call markPotentialEphemeronKey.
629 template <>
630 struct ImplicitEdgeHolderType<JSObject*> {
631     typedef JSObject* Type;
632 };
633 
634 template <>
635 struct ImplicitEdgeHolderType<JSScript*> {
636     typedef JSScript* Type;
637 };
638 
639 void
markEphemeronValues(gc::Cell * markedCell,WeakEntryVector & values)640 GCMarker::markEphemeronValues(gc::Cell* markedCell, WeakEntryVector& values)
641 {
642     size_t initialLen = values.length();
643     for (size_t i = 0; i < initialLen; i++)
644         values[i].weakmap->traceEntry(this, markedCell, values[i].key);
645 
646     // The vector should not be appended to during iteration because the key is
647     // already marked, and even in cases where we have a multipart key, we
648     // should only be inserting entries for the unmarked portions.
649     MOZ_ASSERT(values.length() == initialLen);
650 }
651 
652 template <typename T>
653 void
markImplicitEdgesHelper(T markedThing)654 GCMarker::markImplicitEdgesHelper(T markedThing)
655 {
656     if (!isWeakMarkingTracer())
657         return;
658 
659     Zone* zone = gc::TenuredCell::fromPointer(markedThing)->zone();
660     MOZ_ASSERT(zone->isGCMarking());
661     MOZ_ASSERT(!zone->isGCSweeping());
662 
663     auto p = zone->gcWeakKeys.get(JS::GCCellPtr(markedThing));
664     if (!p)
665         return;
666     WeakEntryVector& markables = p->value;
667 
668     markEphemeronValues(markedThing, markables);
669     markables.clear(); // If key address is reused, it should do nothing
670 }
671 
672 template <>
673 void
markImplicitEdgesHelper(HasNoImplicitEdgesType)674 GCMarker::markImplicitEdgesHelper(HasNoImplicitEdgesType)
675 {
676 }
677 
678 template <typename T>
679 void
markImplicitEdges(T * thing)680 GCMarker::markImplicitEdges(T* thing)
681 {
682     markImplicitEdgesHelper<typename ImplicitEdgeHolderType<T*>::Type>(thing);
683 }
684 
685 } // namespace js
686 
687 template <typename T>
688 static inline bool
MustSkipMarking(GCMarker * gcmarker,T thing)689 MustSkipMarking(GCMarker* gcmarker, T thing)
690 {
691     // Don't trace things that are owned by another runtime.
692     if (IsOwnedByOtherRuntime(gcmarker->runtime(), thing))
693         return true;
694 
695     // Don't mark things outside a zone if we are in a per-zone GC.
696     return !thing->zone()->isGCMarking();
697 }
698 
699 template <>
700 bool
MustSkipMarking(GCMarker * gcmarker,JSObject * obj)701 MustSkipMarking<JSObject*>(GCMarker* gcmarker, JSObject* obj)
702 {
703     // Don't trace things that are owned by another runtime.
704     if (IsOwnedByOtherRuntime(gcmarker->runtime(), obj))
705         return true;
706 
707     // We may mark a Nursery thing outside the context of the
708     // MinorCollectionTracer because of a pre-barrier. The pre-barrier is not
709     // needed in this case because we perform a minor collection before each
710     // incremental slice.
711     if (IsInsideNursery(obj))
712         return true;
713 
714     // Don't mark things outside a zone if we are in a per-zone GC. It is
715     // faster to check our own arena header, which we can do since we know that
716     // the object is tenured.
717     return !TenuredCell::fromPointer(obj)->zone()->isGCMarking();
718 }
719 
720 template <typename T>
721 void
DoMarking(GCMarker * gcmarker,T * thing)722 DoMarking(GCMarker* gcmarker, T* thing)
723 {
724     // Do per-type marking precondition checks.
725     if (MustSkipMarking(gcmarker, thing))
726         return;
727 
728     CheckTracedThing(gcmarker, thing);
729     gcmarker->traverse(thing);
730 
731     // Mark the compartment as live.
732     SetMaybeAliveFlag(thing);
733 }
734 
735 template <typename S>
736 struct DoMarkingFunctor : public VoidDefaultAdaptor<S> {
operator ()DoMarkingFunctor737     template <typename T> void operator()(T* t, GCMarker* gcmarker) { DoMarking(gcmarker, t); }
738 };
739 
740 template <typename T>
741 void
DoMarking(GCMarker * gcmarker,T thing)742 DoMarking(GCMarker* gcmarker, T thing)
743 {
744     DispatchTyped(DoMarkingFunctor<T>(), thing, gcmarker);
745 }
746 
747 template <typename T>
748 void
NoteWeakEdge(GCMarker * gcmarker,T ** thingp)749 NoteWeakEdge(GCMarker* gcmarker, T** thingp)
750 {
751     // Do per-type marking precondition checks.
752     if (MustSkipMarking(gcmarker, *thingp))
753         return;
754 
755     CheckTracedThing(gcmarker, *thingp);
756 
757     // If the target is already marked, there's no need to store the edge.
758     if (IsMarkedUnbarriered(gcmarker->runtime(), thingp))
759         return;
760 
761     gcmarker->noteWeakEdge(thingp);
762 }
763 
764 template <typename T>
765 void
NoteWeakEdge(GCMarker * gcmarker,T * thingp)766 NoteWeakEdge(GCMarker* gcmarker, T* thingp)
767 {
768     MOZ_CRASH("the gc does not support tagged pointers as weak edges");
769 }
770 
771 template <typename T>
772 void
noteWeakEdge(T * edge)773 js::GCMarker::noteWeakEdge(T* edge)
774 {
775     static_assert(IsBaseOf<Cell, typename mozilla::RemovePointer<T>::Type>::value,
776                   "edge must point to a GC pointer");
777     MOZ_ASSERT((*edge)->isTenured());
778 
779     // Note: we really want the *source* Zone here. The edge may start in a
780     // non-gc heap location, however, so we use the fact that cross-zone weak
781     // references are not allowed and use the *target's* zone.
782     JS::Zone::WeakEdges &weakRefs = (*edge)->asTenured().zone()->gcWeakRefs;
783     AutoEnterOOMUnsafeRegion oomUnsafe;
784     if (!weakRefs.append(reinterpret_cast<TenuredCell**>(edge)))
785         oomUnsafe.crash("Failed to record a weak edge for sweeping.");
786 }
787 
788 // The simplest traversal calls out to the fully generic traceChildren function
789 // to visit the child edges. In the absence of other traversal mechanisms, this
790 // function will rapidly grow the stack past its bounds and crash the process.
791 // Thus, this generic tracing should only be used in cases where subsequent
792 // tracing will not recurse.
793 template <typename T>
794 void
markAndTraceChildren(T * thing)795 js::GCMarker::markAndTraceChildren(T* thing)
796 {
797     if (ThingIsPermanentAtomOrWellKnownSymbol(thing))
798         return;
799     if (mark(thing))
800         thing->traceChildren(this);
801 }
802 namespace js {
traverse(BaseShape * thing)803 template <> void GCMarker::traverse(BaseShape* thing) { markAndTraceChildren(thing); }
traverse(JS::Symbol * thing)804 template <> void GCMarker::traverse(JS::Symbol* thing) { markAndTraceChildren(thing); }
805 } // namespace js
806 
807 // Shape, BaseShape, String, and Symbol are extremely common, but have simple
808 // patterns of recursion. We traverse trees of these edges immediately, with
809 // aggressive, manual inlining, implemented by eagerlyTraceChildren.
810 template <typename T>
811 void
markAndScan(T * thing)812 js::GCMarker::markAndScan(T* thing)
813 {
814     if (ThingIsPermanentAtomOrWellKnownSymbol(thing))
815         return;
816     if (mark(thing))
817         eagerlyMarkChildren(thing);
818 }
819 namespace js {
traverse(JSString * thing)820 template <> void GCMarker::traverse(JSString* thing) { markAndScan(thing); }
traverse(LazyScript * thing)821 template <> void GCMarker::traverse(LazyScript* thing) { markAndScan(thing); }
traverse(Shape * thing)822 template <> void GCMarker::traverse(Shape* thing) { markAndScan(thing); }
823 } // namespace js
824 
825 // Object and ObjectGroup are extremely common and can contain arbitrarily
826 // nested graphs, so are not trivially inlined. In this case we use a mark
827 // stack to control recursion. JitCode shares none of these properties, but is
828 // included for historical reasons. JSScript normally cannot recurse, but may
829 // be used as a weakmap key and thereby recurse into weakmapped values.
830 template <typename T>
831 void
markAndPush(StackTag tag,T * thing)832 js::GCMarker::markAndPush(StackTag tag, T* thing)
833 {
834     if (!mark(thing))
835         return;
836     pushTaggedPtr(tag, thing);
837     markImplicitEdges(thing);
838 }
839 namespace js {
traverse(JSObject * thing)840 template <> void GCMarker::traverse(JSObject* thing) { markAndPush(ObjectTag, thing); }
traverse(ObjectGroup * thing)841 template <> void GCMarker::traverse(ObjectGroup* thing) { markAndPush(GroupTag, thing); }
traverse(jit::JitCode * thing)842 template <> void GCMarker::traverse(jit::JitCode* thing) { markAndPush(JitCodeTag, thing); }
traverse(JSScript * thing)843 template <> void GCMarker::traverse(JSScript* thing) { markAndPush(ScriptTag, thing); }
844 } // namespace js
845 
846 namespace js {
847 template <>
848 void
traverse(AccessorShape * thing)849 GCMarker::traverse(AccessorShape* thing) {
850     MOZ_CRASH("AccessorShape must be marked as a Shape");
851 }
852 } // namespace js
853 
854 template <typename S, typename T>
855 void
traverseEdge(S source,T * target)856 js::GCMarker::traverseEdge(S source, T* target)
857 {
858     // Atoms and Symbols do not have or mark their internal pointers, respectively.
859     MOZ_ASSERT(!ThingIsPermanentAtomOrWellKnownSymbol(source));
860 
861     // The Zones must match, unless the target is an atom.
862     MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(target),
863                   target->zone()->isAtomsZone() || target->zone() == source->zone());
864 
865     // Atoms and Symbols do not have access to a compartment pointer, or we'd need
866     // to adjust the subsequent check to catch that case.
867     MOZ_ASSERT_IF(ThingIsPermanentAtomOrWellKnownSymbol(target), !target->maybeCompartment());
868     MOZ_ASSERT_IF(target->zoneFromAnyThread()->isAtomsZone(), !target->maybeCompartment());
869     // If we have access to a compartment pointer for both things, they must match.
870     MOZ_ASSERT_IF(source->maybeCompartment() && target->maybeCompartment(),
871                   source->maybeCompartment() == target->maybeCompartment());
872 
873     traverse(target);
874 }
875 
876 template <typename V, typename S> struct TraverseEdgeFunctor : public VoidDefaultAdaptor<V> {
operator ()TraverseEdgeFunctor877     template <typename T> void operator()(T t, GCMarker* gcmarker, S s) {
878         return gcmarker->traverseEdge(s, t);
879     }
880 };
881 
882 template <typename S, typename T>
883 void
traverseEdge(S source,T thing)884 js::GCMarker::traverseEdge(S source, T thing)
885 {
886     DispatchTyped(TraverseEdgeFunctor<T, S>(), thing, this, source);
887 }
888 
889 template <typename T>
890 bool
mark(T * thing)891 js::GCMarker::mark(T* thing)
892 {
893     AssertZoneIsMarking(thing);
894     MOZ_ASSERT(!IsInsideNursery(gc::TenuredCell::fromPointer(thing)));
895     return gc::ParticipatesInCC<T>::value
896            ? gc::TenuredCell::fromPointer(thing)->markIfUnmarked(markColor())
897            : gc::TenuredCell::fromPointer(thing)->markIfUnmarked(gc::BLACK);
898 }
899 
900 
901 /*** Inline, Eager GC Marking *********************************************************************/
902 
903 // Each of the eager, inline marking paths is directly preceeded by the
904 // out-of-line, generic tracing code for comparison. Both paths must end up
905 // traversing equivalent subgraphs.
906 
907 void
traceChildren(JSTracer * trc)908 LazyScript::traceChildren(JSTracer* trc)
909 {
910     if (script_)
911         TraceWeakEdge(trc, &script_, "script");
912 
913     if (function_)
914         TraceEdge(trc, &function_, "function");
915 
916     if (sourceObject_)
917         TraceEdge(trc, &sourceObject_, "sourceObject");
918 
919     if (enclosingScope_)
920         TraceEdge(trc, &enclosingScope_, "enclosingScope");
921 
922     // We rely on the fact that atoms are always tenured.
923     FreeVariable* freeVariables = this->freeVariables();
924     for (auto i : MakeRange(numFreeVariables())) {
925         JSAtom* atom = freeVariables[i].atom();
926         TraceManuallyBarrieredEdge(trc, &atom, "lazyScriptFreeVariable");
927     }
928 
929     HeapPtrFunction* innerFunctions = this->innerFunctions();
930     for (auto i : MakeRange(numInnerFunctions()))
931         TraceEdge(trc, &innerFunctions[i], "lazyScriptInnerFunction");
932 }
933 inline void
eagerlyMarkChildren(LazyScript * thing)934 js::GCMarker::eagerlyMarkChildren(LazyScript *thing)
935 {
936     if (thing->script_)
937         noteWeakEdge(thing->script_.unsafeUnbarrieredForTracing());
938 
939     if (thing->function_)
940         traverseEdge(thing, static_cast<JSObject*>(thing->function_));
941 
942     if (thing->sourceObject_)
943         traverseEdge(thing, static_cast<JSObject*>(thing->sourceObject_));
944 
945     if (thing->enclosingScope_)
946         traverseEdge(thing, static_cast<JSObject*>(thing->enclosingScope_));
947 
948     // We rely on the fact that atoms are always tenured.
949     LazyScript::FreeVariable* freeVariables = thing->freeVariables();
950     for (auto i : MakeRange(thing->numFreeVariables()))
951         traverseEdge(thing, static_cast<JSString*>(freeVariables[i].atom()));
952 
953     HeapPtrFunction* innerFunctions = thing->innerFunctions();
954     for (auto i : MakeRange(thing->numInnerFunctions()))
955         traverseEdge(thing, static_cast<JSObject*>(innerFunctions[i]));
956 }
957 
958 void
traceChildren(JSTracer * trc)959 Shape::traceChildren(JSTracer* trc)
960 {
961     TraceEdge(trc, &base_, "base");
962     TraceEdge(trc, &propidRef(), "propid");
963     if (parent)
964         TraceEdge(trc, &parent, "parent");
965 
966     if (hasGetterObject())
967         TraceManuallyBarrieredEdge(trc, &asAccessorShape().getterObj, "getter");
968     if (hasSetterObject())
969         TraceManuallyBarrieredEdge(trc, &asAccessorShape().setterObj, "setter");
970 }
971 inline void
eagerlyMarkChildren(Shape * shape)972 js::GCMarker::eagerlyMarkChildren(Shape* shape)
973 {
974     MOZ_ASSERT(shape->isMarked(this->markColor()));
975     do {
976         traverseEdge(shape, shape->base());
977         traverseEdge(shape, shape->propidRef().get());
978 
979         // When triggered between slices on belhalf of a barrier, these
980         // objects may reside in the nursery, so require an extra check.
981         // FIXME: Bug 1157967 - remove the isTenured checks.
982         if (shape->hasGetterObject() && shape->getterObject()->isTenured())
983             traverseEdge(shape, shape->getterObject());
984         if (shape->hasSetterObject() && shape->setterObject()->isTenured())
985             traverseEdge(shape, shape->setterObject());
986 
987         shape = shape->previous();
988     } while (shape && mark(shape));
989 }
990 
991 void
traceChildren(JSTracer * trc)992 JSString::traceChildren(JSTracer* trc)
993 {
994     if (hasBase())
995         traceBase(trc);
996     else if (isRope())
997         asRope().traceChildren(trc);
998 }
999 inline void
eagerlyMarkChildren(JSString * str)1000 GCMarker::eagerlyMarkChildren(JSString* str)
1001 {
1002     if (str->isLinear())
1003         eagerlyMarkChildren(&str->asLinear());
1004     else
1005         eagerlyMarkChildren(&str->asRope());
1006 }
1007 
1008 void
traceBase(JSTracer * trc)1009 JSString::traceBase(JSTracer* trc)
1010 {
1011     MOZ_ASSERT(hasBase());
1012     TraceManuallyBarrieredEdge(trc, &d.s.u3.base, "base");
1013 }
1014 inline void
eagerlyMarkChildren(JSLinearString * linearStr)1015 js::GCMarker::eagerlyMarkChildren(JSLinearString* linearStr)
1016 {
1017     AssertZoneIsMarking(linearStr);
1018     MOZ_ASSERT(linearStr->isMarked());
1019     MOZ_ASSERT(linearStr->JSString::isLinear());
1020 
1021     // Use iterative marking to avoid blowing out the stack.
1022     while (linearStr->hasBase()) {
1023         linearStr = linearStr->base();
1024         MOZ_ASSERT(linearStr->JSString::isLinear());
1025         if (linearStr->isPermanentAtom())
1026             break;
1027         AssertZoneIsMarking(linearStr);
1028         if (!mark(static_cast<JSString*>(linearStr)))
1029             break;
1030     }
1031 }
1032 
1033 void
traceChildren(JSTracer * trc)1034 JSRope::traceChildren(JSTracer* trc) {
1035     js::TraceManuallyBarrieredEdge(trc, &d.s.u2.left, "left child");
1036     js::TraceManuallyBarrieredEdge(trc, &d.s.u3.right, "right child");
1037 }
1038 inline void
eagerlyMarkChildren(JSRope * rope)1039 js::GCMarker::eagerlyMarkChildren(JSRope* rope)
1040 {
1041     // This function tries to scan the whole rope tree using the marking stack
1042     // as temporary storage. If that becomes full, the unscanned ropes are
1043     // added to the delayed marking list. When the function returns, the
1044     // marking stack is at the same depth as it was on entry. This way we avoid
1045     // using tags when pushing ropes to the stack as ropes never leak to other
1046     // users of the stack. This also assumes that a rope can only point to
1047     // other ropes or linear strings, it cannot refer to GC things of other
1048     // types.
1049     ptrdiff_t savedPos = stack.position();
1050     JS_DIAGNOSTICS_ASSERT(rope->getTraceKind() == JS::TraceKind::String);
1051 #ifdef JS_DEBUG
1052     static const size_t DEEP_ROPE_THRESHOLD = 100000;
1053     static const size_t ROPE_CYCLE_HISTORY = 100;
1054     DebugOnly<size_t> ropeDepth = 0;
1055     JSRope* history[ROPE_CYCLE_HISTORY];
1056 #endif
1057     while (true) {
1058 #ifdef JS_DEBUG
1059         if (++ropeDepth >= DEEP_ROPE_THRESHOLD) {
1060             // Bug 1011786 comment 294 - detect cyclic ropes. There are some
1061             // legitimate deep ropes, at least in tests. So if we hit a deep
1062             // rope, start recording the nodes we visit and check whether we
1063             // repeat. But do it on a finite window size W so that we're not
1064             // scanning the full history for every node. And only check every
1065             // Wth push, to add only constant overhead per node. This will only
1066             // catch cycles of size up to W (but it seems most likely that any
1067             // cycles will be size 1 or maybe 2.)
1068             if ((ropeDepth > DEEP_ROPE_THRESHOLD + ROPE_CYCLE_HISTORY) &&
1069                 (ropeDepth % ROPE_CYCLE_HISTORY) == 0)
1070             {
1071                 for (size_t i = 0; i < ROPE_CYCLE_HISTORY; i++)
1072                     MOZ_ASSERT(history[i] != rope, "cycle detected in rope");
1073             }
1074             history[ropeDepth % ROPE_CYCLE_HISTORY] = rope;
1075         }
1076 #endif
1077 
1078         JS_DIAGNOSTICS_ASSERT(rope->getTraceKind() == JS::TraceKind::String);
1079         JS_DIAGNOSTICS_ASSERT(rope->JSString::isRope());
1080         AssertZoneIsMarking(rope);
1081         MOZ_ASSERT(rope->isMarked());
1082         JSRope* next = nullptr;
1083 
1084         JSString* right = rope->rightChild();
1085         if (!right->isPermanentAtom() &&
1086             mark(right))
1087         {
1088             if (right->isLinear())
1089                 eagerlyMarkChildren(&right->asLinear());
1090             else
1091                 next = &right->asRope();
1092         }
1093 
1094         JSString* left = rope->leftChild();
1095         if (!left->isPermanentAtom() &&
1096             mark(left))
1097         {
1098             if (left->isLinear()) {
1099                 eagerlyMarkChildren(&left->asLinear());
1100             } else {
1101                 // When both children are ropes, set aside the right one to
1102                 // scan it later.
1103                 if (next && !stack.push(reinterpret_cast<uintptr_t>(next)))
1104                     delayMarkingChildren(next);
1105                 next = &left->asRope();
1106             }
1107         }
1108         if (next) {
1109             rope = next;
1110         } else if (savedPos != stack.position()) {
1111             MOZ_ASSERT(savedPos < stack.position());
1112             rope = reinterpret_cast<JSRope*>(stack.pop());
1113         } else {
1114             break;
1115         }
1116     }
1117     MOZ_ASSERT(savedPos == stack.position());
1118 }
1119 
1120 void
traceChildren(JSTracer * trc)1121 js::ObjectGroup::traceChildren(JSTracer* trc)
1122 {
1123     unsigned count = getPropertyCount();
1124     for (unsigned i = 0; i < count; i++) {
1125         if (ObjectGroup::Property* prop = getProperty(i))
1126             TraceEdge(trc, &prop->id, "group_property");
1127     }
1128 
1129     if (proto().isObject())
1130         TraceEdge(trc, &proto(), "group_proto");
1131 
1132     if (newScript())
1133         newScript()->trace(trc);
1134 
1135     if (maybePreliminaryObjects())
1136         maybePreliminaryObjects()->trace(trc);
1137 
1138     if (maybeUnboxedLayout())
1139         unboxedLayout().trace(trc);
1140 
1141     if (ObjectGroup* unboxedGroup = maybeOriginalUnboxedGroup()) {
1142         TraceManuallyBarrieredEdge(trc, &unboxedGroup, "group_original_unboxed_group");
1143         setOriginalUnboxedGroup(unboxedGroup);
1144     }
1145 
1146     if (JSObject* descr = maybeTypeDescr()) {
1147         TraceManuallyBarrieredEdge(trc, &descr, "group_type_descr");
1148         setTypeDescr(&descr->as<TypeDescr>());
1149     }
1150 
1151     if (JSObject* fun = maybeInterpretedFunction()) {
1152         TraceManuallyBarrieredEdge(trc, &fun, "group_function");
1153         setInterpretedFunction(&fun->as<JSFunction>());
1154     }
1155 }
1156 void
lazilyMarkChildren(ObjectGroup * group)1157 js::GCMarker::lazilyMarkChildren(ObjectGroup* group)
1158 {
1159     unsigned count = group->getPropertyCount();
1160     for (unsigned i = 0; i < count; i++) {
1161         if (ObjectGroup::Property* prop = group->getProperty(i))
1162             traverseEdge(group, prop->id.get());
1163     }
1164 
1165     if (group->proto().isObject())
1166         traverseEdge(group, group->proto().toObject());
1167 
1168     group->compartment()->mark();
1169 
1170     if (GlobalObject* global = group->compartment()->unsafeUnbarrieredMaybeGlobal())
1171         traverseEdge(group, static_cast<JSObject*>(global));
1172 
1173     if (group->newScript())
1174         group->newScript()->trace(this);
1175 
1176     if (group->maybePreliminaryObjects())
1177         group->maybePreliminaryObjects()->trace(this);
1178 
1179     if (group->maybeUnboxedLayout())
1180         group->unboxedLayout().trace(this);
1181 
1182     if (ObjectGroup* unboxedGroup = group->maybeOriginalUnboxedGroup())
1183         traverseEdge(group, unboxedGroup);
1184 
1185     if (TypeDescr* descr = group->maybeTypeDescr())
1186         traverseEdge(group, static_cast<JSObject*>(descr));
1187 
1188     if (JSFunction* fun = group->maybeInterpretedFunction())
1189         traverseEdge(group, static_cast<JSObject*>(fun));
1190 }
1191 
1192 struct TraverseObjectFunctor
1193 {
1194     template <typename T>
operator ()TraverseObjectFunctor1195     void operator()(T* thing, GCMarker* gcmarker, JSObject* src) {
1196         gcmarker->traverseEdge(src, *thing);
1197     }
1198 };
1199 
1200 // Call the trace hook set on the object, if present. If further tracing of
1201 // NativeObject fields is required, this will return the native object.
1202 enum class CheckGeneration { DoChecks, NoChecks};
1203 template <typename Functor, typename... Args>
1204 static inline NativeObject*
CallTraceHook(Functor f,JSTracer * trc,JSObject * obj,CheckGeneration check,Args &&...args)1205 CallTraceHook(Functor f, JSTracer* trc, JSObject* obj, CheckGeneration check, Args&&... args)
1206 {
1207     const Class* clasp = obj->getClass();
1208     MOZ_ASSERT(clasp);
1209     MOZ_ASSERT(obj->isNative() == clasp->isNative());
1210 
1211     if (!clasp->trace)
1212         return &obj->as<NativeObject>();
1213 
1214     if (clasp->trace == InlineTypedObject::obj_trace) {
1215         Shape** pshape = obj->as<InlineTypedObject>().addressOfShapeFromGC();
1216         f(pshape, mozilla::Forward<Args>(args)...);
1217 
1218         InlineTypedObject& tobj = obj->as<InlineTypedObject>();
1219         if (tobj.typeDescr().hasTraceList()) {
1220             VisitTraceList(f, tobj.typeDescr().traceList(), tobj.inlineTypedMem(),
1221                            mozilla::Forward<Args>(args)...);
1222         }
1223 
1224         return nullptr;
1225     }
1226 
1227     if (clasp == &UnboxedPlainObject::class_) {
1228         JSObject** pexpando = obj->as<UnboxedPlainObject>().addressOfExpando();
1229         if (*pexpando)
1230             f(pexpando, mozilla::Forward<Args>(args)...);
1231 
1232         UnboxedPlainObject& unboxed = obj->as<UnboxedPlainObject>();
1233         const UnboxedLayout& layout = check == CheckGeneration::DoChecks
1234                                       ? unboxed.layout()
1235                                       : unboxed.layoutDontCheckGeneration();
1236         if (layout.traceList()) {
1237             VisitTraceList(f, layout.traceList(), unboxed.data(),
1238                            mozilla::Forward<Args>(args)...);
1239         }
1240 
1241         return nullptr;
1242     }
1243 
1244     clasp->trace(trc, obj);
1245 
1246     if (!clasp->isNative())
1247         return nullptr;
1248     return &obj->as<NativeObject>();
1249 }
1250 
1251 template <typename F, typename... Args>
1252 static void
VisitTraceList(F f,const int32_t * traceList,uint8_t * memory,Args &&...args)1253 VisitTraceList(F f, const int32_t* traceList, uint8_t* memory, Args&&... args)
1254 {
1255     while (*traceList != -1) {
1256         f(reinterpret_cast<JSString**>(memory + *traceList), mozilla::Forward<Args>(args)...);
1257         traceList++;
1258     }
1259     traceList++;
1260     while (*traceList != -1) {
1261         JSObject** objp = reinterpret_cast<JSObject**>(memory + *traceList);
1262         if (*objp)
1263             f(objp, mozilla::Forward<Args>(args)...);
1264         traceList++;
1265     }
1266     traceList++;
1267     while (*traceList != -1) {
1268         f(reinterpret_cast<Value*>(memory + *traceList), mozilla::Forward<Args>(args)...);
1269         traceList++;
1270     }
1271 }
1272 
1273 
1274 /*** Mark-stack Marking ***************************************************************************/
1275 
1276 bool
drainMarkStack(SliceBudget & budget)1277 GCMarker::drainMarkStack(SliceBudget& budget)
1278 {
1279 #ifdef DEBUG
1280     MOZ_ASSERT(!strictCompartmentChecking);
1281     strictCompartmentChecking = true;
1282     auto acc = mozilla::MakeScopeExit([&] {strictCompartmentChecking = false;});
1283 #endif
1284 
1285     if (budget.isOverBudget())
1286         return false;
1287 
1288     for (;;) {
1289         while (!stack.isEmpty()) {
1290             processMarkStackTop(budget);
1291             if (budget.isOverBudget()) {
1292                 saveValueRanges();
1293                 return false;
1294             }
1295         }
1296 
1297         if (!hasDelayedChildren())
1298             break;
1299 
1300         /*
1301          * Mark children of things that caused too deep recursion during the
1302          * above tracing. Don't do this until we're done with everything
1303          * else.
1304          */
1305         if (!markDelayedChildren(budget)) {
1306             saveValueRanges();
1307             return false;
1308         }
1309     }
1310 
1311     return true;
1312 }
1313 
1314 inline static bool
ObjectDenseElementsMayBeMarkable(NativeObject * nobj)1315 ObjectDenseElementsMayBeMarkable(NativeObject* nobj)
1316 {
1317     /*
1318      * For arrays that are large enough it's worth checking the type information
1319      * to see if the object's elements contain any GC pointers.  If not, we
1320      * don't need to trace them.
1321      */
1322     const unsigned MinElementsLength = 32;
1323     if (nobj->getDenseInitializedLength() < MinElementsLength || nobj->isSingleton())
1324         return true;
1325 
1326     ObjectGroup* group = nobj->group();
1327     if (group->needsSweep() || group->unknownProperties())
1328         return true;
1329 
1330     HeapTypeSet* typeSet = group->maybeGetProperty(JSID_VOID);
1331     if (!typeSet)
1332         return true;
1333 
1334     static const uint32_t flagMask =
1335         TYPE_FLAG_STRING | TYPE_FLAG_SYMBOL | TYPE_FLAG_LAZYARGS | TYPE_FLAG_ANYOBJECT;
1336     bool mayBeMarkable = typeSet->hasAnyFlag(flagMask) || typeSet->getObjectCount() != 0;
1337 
1338 #ifdef DEBUG
1339     if (!mayBeMarkable) {
1340         const Value* elements = nobj->getDenseElementsAllowCopyOnWrite();
1341         for (unsigned i = 0; i < nobj->getDenseInitializedLength(); i++)
1342             MOZ_ASSERT(!elements[i].isMarkable());
1343     }
1344 #endif
1345 
1346     return mayBeMarkable;
1347 }
1348 
1349 inline void
processMarkStackTop(SliceBudget & budget)1350 GCMarker::processMarkStackTop(SliceBudget& budget)
1351 {
1352     /*
1353      * The function uses explicit goto and implements the scanning of the
1354      * object directly. It allows to eliminate the tail recursion and
1355      * significantly improve the marking performance, see bug 641025.
1356      */
1357     HeapSlot* vp;
1358     HeapSlot* end;
1359     JSObject* obj;
1360 
1361     // Decode
1362     uintptr_t addr = stack.pop();
1363     uintptr_t tag = addr & StackTagMask;
1364     addr &= ~StackTagMask;
1365 
1366     // Dispatch
1367     switch (tag) {
1368       case ValueArrayTag: {
1369         JS_STATIC_ASSERT(ValueArrayTag == 0);
1370         MOZ_ASSERT(!(addr & CellMask));
1371         obj = reinterpret_cast<JSObject*>(addr);
1372         uintptr_t addr2 = stack.pop();
1373         uintptr_t addr3 = stack.pop();
1374         MOZ_ASSERT(addr2 <= addr3);
1375         MOZ_ASSERT((addr3 - addr2) % sizeof(Value) == 0);
1376         vp = reinterpret_cast<HeapSlot*>(addr2);
1377         end = reinterpret_cast<HeapSlot*>(addr3);
1378         goto scan_value_array;
1379       }
1380 
1381       case ObjectTag: {
1382         obj = reinterpret_cast<JSObject*>(addr);
1383         AssertZoneIsMarking(obj);
1384         goto scan_obj;
1385       }
1386 
1387       case GroupTag: {
1388         return lazilyMarkChildren(reinterpret_cast<ObjectGroup*>(addr));
1389       }
1390 
1391       case JitCodeTag: {
1392         return reinterpret_cast<jit::JitCode*>(addr)->traceChildren(this);
1393       }
1394 
1395       case ScriptTag: {
1396         return reinterpret_cast<JSScript*>(addr)->traceChildren(this);
1397       }
1398 
1399       case SavedValueArrayTag: {
1400         MOZ_ASSERT(!(addr & CellMask));
1401         JSObject* obj = reinterpret_cast<JSObject*>(addr);
1402         HeapSlot* vp;
1403         HeapSlot* end;
1404         if (restoreValueArray(obj, (void**)&vp, (void**)&end))
1405             pushValueArray(&obj->as<NativeObject>(), vp, end);
1406         else
1407             repush(obj);
1408         return;
1409       }
1410 
1411       default: MOZ_CRASH("Invalid tag in mark stack");
1412     }
1413     return;
1414 
1415   scan_value_array:
1416     MOZ_ASSERT(vp <= end);
1417     while (vp != end) {
1418         budget.step();
1419         if (budget.isOverBudget()) {
1420             pushValueArray(obj, vp, end);
1421             return;
1422         }
1423 
1424         const Value& v = *vp++;
1425         if (v.isString()) {
1426             traverseEdge(obj, v.toString());
1427         } else if (v.isObject()) {
1428             JSObject* obj2 = &v.toObject();
1429             MOZ_ASSERT(obj->compartment() == obj2->compartment());
1430             if (mark(obj2)) {
1431                 // Save the rest of this value array for later and start scanning obj2's children.
1432                 pushValueArray(obj, vp, end);
1433                 obj = obj2;
1434                 goto scan_obj;
1435             }
1436         } else if (v.isSymbol()) {
1437             traverseEdge(obj, v.toSymbol());
1438         }
1439     }
1440     return;
1441 
1442   scan_obj:
1443     {
1444         AssertZoneIsMarking(obj);
1445 
1446         budget.step();
1447         if (budget.isOverBudget()) {
1448             repush(obj);
1449             return;
1450         }
1451 
1452         markImplicitEdges(obj);
1453         ObjectGroup* group = obj->groupFromGC();
1454         traverseEdge(obj, group);
1455 
1456         NativeObject *nobj = CallTraceHook(TraverseObjectFunctor(), this, obj,
1457                                            CheckGeneration::DoChecks, this, obj);
1458         if (!nobj)
1459             return;
1460 
1461         Shape* shape = nobj->lastProperty();
1462         traverseEdge(obj, shape);
1463 
1464         unsigned nslots = nobj->slotSpan();
1465 
1466         do {
1467             if (nobj->hasEmptyElements())
1468                 break;
1469 
1470             if (nobj->denseElementsAreCopyOnWrite()) {
1471                 JSObject* owner = nobj->getElementsHeader()->ownerObject();
1472                 if (owner != nobj) {
1473                     traverseEdge(obj, owner);
1474                     break;
1475                 }
1476             }
1477 
1478             if (!ObjectDenseElementsMayBeMarkable(nobj))
1479                 break;
1480 
1481             vp = nobj->getDenseElementsAllowCopyOnWrite();
1482             end = vp + nobj->getDenseInitializedLength();
1483 
1484             if (!nslots)
1485                 goto scan_value_array;
1486             pushValueArray(nobj, vp, end);
1487         } while (false);
1488 
1489         vp = nobj->fixedSlots();
1490         if (nobj->slots_) {
1491             unsigned nfixed = nobj->numFixedSlots();
1492             if (nslots > nfixed) {
1493                 pushValueArray(nobj, vp, vp + nfixed);
1494                 vp = nobj->slots_;
1495                 end = vp + (nslots - nfixed);
1496                 goto scan_value_array;
1497             }
1498         }
1499         MOZ_ASSERT(nslots <= nobj->numFixedSlots());
1500         end = vp + nslots;
1501         goto scan_value_array;
1502     }
1503 }
1504 
1505 struct SlotArrayLayout
1506 {
1507     union {
1508         HeapSlot* end;
1509         uintptr_t kind;
1510     };
1511     union {
1512         HeapSlot* start;
1513         uintptr_t index;
1514     };
1515     NativeObject* obj;
1516 
staticAssertsSlotArrayLayout1517     static void staticAsserts() {
1518         /* This should have the same layout as three mark stack items. */
1519         JS_STATIC_ASSERT(sizeof(SlotArrayLayout) == 3 * sizeof(uintptr_t));
1520     }
1521 };
1522 
1523 /*
1524  * During incremental GC, we return from drainMarkStack without having processed
1525  * the entire stack. At that point, JS code can run and reallocate slot arrays
1526  * that are stored on the stack. To prevent this from happening, we replace all
1527  * ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots
1528  * pointers are replaced with slot indexes, and slot array end pointers are
1529  * replaced with the kind of index (properties vs. elements).
1530  */
1531 void
saveValueRanges()1532 GCMarker::saveValueRanges()
1533 {
1534     for (uintptr_t* p = stack.tos_; p > stack.stack_; ) {
1535         uintptr_t tag = *--p & StackTagMask;
1536         if (tag == ValueArrayTag) {
1537             *p &= ~StackTagMask;
1538             p -= 2;
1539             SlotArrayLayout* arr = reinterpret_cast<SlotArrayLayout*>(p);
1540             NativeObject* obj = arr->obj;
1541             MOZ_ASSERT(obj->isNative());
1542 
1543             HeapSlot* vp = obj->getDenseElementsAllowCopyOnWrite();
1544             if (arr->end == vp + obj->getDenseInitializedLength()) {
1545                 MOZ_ASSERT(arr->start >= vp);
1546                 arr->index = arr->start - vp;
1547                 arr->kind = HeapSlot::Element;
1548             } else {
1549                 HeapSlot* vp = obj->fixedSlots();
1550                 unsigned nfixed = obj->numFixedSlots();
1551                 if (arr->start == arr->end) {
1552                     arr->index = obj->slotSpan();
1553                 } else if (arr->start >= vp && arr->start < vp + nfixed) {
1554                     MOZ_ASSERT(arr->end == vp + Min(nfixed, obj->slotSpan()));
1555                     arr->index = arr->start - vp;
1556                 } else {
1557                     MOZ_ASSERT(arr->start >= obj->slots_ &&
1558                                arr->end == obj->slots_ + obj->slotSpan() - nfixed);
1559                     arr->index = (arr->start - obj->slots_) + nfixed;
1560                 }
1561                 arr->kind = HeapSlot::Slot;
1562             }
1563             p[2] |= SavedValueArrayTag;
1564         } else if (tag == SavedValueArrayTag) {
1565             p -= 2;
1566         }
1567     }
1568 }
1569 
1570 bool
restoreValueArray(JSObject * objArg,void ** vpp,void ** endp)1571 GCMarker::restoreValueArray(JSObject* objArg, void** vpp, void** endp)
1572 {
1573     uintptr_t start = stack.pop();
1574     HeapSlot::Kind kind = (HeapSlot::Kind) stack.pop();
1575 
1576     if (!objArg->isNative())
1577         return false;
1578     NativeObject* obj = &objArg->as<NativeObject>();
1579 
1580     if (kind == HeapSlot::Element) {
1581         if (!obj->is<ArrayObject>())
1582             return false;
1583 
1584         uint32_t initlen = obj->getDenseInitializedLength();
1585         HeapSlot* vp = obj->getDenseElementsAllowCopyOnWrite();
1586         if (start < initlen) {
1587             *vpp = vp + start;
1588             *endp = vp + initlen;
1589         } else {
1590             /* The object shrunk, in which case no scanning is needed. */
1591             *vpp = *endp = vp;
1592         }
1593     } else {
1594         MOZ_ASSERT(kind == HeapSlot::Slot);
1595         HeapSlot* vp = obj->fixedSlots();
1596         unsigned nfixed = obj->numFixedSlots();
1597         unsigned nslots = obj->slotSpan();
1598         if (start < nslots) {
1599             if (start < nfixed) {
1600                 *vpp = vp + start;
1601                 *endp = vp + Min(nfixed, nslots);
1602             } else {
1603                 *vpp = obj->slots_ + start - nfixed;
1604                 *endp = obj->slots_ + nslots - nfixed;
1605             }
1606         } else {
1607             /* The object shrunk, in which case no scanning is needed. */
1608             *vpp = *endp = vp;
1609         }
1610     }
1611 
1612     MOZ_ASSERT(*vpp <= *endp);
1613     return true;
1614 }
1615 
1616 
1617 /*** Mark Stack ***********************************************************************************/
1618 
1619 bool
init(JSGCMode gcMode)1620 MarkStack::init(JSGCMode gcMode)
1621 {
1622     setBaseCapacity(gcMode);
1623 
1624     MOZ_ASSERT(!stack_);
1625     uintptr_t* newStack = js_pod_malloc<uintptr_t>(baseCapacity_);
1626     if (!newStack)
1627         return false;
1628 
1629     setStack(newStack, 0, baseCapacity_);
1630     return true;
1631 }
1632 
1633 void
setBaseCapacity(JSGCMode mode)1634 MarkStack::setBaseCapacity(JSGCMode mode)
1635 {
1636     switch (mode) {
1637       case JSGC_MODE_GLOBAL:
1638       case JSGC_MODE_COMPARTMENT:
1639         baseCapacity_ = NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY;
1640         break;
1641       case JSGC_MODE_INCREMENTAL:
1642         baseCapacity_ = INCREMENTAL_MARK_STACK_BASE_CAPACITY;
1643         break;
1644       default:
1645         MOZ_CRASH("bad gc mode");
1646     }
1647 
1648     if (baseCapacity_ > maxCapacity_)
1649         baseCapacity_ = maxCapacity_;
1650 }
1651 
1652 void
setMaxCapacity(size_t maxCapacity)1653 MarkStack::setMaxCapacity(size_t maxCapacity)
1654 {
1655     MOZ_ASSERT(isEmpty());
1656     maxCapacity_ = maxCapacity;
1657     if (baseCapacity_ > maxCapacity_)
1658         baseCapacity_ = maxCapacity_;
1659 
1660     reset();
1661 }
1662 
1663 void
reset()1664 MarkStack::reset()
1665 {
1666     if (capacity() == baseCapacity_) {
1667         // No size change; keep the current stack.
1668         setStack(stack_, 0, baseCapacity_);
1669         return;
1670     }
1671 
1672     uintptr_t* newStack = (uintptr_t*)js_realloc(stack_, sizeof(uintptr_t) * baseCapacity_);
1673     if (!newStack) {
1674         // If the realloc fails, just keep using the existing stack; it's
1675         // not ideal but better than failing.
1676         newStack = stack_;
1677         baseCapacity_ = capacity();
1678     }
1679     setStack(newStack, 0, baseCapacity_);
1680 }
1681 
1682 bool
enlarge(unsigned count)1683 MarkStack::enlarge(unsigned count)
1684 {
1685     size_t newCapacity = Min(maxCapacity_, capacity() * 2);
1686     if (newCapacity < capacity() + count)
1687         return false;
1688 
1689     size_t tosIndex = position();
1690 
1691     uintptr_t* newStack = (uintptr_t*)js_realloc(stack_, sizeof(uintptr_t) * newCapacity);
1692     if (!newStack)
1693         return false;
1694 
1695     setStack(newStack, tosIndex, newCapacity);
1696     return true;
1697 }
1698 
1699 void
setGCMode(JSGCMode gcMode)1700 MarkStack::setGCMode(JSGCMode gcMode)
1701 {
1702     // The mark stack won't be resized until the next call to reset(), but
1703     // that will happen at the end of the next GC.
1704     setBaseCapacity(gcMode);
1705 }
1706 
1707 size_t
sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const1708 MarkStack::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
1709 {
1710     return mallocSizeOf(stack_);
1711 }
1712 
1713 
1714 /*** GCMarker *************************************************************************************/
1715 
1716 /*
1717  * ExpandWeakMaps: the GC is recomputing the liveness of WeakMap entries by
1718  * expanding each live WeakMap into its constituent key->value edges, a table
1719  * of which will be consulted in a later phase whenever marking a potential
1720  * key.
1721  */
GCMarker(JSRuntime * rt)1722 GCMarker::GCMarker(JSRuntime* rt)
1723   : JSTracer(rt, JSTracer::TracerKindTag::Marking, ExpandWeakMaps),
1724     stack(size_t(-1)),
1725     color(BLACK),
1726     unmarkedArenaStackTop(nullptr),
1727     markLaterArenas(0),
1728     started(false),
1729     strictCompartmentChecking(false)
1730 {
1731 }
1732 
1733 bool
init(JSGCMode gcMode)1734 GCMarker::init(JSGCMode gcMode)
1735 {
1736     return stack.init(gcMode);
1737 }
1738 
1739 void
start()1740 GCMarker::start()
1741 {
1742     MOZ_ASSERT(!started);
1743     started = true;
1744     color = BLACK;
1745     linearWeakMarkingDisabled_ = false;
1746 
1747     MOZ_ASSERT(!unmarkedArenaStackTop);
1748     MOZ_ASSERT(markLaterArenas == 0);
1749 }
1750 
1751 void
stop()1752 GCMarker::stop()
1753 {
1754     MOZ_ASSERT(isDrained());
1755 
1756     MOZ_ASSERT(started);
1757     started = false;
1758 
1759     MOZ_ASSERT(!unmarkedArenaStackTop);
1760     MOZ_ASSERT(markLaterArenas == 0);
1761 
1762     /* Free non-ballast stack memory. */
1763     stack.reset();
1764     for (GCZonesIter zone(runtime()); !zone.done(); zone.next())
1765         zone->gcWeakKeys.clear();
1766 }
1767 
1768 void
reset()1769 GCMarker::reset()
1770 {
1771     color = BLACK;
1772 
1773     stack.reset();
1774     MOZ_ASSERT(isMarkStackEmpty());
1775 
1776     while (unmarkedArenaStackTop) {
1777         ArenaHeader* aheader = unmarkedArenaStackTop;
1778         MOZ_ASSERT(aheader->hasDelayedMarking);
1779         MOZ_ASSERT(markLaterArenas);
1780         unmarkedArenaStackTop = aheader->getNextDelayedMarking();
1781         aheader->unsetDelayedMarking();
1782         aheader->markOverflow = 0;
1783         aheader->allocatedDuringIncremental = 0;
1784         markLaterArenas--;
1785     }
1786     MOZ_ASSERT(isDrained());
1787     MOZ_ASSERT(!markLaterArenas);
1788 }
1789 
1790 void
enterWeakMarkingMode()1791 GCMarker::enterWeakMarkingMode()
1792 {
1793     MOZ_ASSERT(tag_ == TracerKindTag::Marking);
1794     if (linearWeakMarkingDisabled_)
1795         return;
1796 
1797     // During weak marking mode, we maintain a table mapping weak keys to
1798     // entries in known-live weakmaps.
1799     if (weakMapAction() == ExpandWeakMaps) {
1800         tag_ = TracerKindTag::WeakMarking;
1801 
1802         for (GCZoneGroupIter zone(runtime()); !zone.done(); zone.next()) {
1803             for (WeakMapBase* m : zone->gcWeakMapList) {
1804                 if (m->marked)
1805                     (void) m->traceEntries(this);
1806             }
1807         }
1808     }
1809 }
1810 
1811 void
leaveWeakMarkingMode()1812 GCMarker::leaveWeakMarkingMode()
1813 {
1814     MOZ_ASSERT_IF(weakMapAction() == ExpandWeakMaps && !linearWeakMarkingDisabled_,
1815                   tag_ == TracerKindTag::WeakMarking);
1816     tag_ = TracerKindTag::Marking;
1817 
1818     // Table is expensive to maintain when not in weak marking mode, so we'll
1819     // rebuild it upon entry rather than allow it to contain stale data.
1820     for (GCZonesIter zone(runtime()); !zone.done(); zone.next())
1821         zone->gcWeakKeys.clear();
1822 }
1823 
1824 void
markDelayedChildren(ArenaHeader * aheader)1825 GCMarker::markDelayedChildren(ArenaHeader* aheader)
1826 {
1827     if (aheader->markOverflow) {
1828         bool always = aheader->allocatedDuringIncremental;
1829         aheader->markOverflow = 0;
1830 
1831         for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next()) {
1832             TenuredCell* t = i.getCell();
1833             if (always || t->isMarked()) {
1834                 t->markIfUnmarked();
1835                 js::TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
1836             }
1837         }
1838     } else {
1839         MOZ_ASSERT(aheader->allocatedDuringIncremental);
1840         PushArena(this, aheader);
1841     }
1842     aheader->allocatedDuringIncremental = 0;
1843     /*
1844      * Note that during an incremental GC we may still be allocating into
1845      * aheader. However, prepareForIncrementalGC sets the
1846      * allocatedDuringIncremental flag if we continue marking.
1847      */
1848 }
1849 
1850 bool
markDelayedChildren(SliceBudget & budget)1851 GCMarker::markDelayedChildren(SliceBudget& budget)
1852 {
1853     GCRuntime& gc = runtime()->gc;
1854     gcstats::AutoPhase ap(gc.stats, gc.state() == MARK, gcstats::PHASE_MARK_DELAYED);
1855 
1856     MOZ_ASSERT(unmarkedArenaStackTop);
1857     do {
1858         /*
1859          * If marking gets delayed at the same arena again, we must repeat
1860          * marking of its things. For that we pop arena from the stack and
1861          * clear its hasDelayedMarking flag before we begin the marking.
1862          */
1863         ArenaHeader* aheader = unmarkedArenaStackTop;
1864         MOZ_ASSERT(aheader->hasDelayedMarking);
1865         MOZ_ASSERT(markLaterArenas);
1866         unmarkedArenaStackTop = aheader->getNextDelayedMarking();
1867         aheader->unsetDelayedMarking();
1868         markLaterArenas--;
1869         markDelayedChildren(aheader);
1870 
1871         budget.step(150);
1872         if (budget.isOverBudget())
1873             return false;
1874     } while (unmarkedArenaStackTop);
1875     MOZ_ASSERT(!markLaterArenas);
1876 
1877     return true;
1878 }
1879 
1880 template<typename T>
1881 static void
PushArenaTyped(GCMarker * gcmarker,ArenaHeader * aheader)1882 PushArenaTyped(GCMarker* gcmarker, ArenaHeader* aheader)
1883 {
1884     for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next())
1885         gcmarker->traverse(i.get<T>());
1886 }
1887 
1888 struct PushArenaFunctor {
operator ()PushArenaFunctor1889     template <typename T> void operator()(GCMarker* gcmarker, ArenaHeader* aheader) {
1890         PushArenaTyped<T>(gcmarker, aheader);
1891     }
1892 };
1893 
1894 void
PushArena(GCMarker * gcmarker,ArenaHeader * aheader)1895 gc::PushArena(GCMarker* gcmarker, ArenaHeader* aheader)
1896 {
1897     DispatchTraceKindTyped(PushArenaFunctor(), MapAllocToTraceKind(aheader->getAllocKind()), gcmarker, aheader);
1898 }
1899 
1900 #ifdef DEBUG
1901 void
checkZone(void * p)1902 GCMarker::checkZone(void* p)
1903 {
1904     MOZ_ASSERT(started);
1905     DebugOnly<Cell*> cell = static_cast<Cell*>(p);
1906     MOZ_ASSERT_IF(cell->isTenured(), cell->asTenured().zone()->isCollecting());
1907 }
1908 #endif
1909 
1910 size_t
sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const1911 GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
1912 {
1913     size_t size = stack.sizeOfExcludingThis(mallocSizeOf);
1914     for (ZonesIter zone(runtime(), WithAtoms); !zone.done(); zone.next())
1915         size += zone->gcGrayRoots.sizeOfExcludingThis(mallocSizeOf);
1916     return size;
1917 }
1918 
1919 
1920 /*** Tenuring Tracer *****************************************************************************/
1921 
1922 namespace js {
1923 template <typename T>
1924 void
traverse(T ** tp)1925 TenuringTracer::traverse(T** tp)
1926 {
1927 }
1928 
1929 template <>
1930 void
traverse(JSObject ** objp)1931 TenuringTracer::traverse(JSObject** objp)
1932 {
1933     // We only ever visit the internals of objects after moving them to tenured.
1934     MOZ_ASSERT(!nursery().isInside(objp));
1935 
1936     if (IsInsideNursery(*objp) && !nursery().getForwardedPointer(objp))
1937         *objp = moveToTenured(*objp);
1938 }
1939 
1940 template <typename S>
1941 struct TenuringTraversalFunctor : public IdentityDefaultAdaptor<S> {
operator ()js::TenuringTraversalFunctor1942     template <typename T> S operator()(T* t, TenuringTracer* trc) {
1943         trc->traverse(&t);
1944         return js::gc::RewrapTaggedPointer<S, T*>::wrap(t);
1945     }
1946 };
1947 
1948 template <typename T>
1949 void
traverse(T * thingp)1950 TenuringTracer::traverse(T* thingp)
1951 {
1952     *thingp = DispatchTyped(TenuringTraversalFunctor<T>(), *thingp, this);
1953 }
1954 } // namespace js
1955 
1956 template <typename T>
1957 void
trace(StoreBuffer * owner,TenuringTracer & mover)1958 js::gc::StoreBuffer::MonoTypeBuffer<T>::trace(StoreBuffer* owner, TenuringTracer& mover)
1959 {
1960     mozilla::ReentrancyGuard g(*owner);
1961     MOZ_ASSERT(owner->isEnabled());
1962     MOZ_ASSERT(stores_.initialized());
1963     sinkStore(owner);
1964     for (typename StoreSet::Range r = stores_.all(); !r.empty(); r.popFront())
1965         r.front().trace(mover);
1966 }
1967 
1968 namespace js {
1969 namespace gc {
1970 template void
1971 StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>::trace(StoreBuffer*, TenuringTracer&);
1972 template void
1973 StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>::trace(StoreBuffer*, TenuringTracer&);
1974 template void
1975 StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>::trace(StoreBuffer*, TenuringTracer&);
1976 template void
1977 StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>::trace(StoreBuffer*, TenuringTracer&);
1978 } // namespace gc
1979 } // namespace js
1980 
1981 void
trace(TenuringTracer & mover) const1982 js::gc::StoreBuffer::SlotsEdge::trace(TenuringTracer& mover) const
1983 {
1984     NativeObject* obj = object();
1985 
1986     // Beware JSObject::swap exchanging a native object for a non-native one.
1987     if (!obj->isNative())
1988         return;
1989 
1990     if (IsInsideNursery(obj))
1991         return;
1992 
1993     if (kind() == ElementKind) {
1994         int32_t initLen = obj->getDenseInitializedLength();
1995         int32_t clampedStart = Min(start_, initLen);
1996         int32_t clampedEnd = Min(start_ + count_, initLen);
1997         mover.traceSlots(static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart)
1998                             ->unsafeUnbarrieredForTracing(), clampedEnd - clampedStart);
1999     } else {
2000         int32_t start = Min(uint32_t(start_), obj->slotSpan());
2001         int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan());
2002         MOZ_ASSERT(end >= start);
2003         mover.traceObjectSlots(obj, start, end - start);
2004     }
2005 }
2006 
2007 void
trace(TenuringTracer & mover) const2008 js::gc::StoreBuffer::WholeCellEdges::trace(TenuringTracer& mover) const
2009 {
2010     MOZ_ASSERT(edge->isTenured());
2011     JS::TraceKind kind = edge->getTraceKind();
2012     if (kind == JS::TraceKind::Object) {
2013         JSObject *object = static_cast<JSObject*>(edge);
2014         mover.traceObject(object);
2015 
2016         // Additionally trace the expando object attached to any unboxed plain
2017         // objects. Baseline and Ion can write properties to the expando while
2018         // only adding a post barrier to the owning unboxed object. Note that
2019         // it isn't possible for a nursery unboxed object to have a tenured
2020         // expando, so that adding a post barrier on the original object will
2021         // capture any tenured->nursery edges in the expando as well.
2022         if (object->is<UnboxedPlainObject>()) {
2023             if (UnboxedExpandoObject* expando = object->as<UnboxedPlainObject>().maybeExpando())
2024                 expando->traceChildren(&mover);
2025         }
2026 
2027         return;
2028     }
2029     if (kind == JS::TraceKind::Script)
2030         static_cast<JSScript*>(edge)->traceChildren(&mover);
2031     else if (kind == JS::TraceKind::JitCode)
2032         static_cast<jit::JitCode*>(edge)->traceChildren(&mover);
2033     else
2034         MOZ_CRASH();
2035 }
2036 
2037 void
trace(TenuringTracer & mover) const2038 js::gc::StoreBuffer::CellPtrEdge::trace(TenuringTracer& mover) const
2039 {
2040     if (!*edge)
2041         return;
2042 
2043     MOZ_ASSERT((*edge)->getTraceKind() == JS::TraceKind::Object);
2044     mover.traverse(reinterpret_cast<JSObject**>(edge));
2045 }
2046 
2047 void
trace(TenuringTracer & mover) const2048 js::gc::StoreBuffer::ValueEdge::trace(TenuringTracer& mover) const
2049 {
2050     if (deref())
2051         mover.traverse(edge);
2052 }
2053 
2054 /* Insert the given relocation entry into the list of things to visit. */
2055 void
insertIntoFixupList(RelocationOverlay * entry)2056 js::TenuringTracer::insertIntoFixupList(RelocationOverlay* entry) {
2057     *tail = entry;
2058     tail = &entry->nextRef();
2059     *tail = nullptr;
2060 }
2061 
2062 JSObject*
moveToTenured(JSObject * src)2063 js::TenuringTracer::moveToTenured(JSObject* src)
2064 {
2065     MOZ_ASSERT(IsInsideNursery(src));
2066 
2067     AllocKind dstKind = src->allocKindForTenure(nursery());
2068     Zone* zone = src->zone();
2069 
2070     TenuredCell* t = zone->arenas.allocateFromFreeList(dstKind, Arena::thingSize(dstKind));
2071     if (!t) {
2072         zone->arenas.checkEmptyFreeList(dstKind);
2073         AutoMaybeStartBackgroundAllocation maybeStartBackgroundAllocation;
2074         AutoEnterOOMUnsafeRegion oomUnsafe;
2075         t = zone->arenas.allocateFromArena(zone, dstKind, maybeStartBackgroundAllocation);
2076         if (!t)
2077             oomUnsafe.crash("Failed to allocate object while tenuring.");
2078     }
2079     JSObject* dst = reinterpret_cast<JSObject*>(t);
2080     tenuredSize += moveObjectToTenured(dst, src, dstKind);
2081 
2082     RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
2083     overlay->forwardTo(dst);
2084     insertIntoFixupList(overlay);
2085 
2086     if (MOZ_UNLIKELY(zone->hasDebuggers())) {
2087         zone->enqueueForPromotionToTenuredLogging(*dst);
2088     }
2089 
2090     TracePromoteToTenured(src, dst);
2091     MemProfiler::MoveNurseryToTenured(src, dst);
2092     return dst;
2093 }
2094 
2095 void
collectToFixedPoint(TenuringTracer & mover,TenureCountCache & tenureCounts)2096 js::Nursery::collectToFixedPoint(TenuringTracer& mover, TenureCountCache& tenureCounts)
2097 {
2098     for (RelocationOverlay* p = mover.head; p; p = p->next()) {
2099         JSObject* obj = static_cast<JSObject*>(p->forwardingAddress());
2100         mover.traceObject(obj);
2101 
2102         TenureCount& entry = tenureCounts.findEntry(obj->groupRaw());
2103         if (entry.group == obj->groupRaw()) {
2104             entry.count++;
2105         } else if (!entry.group) {
2106             entry.group = obj->groupRaw();
2107             entry.count = 1;
2108         }
2109     }
2110 }
2111 
2112 struct TenuringFunctor
2113 {
2114     template <typename T>
operator ()TenuringFunctor2115     void operator()(T* thing, TenuringTracer& mover) {
2116         mover.traverse(thing);
2117     }
2118 };
2119 
2120 // Visit all object children of the object and trace them.
2121 void
traceObject(JSObject * obj)2122 js::TenuringTracer::traceObject(JSObject* obj)
2123 {
2124     NativeObject *nobj = CallTraceHook(TenuringFunctor(), this, obj,
2125                                        CheckGeneration::NoChecks, *this);
2126     if (!nobj)
2127         return;
2128 
2129     // Note: the contents of copy on write elements pointers are filled in
2130     // during parsing and cannot contain nursery pointers.
2131     if (!nobj->hasEmptyElements() &&
2132         !nobj->denseElementsAreCopyOnWrite() &&
2133         ObjectDenseElementsMayBeMarkable(nobj))
2134     {
2135         Value* elems = static_cast<HeapSlot*>(nobj->getDenseElements())->unsafeUnbarrieredForTracing();
2136         traceSlots(elems, elems + nobj->getDenseInitializedLength());
2137     }
2138 
2139     traceObjectSlots(nobj, 0, nobj->slotSpan());
2140 }
2141 
2142 void
traceObjectSlots(NativeObject * nobj,uint32_t start,uint32_t length)2143 js::TenuringTracer::traceObjectSlots(NativeObject* nobj, uint32_t start, uint32_t length)
2144 {
2145     HeapSlot* fixedStart;
2146     HeapSlot* fixedEnd;
2147     HeapSlot* dynStart;
2148     HeapSlot* dynEnd;
2149     nobj->getSlotRange(start, length, &fixedStart, &fixedEnd, &dynStart, &dynEnd);
2150     if (fixedStart)
2151         traceSlots(fixedStart->unsafeUnbarrieredForTracing(), fixedEnd->unsafeUnbarrieredForTracing());
2152     if (dynStart)
2153         traceSlots(dynStart->unsafeUnbarrieredForTracing(), dynEnd->unsafeUnbarrieredForTracing());
2154 }
2155 
2156 void
traceSlots(Value * vp,Value * end)2157 js::TenuringTracer::traceSlots(Value* vp, Value* end)
2158 {
2159     for (; vp != end; ++vp)
2160         traverse(vp);
2161 }
2162 
2163 size_t
moveObjectToTenured(JSObject * dst,JSObject * src,AllocKind dstKind)2164 js::TenuringTracer::moveObjectToTenured(JSObject* dst, JSObject* src, AllocKind dstKind)
2165 {
2166     size_t srcSize = Arena::thingSize(dstKind);
2167     size_t tenuredSize = srcSize;
2168 
2169     /*
2170      * Arrays do not necessarily have the same AllocKind between src and dst.
2171      * We deal with this by copying elements manually, possibly re-inlining
2172      * them if there is adequate room inline in dst.
2173      *
2174      * For Arrays we're reducing tenuredSize to the smaller srcSize
2175      * because moveElementsToTenured() accounts for all Array elements,
2176      * even if they are inlined.
2177      */
2178     if (src->is<ArrayObject>())
2179         tenuredSize = srcSize = sizeof(NativeObject);
2180 
2181     // Copy the Cell contents.
2182     js_memcpy(dst, src, srcSize);
2183 
2184     // Move any hash code attached to the object.
2185     src->zone()->transferUniqueId(dst, src);
2186 
2187     // Move the slots and elements, if we need to.
2188     if (src->isNative()) {
2189         NativeObject* ndst = &dst->as<NativeObject>();
2190         NativeObject* nsrc = &src->as<NativeObject>();
2191         tenuredSize += moveSlotsToTenured(ndst, nsrc, dstKind);
2192         tenuredSize += moveElementsToTenured(ndst, nsrc, dstKind);
2193 
2194         // The shape's list head may point into the old object. This can only
2195         // happen for dictionaries, which are native objects.
2196         if (&nsrc->shape_ == ndst->shape_->listp) {
2197             MOZ_ASSERT(nsrc->shape_->inDictionary());
2198             ndst->shape_->listp = &ndst->shape_;
2199         }
2200     }
2201 
2202     if (src->getClass()->flags & JSCLASS_SKIP_NURSERY_FINALIZE) {
2203         if (src->is<InlineTypedObject>()) {
2204             InlineTypedObject::objectMovedDuringMinorGC(this, dst, src);
2205         } else if (src->is<UnboxedArrayObject>()) {
2206             tenuredSize += UnboxedArrayObject::objectMovedDuringMinorGC(this, dst, src, dstKind);
2207         } else if (src->is<ArgumentsObject>()) {
2208             tenuredSize += ArgumentsObject::objectMovedDuringMinorGC(this, dst, src);
2209         } else {
2210             // Objects with JSCLASS_SKIP_NURSERY_FINALIZE need to be handled above
2211             // to ensure any additional nursery buffers they hold are moved.
2212             MOZ_CRASH("Unhandled JSCLASS_SKIP_NURSERY_FINALIZE Class");
2213         }
2214     }
2215 
2216     return tenuredSize;
2217 }
2218 
2219 size_t
moveSlotsToTenured(NativeObject * dst,NativeObject * src,AllocKind dstKind)2220 js::TenuringTracer::moveSlotsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
2221 {
2222     /* Fixed slots have already been copied over. */
2223     if (!src->hasDynamicSlots())
2224         return 0;
2225 
2226     if (!nursery().isInside(src->slots_)) {
2227         nursery().removeMallocedBuffer(src->slots_);
2228         return 0;
2229     }
2230 
2231     Zone* zone = src->zone();
2232     size_t count = src->numDynamicSlots();
2233 
2234     {
2235         AutoEnterOOMUnsafeRegion oomUnsafe;
2236         dst->slots_ = zone->pod_malloc<HeapSlot>(count);
2237         if (!dst->slots_)
2238             oomUnsafe.crash("Failed to allocate slots while tenuring.");
2239     }
2240 
2241     PodCopy(dst->slots_, src->slots_, count);
2242     nursery().setSlotsForwardingPointer(src->slots_, dst->slots_, count);
2243     return count * sizeof(HeapSlot);
2244 }
2245 
2246 size_t
moveElementsToTenured(NativeObject * dst,NativeObject * src,AllocKind dstKind)2247 js::TenuringTracer::moveElementsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
2248 {
2249     if (src->hasEmptyElements() || src->denseElementsAreCopyOnWrite())
2250         return 0;
2251 
2252     Zone* zone = src->zone();
2253     ObjectElements* srcHeader = src->getElementsHeader();
2254     ObjectElements* dstHeader;
2255 
2256     /* TODO Bug 874151: Prefer to put element data inline if we have space. */
2257     if (!nursery().isInside(srcHeader)) {
2258         MOZ_ASSERT(src->elements_ == dst->elements_);
2259         nursery().removeMallocedBuffer(srcHeader);
2260         return 0;
2261     }
2262 
2263     size_t nslots = ObjectElements::VALUES_PER_HEADER + srcHeader->capacity;
2264 
2265     /* Unlike other objects, Arrays can have fixed elements. */
2266     if (src->is<ArrayObject>() && nslots <= GetGCKindSlots(dstKind)) {
2267         dst->as<ArrayObject>().setFixedElements();
2268         dstHeader = dst->as<ArrayObject>().getElementsHeader();
2269         js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
2270         nursery().setElementsForwardingPointer(srcHeader, dstHeader, nslots);
2271         return nslots * sizeof(HeapSlot);
2272     }
2273 
2274     MOZ_ASSERT(nslots >= 2);
2275 
2276     {
2277         AutoEnterOOMUnsafeRegion oomUnsafe;
2278         dstHeader = reinterpret_cast<ObjectElements*>(zone->pod_malloc<HeapSlot>(nslots));
2279         if (!dstHeader)
2280             oomUnsafe.crash("Failed to allocate elements while tenuring.");
2281     }
2282 
2283     js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
2284     nursery().setElementsForwardingPointer(srcHeader, dstHeader, nslots);
2285     dst->elements_ = dstHeader->elements();
2286     return nslots * sizeof(HeapSlot);
2287 }
2288 
2289 
2290 /*** IsMarked / IsAboutToBeFinalized **************************************************************/
2291 
2292 template <typename T>
2293 static inline void
CheckIsMarkedThing(T * thingp)2294 CheckIsMarkedThing(T* thingp)
2295 {
2296 #define IS_SAME_TYPE_OR(name, type, _) mozilla::IsSame<type*, T>::value ||
2297     static_assert(
2298             JS_FOR_EACH_TRACEKIND(IS_SAME_TYPE_OR)
2299             false, "Only the base cell layout types are allowed into marking/tracing internals");
2300 #undef IS_SAME_TYPE_OR
2301 
2302 #ifdef DEBUG
2303     MOZ_ASSERT(thingp);
2304     MOZ_ASSERT(*thingp);
2305     JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
2306     MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(*thingp),
2307                   CurrentThreadCanAccessRuntime(rt) ||
2308                   (rt->isHeapCollecting() && rt->gc.state() == SWEEP));
2309 #endif
2310 }
2311 
2312 template <typename T>
2313 static bool
IsMarkedInternalCommon(T * thingp)2314 IsMarkedInternalCommon(T* thingp)
2315 {
2316     CheckIsMarkedThing(thingp);
2317     MOZ_ASSERT(!IsInsideNursery(*thingp));
2318 
2319     Zone* zone = (*thingp)->asTenured().zoneFromAnyThread();
2320     if (!zone->isCollectingFromAnyThread() || zone->isGCFinished())
2321         return true;
2322     if (zone->isGCCompacting() && IsForwarded(*thingp))
2323         *thingp = Forwarded(*thingp);
2324     return (*thingp)->asTenured().isMarked();
2325 }
2326 
2327 template <typename T>
2328 static bool
IsMarkedInternal(JSRuntime * rt,T ** thingp)2329 IsMarkedInternal(JSRuntime* rt, T** thingp)
2330 {
2331     if (IsOwnedByOtherRuntime(rt, *thingp))
2332         return true;
2333 
2334     return IsMarkedInternalCommon(thingp);
2335 }
2336 
2337 template <>
2338 /* static */ bool
IsMarkedInternal(JSRuntime * rt,JSObject ** thingp)2339 IsMarkedInternal(JSRuntime* rt, JSObject** thingp)
2340 {
2341     if (IsOwnedByOtherRuntime(rt, *thingp))
2342         return true;
2343 
2344     if (IsInsideNursery(*thingp)) {
2345         MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
2346         return rt->gc.nursery.getForwardedPointer(thingp);
2347     }
2348     return IsMarkedInternalCommon(thingp);
2349 }
2350 
2351 template <typename S>
2352 struct IsMarkedFunctor : public IdentityDefaultAdaptor<S> {
operator ()IsMarkedFunctor2353     template <typename T> S operator()(T* t, JSRuntime* rt, bool* rv) {
2354         *rv = IsMarkedInternal(rt, &t);
2355         return js::gc::RewrapTaggedPointer<S, T*>::wrap(t);
2356     }
2357 };
2358 
2359 template <typename T>
2360 static bool
IsMarkedInternal(JSRuntime * rt,T * thingp)2361 IsMarkedInternal(JSRuntime* rt, T* thingp)
2362 {
2363     bool rv = true;
2364     *thingp = DispatchTyped(IsMarkedFunctor<T>(), *thingp, rt, &rv);
2365     return rv;
2366 }
2367 
2368 bool
IsAboutToBeFinalizedDuringSweep(TenuredCell & tenured)2369 js::gc::IsAboutToBeFinalizedDuringSweep(TenuredCell& tenured)
2370 {
2371     MOZ_ASSERT(!IsInsideNursery(&tenured));
2372     MOZ_ASSERT(!tenured.runtimeFromAnyThread()->isHeapMinorCollecting());
2373     MOZ_ASSERT(tenured.zoneFromAnyThread()->isGCSweeping());
2374     if (tenured.arenaHeader()->allocatedDuringIncremental)
2375         return false;
2376     return !tenured.isMarked();
2377 }
2378 
2379 template <typename T>
2380 static bool
IsAboutToBeFinalizedInternal(T ** thingp)2381 IsAboutToBeFinalizedInternal(T** thingp)
2382 {
2383     CheckIsMarkedThing(thingp);
2384     T* thing = *thingp;
2385     JSRuntime* rt = thing->runtimeFromAnyThread();
2386 
2387     /* Permanent atoms are never finalized by non-owning runtimes. */
2388     if (ThingIsPermanentAtomOrWellKnownSymbol(thing) && !TlsPerThreadData.get()->associatedWith(rt))
2389         return false;
2390 
2391     Nursery& nursery = rt->gc.nursery;
2392     MOZ_ASSERT_IF(!rt->isHeapMinorCollecting(), !IsInsideNursery(thing));
2393     if (rt->isHeapMinorCollecting()) {
2394         if (IsInsideNursery(thing))
2395             return !nursery.getForwardedPointer(reinterpret_cast<JSObject**>(thingp));
2396         return false;
2397     }
2398 
2399     Zone* zone = thing->asTenured().zoneFromAnyThread();
2400     if (zone->isGCSweeping()) {
2401         return IsAboutToBeFinalizedDuringSweep(thing->asTenured());
2402     } else if (zone->isGCCompacting() && IsForwarded(thing)) {
2403         *thingp = Forwarded(thing);
2404         return false;
2405     }
2406 
2407     return false;
2408 }
2409 
2410 template <typename S>
2411 struct IsAboutToBeFinalizedFunctor : public IdentityDefaultAdaptor<S> {
operator ()IsAboutToBeFinalizedFunctor2412     template <typename T> S operator()(T* t, bool* rv) {
2413         *rv = IsAboutToBeFinalizedInternal(&t);
2414         return js::gc::RewrapTaggedPointer<S, T*>::wrap(t);
2415     }
2416 };
2417 
2418 template <typename T>
2419 static bool
IsAboutToBeFinalizedInternal(T * thingp)2420 IsAboutToBeFinalizedInternal(T* thingp)
2421 {
2422     bool rv = false;
2423     *thingp = DispatchTyped(IsAboutToBeFinalizedFunctor<T>(), *thingp, &rv);
2424     return rv;
2425 }
2426 
2427 namespace js {
2428 namespace gc {
2429 
2430 template <typename T>
2431 bool
IsMarkedUnbarriered(JSRuntime * rt,T * thingp)2432 IsMarkedUnbarriered(JSRuntime* rt, T* thingp)
2433 {
2434     return IsMarkedInternal(rt, ConvertToBase(thingp));
2435 }
2436 
2437 template <typename T>
2438 bool
IsMarked(JSRuntime * rt,WriteBarrieredBase<T> * thingp)2439 IsMarked(JSRuntime* rt, WriteBarrieredBase<T>* thingp)
2440 {
2441     return IsMarkedInternal(rt, ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
2442 }
2443 
2444 template <typename T>
2445 bool
IsAboutToBeFinalizedUnbarriered(T * thingp)2446 IsAboutToBeFinalizedUnbarriered(T* thingp)
2447 {
2448     return IsAboutToBeFinalizedInternal(ConvertToBase(thingp));
2449 }
2450 
2451 template <typename T>
2452 bool
IsAboutToBeFinalized(WriteBarrieredBase<T> * thingp)2453 IsAboutToBeFinalized(WriteBarrieredBase<T>* thingp)
2454 {
2455     return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
2456 }
2457 
2458 template <typename T>
2459 bool
IsAboutToBeFinalized(ReadBarrieredBase<T> * thingp)2460 IsAboutToBeFinalized(ReadBarrieredBase<T>* thingp)
2461 {
2462     return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
2463 }
2464 
2465 template <typename T>
2466 JS_PUBLIC_API(bool)
EdgeNeedsSweep(JS::Heap<T> * thingp)2467 EdgeNeedsSweep(JS::Heap<T>* thingp)
2468 {
2469     return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeGet()));
2470 }
2471 
2472 // Instantiate a copy of the Tracing templates for each derived type.
2473 #define INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS(type) \
2474     template bool IsMarkedUnbarriered<type>(JSRuntime*, type*);                \
2475     template bool IsMarked<type>(JSRuntime*, WriteBarrieredBase<type>*); \
2476     template bool IsAboutToBeFinalizedUnbarriered<type>(type*); \
2477     template bool IsAboutToBeFinalized<type>(WriteBarrieredBase<type>*); \
2478     template bool IsAboutToBeFinalized<type>(ReadBarrieredBase<type>*); \
2479     template JS_PUBLIC_API(bool) EdgeNeedsSweep<type>(JS::Heap<type>*);
2480 FOR_EACH_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS)
2481 #undef INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS
2482 
2483 } /* namespace gc */
2484 } /* namespace js */
2485 
2486 
2487 /*** Type Marking *********************************************************************************/
2488 
2489 void
MarkTypeRoot(JSTracer * trc,TypeSet::Type * v,const char * name)2490 TypeSet::MarkTypeRoot(JSTracer* trc, TypeSet::Type* v, const char* name)
2491 {
2492     AssertRootMarkingPhase(trc);
2493     MarkTypeUnbarriered(trc, v, name);
2494 }
2495 
2496 void
MarkTypeUnbarriered(JSTracer * trc,TypeSet::Type * v,const char * name)2497 TypeSet::MarkTypeUnbarriered(JSTracer* trc, TypeSet::Type* v, const char* name)
2498 {
2499     if (v->isSingletonUnchecked()) {
2500         JSObject* obj = v->singletonNoBarrier();
2501         DispatchToTracer(trc, &obj, name);
2502         *v = TypeSet::ObjectType(obj);
2503     } else if (v->isGroupUnchecked()) {
2504         ObjectGroup* group = v->groupNoBarrier();
2505         DispatchToTracer(trc, &group, name);
2506         *v = TypeSet::ObjectType(group);
2507     }
2508 }
2509 
2510 
2511 /*** Cycle Collector Barrier Implementation *******************************************************/
2512 
2513 #ifdef DEBUG
2514 struct AssertNonGrayTracer : public JS::CallbackTracer {
AssertNonGrayTracerAssertNonGrayTracer2515     explicit AssertNonGrayTracer(JSRuntime* rt) : JS::CallbackTracer(rt) {}
onChildAssertNonGrayTracer2516     void onChild(const JS::GCCellPtr& thing) override {
2517         MOZ_ASSERT_IF(thing.asCell()->isTenured(),
2518                       !thing.asCell()->asTenured().isMarked(js::gc::GRAY));
2519     }
2520 };
2521 #endif
2522 
2523 struct UnmarkGrayTracer : public JS::CallbackTracer
2524 {
2525     /*
2526      * We set weakMapAction to DoNotTraceWeakMaps because the cycle collector
2527      * will fix up any color mismatches involving weakmaps when it runs.
2528      */
UnmarkGrayTracerUnmarkGrayTracer2529     explicit UnmarkGrayTracer(JSRuntime *rt, bool tracingShape = false)
2530       : JS::CallbackTracer(rt, DoNotTraceWeakMaps)
2531       , tracingShape(tracingShape)
2532       , previousShape(nullptr)
2533       , unmarkedAny(false)
2534     {}
2535 
2536     void onChild(const JS::GCCellPtr& thing) override;
2537 
2538     /* True iff we are tracing the immediate children of a shape. */
2539     bool tracingShape;
2540 
2541     /* If tracingShape, shape child or nullptr. Otherwise, nullptr. */
2542     Shape* previousShape;
2543 
2544     /* Whether we unmarked anything. */
2545     bool unmarkedAny;
2546 };
2547 
2548 /*
2549  * The GC and CC are run independently. Consequently, the following sequence of
2550  * events can occur:
2551  * 1. GC runs and marks an object gray.
2552  * 2. The mutator runs (specifically, some C++ code with access to gray
2553  *    objects) and creates a pointer from a JS root or other black object to
2554  *    the gray object. If we re-ran a GC at this point, the object would now be
2555  *    black.
2556  * 3. Now we run the CC. It may think it can collect the gray object, even
2557  *    though it's reachable from the JS heap.
2558  *
2559  * To prevent this badness, we unmark the gray bit of an object when it is
2560  * accessed by callers outside XPConnect. This would cause the object to go
2561  * black in step 2 above. This must be done on everything reachable from the
2562  * object being returned. The following code takes care of the recursive
2563  * re-coloring.
2564  *
2565  * There is an additional complication for certain kinds of edges that are not
2566  * contained explicitly in the source object itself, such as from a weakmap key
2567  * to its value, and from an object being watched by a watchpoint to the
2568  * watchpoint's closure. These "implicit edges" are represented in some other
2569  * container object, such as the weakmap or the watchpoint itself. In these
2570  * cases, calling unmark gray on an object won't find all of its children.
2571  *
2572  * Handling these implicit edges has two parts:
2573  * - A special pass enumerating all of the containers that know about the
2574  *   implicit edges to fix any black-gray edges that have been created. This
2575  *   is implemented in nsXPConnect::FixWeakMappingGrayBits.
2576  * - To prevent any incorrectly gray objects from escaping to live JS outside
2577  *   of the containers, we must add unmark-graying read barriers to these
2578  *   containers.
2579  */
2580 void
onChild(const JS::GCCellPtr & thing)2581 UnmarkGrayTracer::onChild(const JS::GCCellPtr& thing)
2582 {
2583     int stackDummy;
2584     if (!JS_CHECK_STACK_SIZE(runtime()->mainThread.nativeStackLimit[StackForSystemCode],
2585                              &stackDummy))
2586     {
2587         /*
2588          * If we run out of stack, we take a more drastic measure: require that
2589          * we GC again before the next CC.
2590          */
2591         runtime()->gc.setGrayBitsInvalid();
2592         return;
2593     }
2594 
2595     Cell* cell = thing.asCell();
2596 
2597     // Cells in the nursery cannot be gray, and therefore must necessarily point
2598     // to only black edges.
2599     if (!cell->isTenured()) {
2600 #ifdef DEBUG
2601         AssertNonGrayTracer nongray(runtime());
2602         TraceChildren(&nongray, cell, thing.kind());
2603 #endif
2604         return;
2605     }
2606 
2607     TenuredCell& tenured = cell->asTenured();
2608     if (!tenured.isMarked(js::gc::GRAY))
2609         return;
2610     tenured.unmark(js::gc::GRAY);
2611 
2612     unmarkedAny = true;
2613 
2614     // Trace children of |tenured|. If |tenured| and its parent are both
2615     // shapes, |tenured| will get saved to mPreviousShape without being traced.
2616     // The parent will later trace |tenured|. This is done to avoid increasing
2617     // the stack depth during shape tracing. It is safe to do because a shape
2618     // can only have one child that is a shape.
2619     UnmarkGrayTracer childTracer(runtime(), thing.kind() == JS::TraceKind::Shape);
2620 
2621     if (thing.kind() != JS::TraceKind::Shape) {
2622         TraceChildren(&childTracer, &tenured, thing.kind());
2623         MOZ_ASSERT(!childTracer.previousShape);
2624         unmarkedAny |= childTracer.unmarkedAny;
2625         return;
2626     }
2627 
2628     MOZ_ASSERT(thing.kind() == JS::TraceKind::Shape);
2629     Shape* shape = static_cast<Shape*>(&tenured);
2630     if (tracingShape) {
2631         MOZ_ASSERT(!previousShape);
2632         previousShape = shape;
2633         return;
2634     }
2635 
2636     do {
2637         MOZ_ASSERT(!shape->isMarked(js::gc::GRAY));
2638         shape->traceChildren(&childTracer);
2639         shape = childTracer.previousShape;
2640         childTracer.previousShape = nullptr;
2641     } while (shape);
2642     unmarkedAny |= childTracer.unmarkedAny;
2643 }
2644 
2645 template <typename T>
2646 static bool
TypedUnmarkGrayCellRecursively(T * t)2647 TypedUnmarkGrayCellRecursively(T* t)
2648 {
2649     MOZ_ASSERT(t);
2650 
2651     JSRuntime* rt = t->runtimeFromMainThread();
2652     MOZ_ASSERT(!rt->isHeapCollecting());
2653 
2654     bool unmarkedArg = false;
2655     if (t->isTenured()) {
2656         if (!t->asTenured().isMarked(GRAY))
2657             return false;
2658 
2659         t->asTenured().unmark(GRAY);
2660         unmarkedArg = true;
2661     }
2662 
2663     UnmarkGrayTracer trc(rt);
2664     gcstats::AutoPhase outerPhase(rt->gc.stats, gcstats::PHASE_BARRIER);
2665     gcstats::AutoPhase innerPhase(rt->gc.stats, gcstats::PHASE_UNMARK_GRAY);
2666     t->traceChildren(&trc);
2667 
2668     return unmarkedArg || trc.unmarkedAny;
2669 }
2670 
2671 struct UnmarkGrayCellRecursivelyFunctor {
operator ()UnmarkGrayCellRecursivelyFunctor2672     template <typename T> bool operator()(T* t) { return TypedUnmarkGrayCellRecursively(t); }
2673 };
2674 
2675 bool
UnmarkGrayCellRecursively(Cell * cell,JS::TraceKind kind)2676 js::UnmarkGrayCellRecursively(Cell* cell, JS::TraceKind kind)
2677 {
2678     return DispatchTraceKindTyped(UnmarkGrayCellRecursivelyFunctor(), cell, kind);
2679 }
2680 
2681 bool
UnmarkGrayShapeRecursively(Shape * shape)2682 js::UnmarkGrayShapeRecursively(Shape* shape)
2683 {
2684     return TypedUnmarkGrayCellRecursively(shape);
2685 }
2686 
JS_FRIEND_API(bool)2687 JS_FRIEND_API(bool)
2688 JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr thing)
2689 {
2690     return js::UnmarkGrayCellRecursively(thing.asCell(), thing.kind());
2691 }
2692