1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #ifndef gc_GCMarker_h
8 #define gc_GCMarker_h
9 
10 #include "mozilla/Maybe.h"
11 
12 #include "ds/OrderedHashTable.h"
13 #include "gc/Barrier.h"
14 #include "js/SliceBudget.h"
15 #include "js/TracingAPI.h"
16 #include "js/TypeDecls.h"
17 
18 class JSRope;
19 
20 namespace js {
21 
22 class AutoAccessAtomsZone;
23 class WeakMapBase;
24 
25 static const size_t NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY = 4096;
26 static const size_t INCREMENTAL_MARK_STACK_BASE_CAPACITY = 32768;
27 static const size_t SMALL_MARK_STACK_BASE_CAPACITY = 256;
28 
29 enum class SlotsOrElementsKind { Elements, FixedSlots, DynamicSlots };
30 
31 namespace gc {
32 
33 enum IncrementalProgress { NotFinished = 0, Finished };
34 
35 class BarrierTracer;
36 struct Cell;
37 
38 using BarrierBuffer = Vector<JS::GCCellPtr, 0, SystemAllocPolicy>;
39 
40 struct EphemeronEdgeTableHashPolicy {
41   using Lookup = Cell*;
hashEphemeronEdgeTableHashPolicy42   static HashNumber hash(const Lookup& v,
43                          const mozilla::HashCodeScrambler& hcs) {
44     return hcs.scramble(mozilla::HashGeneric(v));
45   }
matchEphemeronEdgeTableHashPolicy46   static bool match(Cell* const& k, const Lookup& l) { return k == l; }
isEmptyEphemeronEdgeTableHashPolicy47   static bool isEmpty(Cell* const& v) { return !v; }
makeEmptyEphemeronEdgeTableHashPolicy48   static void makeEmpty(Cell** vp) { *vp = nullptr; }
49 };
50 
51 // Ephemeron edges have two source nodes and one target, and mark the target
52 // with the minimum (least-marked) color of the sources. Currently, one of
53 // those sources will always be a WeakMapBase, so this will refer to its color
54 // at the time the edge is traced through. The other source's color will be
55 // given by the current mark color of the GCMarker.
56 struct EphemeronEdge {
57   CellColor color;
58   Cell* target;
59 
EphemeronEdgeEphemeronEdge60   EphemeronEdge(CellColor color_, Cell* cell) : color(color_), target(cell) {}
61 };
62 
63 using EphemeronEdgeVector = Vector<EphemeronEdge, 2, js::SystemAllocPolicy>;
64 
65 using EphemeronEdgeTable =
66     OrderedHashMap<Cell*, EphemeronEdgeVector, EphemeronEdgeTableHashPolicy,
67                    js::SystemAllocPolicy>;
68 
69 /*
70  * When the mark stack is full, the GC does not call js::TraceChildren to mark
71  * the reachable "children" of the thing. Rather the thing is put aside and
72  * js::TraceChildren is called later when the mark stack is empty.
73  *
74  * To implement such delayed marking of the children with minimal overhead for
75  * the normal case of sufficient stack, we link arenas into a list using
76  * Arena::setNextDelayedMarkingArena(). The head of the list is stored in
77  * GCMarker::delayedMarkingList. GCMarker::delayMarkingChildren() adds arenas
78  * to the list as necessary while markAllDelayedChildren() pops the arenas from
79  * the stack until it is empty.
80  */
81 class MarkStack {
82  public:
83   /*
84    * We use a common mark stack to mark GC things of different types and use
85    * the explicit tags to distinguish them when it cannot be deduced from
86    * the context of push or pop operation.
87    */
88   enum Tag {
89     SlotsOrElementsRangeTag,
90     ObjectTag,
91     JitCodeTag,
92     ScriptTag,
93     TempRopeTag,
94 
95     LastTag = TempRopeTag
96   };
97 
98   static const uintptr_t TagMask = 7;
99   static_assert(TagMask >= uintptr_t(LastTag),
100                 "The tag mask must subsume the tags.");
101   static_assert(TagMask <= gc::CellAlignMask,
102                 "The tag mask must be embeddable in a Cell*.");
103 
104   class TaggedPtr {
105     uintptr_t bits;
106 
107     Cell* ptr() const;
108 
109    public:
110     TaggedPtr() = default;
111     TaggedPtr(Tag tag, Cell* ptr);
112     Tag tag() const;
113     template <typename T>
114     T* as() const;
115 
116     JSObject* asRangeObject() const;
117     JSRope* asTempRope() const;
118 
119     void assertValid() const;
120   };
121 
122   struct SlotsOrElementsRange {
123     SlotsOrElementsRange(SlotsOrElementsKind kind, JSObject* obj, size_t start);
124     void assertValid() const;
125 
126     SlotsOrElementsKind kind() const;
127     size_t start() const;
128     TaggedPtr ptr() const;
129 
130     static constexpr size_t StartShift = 2;
131     static constexpr size_t KindMask = (1 << StartShift) - 1;
132 
133    private:
134     uintptr_t startAndKind_;
135     TaggedPtr ptr_;
136   };
137 
138   explicit MarkStack(size_t maxCapacity = DefaultCapacity);
139   ~MarkStack();
140 
141   static const size_t DefaultCapacity = SIZE_MAX;
142 
143   // The unit for MarkStack::capacity() is mark stack entries.
capacity()144   size_t capacity() { return stack().length(); }
145 
position()146   size_t position() const { return topIndex_; }
147 
148   enum StackType { MainStack, AuxiliaryStack };
149   [[nodiscard]] bool init(StackType which, bool incrementalGCEnabled);
150 
151   [[nodiscard]] bool setStackCapacity(StackType which,
152                                       bool incrementalGCEnabled);
153 
maxCapacity()154   size_t maxCapacity() const { return maxCapacity_; }
155   void setMaxCapacity(size_t maxCapacity);
156 
157   template <typename T>
158   [[nodiscard]] bool push(T* ptr);
159 
160   [[nodiscard]] bool push(JSObject* obj, SlotsOrElementsKind kind,
161                           size_t start);
162   [[nodiscard]] bool push(const SlotsOrElementsRange& array);
163 
164   // GCMarker::eagerlyMarkChildren uses unused marking stack as temporary
165   // storage to hold rope pointers.
166   [[nodiscard]] bool pushTempRope(JSRope* ptr);
167 
isEmpty()168   bool isEmpty() const { return topIndex_ == 0; }
169 
170   Tag peekTag() const;
171   TaggedPtr popPtr();
172   SlotsOrElementsRange popSlotsOrElementsRange();
173 
clear()174   void clear() {
175     // Fall back to the smaller initial capacity so we don't hold on to excess
176     // memory between GCs.
177     stack().clearAndFree();
178     (void)stack().resize(NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY);
179     topIndex_ = 0;
180   }
181 
182   void poisonUnused();
183 
184   size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
185 
186  private:
187   using StackVector = Vector<TaggedPtr, 0, SystemAllocPolicy>;
stack()188   const StackVector& stack() const { return stack_.ref(); }
stack()189   StackVector& stack() { return stack_.ref(); }
190 
191   [[nodiscard]] bool ensureSpace(size_t count);
192 
193   /* Grow the stack, ensuring there is space for at least count elements. */
194   [[nodiscard]] bool enlarge(size_t count);
195 
196   [[nodiscard]] bool resize(size_t newCapacity);
197 
198   TaggedPtr* topPtr();
199 
200   const TaggedPtr& peekPtr() const;
201   [[nodiscard]] bool pushTaggedPtr(Tag tag, Cell* ptr);
202 
203   // Index of the top of the stack.
204   MainThreadOrGCTaskData<size_t> topIndex_;
205 
206   // The maximum stack capacity to grow to.
207   MainThreadOrGCTaskData<size_t> maxCapacity_;
208 
209   // Vector containing allocated stack memory. Unused beyond topIndex_.
210   MainThreadOrGCTaskData<StackVector> stack_;
211 
212 #ifdef DEBUG
213   mutable size_t iteratorCount_;
214 #endif
215 
216   friend class MarkStackIter;
217 };
218 
219 class MarkStackIter {
220   MarkStack& stack_;
221   size_t pos_;
222 
223  public:
224   explicit MarkStackIter(MarkStack& stack);
225   ~MarkStackIter();
226 
227   bool done() const;
228   MarkStack::Tag peekTag() const;
229   MarkStack::TaggedPtr peekPtr() const;
230   MarkStack::SlotsOrElementsRange peekSlotsOrElementsRange() const;
231   void next();
232   void nextPtr();
233   void nextArray();
234 
235  private:
236   size_t position() const;
237 };
238 
239 } /* namespace gc */
240 
241 enum MarkingState : uint8_t {
242   // Have not yet started marking.
243   NotActive,
244 
245   // Main marking mode. Weakmap marking will be populating the gcEphemeronEdges
246   // tables but not consulting them. The state will transition to WeakMarking
247   // until it is done, then back to RegularMarking.
248   RegularMarking,
249 
250   // Same as RegularMarking except now every marked obj/script is immediately
251   // looked up in the gcEphemeronEdges table to find edges generated by weakmap
252   // keys, and traversing them to their values. Transitions back to
253   // RegularMarking when done.
254   WeakMarking,
255 
256   // Same as RegularMarking, but we OOMed (or obeyed a directive in the test
257   // marking queue) and fell back to iterating until the next GC.
258   IterativeMarking
259 };
260 
261 class GCMarker final : public JSTracer {
262  public:
263   explicit GCMarker(JSRuntime* rt);
264   [[nodiscard]] bool init();
265 
setMaxCapacity(size_t maxCap)266   void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); }
maxCapacity()267   size_t maxCapacity() const { return stack.maxCapacity(); }
268 
isActive()269   bool isActive() const { return state != MarkingState::NotActive; }
270 
271   void start();
272   void stop();
273   void reset();
274 
275   // If |thing| is unmarked, mark it and then traverse its children.
276   template <typename T>
277   void markAndTraverse(T* thing);
278 
279   // Traverse a GC thing's children, using a strategy depending on the type.
280   // This can either processing them immediately or push them onto the mark
281   // stack for later.
282   template <typename T>
283   void traverse(T* thing);
284 
285   // Calls traverse on target after making additional assertions.
286   template <typename S, typename T>
287   void markAndTraverseEdge(S source, T* target);
288   template <typename S, typename T>
289   void markAndTraverseEdge(S source, const T& target);
290 
291   // Helper methods that coerce their second argument to the base pointer
292   // type.
293   template <typename S>
markAndTraverseObjectEdge(S source,JSObject * target)294   void markAndTraverseObjectEdge(S source, JSObject* target) {
295     markAndTraverseEdge(source, target);
296   }
297   template <typename S>
markAndTraverseStringEdge(S source,JSString * target)298   void markAndTraverseStringEdge(S source, JSString* target) {
299     markAndTraverseEdge(source, target);
300   }
301 
302   template <typename S, typename T>
303   void checkTraversedEdge(S source, T* target);
304 
305 #ifdef DEBUG
306   // We can't check atom marking if the helper thread lock is already held by
307   // the current thread. This allows us to disable the check.
308   void setCheckAtomMarking(bool check);
309 #endif
310 
311   /*
312    * Care must be taken changing the mark color from gray to black. The cycle
313    * collector depends on the invariant that there are no black to gray edges
314    * in the GC heap. This invariant lets the CC not trace through black
315    * objects. If this invariant is violated, the cycle collector may free
316    * objects that are still reachable.
317    */
318   void setMarkColor(gc::MarkColor newColor);
319   void setMarkColorUnchecked(gc::MarkColor newColor);
markColor()320   gc::MarkColor markColor() const { return color; }
321 
322   // Declare which color the main mark stack will be used for. The whole stack
323   // must be empty when this is called.
324   void setMainStackColor(gc::MarkColor newColor);
325 
326   bool enterWeakMarkingMode();
327   void leaveWeakMarkingMode();
328 
329   // Do not use linear-time weak marking for the rest of this collection.
330   // Currently, this will only be triggered by an OOM when updating needed data
331   // structures.
abortLinearWeakMarking()332   void abortLinearWeakMarking() {
333     if (state == MarkingState::WeakMarking) {
334       leaveWeakMarkingMode();
335     }
336     state = MarkingState::IterativeMarking;
337   }
338 
339   void delayMarkingChildrenOnOOM(gc::Cell* cell);
340   void delayMarkingChildren(gc::Cell* cell);
341 
342   // 'delegate' is no longer the delegate of 'key'.
343   void severWeakDelegate(JSObject* key, JSObject* delegate);
344 
345   // 'delegate' is now the delegate of 'key'. Update weakmap marking state.
346   void restoreWeakDelegate(JSObject* key, JSObject* delegate);
347 
348   bool isDrained();
349 
350   // The mark queue is a testing-only feature for controlling mark ordering and
351   // yield timing.
352   enum MarkQueueProgress {
353     QueueYielded,   // End this incremental GC slice, if possible
354     QueueComplete,  // Done with the queue
355     QueueSuspended  // Continue the GC without ending the slice
356   };
357   MarkQueueProgress processMarkQueue();
358 
359   enum ShouldReportMarkTime : bool {
360     ReportMarkTime = true,
361     DontReportMarkTime = false
362   };
363   [[nodiscard]] bool markUntilBudgetExhausted(
364       SliceBudget& budget, ShouldReportMarkTime reportTime = ReportMarkTime);
365 
setIncrementalGCEnabled(bool enabled)366   void setIncrementalGCEnabled(bool enabled) {
367     // Ignore failure to resize the stack and keep using the existing stack.
368     (void)stack.setStackCapacity(gc::MarkStack::MainStack, enabled);
369   }
370 
371   size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
372 
373 #ifdef DEBUG
shouldCheckCompartments()374   bool shouldCheckCompartments() { return strictCompartmentChecking; }
375 #endif
376 
377   // Mark through edges whose target color depends on the colors of two source
378   // entities (eg a WeakMap and one of its keys), and push the target onto the
379   // mark stack.
380   void markEphemeronEdges(gc::EphemeronEdgeVector& edges);
381 
getMarkCount()382   size_t getMarkCount() const { return markCount; }
clearMarkCount()383   void clearMarkCount() { markCount = 0; }
384 
fromTracer(JSTracer * trc)385   static GCMarker* fromTracer(JSTracer* trc) {
386     MOZ_ASSERT(trc->isMarkingTracer());
387     return static_cast<GCMarker*>(trc);
388   }
389 
390   template <typename T>
391   void markImplicitEdges(T* oldThing);
392 
isWeakMarking()393   bool isWeakMarking() const { return state == MarkingState::WeakMarking; }
394 
395  private:
396 #ifdef DEBUG
397   void checkZone(void* p);
398 #else
399   void checkZone(void* p) {}
400 #endif
401 
402   // Push an object onto the stack for later tracing and assert that it has
403   // already been marked.
404   inline void repush(JSObject* obj);
405 
406   // Process a marked thing's children by calling T::traceChildren().
407   template <typename T>
408   void traceChildren(T* thing);
409 
410   // Process a marked thing's children recursively using an iterative loop and
411   // manual dispatch, for kinds where this is possible.
412   template <typename T>
413   void scanChildren(T* thing);
414 
415   // Push a marked thing onto the mark stack. Its children will be marked later.
416   template <typename T>
417   void pushThing(T* thing);
418 
419   template <typename T>
420   void markImplicitEdgesHelper(T oldThing);
421   void eagerlyMarkChildren(JSLinearString* str);
422   void eagerlyMarkChildren(JSRope* rope);
423   void eagerlyMarkChildren(JSString* str);
424   void eagerlyMarkChildren(Shape* shape);
425   void eagerlyMarkChildren(PropMap* map);
426   void eagerlyMarkChildren(Scope* scope);
427 
428   // We may not have concrete types yet, so this has to be outside the header.
429   template <typename T>
430   void dispatchToTraceChildren(T* thing);
431 
432   // Mark the given GC thing, but do not trace its children. Return true
433   // if the thing became marked.
434   template <typename T>
435   [[nodiscard]] bool mark(T* thing);
436 
437   template <typename T>
438   inline void pushTaggedPtr(T* ptr);
439 
440   inline void pushValueRange(JSObject* obj, SlotsOrElementsKind kind,
441                              size_t start, size_t end);
442 
getStack(gc::MarkColor which)443   gc::MarkStack& getStack(gc::MarkColor which) {
444     return which == mainStackColor ? stack : auxStack;
445   }
getStack(gc::MarkColor which)446   const gc::MarkStack& getStack(gc::MarkColor which) const {
447     return which == mainStackColor ? stack : auxStack;
448   }
449 
currentStack()450   gc::MarkStack& currentStack() {
451     MOZ_ASSERT(currentStackPtr);
452     return *currentStackPtr;
453   }
454 
isMarkStackEmpty()455   bool isMarkStackEmpty() { return stack.isEmpty() && auxStack.isEmpty(); }
456 
hasBlackEntries()457   bool hasBlackEntries() const {
458     return !getStack(gc::MarkColor::Black).isEmpty();
459   }
460 
hasGrayEntries()461   bool hasGrayEntries() const {
462     return !getStack(gc::MarkColor::Gray).isEmpty();
463   }
464 
465   inline void processMarkStackTop(SliceBudget& budget);
466 
467   void markDelayedChildren(gc::Arena* arena, gc::MarkColor color);
468   [[nodiscard]] bool markAllDelayedChildren(SliceBudget& budget,
469                                             ShouldReportMarkTime reportTime);
470   bool processDelayedMarkingList(gc::MarkColor color, SliceBudget& budget);
hasDelayedChildren()471   bool hasDelayedChildren() const { return !!delayedMarkingList; }
472   void rebuildDelayedMarkingList();
473   void appendToDelayedMarkingList(gc::Arena** listTail, gc::Arena* arena);
474 
475   template <typename F>
476   void forEachDelayedMarkingArena(F&& f);
477 
barrierBuffer()478   gc::BarrierBuffer& barrierBuffer() { return barrierBuffer_.ref(); }
479 
480   bool traceBarrieredCells(SliceBudget& budget);
481   friend class gc::GCRuntime;
482 
483   void traceBarrieredCell(JS::GCCellPtr cell);
484 
485   /*
486    * List of cells encountered by the pre-write barrier whose children have yet
487    * to be marked. These cells have already been marked black. They are "grey"
488    * in the GC sense.
489    */
490   MainThreadOrGCTaskData<gc::BarrierBuffer> barrierBuffer_;
491   friend class gc::BarrierTracer;
492 
493   /*
494    * The mark stack. Pointers in this stack are "gray" in the GC sense, but may
495    * mark the contained items either black or gray (in the CC sense) depending
496    * on mainStackColor.
497    */
498   gc::MarkStack stack;
499 
500   /*
501    * A smaller, auxiliary stack, currently only used to accumulate the rare
502    * objects that need to be marked black during gray marking.
503    */
504   gc::MarkStack auxStack;
505 
506   /* The color is only applied to objects and functions. */
507   MainThreadOrGCTaskData<gc::MarkColor> color;
508 
509   MainThreadOrGCTaskData<gc::MarkColor> mainStackColor;
510 
511   MainThreadOrGCTaskData<gc::MarkStack*> currentStackPtr;
512 
513   /* Pointer to the top of the stack of arenas we are delaying marking on. */
514   MainThreadOrGCTaskData<js::gc::Arena*> delayedMarkingList;
515 
516   /* Whether more work has been added to the delayed marking list. */
517   MainThreadOrGCTaskData<bool> delayedMarkingWorkAdded;
518 
519   /* The count of marked objects during GC. */
520   size_t markCount;
521 
522   /* Track the state of marking. */
523   MainThreadOrGCTaskData<MarkingState> state;
524 
525  public:
526   /*
527    * Whether weakmaps can be marked incrementally.
528    *
529    * JSGC_INCREMENTAL_WEAKMAP_ENABLED
530    * pref: javascript.options.mem.incremental_weakmap
531    */
532   MainThreadOrGCTaskData<bool> incrementalWeakMapMarkingEnabled;
533 
534 #ifdef DEBUG
535  private:
536   /* Count of arenas that are currently in the stack. */
537   MainThreadOrGCTaskData<size_t> markLaterArenas;
538 
539   /* Assert that start and stop are called with correct ordering. */
540   MainThreadOrGCTaskData<bool> started;
541 
542   /*
543    * Whether to check that atoms traversed are present in atom marking
544    * bitmap.
545    */
546   MainThreadOrGCTaskData<bool> checkAtomMarking;
547 
548   /* The test marking queue might want to be marking a particular color. */
549   mozilla::Maybe<js::gc::MarkColor> queueMarkColor;
550 
551   /*
552    * If this is true, all marked objects must belong to a compartment being
553    * GCed. This is used to look for compartment bugs.
554    */
555   MainThreadOrGCTaskData<bool> strictCompartmentChecking;
556 
557  public:
558   /*
559    * The compartment and zone of the object whose trace hook is currently being
560    * called, if any. Used to catch cross-compartment edges traced without use of
561    * TraceCrossCompartmentEdge.
562    */
563   MainThreadOrGCTaskData<Compartment*> tracingCompartment;
564   MainThreadOrGCTaskData<Zone*> tracingZone;
565 
566   /*
567    * List of objects to mark at the beginning of a GC. May also contains string
568    * directives to change mark color or wait until different phases of the GC.
569    *
570    * This is a WeakCache because not everything in this list is guaranteed to
571    * end up marked (eg if you insert an object from an already-processed sweep
572    * group in the middle of an incremental GC). Also, the mark queue is not
573    * used during shutdown GCs. In either case, unmarked objects may need to be
574    * discarded.
575    */
576   JS::WeakCache<GCVector<JS::Heap<JS::Value>, 0, SystemAllocPolicy>> markQueue;
577 
578   /* Position within the test mark queue. */
579   size_t queuePos;
580 #endif  // DEBUG
581 };
582 
583 namespace gc {
584 
585 /*
586  * Temporarily change the mark color while this class is on the stack.
587  *
588  * During incremental sweeping this also transitions zones in the
589  * current sweep group into the Mark or MarkGray state as appropriate.
590  */
591 class MOZ_RAII AutoSetMarkColor {
592   GCMarker& marker_;
593   MarkColor initialColor_;
594 
595  public:
AutoSetMarkColor(GCMarker & marker,MarkColor newColor)596   AutoSetMarkColor(GCMarker& marker, MarkColor newColor)
597       : marker_(marker), initialColor_(marker.markColor()) {
598     marker_.setMarkColor(newColor);
599   }
600 
AutoSetMarkColor(GCMarker & marker,CellColor newColor)601   AutoSetMarkColor(GCMarker& marker, CellColor newColor)
602       : AutoSetMarkColor(marker, newColor.asMarkColor()) {}
603 
~AutoSetMarkColor()604   ~AutoSetMarkColor() { marker_.setMarkColor(initialColor_); }
605 };
606 
607 } /* namespace gc */
608 
609 } /* namespace js */
610 
611 #endif /* gc_GCMarker_h */
612