1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #ifndef js_HeapAPI_h
8 #define js_HeapAPI_h
9 
10 #include "mozilla/Atomics.h"
11 #include "mozilla/BitSet.h"
12 
13 #include <limits.h>
14 #include <type_traits>
15 
16 #include "js/GCAnnotations.h"
17 #include "js/shadow/String.h"  // JS::shadow::String
18 #include "js/shadow/Symbol.h"  // JS::shadow::Symbol
19 #include "js/shadow/Zone.h"    // JS::shadow::Zone
20 #include "js/TraceKind.h"
21 #include "js/TypeDecls.h"
22 
23 /* These values are private to the JS engine. */
24 namespace js {
25 
26 class NurseryDecommitTask;
27 
28 JS_PUBLIC_API bool CurrentThreadCanAccessZone(JS::Zone* zone);
29 
30 namespace gc {
31 
32 class Arena;
33 struct Cell;
34 class TenuredChunk;
35 class StoreBuffer;
36 class TenuredCell;
37 
38 const size_t ArenaShift = 12;
39 const size_t ArenaSize = size_t(1) << ArenaShift;
40 const size_t ArenaMask = ArenaSize - 1;
41 
42 #if defined(XP_MACOSX) && defined(__aarch64__)
43 const size_t PageShift = 14;
44 #else
45 const size_t PageShift = 12;
46 #endif
47 // Expected page size, so we could initialze ArenasPerPage at compile-time.
48 // The actual system page size should be queried by SystemPageSize().
49 const size_t PageSize = size_t(1) << PageShift;
50 constexpr size_t ArenasPerPage = PageSize / ArenaSize;
51 
52 #ifdef JS_GC_SMALL_CHUNK_SIZE
53 const size_t ChunkShift = 18;
54 #else
55 const size_t ChunkShift = 20;
56 #endif
57 const size_t ChunkSize = size_t(1) << ChunkShift;
58 const size_t ChunkMask = ChunkSize - 1;
59 
60 const size_t CellAlignShift = 3;
61 const size_t CellAlignBytes = size_t(1) << CellAlignShift;
62 const size_t CellAlignMask = CellAlignBytes - 1;
63 
64 const size_t CellBytesPerMarkBit = CellAlignBytes;
65 const size_t MarkBitsPerCell = 2;
66 
67 /*
68  * The mark bitmap has one bit per each possible cell start position. This
69  * wastes some space for larger GC things but allows us to avoid division by the
70  * cell's size when accessing the bitmap.
71  */
72 const size_t ArenaBitmapBits = ArenaSize / CellBytesPerMarkBit;
73 const size_t ArenaBitmapBytes = HowMany(ArenaBitmapBits, 8);
74 const size_t ArenaBitmapWords = HowMany(ArenaBitmapBits, JS_BITS_PER_WORD);
75 
76 // The base class for all GC chunks, either in the nursery or in the tenured
77 // heap memory. This structure is locatable from any GC pointer by aligning to
78 // the chunk size.
alignas(CellAlignBytes)79 class alignas(CellAlignBytes) ChunkBase {
80  protected:
81   ChunkBase(JSRuntime* rt, StoreBuffer* sb) : storeBuffer(sb), runtime(rt) {
82     MOZ_ASSERT((uintptr_t(this) & ChunkMask) == 0);
83   }
84 
85  public:
86   // The store buffer for pointers from tenured things to things in this
87   // chunk. Will be non-null if and only if this is a nursery chunk.
88   StoreBuffer* storeBuffer;
89 
90   // Provide quick access to the runtime from absolutely anywhere.
91   JSRuntime* runtime;
92 };
93 
94 // Information about tenured heap chunks.
95 struct TenuredChunkInfo {
96  private:
97   friend class ChunkPool;
98   TenuredChunk* next = nullptr;
99   TenuredChunk* prev = nullptr;
100 
101  public:
102   /* List of free committed arenas, linked together with arena.next. */
103   Arena* freeArenasHead;
104 
105   /*
106    * Decommitted pages are tracked by a bitmap in the TenuredChunkBase. We use
107    * this offset to start our search iteration close to a decommitted arena that
108    * we can allocate.
109    */
110   uint32_t lastDecommittedPageOffset;
111 
112   /* Number of free arenas, either committed or decommitted. */
113   uint32_t numArenasFree;
114 
115   /* Number of free, committed arenas. */
116   uint32_t numArenasFreeCommitted;
117 };
118 
119 /*
120  * Calculating ArenasPerChunk:
121  *
122  * To figure out how many Arenas will fit in a chunk we need to know how much
123  * extra space is available after we allocate the header data. This is a problem
124  * because the header size depends on the number of arenas in the chunk.
125  *
126  * The two dependent fields are bitmap and decommittedPages. bitmap needs
127  * ArenaBitmapBytes bytes per arena and decommittedPages needs one bit per
128  * page.
129  *
130  * We can calculate an approximate value by dividing the number of bits of free
131  * space in the chunk by the number of bits needed per arena. This is an
132  * approximation because it doesn't take account of the fact that the variable
133  * sized fields must be rounded up to a whole number of words, or any padding
134  * the compiler adds between fields.
135  *
136  * Fortunately, for the chunk and arena size parameters we use this
137  * approximation turns out to be correct. If it were not we might need to adjust
138  * the arena count down by one to allow more space for the padding.
139  */
140 const size_t BitsPerPageWithHeaders =
141     (ArenaSize + ArenaBitmapBytes) * ArenasPerPage * CHAR_BIT + 1;
142 const size_t ChunkBitsAvailable =
143     (ChunkSize - sizeof(ChunkBase) - sizeof(TenuredChunkInfo)) * CHAR_BIT;
144 const size_t PagesPerChunk = ChunkBitsAvailable / BitsPerPageWithHeaders;
145 const size_t DecommitBits = PagesPerChunk;
146 const size_t ArenasPerChunk = PagesPerChunk * ArenasPerPage;
147 const size_t BitsPerArenaWithHeaders =
148     (ArenaSize + ArenaBitmapBytes) * CHAR_BIT + (DecommitBits / ArenasPerChunk);
149 
150 const size_t CalculatedChunkSizeRequired =
151     sizeof(ChunkBase) + sizeof(TenuredChunkInfo) +
152     RoundUp(ArenasPerChunk * ArenaBitmapBytes, sizeof(uintptr_t)) +
153     RoundUp(DecommitBits, sizeof(uint32_t) * CHAR_BIT) / CHAR_BIT +
154     ArenasPerChunk * ArenaSize;
155 static_assert(CalculatedChunkSizeRequired <= ChunkSize,
156               "Calculated ArenasPerChunk is too large");
157 
158 const size_t CalculatedChunkPadSize = ChunkSize - CalculatedChunkSizeRequired;
159 static_assert(CalculatedChunkPadSize * CHAR_BIT < BitsPerArenaWithHeaders,
160               "Calculated ArenasPerChunk is too small");
161 
162 // Define a macro for the expected number of arenas so its value appears in the
163 // error message if the assertion fails.
164 #ifdef JS_GC_SMALL_CHUNK_SIZE
165 #  define EXPECTED_ARENA_COUNT 63
166 #else
167 #  define EXPECTED_ARENA_COUNT 252
168 #endif
169 static_assert(ArenasPerChunk == EXPECTED_ARENA_COUNT,
170               "Do not accidentally change our heap's density.");
171 #undef EXPECTED_ARENA_COUNT
172 
173 // Mark bitmaps are atomic because they can be written by gray unmarking on the
174 // main thread while read by sweeping on a background thread. The former does
175 // not affect the result of the latter.
176 using MarkBitmapWord = mozilla::Atomic<uintptr_t, mozilla::Relaxed>;
177 
178 /*
179  * Live objects are marked black or gray. Everything reachable from a JS root is
180  * marked black. Objects marked gray are eligible for cycle collection.
181  *
182  *    BlackBit:     GrayOrBlackBit:  Color:
183  *       0               0           white
184  *       0               1           gray
185  *       1               0           black
186  *       1               1           black
187  */
188 enum class ColorBit : uint32_t { BlackBit = 0, GrayOrBlackBit = 1 };
189 
190 // Mark colors. Order is important here: the greater value the 'more marked' a
191 // cell is.
192 enum class MarkColor : uint8_t { Gray = 1, Black = 2 };
193 
194 // Mark bitmap for a tenured heap chunk.
195 struct MarkBitmap {
196   static constexpr size_t WordCount = ArenaBitmapWords * ArenasPerChunk;
197   MarkBitmapWord bitmap[WordCount];
198 
199   inline void getMarkWordAndMask(const TenuredCell* cell, ColorBit colorBit,
200                                  MarkBitmapWord** wordp, uintptr_t* maskp);
201 
202   // The following are not exported and are defined in gc/Heap.h:
203   inline bool markBit(const TenuredCell* cell, ColorBit colorBit);
204   inline bool isMarkedAny(const TenuredCell* cell);
205   inline bool isMarkedBlack(const TenuredCell* cell);
206   inline bool isMarkedGray(const TenuredCell* cell);
207   inline bool markIfUnmarked(const TenuredCell* cell, MarkColor color);
208   inline void markBlack(const TenuredCell* cell);
209   inline void copyMarkBit(TenuredCell* dst, const TenuredCell* src,
210                           ColorBit colorBit);
211   inline void unmark(const TenuredCell* cell);
212   inline void clear();
213   inline MarkBitmapWord* arenaBits(Arena* arena);
214 };
215 
216 static_assert(ArenaBitmapBytes * ArenasPerChunk == sizeof(MarkBitmap),
217               "Ensure our MarkBitmap actually covers all arenas.");
218 
219 // Decommit bitmap for a heap chunk.
220 using DecommitBitmap = mozilla::BitSet<PagesPerChunk, uint32_t>;
221 
222 // Base class containing data members for a tenured heap chunk.
223 class TenuredChunkBase : public ChunkBase {
224  public:
225   TenuredChunkInfo info;
226   MarkBitmap markBits;
227   DecommitBitmap decommittedPages;
228 
229  protected:
TenuredChunkBase(JSRuntime * runtime)230   explicit TenuredChunkBase(JSRuntime* runtime) : ChunkBase(runtime, nullptr) {}
231 };
232 
233 /*
234  * We sometimes use an index to refer to a cell in an arena. The index for a
235  * cell is found by dividing by the cell alignment so not all indices refer to
236  * valid cells.
237  */
238 const size_t ArenaCellIndexBytes = CellAlignBytes;
239 const size_t MaxArenaCellIndex = ArenaSize / CellAlignBytes;
240 
241 const size_t MarkBitmapWordBits = sizeof(MarkBitmapWord) * CHAR_BIT;
242 
243 constexpr size_t FirstArenaAdjustmentBits =
244     RoundUp(sizeof(gc::TenuredChunkBase), ArenaSize) / gc::CellBytesPerMarkBit;
245 
246 static_assert((FirstArenaAdjustmentBits % MarkBitmapWordBits) == 0);
247 constexpr size_t FirstArenaAdjustmentWords =
248     FirstArenaAdjustmentBits / MarkBitmapWordBits;
249 
250 const size_t ChunkRuntimeOffset = offsetof(ChunkBase, runtime);
251 const size_t ChunkStoreBufferOffset = offsetof(ChunkBase, storeBuffer);
252 const size_t ChunkMarkBitmapOffset = offsetof(TenuredChunkBase, markBits);
253 
254 // Hardcoded offsets into Arena class.
255 const size_t ArenaZoneOffset = 2 * sizeof(uint32_t);
256 const size_t ArenaHeaderSize = ArenaZoneOffset + 2 * sizeof(uintptr_t) +
257                                sizeof(size_t) + sizeof(uintptr_t);
258 
259 // The first word of a GC thing has certain requirements from the GC and is used
260 // to store flags in the low bits.
261 const size_t CellFlagBitsReservedForGC = 3;
262 
263 // The first word can be used to store JSClass pointers for some thing kinds, so
264 // these must be suitably aligned.
265 const size_t JSClassAlignBytes = size_t(1) << CellFlagBitsReservedForGC;
266 
267 #ifdef JS_DEBUG
268 /* When downcasting, ensure we are actually the right type. */
269 extern JS_PUBLIC_API void AssertGCThingHasType(js::gc::Cell* cell,
270                                                JS::TraceKind kind);
271 #else
AssertGCThingHasType(js::gc::Cell * cell,JS::TraceKind kind)272 inline void AssertGCThingHasType(js::gc::Cell* cell, JS::TraceKind kind) {}
273 #endif
274 
275 MOZ_ALWAYS_INLINE bool IsInsideNursery(const js::gc::Cell* cell);
276 MOZ_ALWAYS_INLINE bool IsInsideNursery(const js::gc::TenuredCell* cell);
277 
278 } /* namespace gc */
279 } /* namespace js */
280 
281 namespace JS {
282 
283 enum class HeapState {
284   Idle,             // doing nothing with the GC heap
285   Tracing,          // tracing the GC heap without collecting, e.g.
286                     // IterateCompartments()
287   MajorCollecting,  // doing a GC of the major heap
288   MinorCollecting,  // doing a GC of the minor heap (nursery)
289   CycleCollecting   // in the "Unlink" phase of cycle collection
290 };
291 
292 JS_PUBLIC_API HeapState RuntimeHeapState();
293 
RuntimeHeapIsBusy()294 static inline bool RuntimeHeapIsBusy() {
295   return RuntimeHeapState() != HeapState::Idle;
296 }
297 
RuntimeHeapIsTracing()298 static inline bool RuntimeHeapIsTracing() {
299   return RuntimeHeapState() == HeapState::Tracing;
300 }
301 
RuntimeHeapIsMajorCollecting()302 static inline bool RuntimeHeapIsMajorCollecting() {
303   return RuntimeHeapState() == HeapState::MajorCollecting;
304 }
305 
RuntimeHeapIsMinorCollecting()306 static inline bool RuntimeHeapIsMinorCollecting() {
307   return RuntimeHeapState() == HeapState::MinorCollecting;
308 }
309 
RuntimeHeapIsCollecting(HeapState state)310 static inline bool RuntimeHeapIsCollecting(HeapState state) {
311   return state == HeapState::MajorCollecting ||
312          state == HeapState::MinorCollecting;
313 }
314 
RuntimeHeapIsCollecting()315 static inline bool RuntimeHeapIsCollecting() {
316   return RuntimeHeapIsCollecting(RuntimeHeapState());
317 }
318 
RuntimeHeapIsCycleCollecting()319 static inline bool RuntimeHeapIsCycleCollecting() {
320   return RuntimeHeapState() == HeapState::CycleCollecting;
321 }
322 
323 /*
324  * This list enumerates the different types of conceptual stacks we have in
325  * SpiderMonkey. In reality, they all share the C stack, but we allow different
326  * stack limits depending on the type of code running.
327  */
328 enum StackKind {
329   StackForSystemCode,       // C++, such as the GC, running on behalf of the VM.
330   StackForTrustedScript,    // Script running with trusted principals.
331   StackForUntrustedScript,  // Script running with untrusted principals.
332   StackKindCount
333 };
334 
335 /*
336  * Default maximum size for the generational nursery in bytes. This is the
337  * initial value. In the browser this configured by the
338  * javascript.options.mem.nursery.max_kb pref.
339  */
340 const uint32_t DefaultNurseryMaxBytes = 16 * js::gc::ChunkSize;
341 
342 /* Default maximum heap size in bytes to pass to JS_NewContext(). */
343 const uint32_t DefaultHeapMaxBytes = 32 * 1024 * 1024;
344 
345 /**
346  * A GC pointer, tagged with the trace kind.
347  *
348  * In general, a GC pointer should be stored with an exact type. This class
349  * is for use when that is not possible because a single pointer must point
350  * to several kinds of GC thing.
351  */
352 class JS_PUBLIC_API GCCellPtr {
353  public:
GCCellPtr()354   GCCellPtr() : GCCellPtr(nullptr) {}
355 
356   // Construction from a void* and trace kind.
GCCellPtr(void * gcthing,JS::TraceKind traceKind)357   GCCellPtr(void* gcthing, JS::TraceKind traceKind)
358       : ptr(checkedCast(gcthing, traceKind)) {}
359 
360   // Automatically construct a null GCCellPtr from nullptr.
GCCellPtr(decltype (nullptr))361   MOZ_IMPLICIT GCCellPtr(decltype(nullptr))
362       : ptr(checkedCast(nullptr, JS::TraceKind::Null)) {}
363 
364   // Construction from an explicit type.
365   template <typename T>
GCCellPtr(T * p)366   explicit GCCellPtr(T* p)
367       : ptr(checkedCast(p, JS::MapTypeToTraceKind<T>::kind)) {}
GCCellPtr(JSFunction * p)368   explicit GCCellPtr(JSFunction* p)
369       : ptr(checkedCast(p, JS::TraceKind::Object)) {}
GCCellPtr(JSScript * p)370   explicit GCCellPtr(JSScript* p)
371       : ptr(checkedCast(p, JS::TraceKind::Script)) {}
372   explicit GCCellPtr(const Value& v);
373 
kind()374   JS::TraceKind kind() const {
375     uintptr_t kindBits = ptr & OutOfLineTraceKindMask;
376     if (kindBits != OutOfLineTraceKindMask) {
377       return JS::TraceKind(kindBits);
378     }
379     return outOfLineKind();
380   }
381 
382   // Allow GCCellPtr to be used in a boolean context.
383   explicit operator bool() const {
384     MOZ_ASSERT(bool(asCell()) == (kind() != JS::TraceKind::Null));
385     return asCell();
386   }
387 
388   // Simplify checks to the kind.
389   template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
is()390   bool is() const {
391     return kind() == JS::MapTypeToTraceKind<T>::kind;
392   }
393 
394   // Conversions to more specific types must match the kind. Access to
395   // further refined types is not allowed directly from a GCCellPtr.
396   template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
as()397   T& as() const {
398     MOZ_ASSERT(kind() == JS::MapTypeToTraceKind<T>::kind);
399     // We can't use static_cast here, because the fact that JSObject
400     // inherits from js::gc::Cell is not part of the public API.
401     return *reinterpret_cast<T*>(asCell());
402   }
403 
404   // Return a pointer to the cell this |GCCellPtr| refers to, or |nullptr|.
405   // (It would be more symmetrical with |to| for this to return a |Cell&|, but
406   // the result can be |nullptr|, and null references are undefined behavior.)
asCell()407   js::gc::Cell* asCell() const {
408     return reinterpret_cast<js::gc::Cell*>(ptr & ~OutOfLineTraceKindMask);
409   }
410 
411   // The CC's trace logger needs an identity that is XPIDL serializable.
unsafeAsInteger()412   uint64_t unsafeAsInteger() const {
413     return static_cast<uint64_t>(unsafeAsUIntPtr());
414   }
415   // Inline mark bitmap access requires direct pointer arithmetic.
unsafeAsUIntPtr()416   uintptr_t unsafeAsUIntPtr() const {
417     MOZ_ASSERT(asCell());
418     MOZ_ASSERT(!js::gc::IsInsideNursery(asCell()));
419     return reinterpret_cast<uintptr_t>(asCell());
420   }
421 
mayBeOwnedByOtherRuntime()422   MOZ_ALWAYS_INLINE bool mayBeOwnedByOtherRuntime() const {
423     if (!is<JSString>() && !is<JS::Symbol>()) {
424       return false;
425     }
426     if (is<JSString>()) {
427       return JS::shadow::String::isPermanentAtom(asCell());
428     }
429     MOZ_ASSERT(is<JS::Symbol>());
430     return JS::shadow::Symbol::isWellKnownSymbol(asCell());
431   }
432 
433  private:
checkedCast(void * p,JS::TraceKind traceKind)434   static uintptr_t checkedCast(void* p, JS::TraceKind traceKind) {
435     auto* cell = static_cast<js::gc::Cell*>(p);
436     MOZ_ASSERT((uintptr_t(p) & OutOfLineTraceKindMask) == 0);
437     AssertGCThingHasType(cell, traceKind);
438     // Store trace in the bottom bits of pointer for common kinds.
439     uintptr_t kindBits = uintptr_t(traceKind);
440     if (kindBits >= OutOfLineTraceKindMask) {
441       kindBits = OutOfLineTraceKindMask;
442     }
443     return uintptr_t(p) | kindBits;
444   }
445 
446   JS::TraceKind outOfLineKind() const;
447 
448   uintptr_t ptr;
449 } JS_HAZ_GC_POINTER;
450 
451 // Unwraps the given GCCellPtr, calls the functor |f| with a template argument
452 // of the actual type of the pointer, and returns the result.
453 template <typename F>
MapGCThingTyped(GCCellPtr thing,F && f)454 auto MapGCThingTyped(GCCellPtr thing, F&& f) {
455   switch (thing.kind()) {
456 #define JS_EXPAND_DEF(name, type, _, _1) \
457   case JS::TraceKind::name:              \
458     return f(&thing.as<type>());
459     JS_FOR_EACH_TRACEKIND(JS_EXPAND_DEF);
460 #undef JS_EXPAND_DEF
461     default:
462       MOZ_CRASH("Invalid trace kind in MapGCThingTyped for GCCellPtr.");
463   }
464 }
465 
466 // Unwraps the given GCCellPtr and calls the functor |f| with a template
467 // argument of the actual type of the pointer. Doesn't return anything.
468 template <typename F>
ApplyGCThingTyped(GCCellPtr thing,F && f)469 void ApplyGCThingTyped(GCCellPtr thing, F&& f) {
470   // This function doesn't do anything but is supplied for symmetry with other
471   // MapGCThingTyped/ApplyGCThingTyped implementations that have to wrap the
472   // functor to return a dummy value that is ignored.
473   MapGCThingTyped(thing, f);
474 }
475 
476 } /* namespace JS */
477 
478 // These are defined in the toplevel namespace instead of within JS so that
479 // they won't shadow other operator== overloads (see bug 1456512.)
480 
481 inline bool operator==(const JS::GCCellPtr& ptr1, const JS::GCCellPtr& ptr2) {
482   return ptr1.asCell() == ptr2.asCell();
483 }
484 
485 inline bool operator!=(const JS::GCCellPtr& ptr1, const JS::GCCellPtr& ptr2) {
486   return !(ptr1 == ptr2);
487 }
488 
489 namespace js {
490 namespace gc {
491 
492 /* static */
getMarkWordAndMask(const TenuredCell * cell,ColorBit colorBit,MarkBitmapWord ** wordp,uintptr_t * maskp)493 MOZ_ALWAYS_INLINE void MarkBitmap::getMarkWordAndMask(const TenuredCell* cell,
494                                                       ColorBit colorBit,
495                                                       MarkBitmapWord** wordp,
496                                                       uintptr_t* maskp) {
497   // Note: the JIT pre-barrier trampolines inline this code. Update
498   // MacroAssembler::emitPreBarrierFastPath code too when making changes here!
499 
500   MOZ_ASSERT(size_t(colorBit) < MarkBitsPerCell);
501 
502   size_t offset = uintptr_t(cell) & ChunkMask;
503   const size_t bit = offset / CellBytesPerMarkBit + size_t(colorBit);
504   size_t word = bit / MarkBitmapWordBits - FirstArenaAdjustmentWords;
505   MOZ_ASSERT(word < WordCount);
506   *wordp = &bitmap[word];
507   *maskp = uintptr_t(1) << (bit % MarkBitmapWordBits);
508 }
509 
510 namespace detail {
511 
GetCellChunkBase(const Cell * cell)512 static MOZ_ALWAYS_INLINE ChunkBase* GetCellChunkBase(const Cell* cell) {
513   MOZ_ASSERT(cell);
514   return reinterpret_cast<ChunkBase*>(uintptr_t(cell) & ~ChunkMask);
515 }
516 
GetCellChunkBase(const TenuredCell * cell)517 static MOZ_ALWAYS_INLINE TenuredChunkBase* GetCellChunkBase(
518     const TenuredCell* cell) {
519   MOZ_ASSERT(cell);
520   return reinterpret_cast<TenuredChunkBase*>(uintptr_t(cell) & ~ChunkMask);
521 }
522 
GetTenuredGCThingZone(const uintptr_t addr)523 static MOZ_ALWAYS_INLINE JS::Zone* GetTenuredGCThingZone(const uintptr_t addr) {
524   MOZ_ASSERT(addr);
525   const uintptr_t zone_addr = (addr & ~ArenaMask) | ArenaZoneOffset;
526   return *reinterpret_cast<JS::Zone**>(zone_addr);
527 }
528 
TenuredCellIsMarkedGray(const TenuredCell * cell)529 static MOZ_ALWAYS_INLINE bool TenuredCellIsMarkedGray(const TenuredCell* cell) {
530   // Return true if GrayOrBlackBit is set and BlackBit is not set.
531   MOZ_ASSERT(cell);
532   MOZ_ASSERT(!js::gc::IsInsideNursery(cell));
533 
534   MarkBitmapWord* grayWord;
535   uintptr_t grayMask;
536   TenuredChunkBase* chunk = GetCellChunkBase(cell);
537   chunk->markBits.getMarkWordAndMask(cell, js::gc::ColorBit::GrayOrBlackBit,
538                                      &grayWord, &grayMask);
539   if (!(*grayWord & grayMask)) {
540     return false;
541   }
542 
543   MarkBitmapWord* blackWord;
544   uintptr_t blackMask;
545   chunk->markBits.getMarkWordAndMask(cell, js::gc::ColorBit::BlackBit,
546                                      &blackWord, &blackMask);
547   return !(*blackWord & blackMask);
548 }
549 
CellIsMarkedGray(const Cell * cell)550 static MOZ_ALWAYS_INLINE bool CellIsMarkedGray(const Cell* cell) {
551   MOZ_ASSERT(cell);
552   if (js::gc::IsInsideNursery(cell)) {
553     return false;
554   }
555   return TenuredCellIsMarkedGray(reinterpret_cast<const TenuredCell*>(cell));
556 }
557 
558 extern JS_PUBLIC_API bool CellIsMarkedGrayIfKnown(const Cell* cell);
559 
560 #ifdef DEBUG
561 extern JS_PUBLIC_API void AssertCellIsNotGray(const Cell* cell);
562 
563 extern JS_PUBLIC_API bool ObjectIsMarkedBlack(const JSObject* obj);
564 #endif
565 
CellHasStoreBuffer(const Cell * cell)566 MOZ_ALWAYS_INLINE bool CellHasStoreBuffer(const Cell* cell) {
567   return GetCellChunkBase(cell)->storeBuffer;
568 }
569 
570 } /* namespace detail */
571 
IsInsideNursery(const Cell * cell)572 MOZ_ALWAYS_INLINE bool IsInsideNursery(const Cell* cell) {
573   if (!cell) {
574     return false;
575   }
576   return detail::CellHasStoreBuffer(cell);
577 }
578 
IsInsideNursery(const TenuredCell * cell)579 MOZ_ALWAYS_INLINE bool IsInsideNursery(const TenuredCell* cell) {
580   MOZ_ASSERT_IF(
581       cell, !detail::CellHasStoreBuffer(reinterpret_cast<const Cell*>(cell)));
582   return false;
583 }
584 
585 // Allow use before the compiler knows the derivation of JSObject, JSString, and
586 // JS::BigInt.
IsInsideNursery(const JSObject * obj)587 MOZ_ALWAYS_INLINE bool IsInsideNursery(const JSObject* obj) {
588   return IsInsideNursery(reinterpret_cast<const Cell*>(obj));
589 }
IsInsideNursery(const JSString * str)590 MOZ_ALWAYS_INLINE bool IsInsideNursery(const JSString* str) {
591   return IsInsideNursery(reinterpret_cast<const Cell*>(str));
592 }
IsInsideNursery(const JS::BigInt * bi)593 MOZ_ALWAYS_INLINE bool IsInsideNursery(const JS::BigInt* bi) {
594   return IsInsideNursery(reinterpret_cast<const Cell*>(bi));
595 }
596 
IsCellPointerValid(const void * ptr)597 MOZ_ALWAYS_INLINE bool IsCellPointerValid(const void* ptr) {
598   auto addr = uintptr_t(ptr);
599   if (addr < ChunkSize || addr % CellAlignBytes != 0) {
600     return false;
601   }
602 
603   auto* cell = reinterpret_cast<const Cell*>(ptr);
604   if (!IsInsideNursery(cell)) {
605     return detail::GetTenuredGCThingZone(addr) != nullptr;
606   }
607 
608   return true;
609 }
610 
IsCellPointerValidOrNull(const void * cell)611 MOZ_ALWAYS_INLINE bool IsCellPointerValidOrNull(const void* cell) {
612   if (!cell) {
613     return true;
614   }
615   return IsCellPointerValid(cell);
616 }
617 
618 } /* namespace gc */
619 } /* namespace js */
620 
621 namespace JS {
622 
GetTenuredGCThingZone(GCCellPtr thing)623 static MOZ_ALWAYS_INLINE Zone* GetTenuredGCThingZone(GCCellPtr thing) {
624   MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
625   return js::gc::detail::GetTenuredGCThingZone(thing.unsafeAsUIntPtr());
626 }
627 
628 extern JS_PUBLIC_API Zone* GetNurseryCellZone(js::gc::Cell* cell);
629 
GetGCThingZone(GCCellPtr thing)630 static MOZ_ALWAYS_INLINE Zone* GetGCThingZone(GCCellPtr thing) {
631   if (!js::gc::IsInsideNursery(thing.asCell())) {
632     return js::gc::detail::GetTenuredGCThingZone(thing.unsafeAsUIntPtr());
633   }
634 
635   return GetNurseryCellZone(thing.asCell());
636 }
637 
GetStringZone(JSString * str)638 static MOZ_ALWAYS_INLINE Zone* GetStringZone(JSString* str) {
639   if (!js::gc::IsInsideNursery(str)) {
640     return js::gc::detail::GetTenuredGCThingZone(
641         reinterpret_cast<uintptr_t>(str));
642   }
643   return GetNurseryCellZone(reinterpret_cast<js::gc::Cell*>(str));
644 }
645 
646 extern JS_PUBLIC_API Zone* GetObjectZone(JSObject* obj);
647 
GCThingIsMarkedGray(GCCellPtr thing)648 static MOZ_ALWAYS_INLINE bool GCThingIsMarkedGray(GCCellPtr thing) {
649   if (thing.mayBeOwnedByOtherRuntime()) {
650     return false;
651   }
652   return js::gc::detail::CellIsMarkedGrayIfKnown(thing.asCell());
653 }
654 
655 extern JS_PUBLIC_API JS::TraceKind GCThingTraceKind(void* thing);
656 
657 extern JS_PUBLIC_API void EnableNurseryStrings(JSContext* cx);
658 
659 extern JS_PUBLIC_API void DisableNurseryStrings(JSContext* cx);
660 
661 extern JS_PUBLIC_API void EnableNurseryBigInts(JSContext* cx);
662 
663 extern JS_PUBLIC_API void DisableNurseryBigInts(JSContext* cx);
664 
665 /*
666  * Returns true when writes to GC thing pointers (and reads from weak pointers)
667  * must call an incremental barrier. This is generally only true when running
668  * mutator code in-between GC slices. At other times, the barrier may be elided
669  * for performance.
670  */
671 extern JS_PUBLIC_API bool IsIncrementalBarrierNeeded(JSContext* cx);
672 
673 /*
674  * Notify the GC that a reference to a JSObject is about to be overwritten.
675  * This method must be called if IsIncrementalBarrierNeeded.
676  */
677 extern JS_PUBLIC_API void IncrementalPreWriteBarrier(JSObject* obj);
678 
679 /*
680  * Notify the GC that a reference to a tenured GC cell is about to be
681  * overwritten. This method must be called if IsIncrementalBarrierNeeded.
682  */
683 extern JS_PUBLIC_API void IncrementalPreWriteBarrier(GCCellPtr thing);
684 
685 /**
686  * Unsets the gray bit for anything reachable from |thing|. |kind| should not be
687  * JS::TraceKind::Shape. |thing| should be non-null. The return value indicates
688  * if anything was unmarked.
689  */
690 extern JS_PUBLIC_API bool UnmarkGrayGCThingRecursively(GCCellPtr thing);
691 
692 }  // namespace JS
693 
694 namespace js {
695 namespace gc {
696 
697 extern JS_PUBLIC_API void PerformIncrementalReadBarrier(JS::GCCellPtr thing);
698 
ExposeGCThingToActiveJS(JS::GCCellPtr thing)699 static MOZ_ALWAYS_INLINE void ExposeGCThingToActiveJS(JS::GCCellPtr thing) {
700   // TODO: I'd like to assert !RuntimeHeapIsBusy() here but this gets
701   // called while we are tracing the heap, e.g. during memory reporting
702   // (see bug 1313318).
703   MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
704 
705   // GC things residing in the nursery cannot be gray: they have no mark bits.
706   // All live objects in the nursery are moved to tenured at the beginning of
707   // each GC slice, so the gray marker never sees nursery things.
708   if (IsInsideNursery(thing.asCell())) {
709     return;
710   }
711 
712   auto* cell = reinterpret_cast<TenuredCell*>(thing.asCell());
713 
714   // There's nothing to do for permanent GC things that might be owned by
715   // another runtime.
716   if (thing.mayBeOwnedByOtherRuntime()) {
717     return;
718   }
719 
720   auto* zone = JS::shadow::Zone::from(JS::GetTenuredGCThingZone(thing));
721   if (zone->needsIncrementalBarrier()) {
722     PerformIncrementalReadBarrier(thing);
723   } else if (!zone->isGCPreparing() && detail::TenuredCellIsMarkedGray(cell)) {
724     MOZ_ALWAYS_TRUE(JS::UnmarkGrayGCThingRecursively(thing));
725   }
726 
727   MOZ_ASSERT_IF(!zone->isGCPreparing(), !detail::TenuredCellIsMarkedGray(cell));
728 }
729 
730 template <typename T>
731 extern JS_PUBLIC_API bool EdgeNeedsSweepUnbarrieredSlow(T* thingp);
732 
EdgeNeedsSweepUnbarriered(JSObject ** objp)733 static MOZ_ALWAYS_INLINE bool EdgeNeedsSweepUnbarriered(JSObject** objp) {
734   // This function does not handle updating nursery pointers. Raw JSObject
735   // pointers should be updated separately or replaced with
736   // JS::Heap<JSObject*> which handles this automatically.
737   MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
738   if (IsInsideNursery(*objp)) {
739     return false;
740   }
741 
742   auto zone =
743       JS::shadow::Zone::from(detail::GetTenuredGCThingZone(uintptr_t(*objp)));
744   if (!zone->isGCSweepingOrCompacting()) {
745     return false;
746   }
747 
748   return EdgeNeedsSweepUnbarrieredSlow(objp);
749 }
750 
751 }  // namespace gc
752 }  // namespace js
753 
754 namespace JS {
755 
756 /*
757  * This should be called when an object that is marked gray is exposed to the JS
758  * engine (by handing it to running JS code or writing it into live JS
759  * data). During incremental GC, since the gray bits haven't been computed yet,
760  * we conservatively mark the object black.
761  */
ExposeObjectToActiveJS(JSObject * obj)762 static MOZ_ALWAYS_INLINE void ExposeObjectToActiveJS(JSObject* obj) {
763   MOZ_ASSERT(obj);
764   MOZ_ASSERT(!js::gc::EdgeNeedsSweepUnbarrieredSlow(&obj));
765   js::gc::ExposeGCThingToActiveJS(GCCellPtr(obj));
766 }
767 
768 } /* namespace JS */
769 
770 #endif /* js_HeapAPI_h */
771