1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #ifndef js_HeapAPI_h
8 #define js_HeapAPI_h
9 
10 #include <limits.h>
11 #include <type_traits>
12 
13 #include "jspubtd.h"
14 
15 #include "js/GCAnnotations.h"
16 #include "js/TraceKind.h"
17 #include "js/Utility.h"
18 
19 #ifndef JS_BITS_PER_WORD
20 #  error \
21       "JS_BITS_PER_WORD must be defined. Did you forget to include js-config.h?"
22 #endif
23 
24 struct JSExternalStringCallbacks;
25 
26 /* These values are private to the JS engine. */
27 namespace js {
28 
29 JS_FRIEND_API bool CurrentThreadCanAccessZone(JS::Zone* zone);
30 
31 namespace gc {
32 
33 struct Cell;
34 
35 const size_t ArenaShift = 12;
36 const size_t ArenaSize = size_t(1) << ArenaShift;
37 const size_t ArenaMask = ArenaSize - 1;
38 
39 #ifdef JS_GC_SMALL_CHUNK_SIZE
40 const size_t ChunkShift = 18;
41 #else
42 const size_t ChunkShift = 20;
43 #endif
44 const size_t ChunkSize = size_t(1) << ChunkShift;
45 const size_t ChunkMask = ChunkSize - 1;
46 
47 const size_t CellAlignShift = 3;
48 const size_t CellAlignBytes = size_t(1) << CellAlignShift;
49 const size_t CellAlignMask = CellAlignBytes - 1;
50 
51 const size_t CellBytesPerMarkBit = CellAlignBytes;
52 
53 /*
54  * We sometimes use an index to refer to a cell in an arena. The index for a
55  * cell is found by dividing by the cell alignment so not all indicies refer to
56  * valid cells.
57  */
58 const size_t ArenaCellIndexBytes = CellAlignBytes;
59 const size_t MaxArenaCellIndex = ArenaSize / CellAlignBytes;
60 
61 /* These are magic constants derived from actual offsets in gc/Heap.h. */
62 #ifdef JS_GC_SMALL_CHUNK_SIZE
63 const size_t ChunkMarkBitmapOffset = 258104;
64 const size_t ChunkMarkBitmapBits = 31744;
65 #else
66 const size_t ChunkMarkBitmapOffset = 1032352;
67 const size_t ChunkMarkBitmapBits = 129024;
68 #endif
69 const size_t ChunkRuntimeOffset = ChunkSize - sizeof(void*);
70 const size_t ChunkTrailerSize = 2 * sizeof(uintptr_t) + sizeof(uint64_t);
71 const size_t ChunkLocationOffset = ChunkSize - ChunkTrailerSize;
72 const size_t ChunkStoreBufferOffset =
73     ChunkSize - ChunkTrailerSize + sizeof(uint64_t);
74 const size_t ArenaZoneOffset = sizeof(size_t);
75 const size_t ArenaHeaderSize =
76     sizeof(size_t) + 2 * sizeof(uintptr_t) + sizeof(size_t) + sizeof(uintptr_t);
77 
78 // The first word of a GC thing has certain requirements from the GC and is used
79 // to store flags in the low bits.
80 const size_t CellFlagBitsReservedForGC = 3;
81 
82 // The first word can be used to store JSClass pointers for some thing kinds, so
83 // these must be suitably aligned.
84 const size_t JSClassAlignBytes = size_t(1) << CellFlagBitsReservedForGC;
85 
86 /*
87  * Live objects are marked black or gray. Everything reachable from a JS root is
88  * marked black. Objects marked gray are eligible for cycle collection.
89  *
90  *    BlackBit:     GrayOrBlackBit:  Color:
91  *       0               0           white
92  *       0               1           gray
93  *       1               0           black
94  *       1               1           black
95  */
96 enum class ColorBit : uint32_t { BlackBit = 0, GrayOrBlackBit = 1 };
97 
98 /*
99  * The "location" field in the Chunk trailer is a enum indicating various roles
100  * of the chunk.
101  */
102 enum class ChunkLocation : uint32_t {
103   Invalid = 0,
104   Nursery = 1,
105   TenuredHeap = 2
106 };
107 
108 #ifdef JS_DEBUG
109 /* When downcasting, ensure we are actually the right type. */
110 extern JS_FRIEND_API void AssertGCThingHasType(js::gc::Cell* cell,
111                                                JS::TraceKind kind);
112 #else
AssertGCThingHasType(js::gc::Cell * cell,JS::TraceKind kind)113 inline void AssertGCThingHasType(js::gc::Cell* cell, JS::TraceKind kind) {}
114 #endif
115 
116 MOZ_ALWAYS_INLINE bool IsInsideNursery(const js::gc::Cell* cell);
117 
118 } /* namespace gc */
119 } /* namespace js */
120 
121 namespace JS {
122 
123 enum class HeapState {
124   Idle,             // doing nothing with the GC heap
125   Tracing,          // tracing the GC heap without collecting, e.g.
126                     // IterateCompartments()
127   MajorCollecting,  // doing a GC of the major heap
128   MinorCollecting,  // doing a GC of the minor heap (nursery)
129   CycleCollecting   // in the "Unlink" phase of cycle collection
130 };
131 
132 JS_PUBLIC_API HeapState RuntimeHeapState();
133 
RuntimeHeapIsBusy()134 static inline bool RuntimeHeapIsBusy() {
135   return RuntimeHeapState() != HeapState::Idle;
136 }
137 
RuntimeHeapIsTracing()138 static inline bool RuntimeHeapIsTracing() {
139   return RuntimeHeapState() == HeapState::Tracing;
140 }
141 
RuntimeHeapIsMajorCollecting()142 static inline bool RuntimeHeapIsMajorCollecting() {
143   return RuntimeHeapState() == HeapState::MajorCollecting;
144 }
145 
RuntimeHeapIsMinorCollecting()146 static inline bool RuntimeHeapIsMinorCollecting() {
147   return RuntimeHeapState() == HeapState::MinorCollecting;
148 }
149 
RuntimeHeapIsCollecting(HeapState state)150 static inline bool RuntimeHeapIsCollecting(HeapState state) {
151   return state == HeapState::MajorCollecting ||
152          state == HeapState::MinorCollecting;
153 }
154 
RuntimeHeapIsCollecting()155 static inline bool RuntimeHeapIsCollecting() {
156   return RuntimeHeapIsCollecting(RuntimeHeapState());
157 }
158 
RuntimeHeapIsCycleCollecting()159 static inline bool RuntimeHeapIsCycleCollecting() {
160   return RuntimeHeapState() == HeapState::CycleCollecting;
161 }
162 
163 /*
164  * This list enumerates the different types of conceptual stacks we have in
165  * SpiderMonkey. In reality, they all share the C stack, but we allow different
166  * stack limits depending on the type of code running.
167  */
168 enum StackKind {
169   StackForSystemCode,       // C++, such as the GC, running on behalf of the VM.
170   StackForTrustedScript,    // Script running with trusted principals.
171   StackForUntrustedScript,  // Script running with untrusted principals.
172   StackKindCount
173 };
174 
175 /*
176  * Default maximum size for the generational nursery in bytes. This is the
177  * initial value. In the browser this configured by the
178  * javascript.options.mem.nursery.max_kb pref.
179  */
180 const uint32_t DefaultNurseryMaxBytes = 16 * js::gc::ChunkSize;
181 
182 /* Default maximum heap size in bytes to pass to JS_NewContext(). */
183 const uint32_t DefaultHeapMaxBytes = 32 * 1024 * 1024;
184 
185 namespace shadow {
186 
187 struct Zone {
188   enum GCState : uint8_t {
189     NoGC,
190     MarkBlackOnly,
191     MarkBlackAndGray,
192     Sweep,
193     Finished,
194     Compact
195   };
196 
197  protected:
198   JSRuntime* const runtime_;
199   JSTracer* const barrierTracer_;  // A pointer to the JSRuntime's |gcMarker|.
200   uint32_t needsIncrementalBarrier_;
201   GCState gcState_;
202 
ZoneZone203   Zone(JSRuntime* runtime, JSTracer* barrierTracerArg)
204       : runtime_(runtime),
205         barrierTracer_(barrierTracerArg),
206         needsIncrementalBarrier_(0),
207         gcState_(NoGC) {}
208 
209  public:
needsIncrementalBarrierZone210   bool needsIncrementalBarrier() const { return needsIncrementalBarrier_; }
211 
barrierTracerZone212   JSTracer* barrierTracer() {
213     MOZ_ASSERT(needsIncrementalBarrier_);
214     MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtime_));
215     return barrierTracer_;
216   }
217 
runtimeFromMainThreadZone218   JSRuntime* runtimeFromMainThread() const {
219     MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtime_));
220     return runtime_;
221   }
222 
223   // Note: Unrestricted access to the zone's runtime from an arbitrary
224   // thread can easily lead to races. Use this method very carefully.
runtimeFromAnyThreadZone225   JSRuntime* runtimeFromAnyThread() const { return runtime_; }
226 
gcStateZone227   GCState gcState() const { return gcState_; }
wasGCStartedZone228   bool wasGCStarted() const { return gcState_ != NoGC; }
isGCMarkingBlackOnlyZone229   bool isGCMarkingBlackOnly() const { return gcState_ == MarkBlackOnly; }
isGCMarkingBlackAndGrayZone230   bool isGCMarkingBlackAndGray() const { return gcState_ == MarkBlackAndGray; }
isGCSweepingZone231   bool isGCSweeping() const { return gcState_ == Sweep; }
isGCFinishedZone232   bool isGCFinished() const { return gcState_ == Finished; }
isGCCompactingZone233   bool isGCCompacting() const { return gcState_ == Compact; }
isGCMarkingZone234   bool isGCMarking() const {
235     return isGCMarkingBlackOnly() || isGCMarkingBlackAndGray();
236   }
isGCSweepingOrCompactingZone237   bool isGCSweepingOrCompacting() const {
238     return gcState_ == Sweep || gcState_ == Compact;
239   }
240 
fromZone241   static MOZ_ALWAYS_INLINE JS::shadow::Zone* from(JS::Zone* zone) {
242     return reinterpret_cast<JS::shadow::Zone*>(zone);
243   }
244 };
245 
246 struct String {
247   static const uint32_t ATOM_BIT = js::Bit(3);
248   static const uint32_t LINEAR_BIT = js::Bit(4);
249   static const uint32_t INLINE_CHARS_BIT = js::Bit(6);
250   static const uint32_t LATIN1_CHARS_BIT = js::Bit(9);
251   static const uint32_t EXTERNAL_FLAGS = LINEAR_BIT | js::Bit(8);
252   static const uint32_t TYPE_FLAGS_MASK = js::BitMask(9) - js::BitMask(3);
253   static const uint32_t PERMANENT_ATOM_MASK = ATOM_BIT | js::Bit(8);
254 
255   uintptr_t flags_;
256 #if JS_BITS_PER_WORD == 32
257   uint32_t length_;
258 #endif
259 
260   union {
261     const JS::Latin1Char* nonInlineCharsLatin1;
262     const char16_t* nonInlineCharsTwoByte;
263     JS::Latin1Char inlineStorageLatin1[1];
264     char16_t inlineStorageTwoByte[1];
265   };
266   const JSExternalStringCallbacks* externalCallbacks;
267 
flagsString268   inline uint32_t flags() const { return uint32_t(flags_); }
lengthString269   inline uint32_t length() const {
270 #if JS_BITS_PER_WORD == 32
271     return length_;
272 #else
273     return uint32_t(flags_ >> 32);
274 #endif
275   }
276 
isPermanentAtomString277   static bool isPermanentAtom(const js::gc::Cell* cell) {
278     uint32_t flags = reinterpret_cast<const String*>(cell)->flags();
279     return (flags & PERMANENT_ATOM_MASK) == PERMANENT_ATOM_MASK;
280   }
281 };
282 
283 struct Symbol {
284   void* _1;
285   uint32_t code_;
286   static const uint32_t WellKnownAPILimit = 0x80000000;
287 
isWellKnownSymbolSymbol288   static bool isWellKnownSymbol(const js::gc::Cell* cell) {
289     return reinterpret_cast<const Symbol*>(cell)->code_ < WellKnownAPILimit;
290   }
291 };
292 
293 } /* namespace shadow */
294 
295 /**
296  * A GC pointer, tagged with the trace kind.
297  *
298  * In general, a GC pointer should be stored with an exact type. This class
299  * is for use when that is not possible because a single pointer must point
300  * to several kinds of GC thing.
301  */
302 class JS_FRIEND_API GCCellPtr {
303  public:
GCCellPtr()304   GCCellPtr() : GCCellPtr(nullptr) {}
305 
306   // Construction from a void* and trace kind.
GCCellPtr(void * gcthing,JS::TraceKind traceKind)307   GCCellPtr(void* gcthing, JS::TraceKind traceKind)
308       : ptr(checkedCast(gcthing, traceKind)) {}
309 
310   // Automatically construct a null GCCellPtr from nullptr.
GCCellPtr(decltype (nullptr))311   MOZ_IMPLICIT GCCellPtr(decltype(nullptr))
312       : ptr(checkedCast(nullptr, JS::TraceKind::Null)) {}
313 
314   // Construction from an explicit type.
315   template <typename T>
GCCellPtr(T * p)316   explicit GCCellPtr(T* p)
317       : ptr(checkedCast(p, JS::MapTypeToTraceKind<T>::kind)) {}
GCCellPtr(JSFunction * p)318   explicit GCCellPtr(JSFunction* p)
319       : ptr(checkedCast(p, JS::TraceKind::Object)) {}
GCCellPtr(JSScript * p)320   explicit GCCellPtr(JSScript* p)
321       : ptr(checkedCast(p, JS::TraceKind::Script)) {}
322   explicit GCCellPtr(const Value& v);
323 
kind()324   JS::TraceKind kind() const {
325     JS::TraceKind traceKind = JS::TraceKind(ptr & OutOfLineTraceKindMask);
326     if (uintptr_t(traceKind) != OutOfLineTraceKindMask) {
327       return traceKind;
328     }
329     return outOfLineKind();
330   }
331 
332   // Allow GCCellPtr to be used in a boolean context.
333   explicit operator bool() const {
334     MOZ_ASSERT(bool(asCell()) == (kind() != JS::TraceKind::Null));
335     return asCell();
336   }
337 
338   // Simplify checks to the kind.
339   template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
is()340   bool is() const {
341     return kind() == JS::MapTypeToTraceKind<T>::kind;
342   }
343 
344   // Conversions to more specific types must match the kind. Access to
345   // further refined types is not allowed directly from a GCCellPtr.
346   template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
as()347   T& as() const {
348     MOZ_ASSERT(kind() == JS::MapTypeToTraceKind<T>::kind);
349     // We can't use static_cast here, because the fact that JSObject
350     // inherits from js::gc::Cell is not part of the public API.
351     return *reinterpret_cast<T*>(asCell());
352   }
353 
354   // Return a pointer to the cell this |GCCellPtr| refers to, or |nullptr|.
355   // (It would be more symmetrical with |to| for this to return a |Cell&|, but
356   // the result can be |nullptr|, and null references are undefined behavior.)
asCell()357   js::gc::Cell* asCell() const {
358     return reinterpret_cast<js::gc::Cell*>(ptr & ~OutOfLineTraceKindMask);
359   }
360 
361   // The CC's trace logger needs an identity that is XPIDL serializable.
unsafeAsInteger()362   uint64_t unsafeAsInteger() const {
363     return static_cast<uint64_t>(unsafeAsUIntPtr());
364   }
365   // Inline mark bitmap access requires direct pointer arithmetic.
unsafeAsUIntPtr()366   uintptr_t unsafeAsUIntPtr() const {
367     MOZ_ASSERT(asCell());
368     MOZ_ASSERT(!js::gc::IsInsideNursery(asCell()));
369     return reinterpret_cast<uintptr_t>(asCell());
370   }
371 
mayBeOwnedByOtherRuntime()372   MOZ_ALWAYS_INLINE bool mayBeOwnedByOtherRuntime() const {
373     if (!is<JSString>() && !is<JS::Symbol>()) {
374       return false;
375     }
376     if (is<JSString>()) {
377       return JS::shadow::String::isPermanentAtom(asCell());
378     }
379     MOZ_ASSERT(is<JS::Symbol>());
380     return JS::shadow::Symbol::isWellKnownSymbol(asCell());
381   }
382 
383  private:
checkedCast(void * p,JS::TraceKind traceKind)384   static uintptr_t checkedCast(void* p, JS::TraceKind traceKind) {
385     js::gc::Cell* cell = static_cast<js::gc::Cell*>(p);
386     MOZ_ASSERT((uintptr_t(p) & OutOfLineTraceKindMask) == 0);
387     AssertGCThingHasType(cell, traceKind);
388     // Note: the OutOfLineTraceKindMask bits are set on all out-of-line kinds
389     // so that we can mask instead of branching.
390     MOZ_ASSERT_IF(uintptr_t(traceKind) >= OutOfLineTraceKindMask,
391                   (uintptr_t(traceKind) & OutOfLineTraceKindMask) ==
392                       OutOfLineTraceKindMask);
393     return uintptr_t(p) | (uintptr_t(traceKind) & OutOfLineTraceKindMask);
394   }
395 
396   JS::TraceKind outOfLineKind() const;
397 
398   uintptr_t ptr;
399 } JS_HAZ_GC_POINTER;
400 
401 // Unwraps the given GCCellPtr, calls the functor |f| with a template argument
402 // of the actual type of the pointer, and returns the result.
403 template <typename F>
MapGCThingTyped(GCCellPtr thing,F && f)404 auto MapGCThingTyped(GCCellPtr thing, F&& f) {
405   switch (thing.kind()) {
406 #define JS_EXPAND_DEF(name, type, _, _1) \
407   case JS::TraceKind::name:              \
408     return f(&thing.as<type>());
409     JS_FOR_EACH_TRACEKIND(JS_EXPAND_DEF);
410 #undef JS_EXPAND_DEF
411     default:
412       MOZ_CRASH("Invalid trace kind in MapGCThingTyped for GCCellPtr.");
413   }
414 }
415 
416 // Unwraps the given GCCellPtr and calls the functor |f| with a template
417 // argument of the actual type of the pointer. Doesn't return anything.
418 template <typename F>
ApplyGCThingTyped(GCCellPtr thing,F && f)419 void ApplyGCThingTyped(GCCellPtr thing, F&& f) {
420   // This function doesn't do anything but is supplied for symmetry with other
421   // MapGCThingTyped/ApplyGCThingTyped implementations that have to wrap the
422   // functor to return a dummy value that is ignored.
423   MapGCThingTyped(thing, f);
424 }
425 
426 } /* namespace JS */
427 
428 // These are defined in the toplevel namespace instead of within JS so that
429 // they won't shadow other operator== overloads (see bug 1456512.)
430 
431 inline bool operator==(const JS::GCCellPtr& ptr1, const JS::GCCellPtr& ptr2) {
432   return ptr1.asCell() == ptr2.asCell();
433 }
434 
435 inline bool operator!=(const JS::GCCellPtr& ptr1, const JS::GCCellPtr& ptr2) {
436   return !(ptr1 == ptr2);
437 }
438 
439 namespace js {
440 namespace gc {
441 namespace detail {
442 
GetGCThingMarkBitmap(const uintptr_t addr)443 static MOZ_ALWAYS_INLINE uintptr_t* GetGCThingMarkBitmap(const uintptr_t addr) {
444   // Note: the JIT pre-barrier trampolines inline this code. Update that
445   // code too when making changes here!
446   MOZ_ASSERT(addr);
447   const uintptr_t bmap_addr = (addr & ~ChunkMask) | ChunkMarkBitmapOffset;
448   return reinterpret_cast<uintptr_t*>(bmap_addr);
449 }
450 
GetGCThingMarkWordAndMask(const uintptr_t addr,ColorBit colorBit,uintptr_t ** wordp,uintptr_t * maskp)451 static MOZ_ALWAYS_INLINE void GetGCThingMarkWordAndMask(const uintptr_t addr,
452                                                         ColorBit colorBit,
453                                                         uintptr_t** wordp,
454                                                         uintptr_t* maskp) {
455   // Note: the JIT pre-barrier trampolines inline this code. Update that
456   // code too when making changes here!
457   MOZ_ASSERT(addr);
458   const size_t bit = (addr & js::gc::ChunkMask) / js::gc::CellBytesPerMarkBit +
459                      static_cast<uint32_t>(colorBit);
460   MOZ_ASSERT(bit < js::gc::ChunkMarkBitmapBits);
461   uintptr_t* bitmap = GetGCThingMarkBitmap(addr);
462   const uintptr_t nbits = sizeof(*bitmap) * CHAR_BIT;
463   *maskp = uintptr_t(1) << (bit % nbits);
464   *wordp = &bitmap[bit / nbits];
465 }
466 
GetTenuredGCThingZone(const uintptr_t addr)467 static MOZ_ALWAYS_INLINE JS::Zone* GetTenuredGCThingZone(const uintptr_t addr) {
468   MOZ_ASSERT(addr);
469   const uintptr_t zone_addr = (addr & ~ArenaMask) | ArenaZoneOffset;
470   return *reinterpret_cast<JS::Zone**>(zone_addr);
471 }
472 
TenuredCellIsMarkedGray(const Cell * cell)473 static MOZ_ALWAYS_INLINE bool TenuredCellIsMarkedGray(const Cell* cell) {
474   // Return true if GrayOrBlackBit is set and BlackBit is not set.
475   MOZ_ASSERT(cell);
476   MOZ_ASSERT(!js::gc::IsInsideNursery(cell));
477 
478   uintptr_t *grayWord, grayMask;
479   js::gc::detail::GetGCThingMarkWordAndMask(
480       uintptr_t(cell), js::gc::ColorBit::GrayOrBlackBit, &grayWord, &grayMask);
481   if (!(*grayWord & grayMask)) {
482     return false;
483   }
484 
485   uintptr_t *blackWord, blackMask;
486   js::gc::detail::GetGCThingMarkWordAndMask(
487       uintptr_t(cell), js::gc::ColorBit::BlackBit, &blackWord, &blackMask);
488   return !(*blackWord & blackMask);
489 }
490 
CellIsMarkedGray(const Cell * cell)491 static MOZ_ALWAYS_INLINE bool CellIsMarkedGray(const Cell* cell) {
492   MOZ_ASSERT(cell);
493   if (js::gc::IsInsideNursery(cell)) {
494     return false;
495   }
496   return TenuredCellIsMarkedGray(cell);
497 }
498 
499 extern JS_PUBLIC_API bool CellIsMarkedGrayIfKnown(const Cell* cell);
500 
501 #ifdef DEBUG
502 extern JS_PUBLIC_API void AssertCellIsNotGray(const Cell* cell);
503 
504 extern JS_PUBLIC_API bool ObjectIsMarkedBlack(const JSObject* obj);
505 #endif
506 
GetCellLocation(const void * cell)507 MOZ_ALWAYS_INLINE ChunkLocation GetCellLocation(const void* cell) {
508   uintptr_t addr = uintptr_t(cell);
509   addr &= ~js::gc::ChunkMask;
510   addr |= js::gc::ChunkLocationOffset;
511   return *reinterpret_cast<ChunkLocation*>(addr);
512 }
513 
NurseryCellHasStoreBuffer(const void * cell)514 MOZ_ALWAYS_INLINE bool NurseryCellHasStoreBuffer(const void* cell) {
515   uintptr_t addr = uintptr_t(cell);
516   addr &= ~js::gc::ChunkMask;
517   addr |= js::gc::ChunkStoreBufferOffset;
518   return *reinterpret_cast<void**>(addr) != nullptr;
519 }
520 
521 } /* namespace detail */
522 
IsInsideNursery(const Cell * cell)523 MOZ_ALWAYS_INLINE bool IsInsideNursery(const Cell* cell) {
524   if (!cell) {
525     return false;
526   }
527   auto location = detail::GetCellLocation(cell);
528   MOZ_ASSERT(location == ChunkLocation::Nursery ||
529              location == ChunkLocation::TenuredHeap);
530   return location == ChunkLocation::Nursery;
531 }
532 
533 // Allow use before the compiler knows the derivation of JSObject, JSString, and
534 // JS::BigInt.
IsInsideNursery(const JSObject * obj)535 MOZ_ALWAYS_INLINE bool IsInsideNursery(const JSObject* obj) {
536   return IsInsideNursery(reinterpret_cast<const Cell*>(obj));
537 }
IsInsideNursery(const JSString * str)538 MOZ_ALWAYS_INLINE bool IsInsideNursery(const JSString* str) {
539   return IsInsideNursery(reinterpret_cast<const Cell*>(str));
540 }
IsInsideNursery(const JS::BigInt * bi)541 MOZ_ALWAYS_INLINE bool IsInsideNursery(const JS::BigInt* bi) {
542   return IsInsideNursery(reinterpret_cast<const Cell*>(bi));
543 }
544 
IsCellPointerValid(const void * cell)545 MOZ_ALWAYS_INLINE bool IsCellPointerValid(const void* cell) {
546   auto addr = uintptr_t(cell);
547   if (addr < ChunkSize || addr % CellAlignBytes != 0) {
548     return false;
549   }
550   auto location = detail::GetCellLocation(cell);
551   if (location == ChunkLocation::TenuredHeap) {
552     return !!detail::GetTenuredGCThingZone(addr);
553   }
554   if (location == ChunkLocation::Nursery) {
555     return detail::NurseryCellHasStoreBuffer(cell);
556   }
557   return false;
558 }
559 
IsCellPointerValidOrNull(const void * cell)560 MOZ_ALWAYS_INLINE bool IsCellPointerValidOrNull(const void* cell) {
561   if (!cell) {
562     return true;
563   }
564   return IsCellPointerValid(cell);
565 }
566 
567 } /* namespace gc */
568 } /* namespace js */
569 
570 namespace JS {
571 
GetTenuredGCThingZone(GCCellPtr thing)572 static MOZ_ALWAYS_INLINE Zone* GetTenuredGCThingZone(GCCellPtr thing) {
573   MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
574   return js::gc::detail::GetTenuredGCThingZone(thing.unsafeAsUIntPtr());
575 }
576 
577 extern JS_PUBLIC_API Zone* GetNurseryCellZone(js::gc::Cell* cell);
578 
GetGCThingZone(GCCellPtr thing)579 static MOZ_ALWAYS_INLINE Zone* GetGCThingZone(GCCellPtr thing) {
580   if (!js::gc::IsInsideNursery(thing.asCell())) {
581     return js::gc::detail::GetTenuredGCThingZone(thing.unsafeAsUIntPtr());
582   }
583 
584   return GetNurseryCellZone(thing.asCell());
585 }
586 
GetStringZone(JSString * str)587 static MOZ_ALWAYS_INLINE Zone* GetStringZone(JSString* str) {
588   if (!js::gc::IsInsideNursery(str)) {
589     return js::gc::detail::GetTenuredGCThingZone(
590         reinterpret_cast<uintptr_t>(str));
591   }
592   return GetNurseryCellZone(reinterpret_cast<js::gc::Cell*>(str));
593 }
594 
595 extern JS_PUBLIC_API Zone* GetObjectZone(JSObject* obj);
596 
GCThingIsMarkedGray(GCCellPtr thing)597 static MOZ_ALWAYS_INLINE bool GCThingIsMarkedGray(GCCellPtr thing) {
598   if (thing.mayBeOwnedByOtherRuntime()) {
599     return false;
600   }
601   return js::gc::detail::CellIsMarkedGrayIfKnown(thing.asCell());
602 }
603 
604 extern JS_PUBLIC_API JS::TraceKind GCThingTraceKind(void* thing);
605 
606 extern JS_PUBLIC_API void EnableNurseryStrings(JSContext* cx);
607 
608 extern JS_PUBLIC_API void DisableNurseryStrings(JSContext* cx);
609 
610 extern JS_PUBLIC_API void EnableNurseryBigInts(JSContext* cx);
611 
612 extern JS_PUBLIC_API void DisableNurseryBigInts(JSContext* cx);
613 
614 /*
615  * Returns true when writes to GC thing pointers (and reads from weak pointers)
616  * must call an incremental barrier. This is generally only true when running
617  * mutator code in-between GC slices. At other times, the barrier may be elided
618  * for performance.
619  */
620 extern JS_PUBLIC_API bool IsIncrementalBarrierNeeded(JSContext* cx);
621 
622 /*
623  * Notify the GC that a reference to a JSObject is about to be overwritten.
624  * This method must be called if IsIncrementalBarrierNeeded.
625  */
626 extern JS_PUBLIC_API void IncrementalPreWriteBarrier(JSObject* obj);
627 
628 /*
629  * Notify the GC that a reference to a tenured GC cell is about to be
630  * overwritten. This method must be called if IsIncrementalBarrierNeeded.
631  */
632 extern JS_PUBLIC_API void IncrementalPreWriteBarrier(GCCellPtr thing);
633 
634 /**
635  * Unsets the gray bit for anything reachable from |thing|. |kind| should not be
636  * JS::TraceKind::Shape. |thing| should be non-null. The return value indicates
637  * if anything was unmarked.
638  */
639 extern JS_FRIEND_API bool UnmarkGrayGCThingRecursively(GCCellPtr thing);
640 
641 }  // namespace JS
642 
643 namespace js {
644 namespace gc {
645 
646 extern JS_PUBLIC_API void PerformIncrementalReadBarrier(JS::GCCellPtr thing);
647 
IsIncrementalBarrierNeededOnTenuredGCThing(const JS::GCCellPtr thing)648 static MOZ_ALWAYS_INLINE bool IsIncrementalBarrierNeededOnTenuredGCThing(
649     const JS::GCCellPtr thing) {
650   MOZ_ASSERT(thing);
651   MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
652 
653   // TODO: I'd like to assert !RuntimeHeapIsBusy() here but this gets
654   // called while we are tracing the heap, e.g. during memory reporting
655   // (see bug 1313318).
656   MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
657 
658   JS::Zone* zone = JS::GetTenuredGCThingZone(thing);
659   return JS::shadow::Zone::from(zone)->needsIncrementalBarrier();
660 }
661 
ExposeGCThingToActiveJS(JS::GCCellPtr thing)662 static MOZ_ALWAYS_INLINE void ExposeGCThingToActiveJS(JS::GCCellPtr thing) {
663   // GC things residing in the nursery cannot be gray: they have no mark bits.
664   // All live objects in the nursery are moved to tenured at the beginning of
665   // each GC slice, so the gray marker never sees nursery things.
666   if (IsInsideNursery(thing.asCell())) {
667     return;
668   }
669 
670   // There's nothing to do for permanent GC things that might be owned by
671   // another runtime.
672   if (thing.mayBeOwnedByOtherRuntime()) {
673     return;
674   }
675 
676   if (IsIncrementalBarrierNeededOnTenuredGCThing(thing)) {
677     PerformIncrementalReadBarrier(thing);
678   } else if (detail::TenuredCellIsMarkedGray(thing.asCell())) {
679     JS::UnmarkGrayGCThingRecursively(thing);
680   }
681 
682   MOZ_ASSERT(!detail::TenuredCellIsMarkedGray(thing.asCell()));
683 }
684 
685 template <typename T>
686 extern JS_PUBLIC_API bool EdgeNeedsSweepUnbarrieredSlow(T* thingp);
687 
EdgeNeedsSweepUnbarriered(JSObject ** objp)688 static MOZ_ALWAYS_INLINE bool EdgeNeedsSweepUnbarriered(JSObject** objp) {
689   // This function does not handle updating nursery pointers. Raw JSObject
690   // pointers should be updated separately or replaced with
691   // JS::Heap<JSObject*> which handles this automatically.
692   MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
693   if (IsInsideNursery(*objp)) {
694     return false;
695   }
696 
697   auto zone =
698       JS::shadow::Zone::from(detail::GetTenuredGCThingZone(uintptr_t(*objp)));
699   if (!zone->isGCSweepingOrCompacting()) {
700     return false;
701   }
702 
703   return EdgeNeedsSweepUnbarrieredSlow(objp);
704 }
705 
706 }  // namespace gc
707 }  // namespace js
708 
709 namespace JS {
710 
711 /*
712  * This should be called when an object that is marked gray is exposed to the JS
713  * engine (by handing it to running JS code or writing it into live JS
714  * data). During incremental GC, since the gray bits haven't been computed yet,
715  * we conservatively mark the object black.
716  */
ExposeObjectToActiveJS(JSObject * obj)717 static MOZ_ALWAYS_INLINE void ExposeObjectToActiveJS(JSObject* obj) {
718   MOZ_ASSERT(obj);
719   MOZ_ASSERT(!js::gc::EdgeNeedsSweepUnbarrieredSlow(&obj));
720   js::gc::ExposeGCThingToActiveJS(GCCellPtr(obj));
721 }
722 
723 } /* namespace JS */
724 
725 #endif /* js_HeapAPI_h */
726