1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #ifndef gc_Cell_h
8 #define gc_Cell_h
9 
10 #include "mozilla/Atomics.h"
11 #include "mozilla/EndianUtils.h"
12 
13 #include <type_traits>
14 
15 #include "gc/GCEnum.h"
16 #include "gc/Heap.h"
17 #include "js/GCAnnotations.h"
18 #include "js/shadow/Zone.h"  // JS::shadow::Zone
19 #include "js/TraceKind.h"
20 #include "js/TypeDecls.h"
21 
22 namespace JS {
23 enum class TraceKind;
24 } /* namespace JS */
25 
26 namespace js {
27 
28 class GenericPrinter;
29 
30 extern bool RuntimeFromMainThreadIsHeapMajorCollecting(
31     JS::shadow::Zone* shadowZone);
32 
33 #ifdef DEBUG
34 
35 // Barriers can't be triggered during backend Ion compilation, which may run on
36 // a helper thread.
37 extern bool CurrentThreadIsIonCompiling();
38 
39 extern bool CurrentThreadIsGCMarking();
40 extern bool CurrentThreadIsGCSweeping();
41 extern bool CurrentThreadIsGCFinalizing();
42 extern bool RuntimeIsVerifyingPreBarriers(JSRuntime* runtime);
43 
44 #endif
45 
46 extern void TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc,
47                                                      gc::Cell** thingp,
48                                                      const char* name);
49 
50 namespace gc {
51 
52 class Arena;
53 enum class AllocKind : uint8_t;
54 class StoreBuffer;
55 class TenuredCell;
56 
57 extern void PerformIncrementalBarrier(TenuredCell* cell);
58 extern void PerformIncrementalBarrierDuringFlattening(JSString* str);
59 extern void UnmarkGrayGCThingRecursively(TenuredCell* cell);
60 
61 // Like gc::MarkColor but allows the possibility of the cell being unmarked.
62 //
63 // This class mimics an enum class, but supports operator overloading.
64 class CellColor {
65  public:
66   enum Color { White = 0, Gray = 1, Black = 2 };
67 
CellColor()68   CellColor() : color(White) {}
69 
CellColor(MarkColor markColor)70   MOZ_IMPLICIT CellColor(MarkColor markColor)
71       : color(markColor == MarkColor::Black ? Black : Gray) {}
72 
CellColor(Color c)73   MOZ_IMPLICIT constexpr CellColor(Color c) : color(c) {}
74 
asMarkColor()75   MarkColor asMarkColor() const {
76     MOZ_ASSERT(color != White);
77     return color == Black ? MarkColor::Black : MarkColor::Gray;
78   }
79 
80   // Implement a total ordering for CellColor, with white being 'least marked'
81   // and black being 'most marked'.
82   bool operator<(const CellColor other) const { return color < other.color; }
83   bool operator>(const CellColor other) const { return color > other.color; }
84   bool operator<=(const CellColor other) const { return color <= other.color; }
85   bool operator>=(const CellColor other) const { return color >= other.color; }
86   bool operator!=(const CellColor other) const { return color != other.color; }
87   bool operator==(const CellColor other) const { return color == other.color; }
88   explicit operator bool() const { return color != White; }
89 
90 #if defined(JS_GC_ZEAL) || defined(DEBUG)
name()91   const char* name() const {
92     switch (color) {
93       case CellColor::White:
94         return "white";
95       case CellColor::Black:
96         return "black";
97       case CellColor::Gray:
98         return "gray";
99       default:
100         MOZ_CRASH("Unexpected cell color");
101     }
102   }
103 #endif
104 
105  private:
106   Color color;
107 };
108 
109 // [SMDOC] GC Cell
110 //
111 // A GC cell is the ultimate base class for all GC things. All types allocated
112 // on the GC heap extend either gc::Cell or gc::TenuredCell. If a type is always
113 // tenured, prefer the TenuredCell class as base.
114 //
115 // The first word of Cell is a uintptr_t that reserves the low three bits for GC
116 // purposes. The remaining bits are available to sub-classes and can be used
117 // store a pointer to another gc::Cell. It can also be used for temporary
118 // storage (see setTemporaryGCUnsafeData). To make use of the remaining space,
119 // sub-classes derive from a helper class such as TenuredCellWithNonGCPointer.
120 //
121 // During moving GC operation a Cell may be marked as forwarded. This indicates
122 // that a gc::RelocationOverlay is currently stored in the Cell's memory and
123 // should be used to find the new location of the Cell.
124 struct Cell {
125  protected:
126   // Cell header word. Stores GC flags and derived class data.
127   //
128   // This is atomic since it can be read from and written to by different
129   // threads during compacting GC, in a limited way. Specifically, writes that
130   // update the derived class data can race with reads that check the forwarded
131   // flag. The writes do not change the forwarded flag (which is always false in
132   // this situation).
133   mozilla::Atomic<uintptr_t, mozilla::MemoryOrdering::Relaxed> header_;
134 
135  public:
136   static_assert(gc::CellFlagBitsReservedForGC >= 3,
137                 "Not enough flag bits reserved for GC");
138   static constexpr uintptr_t RESERVED_MASK =
139       BitMask(gc::CellFlagBitsReservedForGC);
140 
141   // Indicates whether the cell has been forwarded (moved) by generational or
142   // compacting GC and is now a RelocationOverlay.
143   static constexpr uintptr_t FORWARD_BIT = Bit(0);
144 
145   // Bits 1 and 2 are reserved for future use by the GC.
146 
isForwardedCell147   bool isForwarded() const { return header_ & FORWARD_BIT; }
flagsCell148   uintptr_t flags() const { return header_ & RESERVED_MASK; }
149 
isTenuredCell150   MOZ_ALWAYS_INLINE bool isTenured() const { return !IsInsideNursery(this); }
151   MOZ_ALWAYS_INLINE const TenuredCell& asTenured() const;
152   MOZ_ALWAYS_INLINE TenuredCell& asTenured();
153 
154   MOZ_ALWAYS_INLINE bool isMarkedAny() const;
155   MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
156   MOZ_ALWAYS_INLINE bool isMarkedGray() const;
157   MOZ_ALWAYS_INLINE bool isMarked(gc::MarkColor color) const;
158   MOZ_ALWAYS_INLINE bool isMarkedAtLeast(gc::MarkColor color) const;
159 
colorCell160   MOZ_ALWAYS_INLINE CellColor color() const {
161     return isMarkedBlack()  ? CellColor::Black
162            : isMarkedGray() ? CellColor::Gray
163                             : CellColor::White;
164   }
165 
166   inline JSRuntime* runtimeFromMainThread() const;
167 
168   // Note: Unrestricted access to the runtime of a GC thing from an arbitrary
169   // thread can easily lead to races. Use this method very carefully.
170   inline JSRuntime* runtimeFromAnyThread() const;
171 
172   // May be overridden by GC thing kinds that have a compartment pointer.
maybeCompartmentCell173   inline JS::Compartment* maybeCompartment() const { return nullptr; }
174 
175   // The StoreBuffer used to record incoming pointers from the tenured heap.
176   // This will return nullptr for a tenured cell.
177   inline StoreBuffer* storeBuffer() const;
178 
179   inline JS::TraceKind getTraceKind() const;
180 
181   static MOZ_ALWAYS_INLINE bool needPreWriteBarrier(JS::Zone* zone);
182 
183   template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
isCell184   inline bool is() const {
185     return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
186   }
187 
188   template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
asCell189   inline T* as() {
190     // |this|-qualify the |is| call below to avoid compile errors with even
191     // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
192     MOZ_ASSERT(this->is<T>());
193     return static_cast<T*>(this);
194   }
195 
196   template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
asCell197   inline const T* as() const {
198     // |this|-qualify the |is| call below to avoid compile errors with even
199     // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
200     MOZ_ASSERT(this->is<T>());
201     return static_cast<const T*>(this);
202   }
203 
204   inline JS::Zone* zone() const;
205   inline JS::Zone* zoneFromAnyThread() const;
206 
207   // Get the zone for a cell known to be in the nursery.
208   inline JS::Zone* nurseryZone() const;
209   inline JS::Zone* nurseryZoneFromAnyThread() const;
210 
211   // Default implementation for kinds that cannot be permanent. This may be
212   // overriden by derived classes.
isPermanentAndMayBeSharedCell213   MOZ_ALWAYS_INLINE bool isPermanentAndMayBeShared() const { return false; }
214 
215 #ifdef DEBUG
216   static inline void assertThingIsNotGray(Cell* cell);
217   inline bool isAligned() const;
218   void dump(GenericPrinter& out) const;
219   void dump() const;
220 #endif
221 
222  protected:
223   uintptr_t address() const;
224   inline TenuredChunk* chunk() const;
225 
226  private:
227   // Cells are destroyed by the GC. Do not delete them directly.
228   void operator delete(void*) = delete;
229 } JS_HAZ_GC_THING;
230 
231 // A GC TenuredCell gets behaviors that are valid for things in the Tenured
232 // heap, such as access to the arena and mark bits.
233 class TenuredCell : public Cell {
234  public:
isTenured()235   MOZ_ALWAYS_INLINE bool isTenured() const {
236     MOZ_ASSERT(!IsInsideNursery(this));
237     return true;
238   }
239 
240   // Mark bit management.
241   MOZ_ALWAYS_INLINE bool isMarkedAny() const;
242   MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
243   MOZ_ALWAYS_INLINE bool isMarkedGray() const;
244 
245   // Same as Cell::color, but skips nursery checks.
color()246   MOZ_ALWAYS_INLINE CellColor color() const {
247     return isMarkedBlack()  ? CellColor::Black
248            : isMarkedGray() ? CellColor::Gray
249                             : CellColor::White;
250   }
251 
252   // The return value indicates if the cell went from unmarked to marked.
253   MOZ_ALWAYS_INLINE bool markIfUnmarked(
254       MarkColor color = MarkColor::Black) const;
255   MOZ_ALWAYS_INLINE void markBlack() const;
256   MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const TenuredCell* src);
257   MOZ_ALWAYS_INLINE void unmark();
258 
259   // Access to the arena.
260   inline Arena* arena() const;
261   inline AllocKind getAllocKind() const;
262   inline JS::TraceKind getTraceKind() const;
263   inline JS::Zone* zone() const;
264   inline JS::Zone* zoneFromAnyThread() const;
265   inline bool isInsideZone(JS::Zone* zone) const;
266 
shadowZone()267   MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZone() const {
268     return JS::shadow::Zone::from(zone());
269   }
shadowZoneFromAnyThread()270   MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZoneFromAnyThread() const {
271     return JS::shadow::Zone::from(zoneFromAnyThread());
272   }
273 
274   template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
is()275   inline bool is() const {
276     return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
277   }
278 
279   template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
as()280   inline T* as() {
281     // |this|-qualify the |is| call below to avoid compile errors with even
282     // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
283     MOZ_ASSERT(this->is<T>());
284     return static_cast<T*>(this);
285   }
286 
287   template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
as()288   inline const T* as() const {
289     // |this|-qualify the |is| call below to avoid compile errors with even
290     // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
291     MOZ_ASSERT(this->is<T>());
292     return static_cast<const T*>(this);
293   }
294 
295   // Default implementation for kinds that don't require fixup.
fixupAfterMovingGC()296   void fixupAfterMovingGC() {}
297 
298 #ifdef DEBUG
299   inline bool isAligned() const;
300 #endif
301 };
302 
asTenured()303 MOZ_ALWAYS_INLINE const TenuredCell& Cell::asTenured() const {
304   MOZ_ASSERT(isTenured());
305   return *static_cast<const TenuredCell*>(this);
306 }
307 
asTenured()308 MOZ_ALWAYS_INLINE TenuredCell& Cell::asTenured() {
309   MOZ_ASSERT(isTenured());
310   return *static_cast<TenuredCell*>(this);
311 }
312 
isMarkedAny()313 MOZ_ALWAYS_INLINE bool Cell::isMarkedAny() const {
314   return !isTenured() || asTenured().isMarkedAny();
315 }
316 
isMarkedBlack()317 MOZ_ALWAYS_INLINE bool Cell::isMarkedBlack() const {
318   return !isTenured() || asTenured().isMarkedBlack();
319 }
320 
isMarkedGray()321 MOZ_ALWAYS_INLINE bool Cell::isMarkedGray() const {
322   return isTenured() && asTenured().isMarkedGray();
323 }
324 
isMarked(gc::MarkColor color)325 MOZ_ALWAYS_INLINE bool Cell::isMarked(gc::MarkColor color) const {
326   return color == MarkColor::Gray ? isMarkedGray() : isMarkedBlack();
327 }
328 
isMarkedAtLeast(gc::MarkColor color)329 MOZ_ALWAYS_INLINE bool Cell::isMarkedAtLeast(gc::MarkColor color) const {
330   return color == MarkColor::Gray ? isMarkedAny() : isMarkedBlack();
331 }
332 
runtimeFromMainThread()333 inline JSRuntime* Cell::runtimeFromMainThread() const {
334   JSRuntime* rt = chunk()->runtime;
335   MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
336   return rt;
337 }
338 
runtimeFromAnyThread()339 inline JSRuntime* Cell::runtimeFromAnyThread() const {
340   return chunk()->runtime;
341 }
342 
address()343 inline uintptr_t Cell::address() const {
344   uintptr_t addr = uintptr_t(this);
345   MOZ_ASSERT(addr % CellAlignBytes == 0);
346   MOZ_ASSERT(TenuredChunk::withinValidRange(addr));
347   return addr;
348 }
349 
chunk()350 TenuredChunk* Cell::chunk() const {
351   uintptr_t addr = uintptr_t(this);
352   MOZ_ASSERT(addr % CellAlignBytes == 0);
353   addr &= ~ChunkMask;
354   return reinterpret_cast<TenuredChunk*>(addr);
355 }
356 
storeBuffer()357 inline StoreBuffer* Cell::storeBuffer() const { return chunk()->storeBuffer; }
358 
zone()359 JS::Zone* Cell::zone() const {
360   if (isTenured()) {
361     return asTenured().zone();
362   }
363 
364   return nurseryZone();
365 }
366 
zoneFromAnyThread()367 JS::Zone* Cell::zoneFromAnyThread() const {
368   if (isTenured()) {
369     return asTenured().zoneFromAnyThread();
370   }
371 
372   return nurseryZoneFromAnyThread();
373 }
374 
nurseryZone()375 JS::Zone* Cell::nurseryZone() const {
376   JS::Zone* zone = nurseryZoneFromAnyThread();
377   MOZ_ASSERT(CurrentThreadIsGCMarking() || CurrentThreadCanAccessZone(zone));
378   return zone;
379 }
380 
nurseryZoneFromAnyThread()381 JS::Zone* Cell::nurseryZoneFromAnyThread() const {
382   return NurseryCellHeader::from(this)->zone();
383 }
384 
385 #ifdef DEBUG
386 extern Cell* UninlinedForwarded(const Cell* cell);
387 #endif
388 
getTraceKind()389 inline JS::TraceKind Cell::getTraceKind() const {
390   if (isTenured()) {
391     MOZ_ASSERT_IF(isForwarded(), UninlinedForwarded(this)->getTraceKind() ==
392                                      asTenured().getTraceKind());
393     return asTenured().getTraceKind();
394   }
395 
396   return NurseryCellHeader::from(this)->traceKind();
397 }
398 
needPreWriteBarrier(JS::Zone * zone)399 /* static */ MOZ_ALWAYS_INLINE bool Cell::needPreWriteBarrier(JS::Zone* zone) {
400   return JS::shadow::Zone::from(zone)->needsIncrementalBarrier();
401 }
402 
isMarkedAny()403 bool TenuredCell::isMarkedAny() const {
404   MOZ_ASSERT(arena()->allocated());
405   return chunk()->markBits.isMarkedAny(this);
406 }
407 
isMarkedBlack()408 bool TenuredCell::isMarkedBlack() const {
409   MOZ_ASSERT(arena()->allocated());
410   return chunk()->markBits.isMarkedBlack(this);
411 }
412 
isMarkedGray()413 bool TenuredCell::isMarkedGray() const {
414   MOZ_ASSERT(arena()->allocated());
415   return chunk()->markBits.isMarkedGray(this);
416 }
417 
markIfUnmarked(MarkColor color)418 bool TenuredCell::markIfUnmarked(MarkColor color /* = Black */) const {
419   return chunk()->markBits.markIfUnmarked(this, color);
420 }
421 
markBlack()422 void TenuredCell::markBlack() const { chunk()->markBits.markBlack(this); }
423 
copyMarkBitsFrom(const TenuredCell * src)424 void TenuredCell::copyMarkBitsFrom(const TenuredCell* src) {
425   MarkBitmap& markBits = chunk()->markBits;
426   markBits.copyMarkBit(this, src, ColorBit::BlackBit);
427   markBits.copyMarkBit(this, src, ColorBit::GrayOrBlackBit);
428 }
429 
unmark()430 void TenuredCell::unmark() { chunk()->markBits.unmark(this); }
431 
arena()432 inline Arena* TenuredCell::arena() const {
433   MOZ_ASSERT(isTenured());
434   uintptr_t addr = address();
435   addr &= ~ArenaMask;
436   return reinterpret_cast<Arena*>(addr);
437 }
438 
getAllocKind()439 AllocKind TenuredCell::getAllocKind() const { return arena()->getAllocKind(); }
440 
getTraceKind()441 JS::TraceKind TenuredCell::getTraceKind() const {
442   return MapAllocToTraceKind(getAllocKind());
443 }
444 
zone()445 JS::Zone* TenuredCell::zone() const {
446   JS::Zone* zone = arena()->zone;
447   MOZ_ASSERT(CurrentThreadIsGCMarking() || CurrentThreadCanAccessZone(zone));
448   return zone;
449 }
450 
zoneFromAnyThread()451 JS::Zone* TenuredCell::zoneFromAnyThread() const { return arena()->zone; }
452 
isInsideZone(JS::Zone * zone)453 bool TenuredCell::isInsideZone(JS::Zone* zone) const {
454   return zone == arena()->zone;
455 }
456 
457 // Read barrier and pre-write barrier implementation for GC cells.
458 
459 template <typename T>
ReadBarrier(T * thing)460 MOZ_ALWAYS_INLINE void ReadBarrier(T* thing) {
461   static_assert(std::is_base_of_v<Cell, T>);
462   static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
463 
464   if (thing && !thing->isPermanentAndMayBeShared()) {
465     ReadBarrierImpl(thing);
466   }
467 }
468 
ReadBarrierImpl(TenuredCell * thing)469 MOZ_ALWAYS_INLINE void ReadBarrierImpl(TenuredCell* thing) {
470   MOZ_ASSERT(!CurrentThreadIsIonCompiling());
471   MOZ_ASSERT(!CurrentThreadIsGCMarking());
472   MOZ_ASSERT(thing);
473   MOZ_ASSERT(CurrentThreadCanAccessZone(thing->zoneFromAnyThread()));
474 
475   // Barriers should not be triggered on main thread while collecting.
476   mozilla::DebugOnly<JSRuntime*> runtime = thing->runtimeFromAnyThread();
477   MOZ_ASSERT_IF(CurrentThreadCanAccessRuntime(runtime),
478                 !JS::RuntimeHeapIsCollecting());
479 
480   JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
481   if (shadowZone->needsIncrementalBarrier()) {
482     // We should only observe barriers being enabled on the main thread.
483     MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime));
484     PerformIncrementalBarrier(thing);
485     return;
486   }
487 
488   if (thing->isMarkedGray()) {
489     // There shouldn't be anything marked gray unless we're on the main thread.
490     MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime));
491     UnmarkGrayGCThingRecursively(thing);
492   }
493 }
494 
ReadBarrierImpl(Cell * thing)495 MOZ_ALWAYS_INLINE void ReadBarrierImpl(Cell* thing) {
496   MOZ_ASSERT(!CurrentThreadIsGCMarking());
497   if (thing->isTenured()) {
498     ReadBarrierImpl(&thing->asTenured());
499   }
500 }
501 
PreWriteBarrierImpl(TenuredCell * thing)502 MOZ_ALWAYS_INLINE void PreWriteBarrierImpl(TenuredCell* thing) {
503   MOZ_ASSERT(!CurrentThreadIsIonCompiling());
504   MOZ_ASSERT(!CurrentThreadIsGCMarking());
505 
506   if (!thing) {
507     return;
508   }
509 
510   // Barriers can be triggered on the main thread while collecting, but are
511   // disabled. For example, this happens when destroying HeapPtr wrappers.
512 
513   JS::shadow::Zone* zone = thing->shadowZoneFromAnyThread();
514   if (!zone->needsIncrementalBarrier()) {
515     return;
516   }
517 
518   // Barriers can be triggered on off the main thread in two situations:
519   //  - background finalization of HeapPtrs to the atoms zone
520   //  - while we are verifying pre-barriers for a worker runtime
521   // The barrier is not required in either case.
522   bool checkThread = zone->isAtomsZone();
523 #ifdef JS_GC_ZEAL
524   checkThread = checkThread || zone->isSelfHostingZone();
525 #endif
526   JSRuntime* runtime = thing->runtimeFromAnyThread();
527   if (checkThread && !CurrentThreadCanAccessRuntime(runtime)) {
528     MOZ_ASSERT(CurrentThreadIsGCFinalizing() ||
529                RuntimeIsVerifyingPreBarriers(runtime));
530     return;
531   }
532 
533   MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime));
534   MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(zone));
535   PerformIncrementalBarrier(thing);
536 }
537 
PreWriteBarrierImpl(Cell * thing)538 MOZ_ALWAYS_INLINE void PreWriteBarrierImpl(Cell* thing) {
539   MOZ_ASSERT(!CurrentThreadIsGCMarking());
540   if (thing && thing->isTenured()) {
541     PreWriteBarrierImpl(&thing->asTenured());
542   }
543 }
544 
545 template <typename T>
PreWriteBarrier(T * thing)546 MOZ_ALWAYS_INLINE void PreWriteBarrier(T* thing) {
547   static_assert(std::is_base_of_v<Cell, T>);
548   static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
549 
550   if (thing && !thing->isPermanentAndMayBeShared()) {
551     PreWriteBarrierImpl(thing);
552   }
553 }
554 
555 // Pre-write barrier implementation for structures containing GC cells, taking a
556 // functor to trace the structure.
557 template <typename T, typename F>
PreWriteBarrier(JS::Zone * zone,T * data,const F & traceFn)558 MOZ_ALWAYS_INLINE void PreWriteBarrier(JS::Zone* zone, T* data,
559                                        const F& traceFn) {
560   MOZ_ASSERT(!CurrentThreadIsIonCompiling());
561   MOZ_ASSERT(!CurrentThreadIsGCMarking());
562 
563   auto* shadowZone = JS::shadow::Zone::from(zone);
564   if (!shadowZone->needsIncrementalBarrier()) {
565     return;
566   }
567 
568   MOZ_ASSERT(CurrentThreadCanAccessRuntime(shadowZone->runtimeFromAnyThread()));
569   MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
570 
571   traceFn(shadowZone->barrierTracer(), data);
572 }
573 
574 // Pre-write barrier implementation for structures containing GC cells. T must
575 // support a |trace| method.
576 template <typename T>
PreWriteBarrier(JS::Zone * zone,T * data)577 MOZ_ALWAYS_INLINE void PreWriteBarrier(JS::Zone* zone, T* data) {
578   PreWriteBarrier(zone, data, [](JSTracer* trc, T* data) { data->trace(trc); });
579 }
580 
581 #ifdef DEBUG
582 
assertThingIsNotGray(Cell * cell)583 /* static */ void Cell::assertThingIsNotGray(Cell* cell) {
584   JS::AssertCellIsNotGray(cell);
585 }
586 
isAligned()587 bool Cell::isAligned() const {
588   if (!isTenured()) {
589     return true;
590   }
591   return asTenured().isAligned();
592 }
593 
isAligned()594 bool TenuredCell::isAligned() const {
595   return Arena::isAligned(address(), arena()->getThingSize());
596 }
597 
598 #endif
599 
600 // Base class for nusery-allocatable GC things that have 32-bit length and
601 // 32-bit flags (currently JSString and BigInt).
602 //
603 // This tries to store both in Cell::header_, but if that isn't large enough the
604 // length is stored separately.
605 //
606 //          32       0
607 //  ------------------
608 //  | Length | Flags |
609 //  ------------------
610 //
611 // The low bits of the flags word (see CellFlagBitsReservedForGC) are reserved
612 // for GC. Derived classes must ensure they don't use these flags for non-GC
613 // purposes.
alignas(gc::CellAlignBytes)614 class alignas(gc::CellAlignBytes) CellWithLengthAndFlags : public Cell {
615 #if JS_BITS_PER_WORD == 32
616   // Additional storage for length if |header_| is too small to fit both.
617   uint32_t length_;
618 #endif
619 
620  protected:
621   uint32_t headerLengthField() const {
622 #if JS_BITS_PER_WORD == 32
623     return length_;
624 #else
625     return uint32_t(header_ >> 32);
626 #endif
627   }
628 
629   uint32_t headerFlagsField() const { return uint32_t(header_); }
630 
631   void setHeaderFlagBit(uint32_t flag) {
632     MOZ_ASSERT((flag & RESERVED_MASK) == 0);
633     header_ |= uintptr_t(flag);
634   }
635   void clearHeaderFlagBit(uint32_t flag) {
636     MOZ_ASSERT((flag & RESERVED_MASK) == 0);
637     header_ &= ~uintptr_t(flag);
638   }
639   void toggleHeaderFlagBit(uint32_t flag) {
640     MOZ_ASSERT((flag & RESERVED_MASK) == 0);
641     header_ ^= uintptr_t(flag);
642   }
643 
644   void setHeaderLengthAndFlags(uint32_t len, uint32_t flags) {
645     MOZ_ASSERT((flags & RESERVED_MASK) == 0);
646 #if JS_BITS_PER_WORD == 32
647     header_ = flags;
648     length_ = len;
649 #else
650     header_ = (uint64_t(len) << 32) | uint64_t(flags);
651 #endif
652   }
653 
654  public:
655   // Returns the offset of header_. JIT code should use offsetOfFlags
656   // below.
657   static constexpr size_t offsetOfRawHeaderFlagsField() {
658     return offsetof(CellWithLengthAndFlags, header_);
659   }
660 
661   // Offsets for direct field from jit code. A number of places directly
662   // access 32-bit length and flags fields so do endian trickery here.
663 #if JS_BITS_PER_WORD == 32
664   static constexpr size_t offsetOfHeaderFlags() {
665     return offsetof(CellWithLengthAndFlags, header_);
666   }
667   static constexpr size_t offsetOfHeaderLength() {
668     return offsetof(CellWithLengthAndFlags, length_);
669   }
670 #elif MOZ_LITTLE_ENDIAN()
671   static constexpr size_t offsetOfHeaderFlags() {
672     return offsetof(CellWithLengthAndFlags, header_);
673   }
674   static constexpr size_t offsetOfHeaderLength() {
675     return offsetof(CellWithLengthAndFlags, header_) + sizeof(uint32_t);
676   }
677 #else
678   static constexpr size_t offsetOfHeaderFlags() {
679     return offsetof(CellWithLengthAndFlags, header_) + sizeof(uint32_t);
680   }
681   static constexpr size_t offsetOfHeaderLength() {
682     return offsetof(CellWithLengthAndFlags, header_);
683   }
684 #endif
685 };
686 
687 // Base class for non-nursery-allocatable GC things that allows storing a non-GC
688 // thing pointer in the first word.
689 //
690 // The low bits of the word (see CellFlagBitsReservedForGC) are reserved for GC.
691 template <class PtrT>
alignas(gc::CellAlignBytes)692 class alignas(gc::CellAlignBytes) TenuredCellWithNonGCPointer
693     : public TenuredCell {
694   static_assert(!std::is_pointer_v<PtrT>,
695                 "PtrT should be the type of the referent, not of the pointer");
696   static_assert(
697       !std::is_base_of_v<Cell, PtrT>,
698       "Don't use TenuredCellWithNonGCPointer for pointers to GC things");
699 
700  protected:
701   TenuredCellWithNonGCPointer() = default;
702   explicit TenuredCellWithNonGCPointer(PtrT* initial) {
703     uintptr_t data = uintptr_t(initial);
704     MOZ_ASSERT((data & RESERVED_MASK) == 0);
705     header_ = data;
706   }
707 
708   PtrT* headerPtr() const {
709     MOZ_ASSERT(flags() == 0);
710     return reinterpret_cast<PtrT*>(uintptr_t(header_));
711   }
712 
713   void setHeaderPtr(PtrT* newValue) {
714     // As above, no flags are expected to be set here.
715     uintptr_t data = uintptr_t(newValue);
716     MOZ_ASSERT(flags() == 0);
717     MOZ_ASSERT((data & RESERVED_MASK) == 0);
718     header_ = data;
719   }
720 
721  public:
722   static constexpr size_t offsetOfHeaderPtr() {
723     return offsetof(TenuredCellWithNonGCPointer, header_);
724   }
725 };
726 
727 // Base class for non-nursery-allocatable GC things that allows storing flags
728 // in the first word.
729 //
730 // The low bits of the flags word (see CellFlagBitsReservedForGC) are reserved
731 // for GC.
alignas(gc::CellAlignBytes)732 class alignas(gc::CellAlignBytes) TenuredCellWithFlags : public TenuredCell {
733  protected:
734   TenuredCellWithFlags() = default;
735   explicit TenuredCellWithFlags(uintptr_t initial) {
736     MOZ_ASSERT((initial & RESERVED_MASK) == 0);
737     header_ = initial;
738   }
739 
740   uintptr_t headerFlagsField() const {
741     MOZ_ASSERT(flags() == 0);
742     return header_;
743   }
744 
745   void setHeaderFlagBits(uintptr_t flags) {
746     MOZ_ASSERT((flags & RESERVED_MASK) == 0);
747     header_ |= flags;
748   }
749   void clearHeaderFlagBits(uintptr_t flags) {
750     MOZ_ASSERT((flags & RESERVED_MASK) == 0);
751     header_ &= ~flags;
752   }
753 };
754 
755 // Base class for GC things that have a tenured GC pointer as their first word.
756 //
757 // The low bits of the first word (see CellFlagBitsReservedForGC) are reserved
758 // for GC.
759 //
760 // This includes a pre write barrier when the pointer is update. No post barrier
761 // is necessary as the pointer is always tenured.
762 template <class BaseCell, class PtrT>
alignas(gc::CellAlignBytes)763 class alignas(gc::CellAlignBytes) CellWithTenuredGCPointer : public BaseCell {
764   static void staticAsserts() {
765     // These static asserts are not in class scope because the PtrT may not be
766     // defined when this class template is instantiated.
767     static_assert(
768         std::is_same_v<BaseCell, Cell> || std::is_same_v<BaseCell, TenuredCell>,
769         "BaseCell must be either Cell or TenuredCell");
770     static_assert(
771         !std::is_pointer_v<PtrT>,
772         "PtrT should be the type of the referent, not of the pointer");
773     static_assert(
774         std::is_base_of_v<Cell, PtrT>,
775         "Only use CellWithTenuredGCPointer for pointers to GC things");
776   }
777 
778  protected:
779   CellWithTenuredGCPointer() = default;
780   explicit CellWithTenuredGCPointer(PtrT* initial) { initHeaderPtr(initial); }
781 
782   void initHeaderPtr(PtrT* initial) {
783     MOZ_ASSERT(!IsInsideNursery(initial));
784     uintptr_t data = uintptr_t(initial);
785     MOZ_ASSERT((data & Cell::RESERVED_MASK) == 0);
786     this->header_ = data;
787   }
788 
789   void setHeaderPtr(PtrT* newValue) {
790     // As above, no flags are expected to be set here.
791     MOZ_ASSERT(!IsInsideNursery(newValue));
792     PreWriteBarrier(headerPtr());
793     unbarrieredSetHeaderPtr(newValue);
794   }
795 
796  public:
797   PtrT* headerPtr() const {
798     staticAsserts();
799     MOZ_ASSERT(this->flags() == 0);
800     return reinterpret_cast<PtrT*>(uintptr_t(this->header_));
801   }
802 
803   void unbarrieredSetHeaderPtr(PtrT* newValue) {
804     uintptr_t data = uintptr_t(newValue);
805     MOZ_ASSERT(this->flags() == 0);
806     MOZ_ASSERT((data & Cell::RESERVED_MASK) == 0);
807     this->header_ = data;
808   }
809 
810   static constexpr size_t offsetOfHeaderPtr() {
811     return offsetof(CellWithTenuredGCPointer, header_);
812   }
813 };
814 
815 void CellHeaderPostWriteBarrier(JSObject** ptr, JSObject* prev, JSObject* next);
816 
817 template <class PtrT>
alignas(gc::CellAlignBytes)818 class alignas(gc::CellAlignBytes) TenuredCellWithGCPointer
819     : public TenuredCell {
820   static void staticAsserts() {
821     // These static asserts are not in class scope because the PtrT may not be
822     // defined when this class template is instantiated.
823     static_assert(
824         !std::is_pointer_v<PtrT>,
825         "PtrT should be the type of the referent, not of the pointer");
826     static_assert(
827         std::is_base_of_v<Cell, PtrT>,
828         "Only use TenuredCellWithGCPointer for pointers to GC things");
829     static_assert(
830         !std::is_base_of_v<TenuredCell, PtrT>,
831         "Don't use TenuredCellWithGCPointer for always-tenured GC things");
832   }
833 
834  protected:
835   TenuredCellWithGCPointer() = default;
836   explicit TenuredCellWithGCPointer(PtrT* initial) { initHeaderPtr(initial); }
837 
838   void initHeaderPtr(PtrT* initial) {
839     uintptr_t data = uintptr_t(initial);
840     MOZ_ASSERT((data & Cell::RESERVED_MASK) == 0);
841     this->header_ = data;
842     if (IsInsideNursery(initial)) {
843       CellHeaderPostWriteBarrier(headerPtrAddress(), nullptr, initial);
844     }
845   }
846 
847   PtrT** headerPtrAddress() {
848     MOZ_ASSERT(this->flags() == 0);
849     return reinterpret_cast<PtrT**>(&this->header_);
850   }
851 
852  public:
853   PtrT* headerPtr() const {
854     MOZ_ASSERT(this->flags() == 0);
855     return reinterpret_cast<PtrT*>(uintptr_t(this->header_));
856   }
857 
858   void unbarrieredSetHeaderPtr(PtrT* newValue) {
859     uintptr_t data = uintptr_t(newValue);
860     MOZ_ASSERT(this->flags() == 0);
861     MOZ_ASSERT((data & Cell::RESERVED_MASK) == 0);
862     this->header_ = data;
863   }
864 
865   static constexpr size_t offsetOfHeaderPtr() {
866     return offsetof(TenuredCellWithGCPointer, header_);
867   }
868 };
869 
870 } /* namespace gc */
871 } /* namespace js */
872 
873 #endif /* gc_Cell_h */
874