1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #ifndef gc_Heap_h
8 #define gc_Heap_h
9 
10 #include "mozilla/DebugOnly.h"
11 
12 #include "ds/BitArray.h"
13 #include "gc/AllocKind.h"
14 #include "gc/GCEnum.h"
15 #include "gc/Memory.h"
16 #include "gc/Pretenuring.h"
17 #include "js/HeapAPI.h"
18 #include "js/TypeDecls.h"
19 #include "util/Poison.h"
20 
21 namespace js {
22 
23 class AutoLockGC;
24 class AutoLockGCBgAlloc;
25 class Nursery;
26 class NurseryDecommitTask;
27 
28 namespace gc {
29 
30 class Arena;
31 class ArenaCellSet;
32 class ArenaList;
33 class GCRuntime;
34 class MarkingValidator;
35 class SortedArenaList;
36 class StoreBuffer;
37 class TenuredCell;
38 
39 // Cells are aligned to CellAlignShift, so the largest tagged null pointer is:
40 const uintptr_t LargestTaggedNullCellPointer = (1 << CellAlignShift) - 1;
41 
42 /*
43  * The minimum cell size ends up as twice the cell alignment because the mark
44  * bitmap contains one bit per CellBytesPerMarkBit bytes (which is equal to
45  * CellAlignBytes) and we need two mark bits per cell.
46  */
47 const size_t MinCellSize = CellBytesPerMarkBit * MarkBitsPerCell;
48 
49 static_assert(ArenaSize % CellAlignBytes == 0,
50               "Arena size must be a multiple of cell alignment");
51 
52 /*
53  * A FreeSpan represents a contiguous sequence of free cells in an Arena. It
54  * can take two forms.
55  *
56  * - In an empty span, |first| and |last| are both zero.
57  *
58  * - In a non-empty span, |first| is the address of the first free thing in the
59  *   span, and |last| is the address of the last free thing in the span.
60  *   Furthermore, the memory pointed to by |last| holds a FreeSpan structure
61  *   that points to the next span (which may be empty); this works because
62  *   sizeof(FreeSpan) is less than the smallest thingSize.
63  */
64 class FreeSpan {
65   friend class Arena;
66   friend class ArenaCellIter;
67   friend class ArenaFreeCellIter;
68 
69   uint16_t first;
70   uint16_t last;
71 
72  public:
73   // This inits just |first| and |last|; if the span is non-empty it doesn't
74   // do anything with the next span stored at |last|.
initBounds(uintptr_t firstArg,uintptr_t lastArg,const Arena * arena)75   void initBounds(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
76     checkRange(firstArg, lastArg, arena);
77     first = firstArg;
78     last = lastArg;
79   }
80 
initAsEmpty()81   void initAsEmpty() {
82     first = 0;
83     last = 0;
84   }
85 
86   // This sets |first| and |last|, and also sets the next span stored at
87   // |last| as empty. (As a result, |firstArg| and |lastArg| cannot represent
88   // an empty span.)
initFinal(uintptr_t firstArg,uintptr_t lastArg,const Arena * arena)89   void initFinal(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
90     initBounds(firstArg, lastArg, arena);
91     FreeSpan* last = nextSpanUnchecked(arena);
92     last->initAsEmpty();
93     checkSpan(arena);
94   }
95 
isEmpty()96   bool isEmpty() const { return !first; }
97 
getArenaUnchecked()98   Arena* getArenaUnchecked() { return reinterpret_cast<Arena*>(this); }
99   inline Arena* getArena();
100 
offsetOfFirst()101   static size_t offsetOfFirst() { return offsetof(FreeSpan, first); }
102 
offsetOfLast()103   static size_t offsetOfLast() { return offsetof(FreeSpan, last); }
104 
105   // Like nextSpan(), but no checking of the following span is done.
nextSpanUnchecked(const Arena * arena)106   FreeSpan* nextSpanUnchecked(const Arena* arena) const {
107     MOZ_ASSERT(arena && !isEmpty());
108     return reinterpret_cast<FreeSpan*>(uintptr_t(arena) + last);
109   }
110 
nextSpan(const Arena * arena)111   const FreeSpan* nextSpan(const Arena* arena) const {
112     checkSpan(arena);
113     return nextSpanUnchecked(arena);
114   }
115 
allocate(size_t thingSize)116   MOZ_ALWAYS_INLINE TenuredCell* allocate(size_t thingSize) {
117     // Eschew the usual checks, because this might be the placeholder span.
118     // If this is somehow an invalid, non-empty span, checkSpan() will catch it.
119     Arena* arena = getArenaUnchecked();
120     checkSpan(arena);
121     uintptr_t thing = uintptr_t(arena) + first;
122     if (first < last) {
123       // We have space for at least two more things, so do a simple
124       // bump-allocate.
125       first += thingSize;
126     } else if (MOZ_LIKELY(first)) {
127       // The last space points to the next free span (which may be empty).
128       const FreeSpan* next = nextSpan(arena);
129       first = next->first;
130       last = next->last;
131     } else {
132       return nullptr;  // The span is empty.
133     }
134     checkSpan(arena);
135     DebugOnlyPoison(reinterpret_cast<void*>(thing),
136                     JS_ALLOCATED_TENURED_PATTERN, thingSize,
137                     MemCheckKind::MakeUndefined);
138     return reinterpret_cast<TenuredCell*>(thing);
139   }
140 
141   inline void checkSpan(const Arena* arena) const;
142   inline void checkRange(uintptr_t first, uintptr_t last,
143                          const Arena* arena) const;
144 };
145 
146 /*
147  * Arenas are the allocation units of the tenured heap in the GC. An arena
148  * is 4kiB in size and 4kiB-aligned. It starts with several header fields
149  * followed by some bytes of padding. The remainder of the arena is filled
150  * with GC things of a particular AllocKind. The padding ensures that the
151  * GC thing array ends exactly at the end of the arena:
152  *
153  * <----------------------------------------------> = ArenaSize bytes
154  * +---------------+---------+----+----+-----+----+
155  * | header fields | padding | T0 | T1 | ... | Tn |
156  * +---------------+---------+----+----+-----+----+
157  * <-------------------------> = first thing offset
158  */
alignas(ArenaSize)159 class alignas(ArenaSize) Arena {
160   static JS_PUBLIC_DATA const uint8_t ThingSizes[];
161   static JS_PUBLIC_DATA const uint8_t FirstThingOffsets[];
162   static JS_PUBLIC_DATA const uint8_t ThingsPerArena[];
163   /*
164    * The first span of free things in the arena. Most of these spans are
165    * stored as offsets in free regions of the data array, and most operations
166    * on FreeSpans take an Arena pointer for safety. However, the FreeSpans
167    * used for allocation are stored here, at the start of an Arena, and use
168    * their own address to grab the next span within the same Arena.
169    */
170   FreeSpan firstFreeSpan;
171 
172  public:
173   /*
174    * One of the AllocKind constants or AllocKind::LIMIT when the arena does
175    * not contain any GC things and is on the list of empty arenas in the GC
176    * chunk.
177    */
178   AllocKind allocKind;
179 
180   /*
181    * The zone that this Arena is contained within, when allocated. The offset
182    * of this field must match the ArenaZoneOffset stored in js/HeapAPI.h,
183    * as is statically asserted below.
184    */
185   JS::Zone* zone;
186 
187   /*
188    * Arena::next has two purposes: when unallocated, it points to the next
189    * available Arena. When allocated, it points to the next Arena in the same
190    * zone and with the same alloc kind.
191    */
192   Arena* next;
193 
194  private:
195   static const size_t ARENA_FLAG_BITS = 4;
196   static const size_t DELAYED_MARKING_ARENA_BITS =
197       JS_BITS_PER_WORD - ArenaShift;
198   static_assert(
199       ARENA_FLAG_BITS + DELAYED_MARKING_ARENA_BITS <= JS_BITS_PER_WORD,
200       "Not enough space to pack flags and nextDelayedMarkingArena_ pointer "
201       "into a single word.");
202 
203   /*
204    * True until the arena is swept for the first time.
205    */
206   size_t isNewlyCreated : 1;
207 
208   /*
209    * When recursive marking uses too much stack we delay marking of arenas and
210    * link them into a list for later processing. This uses the following fields.
211    */
212   size_t onDelayedMarkingList_ : 1;
213   size_t hasDelayedBlackMarking_ : 1;
214   size_t hasDelayedGrayMarking_ : 1;
215   size_t nextDelayedMarkingArena_ : DELAYED_MARKING_ARENA_BITS;
216 
217   union {
218     /*
219      * For arenas in zones other than the atoms zone, if non-null, points
220      * to an ArenaCellSet that represents the set of cells in this arena
221      * that are in the nursery's store buffer.
222      */
223     ArenaCellSet* bufferedCells_;
224 
225     /*
226      * For arenas in the atoms zone, the starting index into zone atom
227      * marking bitmaps (see AtomMarking.h) of the things in this zone.
228      * Atoms never refer to nursery things, so no store buffer index is
229      * needed.
230      */
231     size_t atomBitmapStart_;
232   };
233 
234  public:
235   /*
236    * The size of data should be |ArenaSize - offsetof(data)|, but the offset
237    * is not yet known to the compiler, so we do it by hand. |firstFreeSpan|
238    * takes up 8 bytes on 64-bit due to alignment requirements; the rest are
239    * obvious. This constant is stored in js/HeapAPI.h.
240    */
241   uint8_t data[ArenaSize - ArenaHeaderSize];
242 
243   void init(JS::Zone* zoneArg, AllocKind kind, const AutoLockGC& lock);
244 
245   // Sets |firstFreeSpan| to the Arena's entire valid range, and
246   // also sets the next span stored at |firstFreeSpan.last| as empty.
247   void setAsFullyUnused() {
248     AllocKind kind = getAllocKind();
249     firstFreeSpan.first = firstThingOffset(kind);
250     firstFreeSpan.last = lastThingOffset(kind);
251     FreeSpan* last = firstFreeSpan.nextSpanUnchecked(this);
252     last->initAsEmpty();
253   }
254 
255   // Initialize an arena to its unallocated state. For arenas that were
256   // previously allocated for some zone, use release() instead.
257   void setAsNotAllocated() {
258     firstFreeSpan.initAsEmpty();
259 
260     // Poison zone pointer to highlight UAF on released arenas in crash data.
261     AlwaysPoison(&zone, JS_FREED_ARENA_PATTERN, sizeof(zone),
262                  MemCheckKind::MakeNoAccess);
263 
264     allocKind = AllocKind::LIMIT;
265     onDelayedMarkingList_ = 0;
266     hasDelayedBlackMarking_ = 0;
267     hasDelayedGrayMarking_ = 0;
268     nextDelayedMarkingArena_ = 0;
269     bufferedCells_ = nullptr;
270 
271     MOZ_ASSERT(!allocated());
272   }
273 
274   // Return an allocated arena to its unallocated state.
275   inline void release(const AutoLockGC& lock);
276 
277   uintptr_t address() const {
278     checkAddress();
279     return uintptr_t(this);
280   }
281 
282   inline void checkAddress() const;
283 
284   inline TenuredChunk* chunk() const;
285 
286   bool allocated() const {
287     MOZ_ASSERT(IsAllocKind(AllocKind(allocKind)));
288     return IsValidAllocKind(AllocKind(allocKind));
289   }
290 
291   AllocKind getAllocKind() const {
292     MOZ_ASSERT(allocated());
293     return allocKind;
294   }
295 
296   FreeSpan* getFirstFreeSpan() { return &firstFreeSpan; }
297 
298   static size_t thingSize(AllocKind kind) { return ThingSizes[size_t(kind)]; }
299   static size_t thingsPerArena(AllocKind kind) {
300     return ThingsPerArena[size_t(kind)];
301   }
302   static size_t thingsSpan(AllocKind kind) {
303     return thingsPerArena(kind) * thingSize(kind);
304   }
305 
306   static size_t firstThingOffset(AllocKind kind) {
307     return FirstThingOffsets[size_t(kind)];
308   }
309   static size_t lastThingOffset(AllocKind kind) {
310     return ArenaSize - thingSize(kind);
311   }
312 
313   size_t getThingSize() const { return thingSize(getAllocKind()); }
314   size_t getThingsPerArena() const { return thingsPerArena(getAllocKind()); }
315   size_t getThingsSpan() const { return getThingsPerArena() * getThingSize(); }
316   size_t getFirstThingOffset() const {
317     return firstThingOffset(getAllocKind());
318   }
319 
320   uintptr_t thingsStart() const { return address() + getFirstThingOffset(); }
321   uintptr_t thingsEnd() const { return address() + ArenaSize; }
322 
323   bool isEmpty() const {
324     // Arena is empty if its first span covers the whole arena.
325     firstFreeSpan.checkSpan(this);
326     AllocKind kind = getAllocKind();
327     return firstFreeSpan.first == firstThingOffset(kind) &&
328            firstFreeSpan.last == lastThingOffset(kind);
329   }
330 
331   bool hasFreeThings() const { return !firstFreeSpan.isEmpty(); }
332 
333   size_t numFreeThings(size_t thingSize) const {
334     firstFreeSpan.checkSpan(this);
335     size_t numFree = 0;
336     const FreeSpan* span = &firstFreeSpan;
337     for (; !span->isEmpty(); span = span->nextSpan(this)) {
338       numFree += (span->last - span->first) / thingSize + 1;
339     }
340     return numFree;
341   }
342 
343   size_t countFreeCells() { return numFreeThings(getThingSize()); }
344   size_t countUsedCells() { return getThingsPerArena() - countFreeCells(); }
345 
346   bool inFreeList(uintptr_t thing) {
347     uintptr_t base = address();
348     const FreeSpan* span = &firstFreeSpan;
349     for (; !span->isEmpty(); span = span->nextSpan(this)) {
350       /* If the thing comes before the current span, it's not free. */
351       if (thing < base + span->first) {
352         return false;
353       }
354 
355       /* If we find it before the end of the span, it's free. */
356       if (thing <= base + span->last) {
357         return true;
358       }
359     }
360     return false;
361   }
362 
363   static bool isAligned(uintptr_t thing, size_t thingSize) {
364     /* Things ends at the arena end. */
365     uintptr_t tailOffset = ArenaSize - (thing & ArenaMask);
366     return tailOffset % thingSize == 0;
367   }
368 
369   bool onDelayedMarkingList() const { return onDelayedMarkingList_; }
370 
371   Arena* getNextDelayedMarking() const {
372     MOZ_ASSERT(onDelayedMarkingList_);
373     return reinterpret_cast<Arena*>(nextDelayedMarkingArena_ << ArenaShift);
374   }
375 
376   void setNextDelayedMarkingArena(Arena* arena) {
377     MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
378     MOZ_ASSERT(!onDelayedMarkingList_);
379     MOZ_ASSERT(!hasDelayedBlackMarking_);
380     MOZ_ASSERT(!hasDelayedGrayMarking_);
381     MOZ_ASSERT(!nextDelayedMarkingArena_);
382     onDelayedMarkingList_ = 1;
383     if (arena) {
384       nextDelayedMarkingArena_ = arena->address() >> ArenaShift;
385     }
386   }
387 
388   void updateNextDelayedMarkingArena(Arena* arena) {
389     MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
390     MOZ_ASSERT(onDelayedMarkingList_);
391     nextDelayedMarkingArena_ = arena ? arena->address() >> ArenaShift : 0;
392   }
393 
394   bool hasDelayedMarking(MarkColor color) const {
395     MOZ_ASSERT(onDelayedMarkingList_);
396     return color == MarkColor::Black ? hasDelayedBlackMarking_
397                                      : hasDelayedGrayMarking_;
398   }
399 
400   bool hasAnyDelayedMarking() const {
401     MOZ_ASSERT(onDelayedMarkingList_);
402     return hasDelayedBlackMarking_ || hasDelayedGrayMarking_;
403   }
404 
405   void setHasDelayedMarking(MarkColor color, bool value) {
406     MOZ_ASSERT(onDelayedMarkingList_);
407     if (color == MarkColor::Black) {
408       hasDelayedBlackMarking_ = value;
409     } else {
410       hasDelayedGrayMarking_ = value;
411     }
412   }
413 
414   void clearDelayedMarkingState() {
415     MOZ_ASSERT(onDelayedMarkingList_);
416     onDelayedMarkingList_ = 0;
417     hasDelayedBlackMarking_ = 0;
418     hasDelayedGrayMarking_ = 0;
419     nextDelayedMarkingArena_ = 0;
420   }
421 
422   inline ArenaCellSet*& bufferedCells();
423   inline size_t& atomBitmapStart();
424 
425   template <typename T>
426   size_t finalize(JSFreeOp* fop, AllocKind thingKind, size_t thingSize);
427 
428   static void staticAsserts();
429   static void checkLookupTables();
430 
431   void unmarkAll();
432   void unmarkPreMarkedFreeCells();
433 
434   void arenaAllocatedDuringGC();
435 
436 #ifdef DEBUG
437   void checkNoMarkedFreeCells();
438   void checkAllCellsMarkedBlack();
439 #endif
440 
441 #if defined(DEBUG) || defined(JS_GC_ZEAL)
442   void checkNoMarkedCells();
443 #endif
444 };
445 
446 static_assert(ArenaZoneOffset == offsetof(Arena, zone),
447               "The hardcoded API zone offset must match the actual offset.");
448 
449 static_assert(sizeof(Arena) == ArenaSize,
450               "ArenaSize must match the actual size of the Arena structure.");
451 
452 static_assert(
453     offsetof(Arena, data) == ArenaHeaderSize,
454     "ArenaHeaderSize must match the actual size of the header fields.");
455 
getArena()456 inline Arena* FreeSpan::getArena() {
457   Arena* arena = getArenaUnchecked();
458   arena->checkAddress();
459   return arena;
460 }
461 
checkSpan(const Arena * arena)462 inline void FreeSpan::checkSpan(const Arena* arena) const {
463 #ifdef DEBUG
464   if (!first) {
465     MOZ_ASSERT(!first && !last);
466     return;
467   }
468 
469   arena->checkAddress();
470   checkRange(first, last, arena);
471 
472   // If there's a following span, it must have a higher address,
473   // and the gap must be at least 2 * thingSize.
474   const FreeSpan* next = nextSpanUnchecked(arena);
475   if (next->first) {
476     checkRange(next->first, next->last, arena);
477     size_t thingSize = arena->getThingSize();
478     MOZ_ASSERT(last + 2 * thingSize <= next->first);
479   }
480 #endif
481 }
482 
checkRange(uintptr_t first,uintptr_t last,const Arena * arena)483 inline void FreeSpan::checkRange(uintptr_t first, uintptr_t last,
484                                  const Arena* arena) const {
485 #ifdef DEBUG
486   MOZ_ASSERT(arena);
487   MOZ_ASSERT(first <= last);
488   AllocKind thingKind = arena->getAllocKind();
489   MOZ_ASSERT(first >= Arena::firstThingOffset(thingKind));
490   MOZ_ASSERT(last <= Arena::lastThingOffset(thingKind));
491   MOZ_ASSERT((last - first) % Arena::thingSize(thingKind) == 0);
492 #endif
493 }
494 
495 // Mark bitmap API:
496 
markBit(const TenuredCell * cell,ColorBit colorBit)497 MOZ_ALWAYS_INLINE bool MarkBitmap::markBit(const TenuredCell* cell,
498                                            ColorBit colorBit) {
499   MarkBitmapWord* word;
500   uintptr_t mask;
501   getMarkWordAndMask(cell, colorBit, &word, &mask);
502   return *word & mask;
503 }
504 
isMarkedAny(const TenuredCell * cell)505 MOZ_ALWAYS_INLINE bool MarkBitmap::isMarkedAny(const TenuredCell* cell) {
506   return markBit(cell, ColorBit::BlackBit) ||
507          markBit(cell, ColorBit::GrayOrBlackBit);
508 }
509 
isMarkedBlack(const TenuredCell * cell)510 MOZ_ALWAYS_INLINE bool MarkBitmap::isMarkedBlack(const TenuredCell* cell) {
511   return markBit(cell, ColorBit::BlackBit);
512 }
513 
isMarkedGray(const TenuredCell * cell)514 MOZ_ALWAYS_INLINE bool MarkBitmap::isMarkedGray(const TenuredCell* cell) {
515   return !markBit(cell, ColorBit::BlackBit) &&
516          markBit(cell, ColorBit::GrayOrBlackBit);
517 }
518 
519 // The return value indicates if the cell went from unmarked to marked.
markIfUnmarked(const TenuredCell * cell,MarkColor color)520 MOZ_ALWAYS_INLINE bool MarkBitmap::markIfUnmarked(const TenuredCell* cell,
521                                                   MarkColor color) {
522   MarkBitmapWord* word;
523   uintptr_t mask;
524   getMarkWordAndMask(cell, ColorBit::BlackBit, &word, &mask);
525   if (*word & mask) {
526     return false;
527   }
528   if (color == MarkColor::Black) {
529     *word |= mask;
530   } else {
531     /*
532      * We use getMarkWordAndMask to recalculate both mask and word as
533      * doing just mask << color may overflow the mask.
534      */
535     getMarkWordAndMask(cell, ColorBit::GrayOrBlackBit, &word, &mask);
536     if (*word & mask) {
537       return false;
538     }
539     *word |= mask;
540   }
541   return true;
542 }
543 
markBlack(const TenuredCell * cell)544 MOZ_ALWAYS_INLINE void MarkBitmap::markBlack(const TenuredCell* cell) {
545   MarkBitmapWord* word;
546   uintptr_t mask;
547   getMarkWordAndMask(cell, ColorBit::BlackBit, &word, &mask);
548   *word |= mask;
549 }
550 
copyMarkBit(TenuredCell * dst,const TenuredCell * src,ColorBit colorBit)551 MOZ_ALWAYS_INLINE void MarkBitmap::copyMarkBit(TenuredCell* dst,
552                                                const TenuredCell* src,
553                                                ColorBit colorBit) {
554   TenuredChunkBase* srcChunk = detail::GetCellChunkBase(src);
555   MarkBitmapWord* srcWord;
556   uintptr_t srcMask;
557   srcChunk->markBits.getMarkWordAndMask(src, colorBit, &srcWord, &srcMask);
558 
559   MarkBitmapWord* dstWord;
560   uintptr_t dstMask;
561   getMarkWordAndMask(dst, colorBit, &dstWord, &dstMask);
562 
563   *dstWord &= ~dstMask;
564   if (*srcWord & srcMask) {
565     *dstWord |= dstMask;
566   }
567 }
568 
unmark(const TenuredCell * cell)569 MOZ_ALWAYS_INLINE void MarkBitmap::unmark(const TenuredCell* cell) {
570   MarkBitmapWord* word;
571   uintptr_t mask;
572   getMarkWordAndMask(cell, ColorBit::BlackBit, &word, &mask);
573   *word &= ~mask;
574   getMarkWordAndMask(cell, ColorBit::GrayOrBlackBit, &word, &mask);
575   *word &= ~mask;
576 }
577 
arenaBits(Arena * arena)578 inline MarkBitmapWord* MarkBitmap::arenaBits(Arena* arena) {
579   static_assert(
580       ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD,
581       "We assume that the part of the bitmap corresponding to the arena "
582       "has the exact number of words so we do not need to deal with a word "
583       "that covers bits from two arenas.");
584 
585   MarkBitmapWord* word;
586   uintptr_t unused;
587   getMarkWordAndMask(reinterpret_cast<TenuredCell*>(arena->address()),
588                      ColorBit::BlackBit, &word, &unused);
589   return word;
590 }
591 
592 /*
593  * A chunk in the tenured heap. TenuredChunks contain arenas and associated data
594  * structures (mark bitmap, delayed marking state).
595  */
596 class TenuredChunk : public TenuredChunkBase {
597   Arena arenas[ArenasPerChunk];
598 
599   friend class GCRuntime;
600   friend class MarkingValidator;
601 
602  public:
fromAddress(uintptr_t addr)603   static TenuredChunk* fromAddress(uintptr_t addr) {
604     addr &= ~ChunkMask;
605     return reinterpret_cast<TenuredChunk*>(addr);
606   }
607 
withinValidRange(uintptr_t addr)608   static bool withinValidRange(uintptr_t addr) {
609     uintptr_t offset = addr & ChunkMask;
610     if (TenuredChunk::fromAddress(addr)->isNurseryChunk()) {
611       return offset >= sizeof(ChunkBase) && offset < ChunkSize;
612     }
613     return offset >= offsetof(TenuredChunk, arenas) && offset < ChunkSize;
614   }
615 
arenaIndex(const Arena * arena)616   static size_t arenaIndex(const Arena* arena) {
617     uintptr_t addr = arena->address();
618     MOZ_ASSERT(!TenuredChunk::fromAddress(addr)->isNurseryChunk());
619     MOZ_ASSERT(withinValidRange(addr));
620     uintptr_t offset = addr & ChunkMask;
621     return (offset - offsetof(TenuredChunk, arenas)) >> ArenaShift;
622   }
623 
TenuredChunk(JSRuntime * runtime)624   explicit TenuredChunk(JSRuntime* runtime) : TenuredChunkBase(runtime) {}
625 
address()626   uintptr_t address() const {
627     uintptr_t addr = reinterpret_cast<uintptr_t>(this);
628     MOZ_ASSERT(!(addr & ChunkMask));
629     return addr;
630   }
631 
unused()632   bool unused() const { return info.numArenasFree == ArenasPerChunk; }
633 
hasAvailableArenas()634   bool hasAvailableArenas() const { return info.numArenasFree != 0; }
635 
isNurseryChunk()636   bool isNurseryChunk() const { return storeBuffer; }
637 
638   Arena* allocateArena(GCRuntime* gc, JS::Zone* zone, AllocKind kind,
639                        const AutoLockGC& lock);
640 
641   void releaseArena(GCRuntime* gc, Arena* arena, const AutoLockGC& lock);
642   void recycleArena(Arena* arena, SortedArenaList& dest, size_t thingsPerArena);
643 
644   void decommitFreeArenas(GCRuntime* gc, const bool& cancel, AutoLockGC& lock);
645   [[nodiscard]] bool decommitOneFreePage(GCRuntime* gc, size_t pageIndex,
646                                          AutoLockGC& lock);
647   void decommitAllArenas();
648 
649   // This will decommit each unused not-already decommitted arena. It performs a
650   // system call for each arena but is only used during OOM.
651   void decommitFreeArenasWithoutUnlocking(const AutoLockGC& lock);
652 
653   static TenuredChunk* allocate(GCRuntime* gc);
654   void init(GCRuntime* gc, bool allMemoryCommitted);
655 
656   /* Unlink and return the freeArenasHead. */
657   Arena* fetchNextFreeArena(GCRuntime* gc);
658 
659 #ifdef DEBUG
660   void verify() const;
661 #else
verify()662   void verify() const {}
663 #endif
664 
665  private:
666   void commitOnePage(GCRuntime* gc);
667 
668   void updateChunkListAfterAlloc(GCRuntime* gc, const AutoLockGC& lock);
669   void updateChunkListAfterFree(GCRuntime* gc, size_t numArenasFree,
670                                 const AutoLockGC& lock);
671 
672   // Check if all arenas in a page are free.
673   bool canDecommitPage(size_t pageIndex) const;
674 
675   // Check the arena from freeArenasList is located in a free page.
676   // Unlike the isPageFree(size_t) version, this isPageFree(Arena*) will see the
677   // following arenas from the freeArenasHead are also located in the same page,
678   // to prevent not to access the arenas mprotect'ed during compaction in debug
679   // build.
680   bool isPageFree(const Arena* arena) const;
681 
682   // Get the page index of the arena.
pageIndex(const Arena * arena)683   size_t pageIndex(const Arena* arena) const {
684     return pageIndex(arenaIndex(arena));
685   }
pageIndex(size_t arenaIndex)686   size_t pageIndex(size_t arenaIndex) const {
687     return arenaIndex / ArenasPerPage;
688   }
689 
pageAddress(size_t pageIndex)690   Arena* pageAddress(size_t pageIndex) {
691     return &arenas[pageIndex * ArenasPerPage];
692   }
693 };
694 
checkAddress()695 inline void Arena::checkAddress() const {
696   mozilla::DebugOnly<uintptr_t> addr = uintptr_t(this);
697   MOZ_ASSERT(addr);
698   MOZ_ASSERT(!(addr & ArenaMask));
699   MOZ_ASSERT(TenuredChunk::withinValidRange(addr));
700 }
701 
chunk()702 inline TenuredChunk* Arena::chunk() const {
703   return TenuredChunk::fromAddress(address());
704 }
705 
InFreeList(Arena * arena,void * thing)706 inline bool InFreeList(Arena* arena, void* thing) {
707   uintptr_t addr = reinterpret_cast<uintptr_t>(thing);
708   MOZ_ASSERT(Arena::isAligned(addr, arena->getThingSize()));
709   return arena->inFreeList(addr);
710 }
711 
712 static const int32_t ChunkStoreBufferOffsetFromLastByte =
713     int32_t(gc::ChunkStoreBufferOffset) - int32_t(gc::ChunkMask);
714 
715 // Cell header stored before all nursery cells.
alignas(gc::CellAlignBytes)716 struct alignas(gc::CellAlignBytes) NurseryCellHeader {
717   // Store zone pointer with the trace kind in the lowest three bits.
718   const uintptr_t allocSiteAndTraceKind;
719 
720   // We only need to store a subset of trace kinds so this doesn't cover the
721   // full range.
722   static const uintptr_t TraceKindMask = 3;
723 
724   static uintptr_t MakeValue(AllocSite* const site, JS::TraceKind kind) {
725     MOZ_ASSERT(uintptr_t(kind) < TraceKindMask);
726     MOZ_ASSERT((uintptr_t(site) & TraceKindMask) == 0);
727     return uintptr_t(site) | uintptr_t(kind);
728   }
729 
730   inline NurseryCellHeader(AllocSite* site, JS::TraceKind kind);
731 
732   AllocSite* allocSite() const {
733     return reinterpret_cast<AllocSite*>(allocSiteAndTraceKind & ~TraceKindMask);
734   }
735 
736   JS::Zone* zone() const { return allocSite()->zone(); }
737 
738   JS::TraceKind traceKind() const {
739     return JS::TraceKind(allocSiteAndTraceKind & TraceKindMask);
740   }
741 
742   static const NurseryCellHeader* from(const Cell* cell) {
743     MOZ_ASSERT(IsInsideNursery(cell));
744     return reinterpret_cast<const NurseryCellHeader*>(
745         uintptr_t(cell) - sizeof(NurseryCellHeader));
746   }
747 };
748 
749 static_assert(uintptr_t(JS::TraceKind::Object) <=
750               NurseryCellHeader::TraceKindMask);
751 static_assert(uintptr_t(JS::TraceKind::String) <=
752               NurseryCellHeader::TraceKindMask);
753 static_assert(uintptr_t(JS::TraceKind::BigInt) <=
754               NurseryCellHeader::TraceKindMask);
755 
756 } /* namespace gc */
757 
758 namespace debug {
759 
760 // Utility functions meant to be called from an interactive debugger.
761 enum class MarkInfo : int {
762   BLACK = 0,
763   GRAY = 1,
764   UNMARKED = -1,
765   NURSERY = -2,
766 };
767 
768 // Get the mark color for a cell, in a way easily usable from a debugger.
769 MOZ_NEVER_INLINE MarkInfo GetMarkInfo(js::gc::Cell* cell);
770 
771 // Sample usage from gdb:
772 //
773 //   (gdb) p $word = js::debug::GetMarkWordAddress(obj)
774 //   $1 = (uintptr_t *) 0x7fa56d5fe360
775 //   (gdb) p/x $mask = js::debug::GetMarkMask(obj, js::gc::GRAY)
776 //   $2 = 0x200000000
777 //   (gdb) watch *$word
778 //   Hardware watchpoint 7: *$word
779 //   (gdb) cond 7 *$word & $mask
780 //   (gdb) cont
781 //
782 // Note that this is *not* a watchpoint on a single bit. It is a watchpoint on
783 // the whole word, which will trigger whenever the word changes and the
784 // selected bit is set after the change.
785 //
786 // So if the bit changing is the desired one, this is exactly what you want.
787 // But if a different bit changes (either set or cleared), you may still stop
788 // execution if the $mask bit happened to already be set. gdb does not expose
789 // enough information to restrict the watchpoint to just a single bit.
790 
791 // Return the address of the word containing the mark bits for the given cell,
792 // or nullptr if the cell is in the nursery.
793 MOZ_NEVER_INLINE uintptr_t* GetMarkWordAddress(js::gc::Cell* cell);
794 
795 // Return the mask for the given cell and color bit, or 0 if the cell is in the
796 // nursery.
797 MOZ_NEVER_INLINE uintptr_t GetMarkMask(js::gc::Cell* cell, uint32_t colorBit);
798 
799 } /* namespace debug */
800 } /* namespace js */
801 
802 #endif /* gc_Heap_h */
803