1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #ifndef gc_Heap_h
8 #define gc_Heap_h
9 
10 #include "mozilla/DebugOnly.h"
11 
12 #include "ds/BitArray.h"
13 #include "gc/AllocKind.h"
14 #include "gc/GCEnum.h"
15 #include "gc/Memory.h"
16 #include "gc/Pretenuring.h"
17 #include "js/HeapAPI.h"
18 #include "js/TypeDecls.h"
19 #include "util/Poison.h"
20 
21 namespace js {
22 
23 class AutoLockGC;
24 class AutoLockGCBgAlloc;
25 class Nursery;
26 class NurseryDecommitTask;
27 
28 namespace gc {
29 
30 class Arena;
31 class ArenaCellSet;
32 class ArenaList;
33 class GCRuntime;
34 class MarkingValidator;
35 class SortedArenaList;
36 class StoreBuffer;
37 class TenuredCell;
38 
39 // Cells are aligned to CellAlignShift, so the largest tagged null pointer is:
40 const uintptr_t LargestTaggedNullCellPointer = (1 << CellAlignShift) - 1;
41 
42 /*
43  * The minimum cell size ends up as twice the cell alignment because the mark
44  * bitmap contains one bit per CellBytesPerMarkBit bytes (which is equal to
45  * CellAlignBytes) and we need two mark bits per cell.
46  */
47 const size_t MinCellSize = CellBytesPerMarkBit * MarkBitsPerCell;
48 
49 static_assert(ArenaSize % CellAlignBytes == 0,
50               "Arena size must be a multiple of cell alignment");
51 
52 /*
53  * A FreeSpan represents a contiguous sequence of free cells in an Arena. It
54  * can take two forms.
55  *
56  * - In an empty span, |first| and |last| are both zero.
57  *
58  * - In a non-empty span, |first| is the address of the first free thing in the
59  *   span, and |last| is the address of the last free thing in the span.
60  *   Furthermore, the memory pointed to by |last| holds a FreeSpan structure
61  *   that points to the next span (which may be empty); this works because
62  *   sizeof(FreeSpan) is less than the smallest thingSize.
63  */
64 class FreeSpan {
65   friend class Arena;
66   friend class ArenaCellIter;
67   friend class ArenaFreeCellIter;
68 
69   uint16_t first;
70   uint16_t last;
71 
72  public:
73   // This inits just |first| and |last|; if the span is non-empty it doesn't
74   // do anything with the next span stored at |last|.
initBounds(uintptr_t firstArg,uintptr_t lastArg,const Arena * arena)75   void initBounds(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
76     checkRange(firstArg, lastArg, arena);
77     first = firstArg;
78     last = lastArg;
79   }
80 
initAsEmpty()81   void initAsEmpty() {
82     first = 0;
83     last = 0;
84   }
85 
86   // This sets |first| and |last|, and also sets the next span stored at
87   // |last| as empty. (As a result, |firstArg| and |lastArg| cannot represent
88   // an empty span.)
initFinal(uintptr_t firstArg,uintptr_t lastArg,const Arena * arena)89   void initFinal(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
90     initBounds(firstArg, lastArg, arena);
91     FreeSpan* last = nextSpanUnchecked(arena);
92     last->initAsEmpty();
93     checkSpan(arena);
94   }
95 
isEmpty()96   bool isEmpty() const { return !first; }
97 
getArenaUnchecked()98   Arena* getArenaUnchecked() { return reinterpret_cast<Arena*>(this); }
99   inline Arena* getArena();
100 
offsetOfFirst()101   static size_t offsetOfFirst() { return offsetof(FreeSpan, first); }
102 
offsetOfLast()103   static size_t offsetOfLast() { return offsetof(FreeSpan, last); }
104 
105   // Like nextSpan(), but no checking of the following span is done.
nextSpanUnchecked(const Arena * arena)106   FreeSpan* nextSpanUnchecked(const Arena* arena) const {
107     MOZ_ASSERT(arena && !isEmpty());
108     return reinterpret_cast<FreeSpan*>(uintptr_t(arena) + last);
109   }
110 
nextSpan(const Arena * arena)111   const FreeSpan* nextSpan(const Arena* arena) const {
112     checkSpan(arena);
113     return nextSpanUnchecked(arena);
114   }
115 
allocate(size_t thingSize)116   MOZ_ALWAYS_INLINE TenuredCell* allocate(size_t thingSize) {
117     // Eschew the usual checks, because this might be the placeholder span.
118     // If this is somehow an invalid, non-empty span, checkSpan() will catch it.
119     Arena* arena = getArenaUnchecked();
120     checkSpan(arena);
121     uintptr_t thing = uintptr_t(arena) + first;
122     if (first < last) {
123       // We have space for at least two more things, so do a simple
124       // bump-allocate.
125       first += thingSize;
126     } else if (MOZ_LIKELY(first)) {
127       // The last space points to the next free span (which may be empty).
128       const FreeSpan* next = nextSpan(arena);
129       first = next->first;
130       last = next->last;
131     } else {
132       return nullptr;  // The span is empty.
133     }
134     checkSpan(arena);
135     DebugOnlyPoison(reinterpret_cast<void*>(thing),
136                     JS_ALLOCATED_TENURED_PATTERN, thingSize,
137                     MemCheckKind::MakeUndefined);
138     return reinterpret_cast<TenuredCell*>(thing);
139   }
140 
141   inline void checkSpan(const Arena* arena) const;
142   inline void checkRange(uintptr_t first, uintptr_t last,
143                          const Arena* arena) const;
144 };
145 
146 /*
147  * Arenas are the allocation units of the tenured heap in the GC. An arena
148  * is 4kiB in size and 4kiB-aligned. It starts with several header fields
149  * followed by some bytes of padding. The remainder of the arena is filled
150  * with GC things of a particular AllocKind. The padding ensures that the
151  * GC thing array ends exactly at the end of the arena:
152  *
153  * <----------------------------------------------> = ArenaSize bytes
154  * +---------------+---------+----+----+-----+----+
155  * | header fields | padding | T0 | T1 | ... | Tn |
156  * +---------------+---------+----+----+-----+----+
157  * <-------------------------> = first thing offset
158  */
alignas(ArenaSize)159 class alignas(ArenaSize) Arena {
160   static JS_PUBLIC_DATA const uint8_t ThingSizes[];
161   static JS_PUBLIC_DATA const uint8_t FirstThingOffsets[];
162   static JS_PUBLIC_DATA const uint8_t ThingsPerArena[];
163   /*
164    * The first span of free things in the arena. Most of these spans are
165    * stored as offsets in free regions of the data array, and most operations
166    * on FreeSpans take an Arena pointer for safety. However, the FreeSpans
167    * used for allocation are stored here, at the start of an Arena, and use
168    * their own address to grab the next span within the same Arena.
169    */
170   FreeSpan firstFreeSpan;
171 
172  public:
173   /*
174    * One of the AllocKind constants or AllocKind::LIMIT when the arena does
175    * not contain any GC things and is on the list of empty arenas in the GC
176    * chunk.
177    */
178   AllocKind allocKind;
179 
180   /*
181    * The zone that this Arena is contained within, when allocated. The offset
182    * of this field must match the ArenaZoneOffset stored in js/HeapAPI.h,
183    * as is statically asserted below.
184    */
185   JS::Zone* zone;
186 
187   /*
188    * Arena::next has two purposes: when unallocated, it points to the next
189    * available Arena. When allocated, it points to the next Arena in the same
190    * zone and with the same alloc kind.
191    */
192   Arena* next;
193 
194  private:
195   static const size_t ARENA_FLAG_BITS = 4;
196   static const size_t DELAYED_MARKING_ARENA_BITS =
197       JS_BITS_PER_WORD - ArenaShift;
198   static_assert(
199       ARENA_FLAG_BITS + DELAYED_MARKING_ARENA_BITS <= JS_BITS_PER_WORD,
200       "Not enough space to pack flags and nextDelayedMarkingArena_ pointer "
201       "into a single word.");
202 
203   /*
204    * True until the arena is swept for the first time.
205    */
206   size_t isNewlyCreated : 1;
207 
208   /*
209    * When recursive marking uses too much stack we delay marking of arenas and
210    * link them into a list for later processing. This uses the following fields.
211    */
212   size_t onDelayedMarkingList_ : 1;
213   size_t hasDelayedBlackMarking_ : 1;
214   size_t hasDelayedGrayMarking_ : 1;
215   size_t nextDelayedMarkingArena_ : DELAYED_MARKING_ARENA_BITS;
216 
217   union {
218     /*
219      * For arenas in zones other than the atoms zone, if non-null, points
220      * to an ArenaCellSet that represents the set of cells in this arena
221      * that are in the nursery's store buffer.
222      */
223     ArenaCellSet* bufferedCells_;
224 
225     /*
226      * For arenas in the atoms zone, the starting index into zone atom
227      * marking bitmaps (see AtomMarking.h) of the things in this zone.
228      * Atoms never refer to nursery things, so no store buffer index is
229      * needed.
230      */
231     size_t atomBitmapStart_;
232   };
233 
234  public:
235   /*
236    * The size of data should be |ArenaSize - offsetof(data)|, but the offset
237    * is not yet known to the compiler, so we do it by hand. |firstFreeSpan|
238    * takes up 8 bytes on 64-bit due to alignment requirements; the rest are
239    * obvious. This constant is stored in js/HeapAPI.h.
240    */
241   uint8_t data[ArenaSize - ArenaHeaderSize];
242 
243   void init(JS::Zone* zoneArg, AllocKind kind, const AutoLockGC& lock);
244 
245   // Sets |firstFreeSpan| to the Arena's entire valid range, and
246   // also sets the next span stored at |firstFreeSpan.last| as empty.
247   void setAsFullyUnused() {
248     AllocKind kind = getAllocKind();
249     firstFreeSpan.first = firstThingOffset(kind);
250     firstFreeSpan.last = lastThingOffset(kind);
251     FreeSpan* last = firstFreeSpan.nextSpanUnchecked(this);
252     last->initAsEmpty();
253   }
254 
255   // Initialize an arena to its unallocated state. For arenas that were
256   // previously allocated for some zone, use release() instead.
257   void setAsNotAllocated() {
258     firstFreeSpan.initAsEmpty();
259 
260     // Poison zone pointer to highlight UAF on released arenas in crash data.
261     AlwaysPoison(&zone, JS_FREED_ARENA_PATTERN, sizeof(zone),
262                  MemCheckKind::MakeNoAccess);
263 
264     allocKind = AllocKind::LIMIT;
265     onDelayedMarkingList_ = 0;
266     hasDelayedBlackMarking_ = 0;
267     hasDelayedGrayMarking_ = 0;
268     nextDelayedMarkingArena_ = 0;
269     bufferedCells_ = nullptr;
270 
271     MOZ_ASSERT(!allocated());
272   }
273 
274   // Return an allocated arena to its unallocated state.
275   inline void release(const AutoLockGC& lock);
276 
277   uintptr_t address() const {
278     checkAddress();
279     return uintptr_t(this);
280   }
281 
282   inline void checkAddress() const;
283 
284   inline TenuredChunk* chunk() const;
285 
286   bool allocated() const {
287     MOZ_ASSERT(IsAllocKind(AllocKind(allocKind)));
288     return IsValidAllocKind(AllocKind(allocKind));
289   }
290 
291   AllocKind getAllocKind() const {
292     MOZ_ASSERT(allocated());
293     return allocKind;
294   }
295 
296   FreeSpan* getFirstFreeSpan() { return &firstFreeSpan; }
297 
298   static size_t thingSize(AllocKind kind) { return ThingSizes[size_t(kind)]; }
299   static size_t thingsPerArena(AllocKind kind) {
300     return ThingsPerArena[size_t(kind)];
301   }
302   static size_t thingsSpan(AllocKind kind) {
303     return thingsPerArena(kind) * thingSize(kind);
304   }
305 
306   static size_t firstThingOffset(AllocKind kind) {
307     return FirstThingOffsets[size_t(kind)];
308   }
309   static size_t lastThingOffset(AllocKind kind) {
310     return ArenaSize - thingSize(kind);
311   }
312 
313   size_t getThingSize() const { return thingSize(getAllocKind()); }
314   size_t getThingsPerArena() const { return thingsPerArena(getAllocKind()); }
315   size_t getThingsSpan() const { return getThingsPerArena() * getThingSize(); }
316   size_t getFirstThingOffset() const {
317     return firstThingOffset(getAllocKind());
318   }
319 
320   uintptr_t thingsStart() const { return address() + getFirstThingOffset(); }
321   uintptr_t thingsEnd() const { return address() + ArenaSize; }
322 
323   bool isEmpty() const {
324     // Arena is empty if its first span covers the whole arena.
325     firstFreeSpan.checkSpan(this);
326     AllocKind kind = getAllocKind();
327     return firstFreeSpan.first == firstThingOffset(kind) &&
328            firstFreeSpan.last == lastThingOffset(kind);
329   }
330 
331   bool hasFreeThings() const { return !firstFreeSpan.isEmpty(); }
332 
333   size_t numFreeThings(size_t thingSize) const {
334     firstFreeSpan.checkSpan(this);
335     size_t numFree = 0;
336     const FreeSpan* span = &firstFreeSpan;
337     for (; !span->isEmpty(); span = span->nextSpan(this)) {
338       numFree += (span->last - span->first) / thingSize + 1;
339     }
340     return numFree;
341   }
342 
343   size_t countFreeCells() { return numFreeThings(getThingSize()); }
344   size_t countUsedCells() { return getThingsPerArena() - countFreeCells(); }
345 
346   bool inFreeList(uintptr_t thing) {
347     uintptr_t base = address();
348     const FreeSpan* span = &firstFreeSpan;
349     for (; !span->isEmpty(); span = span->nextSpan(this)) {
350       /* If the thing comes before the current span, it's not free. */
351       if (thing < base + span->first) {
352         return false;
353       }
354 
355       /* If we find it before the end of the span, it's free. */
356       if (thing <= base + span->last) {
357         return true;
358       }
359     }
360     return false;
361   }
362 
363   static bool isAligned(uintptr_t thing, size_t thingSize) {
364     /* Things ends at the arena end. */
365     uintptr_t tailOffset = ArenaSize - (thing & ArenaMask);
366     return tailOffset % thingSize == 0;
367   }
368 
369   bool onDelayedMarkingList() const { return onDelayedMarkingList_; }
370 
371   Arena* getNextDelayedMarking() const {
372     MOZ_ASSERT(onDelayedMarkingList_);
373     return reinterpret_cast<Arena*>(nextDelayedMarkingArena_ << ArenaShift);
374   }
375 
376   void setNextDelayedMarkingArena(Arena* arena) {
377     MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
378     MOZ_ASSERT(!onDelayedMarkingList_);
379     MOZ_ASSERT(!hasDelayedBlackMarking_);
380     MOZ_ASSERT(!hasDelayedGrayMarking_);
381     MOZ_ASSERT(!nextDelayedMarkingArena_);
382     onDelayedMarkingList_ = 1;
383     if (arena) {
384       nextDelayedMarkingArena_ = arena->address() >> ArenaShift;
385     }
386   }
387 
388   void updateNextDelayedMarkingArena(Arena* arena) {
389     MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
390     MOZ_ASSERT(onDelayedMarkingList_);
391     nextDelayedMarkingArena_ = arena ? arena->address() >> ArenaShift : 0;
392   }
393 
394   bool hasDelayedMarking(MarkColor color) const {
395     MOZ_ASSERT(onDelayedMarkingList_);
396     return color == MarkColor::Black ? hasDelayedBlackMarking_
397                                      : hasDelayedGrayMarking_;
398   }
399 
400   bool hasAnyDelayedMarking() const {
401     MOZ_ASSERT(onDelayedMarkingList_);
402     return hasDelayedBlackMarking_ || hasDelayedGrayMarking_;
403   }
404 
405   void setHasDelayedMarking(MarkColor color, bool value) {
406     MOZ_ASSERT(onDelayedMarkingList_);
407     if (color == MarkColor::Black) {
408       hasDelayedBlackMarking_ = value;
409     } else {
410       hasDelayedGrayMarking_ = value;
411     }
412   }
413 
414   void clearDelayedMarkingState() {
415     MOZ_ASSERT(onDelayedMarkingList_);
416     onDelayedMarkingList_ = 0;
417     hasDelayedBlackMarking_ = 0;
418     hasDelayedGrayMarking_ = 0;
419     nextDelayedMarkingArena_ = 0;
420   }
421 
422   inline ArenaCellSet*& bufferedCells();
423   inline size_t& atomBitmapStart();
424 
425   template <typename T>
426   size_t finalize(JSFreeOp* fop, AllocKind thingKind, size_t thingSize);
427 
428   static void staticAsserts();
429   static void checkLookupTables();
430 
431   void unmarkAll();
432   void unmarkPreMarkedFreeCells();
433 
434   void arenaAllocatedDuringGC();
435 
436 #ifdef DEBUG
437   void checkNoMarkedFreeCells();
438   void checkAllCellsMarkedBlack();
439 #endif
440 
441 #if defined(DEBUG) || defined(JS_GC_ZEAL)
442   void checkNoMarkedCells();
443 #endif
444 };
445 
446 static_assert(ArenaZoneOffset == offsetof(Arena, zone),
447               "The hardcoded API zone offset must match the actual offset.");
448 
449 static_assert(sizeof(Arena) == ArenaSize,
450               "ArenaSize must match the actual size of the Arena structure.");
451 
452 static_assert(
453     offsetof(Arena, data) == ArenaHeaderSize,
454     "ArenaHeaderSize must match the actual size of the header fields.");
455 
getArena()456 inline Arena* FreeSpan::getArena() {
457   Arena* arena = getArenaUnchecked();
458   arena->checkAddress();
459   return arena;
460 }
461 
checkSpan(const Arena * arena)462 inline void FreeSpan::checkSpan(const Arena* arena) const {
463 #ifdef DEBUG
464   if (!first) {
465     MOZ_ASSERT(!first && !last);
466     return;
467   }
468 
469   arena->checkAddress();
470   checkRange(first, last, arena);
471 
472   // If there's a following span, it must have a higher address,
473   // and the gap must be at least 2 * thingSize.
474   const FreeSpan* next = nextSpanUnchecked(arena);
475   if (next->first) {
476     checkRange(next->first, next->last, arena);
477     size_t thingSize = arena->getThingSize();
478     MOZ_ASSERT(last + 2 * thingSize <= next->first);
479   }
480 #endif
481 }
482 
checkRange(uintptr_t first,uintptr_t last,const Arena * arena)483 inline void FreeSpan::checkRange(uintptr_t first, uintptr_t last,
484                                  const Arena* arena) const {
485 #ifdef DEBUG
486   MOZ_ASSERT(arena);
487   MOZ_ASSERT(first <= last);
488   AllocKind thingKind = arena->getAllocKind();
489   MOZ_ASSERT(first >= Arena::firstThingOffset(thingKind));
490   MOZ_ASSERT(last <= Arena::lastThingOffset(thingKind));
491   MOZ_ASSERT((last - first) % Arena::thingSize(thingKind) == 0);
492 #endif
493 }
494 
495 // Mark bitmap API:
496 
markBit(const TenuredCell * cell,ColorBit colorBit)497 MOZ_ALWAYS_INLINE bool MarkBitmap::markBit(const TenuredCell* cell,
498                                            ColorBit colorBit) {
499   MarkBitmapWord* word;
500   uintptr_t mask;
501   getMarkWordAndMask(cell, colorBit, &word, &mask);
502   return *word & mask;
503 }
504 
isMarkedAny(const TenuredCell * cell)505 MOZ_ALWAYS_INLINE bool MarkBitmap::isMarkedAny(const TenuredCell* cell) {
506   return markBit(cell, ColorBit::BlackBit) ||
507          markBit(cell, ColorBit::GrayOrBlackBit);
508 }
509 
isMarkedBlack(const TenuredCell * cell)510 MOZ_ALWAYS_INLINE bool MarkBitmap::isMarkedBlack(const TenuredCell* cell) {
511   return markBit(cell, ColorBit::BlackBit);
512 }
513 
isMarkedGray(const TenuredCell * cell)514 MOZ_ALWAYS_INLINE bool MarkBitmap::isMarkedGray(const TenuredCell* cell) {
515   return !markBit(cell, ColorBit::BlackBit) &&
516          markBit(cell, ColorBit::GrayOrBlackBit);
517 }
518 
519 // The return value indicates if the cell went from unmarked to marked.
markIfUnmarked(const TenuredCell * cell,MarkColor color)520 MOZ_ALWAYS_INLINE bool MarkBitmap::markIfUnmarked(const TenuredCell* cell,
521                                                   MarkColor color) {
522   MarkBitmapWord* word;
523   uintptr_t mask;
524   getMarkWordAndMask(cell, ColorBit::BlackBit, &word, &mask);
525   if (*word & mask) {
526     return false;
527   }
528   if (color == MarkColor::Black) {
529     *word |= mask;
530   } else {
531     /*
532      * We use getMarkWordAndMask to recalculate both mask and word as
533      * doing just mask << color may overflow the mask.
534      */
535     getMarkWordAndMask(cell, ColorBit::GrayOrBlackBit, &word, &mask);
536     if (*word & mask) {
537       return false;
538     }
539     *word |= mask;
540   }
541   return true;
542 }
543 
markBlack(const TenuredCell * cell)544 MOZ_ALWAYS_INLINE void MarkBitmap::markBlack(const TenuredCell* cell) {
545   MarkBitmapWord* word;
546   uintptr_t mask;
547   getMarkWordAndMask(cell, ColorBit::BlackBit, &word, &mask);
548   *word |= mask;
549 }
550 
copyMarkBit(TenuredCell * dst,const TenuredCell * src,ColorBit colorBit)551 MOZ_ALWAYS_INLINE void MarkBitmap::copyMarkBit(TenuredCell* dst,
552                                                const TenuredCell* src,
553                                                ColorBit colorBit) {
554   TenuredChunkBase* srcChunk = detail::GetCellChunkBase(src);
555   MarkBitmapWord* srcWord;
556   uintptr_t srcMask;
557   srcChunk->markBits.getMarkWordAndMask(src, colorBit, &srcWord, &srcMask);
558 
559   MarkBitmapWord* dstWord;
560   uintptr_t dstMask;
561   getMarkWordAndMask(dst, colorBit, &dstWord, &dstMask);
562 
563   *dstWord &= ~dstMask;
564   if (*srcWord & srcMask) {
565     *dstWord |= dstMask;
566   }
567 }
568 
unmark(const TenuredCell * cell)569 MOZ_ALWAYS_INLINE void MarkBitmap::unmark(const TenuredCell* cell) {
570   MarkBitmapWord* word;
571   uintptr_t mask;
572   getMarkWordAndMask(cell, ColorBit::BlackBit, &word, &mask);
573   *word &= ~mask;
574   getMarkWordAndMask(cell, ColorBit::GrayOrBlackBit, &word, &mask);
575   *word &= ~mask;
576 }
577 
clear()578 inline void MarkBitmap::clear() {
579   for (size_t i = 0; i < MarkBitmap::WordCount; i++) {
580     bitmap[i] = 0;
581   }
582 }
583 
arenaBits(Arena * arena)584 inline MarkBitmapWord* MarkBitmap::arenaBits(Arena* arena) {
585   static_assert(
586       ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD,
587       "We assume that the part of the bitmap corresponding to the arena "
588       "has the exact number of words so we do not need to deal with a word "
589       "that covers bits from two arenas.");
590 
591   MarkBitmapWord* word;
592   uintptr_t unused;
593   getMarkWordAndMask(reinterpret_cast<TenuredCell*>(arena->address()),
594                      ColorBit::BlackBit, &word, &unused);
595   return word;
596 }
597 
598 /*
599  * A chunk in the tenured heap. TenuredChunks contain arenas and associated data
600  * structures (mark bitmap, delayed marking state).
601  */
602 class TenuredChunk : public TenuredChunkBase {
603   Arena arenas[ArenasPerChunk];
604 
605   friend class GCRuntime;
606   friend class MarkingValidator;
607 
608  public:
fromAddress(uintptr_t addr)609   static TenuredChunk* fromAddress(uintptr_t addr) {
610     addr &= ~ChunkMask;
611     return reinterpret_cast<TenuredChunk*>(addr);
612   }
613 
withinValidRange(uintptr_t addr)614   static bool withinValidRange(uintptr_t addr) {
615     uintptr_t offset = addr & ChunkMask;
616     if (TenuredChunk::fromAddress(addr)->isNurseryChunk()) {
617       return offset >= sizeof(ChunkBase) && offset < ChunkSize;
618     }
619     return offset >= offsetof(TenuredChunk, arenas) && offset < ChunkSize;
620   }
621 
arenaIndex(uintptr_t addr)622   static size_t arenaIndex(uintptr_t addr) {
623     MOZ_ASSERT(!TenuredChunk::fromAddress(addr)->isNurseryChunk());
624     MOZ_ASSERT(withinValidRange(addr));
625     uintptr_t offset = addr & ChunkMask;
626     return (offset - offsetof(TenuredChunk, arenas)) >> ArenaShift;
627   }
628 
TenuredChunk(JSRuntime * runtime)629   explicit TenuredChunk(JSRuntime* runtime) : TenuredChunkBase(runtime) {}
630 
address()631   uintptr_t address() const {
632     uintptr_t addr = reinterpret_cast<uintptr_t>(this);
633     MOZ_ASSERT(!(addr & ChunkMask));
634     return addr;
635   }
636 
unused()637   bool unused() const { return info.numArenasFree == ArenasPerChunk; }
638 
hasAvailableArenas()639   bool hasAvailableArenas() const { return info.numArenasFree != 0; }
640 
isNurseryChunk()641   bool isNurseryChunk() const { return storeBuffer; }
642 
643   Arena* allocateArena(GCRuntime* gc, JS::Zone* zone, AllocKind kind,
644                        const AutoLockGC& lock);
645 
646   void releaseArena(GCRuntime* gc, Arena* arena, const AutoLockGC& lock);
647   void recycleArena(Arena* arena, SortedArenaList& dest, size_t thingsPerArena);
648 
649   void decommitFreeArenas(GCRuntime* gc, const bool& cancel, AutoLockGC& lock);
650   [[nodiscard]] bool decommitOneFreePage(GCRuntime* gc, size_t pageIndex,
651                                          AutoLockGC& lock);
652   void decommitAllArenas();
653 
654   // This will decommit each unused not-already decommitted arena. It performs a
655   // system call for each arena but is only used during OOM.
656   void decommitFreeArenasWithoutUnlocking(const AutoLockGC& lock);
657 
658   static TenuredChunk* allocate(GCRuntime* gc);
659   void init(GCRuntime* gc);
660 
661   /* Unlink and return the freeArenasHead. */
662   Arena* fetchNextFreeArena(GCRuntime* gc);
663 
664 #ifdef DEBUG
665   void verify() const;
666 #endif
667 
668  private:
669   /* Search for a decommitted page to allocate. */
670   unsigned findDecommittedPageOffset();
671   void commitOnePage(GCRuntime* gc);
672 
673   void addArenaToFreeList(GCRuntime* gc, Arena* arena);
674 
675   // Add Arenas located in the page of pageIndex to the free list.
676   void addArenasInPageToFreeList(GCRuntime* gc, size_t pageIndex);
677   // Mark areans located in the same page of arena as decommitted.
678   void markArenasInPageDecommitted(size_t pageIndex);
679 
680   void updateChunkListAfterAlloc(GCRuntime* gc, const AutoLockGC& lock);
681   void updateChunkListAfterFree(GCRuntime* gc, size_t numArenasFree,
682                                 const AutoLockGC& lock);
683 
684   // Rebuild info.freeArenasHead by ascending order of arenas' address.
685   void rebuildFreeArenasList();
686 
687   // Check if the page is free.
688   bool isPageFree(size_t pageIndex) const;
689   // Check the arena from freeArenasList is located in a free page.
690   // Unlike the isPageFree(size_t) version, this isPageFree(Arena*) will see the
691   // following arenas from the freeArenasHead are also located in the same page,
692   // to prevent not to access the arenas mprotect'ed during compaction in debug
693   // build.
694   bool isPageFree(const Arena* arena) const;
695 
696   // Get the page index of the arena.
pageIndex(const Arena * arena)697   size_t pageIndex(const Arena* arena) const {
698     return pageIndex(arenaIndex(arena->address()));
699   }
pageIndex(size_t arenaIndex)700   size_t pageIndex(size_t arenaIndex) const {
701     return arenaIndex / ArenasPerPage;
702   }
703 
pageAddress(size_t pageIndex)704   Arena* pageAddress(size_t pageIndex) {
705     return &arenas[pageIndex * ArenasPerPage];
706   }
707 };
708 
checkAddress()709 inline void Arena::checkAddress() const {
710   mozilla::DebugOnly<uintptr_t> addr = uintptr_t(this);
711   MOZ_ASSERT(addr);
712   MOZ_ASSERT(!(addr & ArenaMask));
713   MOZ_ASSERT(TenuredChunk::withinValidRange(addr));
714 }
715 
chunk()716 inline TenuredChunk* Arena::chunk() const {
717   return TenuredChunk::fromAddress(address());
718 }
719 
InFreeList(Arena * arena,void * thing)720 inline bool InFreeList(Arena* arena, void* thing) {
721   uintptr_t addr = reinterpret_cast<uintptr_t>(thing);
722   MOZ_ASSERT(Arena::isAligned(addr, arena->getThingSize()));
723   return arena->inFreeList(addr);
724 }
725 
726 static const int32_t ChunkStoreBufferOffsetFromLastByte =
727     int32_t(gc::ChunkStoreBufferOffset) - int32_t(gc::ChunkMask);
728 
729 // Cell header stored before all nursery cells.
alignas(gc::CellAlignBytes)730 struct alignas(gc::CellAlignBytes) NurseryCellHeader {
731   // Store zone pointer with the trace kind in the lowest three bits.
732   const uintptr_t allocSiteAndTraceKind;
733 
734   // We only need to store a subset of trace kinds so this doesn't cover the
735   // full range.
736   static const uintptr_t TraceKindMask = 3;
737 
738   static uintptr_t MakeValue(AllocSite* const site, JS::TraceKind kind) {
739     MOZ_ASSERT(uintptr_t(kind) < TraceKindMask);
740     MOZ_ASSERT((uintptr_t(site) & TraceKindMask) == 0);
741     return uintptr_t(site) | uintptr_t(kind);
742   }
743 
744   inline NurseryCellHeader(AllocSite* site, JS::TraceKind kind);
745 
746   AllocSite* allocSite() const {
747     return reinterpret_cast<AllocSite*>(allocSiteAndTraceKind & ~TraceKindMask);
748   }
749 
750   JS::Zone* zone() const { return allocSite()->zone(); }
751 
752   JS::TraceKind traceKind() const {
753     return JS::TraceKind(allocSiteAndTraceKind & TraceKindMask);
754   }
755 
756   static const NurseryCellHeader* from(const Cell* cell) {
757     MOZ_ASSERT(IsInsideNursery(cell));
758     return reinterpret_cast<const NurseryCellHeader*>(
759         uintptr_t(cell) - sizeof(NurseryCellHeader));
760   }
761 };
762 
763 static_assert(uintptr_t(JS::TraceKind::Object) <=
764               NurseryCellHeader::TraceKindMask);
765 static_assert(uintptr_t(JS::TraceKind::String) <=
766               NurseryCellHeader::TraceKindMask);
767 static_assert(uintptr_t(JS::TraceKind::BigInt) <=
768               NurseryCellHeader::TraceKindMask);
769 
770 } /* namespace gc */
771 
772 namespace debug {
773 
774 // Utility functions meant to be called from an interactive debugger.
775 enum class MarkInfo : int {
776   BLACK = 0,
777   GRAY = 1,
778   UNMARKED = -1,
779   NURSERY = -2,
780 };
781 
782 // Get the mark color for a cell, in a way easily usable from a debugger.
783 MOZ_NEVER_INLINE MarkInfo GetMarkInfo(js::gc::Cell* cell);
784 
785 // Sample usage from gdb:
786 //
787 //   (gdb) p $word = js::debug::GetMarkWordAddress(obj)
788 //   $1 = (uintptr_t *) 0x7fa56d5fe360
789 //   (gdb) p/x $mask = js::debug::GetMarkMask(obj, js::gc::GRAY)
790 //   $2 = 0x200000000
791 //   (gdb) watch *$word
792 //   Hardware watchpoint 7: *$word
793 //   (gdb) cond 7 *$word & $mask
794 //   (gdb) cont
795 //
796 // Note that this is *not* a watchpoint on a single bit. It is a watchpoint on
797 // the whole word, which will trigger whenever the word changes and the
798 // selected bit is set after the change.
799 //
800 // So if the bit changing is the desired one, this is exactly what you want.
801 // But if a different bit changes (either set or cleared), you may still stop
802 // execution if the $mask bit happened to already be set. gdb does not expose
803 // enough information to restrict the watchpoint to just a single bit.
804 
805 // Return the address of the word containing the mark bits for the given cell,
806 // or nullptr if the cell is in the nursery.
807 MOZ_NEVER_INLINE uintptr_t* GetMarkWordAddress(js::gc::Cell* cell);
808 
809 // Return the mask for the given cell and color bit, or 0 if the cell is in the
810 // nursery.
811 MOZ_NEVER_INLINE uintptr_t GetMarkMask(js::gc::Cell* cell, uint32_t colorBit);
812 
813 } /* namespace debug */
814 } /* namespace js */
815 
816 #endif /* gc_Heap_h */
817