1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sw=4 et tw=78:
3  *
4  * This Source Code Form is subject to the terms of the Mozilla Public
5  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
6  * You can obtain one at http://mozilla.org/MPL/2.0/. */
7 
8 #ifndef gc_Nursery_h
9 #define gc_Nursery_h
10 
11 #include "jsalloc.h"
12 #include "jspubtd.h"
13 
14 #include "ds/BitArray.h"
15 #include "gc/Heap.h"
16 #include "gc/Memory.h"
17 #include "js/Class.h"
18 #include "js/GCAPI.h"
19 #include "js/HashTable.h"
20 #include "js/HeapAPI.h"
21 #include "js/Value.h"
22 #include "js/Vector.h"
23 #include "vm/SharedMem.h"
24 
25 namespace JS {
26 struct Zone;
27 } // namespace JS
28 
29 namespace js {
30 
31 class ObjectElements;
32 class NativeObject;
33 class Nursery;
34 class HeapSlot;
35 class ObjectGroup;
36 
37 void SetGCZeal(JSRuntime*, uint8_t, uint32_t);
38 
39 namespace gc {
40 struct Cell;
41 class MinorCollectionTracer;
42 class RelocationOverlay;
43 struct TenureCountCache;
44 } /* namespace gc */
45 
46 namespace jit {
47 class MacroAssembler;
48 } // namespace jit
49 
50 class TenuringTracer : public JSTracer
51 {
52     friend class Nursery;
53     Nursery& nursery_;
54 
55     // Amount of data moved to the tenured generation during collection.
56     size_t tenuredSize;
57 
58     // This list is threaded through the Nursery using the space from already
59     // moved things. The list is used to fix up the moved things and to find
60     // things held live by intra-Nursery pointers.
61     gc::RelocationOverlay* head;
62     gc::RelocationOverlay** tail;
63 
64     TenuringTracer(JSRuntime* rt, Nursery* nursery);
65 
66   public:
nursery()67     const Nursery& nursery() const { return nursery_; }
68 
69     // Returns true if the pointer was updated.
70     template <typename T> void traverse(T** thingp);
71     template <typename T> void traverse(T* thingp);
72 
73     void insertIntoFixupList(gc::RelocationOverlay* entry);
74 
75     // The store buffers need to be able to call these directly.
76     void traceObject(JSObject* src);
77     void traceObjectSlots(NativeObject* nobj, uint32_t start, uint32_t length);
traceSlots(JS::Value * vp,uint32_t nslots)78     void traceSlots(JS::Value* vp, uint32_t nslots) { traceSlots(vp, vp + nslots); }
79 
80   private:
nursery()81     Nursery& nursery() { return nursery_; }
82 
83     JSObject* moveToTenured(JSObject* src);
84     size_t moveObjectToTenured(JSObject* dst, JSObject* src, gc::AllocKind dstKind);
85     size_t moveElementsToTenured(NativeObject* dst, NativeObject* src, gc::AllocKind dstKind);
86     size_t moveSlotsToTenured(NativeObject* dst, NativeObject* src, gc::AllocKind dstKind);
87 
88     void traceSlots(JS::Value* vp, JS::Value* end);
89 };
90 
91 class Nursery
92 {
93   public:
94     static const size_t Alignment = gc::ChunkSize;
95     static const size_t ChunkShift = gc::ChunkShift;
96 
Nursery(JSRuntime * rt)97     explicit Nursery(JSRuntime* rt)
98       : runtime_(rt),
99         position_(0),
100         currentStart_(0),
101         currentEnd_(0),
102         heapStart_(0),
103         heapEnd_(0),
104         currentChunk_(0),
105         numActiveChunks_(0),
106         numNurseryChunks_(0),
107         profileThreshold_(0),
108         enableProfiling_(false),
109         freeMallocedBuffersTask(nullptr),
110         sweepActions_(nullptr)
111     {}
112     ~Nursery();
113 
114     bool init(uint32_t maxNurseryBytes);
115 
exists()116     bool exists() const { return numNurseryChunks_ != 0; }
numChunks()117     size_t numChunks() const { return numNurseryChunks_; }
nurserySize()118     size_t nurserySize() const { return numNurseryChunks_ << ChunkShift; }
119 
120     void enable();
121     void disable();
isEnabled()122     bool isEnabled() const { return numActiveChunks_ != 0; }
123 
124     /* Return true if no allocations have been made since the last collection. */
125     bool isEmpty() const;
126 
127     /*
128      * Check whether an arbitrary pointer is within the nursery. This is
129      * slower than IsInsideNursery(Cell*), but works on all types of pointers.
130      */
131     MOZ_ALWAYS_INLINE bool isInside(gc::Cell* cellp) const = delete;
isInside(const void * p)132     MOZ_ALWAYS_INLINE bool isInside(const void* p) const {
133         return uintptr_t(p) >= heapStart_ && uintptr_t(p) < heapEnd_;
134     }
135     template<typename T>
isInside(const SharedMem<T> & p)136     bool isInside(const SharedMem<T>& p) const {
137         return isInside(p.unwrap(/*safe - used for value in comparison above*/));
138     }
139 
140     /*
141      * Allocate and return a pointer to a new GC object with its |slots|
142      * pointer pre-filled. Returns nullptr if the Nursery is full.
143      */
144     JSObject* allocateObject(JSContext* cx, size_t size, size_t numDynamic, const js::Class* clasp);
145 
146     /* Allocate a buffer for a given zone, using the nursery if possible. */
147     void* allocateBuffer(JS::Zone* zone, uint32_t nbytes);
148 
149     /*
150      * Allocate a buffer for a given object, using the nursery if possible and
151      * obj is in the nursery.
152      */
153     void* allocateBuffer(JSObject* obj, uint32_t nbytes);
154 
155     /* Resize an existing object buffer. */
156     void* reallocateBuffer(JSObject* obj, void* oldBuffer,
157                            uint32_t oldBytes, uint32_t newBytes);
158 
159     /* Free an object buffer. */
160     void freeBuffer(void* buffer);
161 
162     typedef Vector<ObjectGroup*, 0, SystemAllocPolicy> ObjectGroupList;
163 
164     /*
165      * Do a minor collection, optionally specifying a list to store groups which
166      * should be pretenured afterwards.
167      */
168     void collect(JSRuntime* rt, JS::gcreason::Reason reason, ObjectGroupList* pretenureGroups);
169 
170     /*
171      * Check if the thing at |*ref| in the Nursery has been forwarded. If so,
172      * sets |*ref| to the new location of the object and returns true. Otherwise
173      * returns false and leaves |*ref| unset.
174      */
175     MOZ_ALWAYS_INLINE bool getForwardedPointer(JSObject** ref) const;
176 
177     /* Forward a slots/elements pointer stored in an Ion frame. */
178     void forwardBufferPointer(HeapSlot** pSlotsElems);
179 
maybeSetForwardingPointer(JSTracer * trc,void * oldData,void * newData,bool direct)180     void maybeSetForwardingPointer(JSTracer* trc, void* oldData, void* newData, bool direct) {
181         if (trc->isTenuringTracer() && isInside(oldData))
182             setForwardingPointer(oldData, newData, direct);
183     }
184 
185     /* Mark a malloced buffer as no longer needing to be freed. */
removeMallocedBuffer(void * buffer)186     void removeMallocedBuffer(void* buffer) {
187         mallocedBuffers.remove(buffer);
188     }
189 
190     void waitBackgroundFreeEnd();
191 
addedUniqueIdToCell(gc::Cell * cell)192     bool addedUniqueIdToCell(gc::Cell* cell) {
193         if (!IsInsideNursery(cell) || !isEnabled())
194             return true;
195         MOZ_ASSERT(cellsWithUid_.initialized());
196         MOZ_ASSERT(!cellsWithUid_.has(cell));
197         return cellsWithUid_.put(cell);
198     }
199 
200     using SweepThunk = void (*)(void *data);
201     void queueSweepAction(SweepThunk thunk, void* data);
202 
sizeOfHeapCommitted()203     size_t sizeOfHeapCommitted() const {
204         return numActiveChunks_ * gc::ChunkSize;
205     }
sizeOfHeapDecommitted()206     size_t sizeOfHeapDecommitted() const {
207         return (numNurseryChunks_ - numActiveChunks_) * gc::ChunkSize;
208     }
sizeOfMallocedBuffers(mozilla::MallocSizeOf mallocSizeOf)209     size_t sizeOfMallocedBuffers(mozilla::MallocSizeOf mallocSizeOf) const {
210         size_t total = 0;
211         for (MallocedBuffersSet::Range r = mallocedBuffers.all(); !r.empty(); r.popFront())
212             total += mallocSizeOf(r.front());
213         total += mallocedBuffers.sizeOfExcludingThis(mallocSizeOf);
214         return total;
215     }
216 
start()217     MOZ_ALWAYS_INLINE uintptr_t start() const {
218         return heapStart_;
219     }
220 
heapEnd()221     MOZ_ALWAYS_INLINE uintptr_t heapEnd() const {
222         return heapEnd_;
223     }
224 
225 #ifdef JS_GC_ZEAL
226     void enterZealMode();
227     void leaveZealMode();
228 #endif
229 
230   private:
231     /*
232      * The start and end pointers are stored under the runtime so that we can
233      * inline the isInsideNursery check into embedder code. Use the start()
234      * and heapEnd() functions to access these values.
235      */
236     JSRuntime* runtime_;
237 
238     /* Pointer to the first unallocated byte in the nursery. */
239     uintptr_t position_;
240 
241     /* Pointer to the logical start of the Nursery. */
242     uintptr_t currentStart_;
243 
244     /* Pointer to the last byte of space in the current chunk. */
245     uintptr_t currentEnd_;
246 
247     /* Pointer to first and last address of the total nursery allocation. */
248     uintptr_t heapStart_;
249     uintptr_t heapEnd_;
250 
251     /* The index of the chunk that is currently being allocated from. */
252     int currentChunk_;
253 
254     /* The index after the last chunk that we will allocate from. */
255     int numActiveChunks_;
256 
257     /* Number of chunks allocated for the nursery. */
258     int numNurseryChunks_;
259 
260     /* Report minor collections taking more than this many us, if enabled. */
261     int64_t profileThreshold_;
262     bool enableProfiling_;
263 
264     /*
265      * The set of externally malloced buffers potentially kept live by objects
266      * stored in the nursery. Any external buffers that do not belong to a
267      * tenured thing at the end of a minor GC must be freed.
268      */
269     typedef HashSet<void*, PointerHasher<void*, 3>, SystemAllocPolicy> MallocedBuffersSet;
270     MallocedBuffersSet mallocedBuffers;
271 
272     /* A task structure used to free the malloced bufers on a background thread. */
273     struct FreeMallocedBuffersTask;
274     FreeMallocedBuffersTask* freeMallocedBuffersTask;
275 
276     /*
277      * During a collection most hoisted slot and element buffers indicate their
278      * new location with a forwarding pointer at the base. This does not work
279      * for buffers whose length is less than pointer width, or when different
280      * buffers might overlap each other. For these, an entry in the following
281      * table is used.
282      */
283     typedef HashMap<void*, void*, PointerHasher<void*, 1>, SystemAllocPolicy> ForwardedBufferMap;
284     ForwardedBufferMap forwardedBuffers;
285 
286     /*
287      * When we assign a unique id to cell in the nursery, that almost always
288      * means that the cell will be in a hash table, and thus, held live,
289      * automatically moving the uid from the nursery to its new home in
290      * tenured. It is possible, if rare, for an object that acquired a uid to
291      * be dead before the next collection, in which case we need to know to
292      * remove it when we sweep.
293      *
294      * Note: we store the pointers as Cell* here, resulting in an ugly cast in
295      *       sweep. This is because this structure is used to help implement
296      *       stable object hashing and we have to break the cycle somehow.
297      */
298     using CellsWithUniqueIdSet = HashSet<gc::Cell*, PointerHasher<gc::Cell*, 3>, SystemAllocPolicy>;
299     CellsWithUniqueIdSet cellsWithUid_;
300 
301     struct SweepAction;
302     SweepAction* sweepActions_;
303 
304     /* The maximum number of bytes allowed to reside in nursery buffers. */
305     static const size_t MaxNurseryBufferSize = 1024;
306 
307     /* The amount of space in the mapped nursery available to allocations. */
308     static const size_t NurseryChunkUsableSize = gc::ChunkSize - sizeof(gc::ChunkTrailer);
309 
310     struct NurseryChunkLayout {
311         char data[NurseryChunkUsableSize];
312         gc::ChunkTrailer trailer;
startNurseryChunkLayout313         uintptr_t start() { return uintptr_t(&data); }
endNurseryChunkLayout314         uintptr_t end() { return uintptr_t(&trailer); }
315     };
316     static_assert(sizeof(NurseryChunkLayout) == gc::ChunkSize,
317                   "Nursery chunk size must match gc::Chunk size.");
chunk(int index)318     NurseryChunkLayout& chunk(int index) const {
319         MOZ_ASSERT(index < numNurseryChunks_);
320         MOZ_ASSERT(start());
321         return reinterpret_cast<NurseryChunkLayout*>(start())[index];
322     }
323 
initChunk(int chunkno)324     MOZ_ALWAYS_INLINE void initChunk(int chunkno) {
325         gc::StoreBuffer* sb = JS::shadow::Runtime::asShadowRuntime(runtime())->gcStoreBufferPtr();
326         new (&chunk(chunkno).trailer) gc::ChunkTrailer(runtime(), sb);
327     }
328 
setCurrentChunk(int chunkno)329     MOZ_ALWAYS_INLINE void setCurrentChunk(int chunkno) {
330         MOZ_ASSERT(chunkno < numNurseryChunks_);
331         MOZ_ASSERT(chunkno < numActiveChunks_);
332         currentChunk_ = chunkno;
333         position_ = chunk(chunkno).start();
334         currentEnd_ = chunk(chunkno).end();
335         initChunk(chunkno);
336     }
337 
338     void updateDecommittedRegion();
339 
allocationEnd()340     MOZ_ALWAYS_INLINE uintptr_t allocationEnd() const {
341         MOZ_ASSERT(numActiveChunks_ > 0);
342         return chunk(numActiveChunks_ - 1).end();
343     }
344 
currentEnd()345     MOZ_ALWAYS_INLINE uintptr_t currentEnd() const {
346         MOZ_ASSERT(runtime_);
347         MOZ_ASSERT(currentEnd_ == chunk(currentChunk_).end());
348         return currentEnd_;
349     }
addressOfCurrentEnd()350     void* addressOfCurrentEnd() const {
351         MOZ_ASSERT(runtime_);
352         return (void*)&currentEnd_;
353     }
354 
position()355     uintptr_t position() const { return position_; }
addressOfPosition()356     void* addressOfPosition() const { return (void*)&position_; }
357 
runtime()358     JSRuntime* runtime() const { return runtime_; }
359 
360     /* Allocates a new GC thing from the tenured generation during minor GC. */
361     gc::TenuredCell* allocateFromTenured(JS::Zone* zone, gc::AllocKind thingKind);
362 
363     /* Common internal allocator function. */
364     void* allocate(size_t size);
365 
366     /*
367      * Move the object at |src| in the Nursery to an already-allocated cell
368      * |dst| in Tenured.
369      */
370     void collectToFixedPoint(TenuringTracer& trc, gc::TenureCountCache& tenureCounts);
371 
372     /* Handle relocation of slots/elements pointers stored in Ion frames. */
373     void setForwardingPointer(void* oldData, void* newData, bool direct);
374 
375     void setSlotsForwardingPointer(HeapSlot* oldSlots, HeapSlot* newSlots, uint32_t nslots);
376     void setElementsForwardingPointer(ObjectElements* oldHeader, ObjectElements* newHeader,
377                                       uint32_t nelems);
378 
379     /* Free malloced pointers owned by freed things in the nursery. */
380     void freeMallocedBuffers();
381 
382     /*
383      * Frees all non-live nursery-allocated things at the end of a minor
384      * collection.
385      */
386     void sweep();
387 
388     void runSweepActions();
389 
390     /* Change the allocable space provided by the nursery. */
391     void growAllocableSpace();
392     void shrinkAllocableSpace();
393 
394     friend class TenuringTracer;
395     friend class gc::MinorCollectionTracer;
396     friend class jit::MacroAssembler;
397 };
398 
399 } /* namespace js */
400 
401 #endif /* gc_Nursery_h */
402