1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #ifndef gc_Barrier_h
8 #define gc_Barrier_h
9 
10 #include "mozilla/DebugOnly.h"
11 
12 #include <type_traits>  // std::true_type
13 
14 #include "NamespaceImports.h"
15 
16 #include "gc/Cell.h"
17 #include "gc/StoreBuffer.h"
18 #include "js/ComparisonOperators.h"     // JS::detail::DefineComparisonOps
19 #include "js/experimental/TypedData.h"  // js::EnableIfABOVType
20 #include "js/HeapAPI.h"
21 #include "js/Id.h"
22 #include "js/RootingAPI.h"
23 #include "js/Value.h"
24 #include "util/Poison.h"
25 
26 /*
27  * [SMDOC] GC Barriers
28  *
29  * Several kinds of barrier are necessary to allow the GC to function correctly.
30  * These are triggered by reading or writing to GC pointers in the heap and
31  * serve to tell the collector about changes to the graph of reachable GC
32  * things.
33  *
34  * Since it would be awkward to change every write to memory into a function
35  * call, this file contains a bunch of C++ classes and templates that use
36  * operator overloading to take care of barriers automatically. In most cases,
37  * all that's necessary is to replace:
38  *
39  *     Type* field;
40  *
41  * with:
42  *
43  *     HeapPtr<Type> field;
44  *
45  * All heap-based GC pointers and tagged pointers must use one of these classes,
46  * except in a couple of exceptional cases.
47  *
48  * These classes are designed to be used by the internals of the JS engine.
49  * Barriers designed to be used externally are provided in js/RootingAPI.h.
50  *
51  * Overview
52  * ========
53  *
54  * This file implements the following concrete classes:
55  *
56  * HeapPtr       General wrapper for heap-based pointers that provides pre- and
57  *               post-write barriers. Most clients should use this.
58  *
59  * GCPtr         An optimisation of HeapPtr for objects which are only destroyed
60  *               by GC finalization (this rules out use in Vector, for example).
61  *
62  * PreBarriered  Provides a pre-barrier but not a post-barrier. Necessary when
63  *               generational GC updates are handled manually, e.g. for hash
64  *               table keys that don't use MovableCellHasher.
65  *
66  * HeapSlot      Provides pre and post-barriers, optimised for use in JSObject
67  *               slots and elements.
68  *
69  * WeakHeapPtr   Provides read and post-write barriers, for use with weak
70  *               pointers.
71  *
72  * UnsafeBarePtr Provides no barriers. Don't add new uses of this, or only if
73  *               you really know what you are doing.
74  *
75  * The following classes are implemented in js/RootingAPI.h (in the JS
76  * namespace):
77  *
78  * Heap          General wrapper for external clients. Like HeapPtr but also
79  *               handles cycle collector concerns. Most external clients should
80  *               use this.
81  *
82  * TenuredHeap   Like Heap but doesn't allow nursery pointers. Allows storing
83  *               flags in unused lower bits of the pointer.
84  *
85  * Which class to use?
86  * -------------------
87  *
88  * Answer the following questions to decide which barrier class is right for
89  * your use case:
90  *
91  * Is your code part of the JS engine?
92  *   Yes, it's internal =>
93  *     Is your pointer weak or strong?
94  *       Strong =>
95  *         Do you want automatic handling of nursery pointers?
96  *           Yes, of course =>
97  *             Can your object be destroyed outside of a GC?
98  *               Yes => Use HeapPtr<T>
99  *               No => Use GCPtr<T> (optimization)
100  *           No, I'll do this myself =>
101  *             Do you want pre-barriers so incremental marking works?
102  *               Yes, of course => Use PreBarriered<T>
103  *               No, and I'll fix all the bugs myself => Use UnsafeBarePtr<T>
104  *       Weak => Use WeakHeapPtr<T>
105  *   No, it's external =>
106  *     Can your pointer refer to nursery objects?
107  *       Yes => Use JS::Heap<T>
108  *       Never => Use JS::TenuredHeap<T> (optimization)
109  *
110  * If in doubt, use HeapPtr<T>.
111  *
112  * Write barriers
113  * ==============
114  *
115  * A write barrier is a mechanism used by incremental or generational GCs to
116  * ensure that every value that needs to be marked is marked. In general, the
117  * write barrier should be invoked whenever a write can cause the set of things
118  * traced through by the GC to change. This includes:
119  *
120  *   - writes to object properties
121  *   - writes to array slots
122  *   - writes to fields like JSObject::shape_ that we trace through
123  *   - writes to fields in private data
124  *   - writes to non-markable fields like JSObject::private that point to
125  *     markable data
126  *
127  * The last category is the trickiest. Even though the private pointer does not
128  * point to a GC thing, changing the private pointer may change the set of
129  * objects that are traced by the GC. Therefore it needs a write barrier.
130  *
131  * Every barriered write should have the following form:
132  *
133  *   <pre-barrier>
134  *   obj->field = value; // do the actual write
135  *   <post-barrier>
136  *
137  * The pre-barrier is used for incremental GC and the post-barrier is for
138  * generational GC.
139  *
140  * Pre-write barrier
141  * -----------------
142  *
143  * To understand the pre-barrier, let's consider how incremental GC works. The
144  * GC itself is divided into "slices". Between each slice, JS code is allowed to
145  * run. Each slice should be short so that the user doesn't notice the
146  * interruptions. In our GC, the structure of the slices is as follows:
147  *
148  * 1. ... JS work, which leads to a request to do GC ...
149  * 2. [first GC slice, which performs all root marking and (maybe) more marking]
150  * 3. ... more JS work is allowed to run ...
151  * 4. [GC mark slice, which runs entirely in
152  *    GCRuntime::markUntilBudgetExhausted]
153  * 5. ... more JS work ...
154  * 6. [GC mark slice, which runs entirely in
155  *    GCRuntime::markUntilBudgetExhausted]
156  * 7. ... more JS work ...
157  * 8. [GC marking finishes; sweeping done non-incrementally; GC is done]
158  * 9. ... JS continues uninterrupted now that GC is finishes ...
159  *
160  * Of course, there may be a different number of slices depending on how much
161  * marking is to be done.
162  *
163  * The danger inherent in this scheme is that the JS code in steps 3, 5, and 7
164  * might change the heap in a way that causes the GC to collect an object that
165  * is actually reachable. The write barrier prevents this from happening. We use
166  * a variant of incremental GC called "snapshot at the beginning." This approach
167  * guarantees the invariant that if an object is reachable in step 2, then we
168  * will mark it eventually. The name comes from the idea that we take a
169  * theoretical "snapshot" of all reachable objects in step 2; all objects in
170  * that snapshot should eventually be marked. (Note that the write barrier
171  * verifier code takes an actual snapshot.)
172  *
173  * The basic correctness invariant of a snapshot-at-the-beginning collector is
174  * that any object reachable at the end of the GC (step 9) must either:
175  *   (1) have been reachable at the beginning (step 2) and thus in the snapshot
176  *   (2) or must have been newly allocated, in steps 3, 5, or 7.
177  * To deal with case (2), any objects allocated during an incremental GC are
178  * automatically marked black.
179  *
180  * This strategy is actually somewhat conservative: if an object becomes
181  * unreachable between steps 2 and 8, it would be safe to collect it. We won't,
182  * mainly for simplicity. (Also, note that the snapshot is entirely
183  * theoretical. We don't actually do anything special in step 2 that we wouldn't
184  * do in a non-incremental GC.
185  *
186  * It's the pre-barrier's job to maintain the snapshot invariant. Consider the
187  * write "obj->field = value". Let the prior value of obj->field be
188  * value0. Since it's possible that value0 may have been what obj->field
189  * contained in step 2, when the snapshot was taken, the barrier marks
190  * value0. Note that it only does this if we're in the middle of an incremental
191  * GC. Since this is rare, the cost of the write barrier is usually just an
192  * extra branch.
193  *
194  * In practice, we implement the pre-barrier differently based on the type of
195  * value0. E.g., see JSObject::preWriteBarrier, which is used if obj->field is
196  * a JSObject*. It takes value0 as a parameter.
197  *
198  * Post-write barrier
199  * ------------------
200  *
201  * For generational GC, we want to be able to quickly collect the nursery in a
202  * minor collection.  Part of the way this is achieved is to only mark the
203  * nursery itself; tenured things, which may form the majority of the heap, are
204  * not traced through or marked.  This leads to the problem of what to do about
205  * tenured objects that have pointers into the nursery: if such things are not
206  * marked, they may be discarded while there are still live objects which
207  * reference them. The solution is to maintain information about these pointers,
208  * and mark their targets when we start a minor collection.
209  *
210  * The pointers can be thought of as edges in an object graph, and the set of
211  * edges from the tenured generation into the nursery is known as the remembered
212  * set. Post barriers are used to track this remembered set.
213  *
214  * Whenever a slot which could contain such a pointer is written, we check
215  * whether the pointed-to thing is in the nursery (if storeBuffer() returns a
216  * buffer).  If so we add the cell into the store buffer, which is the
217  * collector's representation of the remembered set.  This means that when we
218  * come to do a minor collection we can examine the contents of the store buffer
219  * and mark any edge targets that are in the nursery.
220  *
221  * Read barriers
222  * =============
223  *
224  * Weak pointer read barrier
225  * -------------------------
226  *
227  * Weak pointers must have a read barrier to prevent the referent from being
228  * collected if it is read after the start of an incremental GC.
229  *
230  * The problem happens when, during an incremental GC, some code reads a weak
231  * pointer and writes it somewhere on the heap that has been marked black in a
232  * previous slice. Since the weak pointer will not otherwise be marked and will
233  * be swept and finalized in the last slice, this will leave the pointer just
234  * written dangling after the GC. To solve this, we immediately mark black all
235  * weak pointers that get read between slices so that it is safe to store them
236  * in an already marked part of the heap, e.g. in Rooted.
237  *
238  * Cycle collector read barrier
239  * ----------------------------
240  *
241  * Heap pointers external to the engine may be marked gray. The JS API has an
242  * invariant that no gray pointers may be passed, and this maintained by a read
243  * barrier that calls ExposeGCThingToActiveJS on such pointers. This is
244  * implemented by JS::Heap<T> in js/RootingAPI.h.
245  *
246  * Implementation Details
247  * ======================
248  *
249  * One additional note: not all object writes need to be pre-barriered. Writes
250  * to newly allocated objects do not need a pre-barrier. In these cases, we use
251  * the "obj->field.init(value)" method instead of "obj->field = value". We use
252  * the init naming idiom in many places to signify that a field is being
253  * assigned for the first time.
254  *
255  * This file implements the following hierarchy of classes:
256  *
257  * BarrieredBase             base class of all barriers
258  *  |  |
259  *  | WriteBarriered         base class which provides common write operations
260  *  |  |  |  |  |
261  *  |  |  |  | PreBarriered  provides pre-barriers only
262  *  |  |  |  |
263  *  |  |  | GCPtr            provides pre- and post-barriers
264  *  |  |  |
265  *  |  | HeapPtr             provides pre- and post-barriers; is relocatable
266  *  |  |                     and deletable for use inside C++ managed memory
267  *  |  |
268  *  | HeapSlot               similar to GCPtr, but tailored to slots storage
269  *  |
270  * ReadBarriered             base class which provides common read operations
271  *  |
272  * WeakHeapPtr               provides read barriers only
273  *
274  *
275  * The implementation of the barrier logic is implemented in the
276  * Cell/TenuredCell base classes, which are called via:
277  *
278  * WriteBarriered<T>::pre
279  *  -> InternalBarrierMethods<T*>::preBarrier
280  *      -> Cell::preWriteBarrier
281  *  -> InternalBarrierMethods<Value>::preBarrier
282  *  -> InternalBarrierMethods<jsid>::preBarrier
283  *      -> InternalBarrierMethods<T*>::preBarrier
284  *          -> Cell::preWriteBarrier
285  *
286  * GCPtr<T>::post and HeapPtr<T>::post
287  *  -> InternalBarrierMethods<T*>::postBarrier
288  *      -> gc::PostWriteBarrierImpl
289  *  -> InternalBarrierMethods<Value>::postBarrier
290  *      -> StoreBuffer::put
291  *
292  * Barriers for use outside of the JS engine call into the same barrier
293  * implementations at InternalBarrierMethods<T>::post via an indirect call to
294  * Heap(.+)PostWriteBarrier.
295  *
296  * These clases are designed to be used to wrap GC thing pointers or values that
297  * act like them (i.e. JS::Value and jsid).  It is possible to use them for
298  * other types by supplying the necessary barrier implementations but this
299  * is not usually necessary and should be done with caution.
300  */
301 
302 namespace js {
303 
304 class NativeObject;
305 
306 namespace gc {
307 
308 void ValueReadBarrier(const Value& v);
309 void ValuePreWriteBarrier(const Value& v);
310 void IdPreWriteBarrier(jsid id);
311 void CellPtrPreWriteBarrier(JS::GCCellPtr thing);
312 
313 }  // namespace gc
314 
315 #ifdef DEBUG
316 
317 bool CurrentThreadIsTouchingGrayThings();
318 
319 bool IsMarkedBlack(JSObject* obj);
320 
321 #endif
322 
323 struct MOZ_RAII AutoTouchingGrayThings {
324 #ifdef DEBUG
325   AutoTouchingGrayThings();
326   ~AutoTouchingGrayThings();
327 #else
328   AutoTouchingGrayThings() {}
329 #endif
330 };
331 
332 template <typename T, typename Enable = void>
333 struct InternalBarrierMethods {};
334 
335 template <typename T>
336 struct InternalBarrierMethods<T*> {
337   static_assert(std::is_base_of_v<gc::Cell, T>, "Expected a GC thing type");
338 
339   static bool isMarkable(const T* v) { return v != nullptr; }
340 
341   static void preBarrier(T* v) { gc::PreWriteBarrier(v); }
342 
343   static void postBarrier(T** vp, T* prev, T* next) {
344     gc::PostWriteBarrier(vp, prev, next);
345   }
346 
347   static void readBarrier(T* v) { gc::ReadBarrier(v); }
348 
349 #ifdef DEBUG
350   static void assertThingIsNotGray(T* v) { return T::assertThingIsNotGray(v); }
351 #endif
352 };
353 
354 template <>
355 struct InternalBarrierMethods<Value> {
356   static bool isMarkable(const Value& v) { return v.isGCThing(); }
357 
358   static void preBarrier(const Value& v) {
359     if (v.isGCThing()) {
360       gc::ValuePreWriteBarrier(v);
361     }
362   }
363 
364   static MOZ_ALWAYS_INLINE void postBarrier(Value* vp, const Value& prev,
365                                             const Value& next) {
366     MOZ_ASSERT(!CurrentThreadIsIonCompiling());
367     MOZ_ASSERT(vp);
368 
369     // If the target needs an entry, add it.
370     js::gc::StoreBuffer* sb;
371     if (next.isNurseryAllocatableGCThing() &&
372         (sb = next.toGCThing()->storeBuffer())) {
373       // If we know that the prev has already inserted an entry, we can
374       // skip doing the lookup to add the new entry. Note that we cannot
375       // safely assert the presence of the entry because it may have been
376       // added via a different store buffer.
377       if (prev.isNurseryAllocatableGCThing() &&
378           prev.toGCThing()->storeBuffer()) {
379         return;
380       }
381       sb->putValue(vp);
382       return;
383     }
384     // Remove the prev entry if the new value does not need it.
385     if (prev.isNurseryAllocatableGCThing() &&
386         (sb = prev.toGCThing()->storeBuffer())) {
387       sb->unputValue(vp);
388     }
389   }
390 
391   static void readBarrier(const Value& v) {
392     if (v.isGCThing()) {
393       gc::ValueReadBarrier(v);
394     }
395   }
396 
397 #ifdef DEBUG
398   static void assertThingIsNotGray(const Value& v) {
399     JS::AssertValueIsNotGray(v);
400   }
401 #endif
402 };
403 
404 template <>
405 struct InternalBarrierMethods<jsid> {
406   static bool isMarkable(jsid id) { return id.isGCThing(); }
407   static void preBarrier(jsid id) {
408     if (id.isGCThing()) {
409       gc::IdPreWriteBarrier(id);
410     }
411   }
412   static void postBarrier(jsid* idp, jsid prev, jsid next) {}
413 #ifdef DEBUG
414   static void assertThingIsNotGray(jsid id) { JS::AssertIdIsNotGray(id); }
415 #endif
416 };
417 
418 // Specialization for JS::ArrayBufferOrView subclasses.
419 template <typename T>
420 struct InternalBarrierMethods<T, EnableIfABOVType<T>> {
421   using BM = BarrierMethods<T>;
422 
423   static bool isMarkable(const T& thing) { return bool(thing); }
424   static void preBarrier(const T& thing) {
425     gc::PreWriteBarrier(thing.asObjectUnbarriered());
426   }
427   static void postBarrier(T* tp, const T& prev, const T& next) {
428     BM::postWriteBarrier(tp, prev, next);
429   }
430   static void readBarrier(const T& thing) { BM::readBarrier(thing); }
431 #ifdef DEBUG
432   static void assertThingIsNotGray(const T& thing) {
433     JSObject* obj = thing.asObjectUnbarriered();
434     if (obj) {
435       JS::AssertValueIsNotGray(JS::ObjectValue(*obj));
436     }
437   }
438 #endif
439 };
440 
441 template <typename T>
442 static inline void AssertTargetIsNotGray(const T& v) {
443 #ifdef DEBUG
444   if (!CurrentThreadIsTouchingGrayThings()) {
445     InternalBarrierMethods<T>::assertThingIsNotGray(v);
446   }
447 #endif
448 }
449 
450 // Base class of all barrier types.
451 //
452 // This is marked non-memmovable since post barriers added by derived classes
453 // can add pointers to class instances to the store buffer.
454 template <typename T>
455 class MOZ_NON_MEMMOVABLE BarrieredBase {
456  protected:
457   // BarrieredBase is not directly instantiable.
458   explicit BarrieredBase(const T& v) : value(v) {}
459 
460   // BarrieredBase subclasses cannot be copy constructed by default.
461   BarrieredBase(const BarrieredBase<T>& other) = default;
462 
463   // Storage for all barrier classes. |value| must be a GC thing reference
464   // type: either a direct pointer to a GC thing or a supported tagged
465   // pointer that can reference GC things, such as JS::Value or jsid. Nested
466   // barrier types are NOT supported. See assertTypeConstraints.
467   T value;
468 
469  public:
470   using ElementType = T;
471 
472   // Note: this is public because C++ cannot friend to a specific template
473   // instantiation. Friending to the generic template leads to a number of
474   // unintended consequences, including template resolution ambiguity and a
475   // circular dependency with Tracing.h.
476   T* unbarrieredAddress() const { return const_cast<T*>(&value); }
477 };
478 
479 // Base class for barriered pointer types that intercept only writes.
480 template <class T>
481 class WriteBarriered : public BarrieredBase<T>,
482                        public WrappedPtrOperations<T, WriteBarriered<T>> {
483  protected:
484   using BarrieredBase<T>::value;
485 
486   // WriteBarriered is not directly instantiable.
487   explicit WriteBarriered(const T& v) : BarrieredBase<T>(v) {}
488 
489  public:
490   DECLARE_POINTER_CONSTREF_OPS(T);
491 
492   // Use this if the automatic coercion to T isn't working.
493   const T& get() const { return this->value; }
494 
495   // Use this if you want to change the value without invoking barriers.
496   // Obviously this is dangerous unless you know the barrier is not needed.
497   void unbarrieredSet(const T& v) { this->value = v; }
498 
499   // For users who need to manually barrier the raw types.
500   static void preWriteBarrier(const T& v) {
501     InternalBarrierMethods<T>::preBarrier(v);
502   }
503 
504  protected:
505   void pre() { InternalBarrierMethods<T>::preBarrier(this->value); }
506   MOZ_ALWAYS_INLINE void post(const T& prev, const T& next) {
507     InternalBarrierMethods<T>::postBarrier(&this->value, prev, next);
508   }
509 };
510 
511 #define DECLARE_POINTER_ASSIGN_AND_MOVE_OPS(Wrapper, T) \
512   DECLARE_POINTER_ASSIGN_OPS(Wrapper, T)                \
513   Wrapper<T>& operator=(Wrapper<T>&& other) {           \
514     setUnchecked(other.release());                      \
515     return *this;                                       \
516   }
517 
518 /*
519  * PreBarriered only automatically handles pre-barriers. Post-barriers must be
520  * manually implemented when using this class. GCPtr and HeapPtr should be used
521  * in all cases that do not require explicit low-level control of moving
522  * behavior.
523  *
524  * This class is useful for example for HashMap keys where automatically
525  * updating a moved nursery pointer would break the hash table.
526  */
527 template <class T>
528 class PreBarriered : public WriteBarriered<T> {
529  public:
530   PreBarriered() : WriteBarriered<T>(JS::SafelyInitialized<T>::create()) {}
531   /*
532    * Allow implicit construction for use in generic contexts, such as
533    * DebuggerWeakMap::markKeys.
534    */
535   MOZ_IMPLICIT PreBarriered(const T& v) : WriteBarriered<T>(v) {}
536 
537   explicit PreBarriered(const PreBarriered<T>& other)
538       : WriteBarriered<T>(other.value) {}
539 
540   PreBarriered(PreBarriered<T>&& other) : WriteBarriered<T>(other.release()) {}
541 
542   ~PreBarriered() { this->pre(); }
543 
544   void init(const T& v) { this->value = v; }
545 
546   /* Use to set the pointer to nullptr. */
547   void clear() { set(JS::SafelyInitialized<T>::create()); }
548 
549   DECLARE_POINTER_ASSIGN_AND_MOVE_OPS(PreBarriered, T);
550 
551   void set(const T& v) {
552     AssertTargetIsNotGray(v);
553     setUnchecked(v);
554   }
555 
556  private:
557   void setUnchecked(const T& v) {
558     this->pre();
559     this->value = v;
560   }
561 
562   T release() {
563     T tmp = this->value;
564     this->value = JS::SafelyInitialized<T>::create();
565     return tmp;
566   }
567 };
568 
569 }  // namespace js
570 
571 namespace JS {
572 
573 namespace detail {
574 
575 template <typename T>
576 struct DefineComparisonOps<js::PreBarriered<T>> : std::true_type {
577   static const T& get(const js::PreBarriered<T>& v) { return v.get(); }
578 };
579 
580 }  // namespace detail
581 
582 }  // namespace JS
583 
584 namespace js {
585 
586 /*
587  * A pre- and post-barriered heap pointer, for use inside the JS engine.
588  *
589  * It must only be stored in memory that has GC lifetime. GCPtr must not be
590  * used in contexts where it may be implicitly moved or deleted, e.g. most
591  * containers.
592  *
593  * The post-barriers implemented by this class are faster than those
594  * implemented by js::HeapPtr<T> or JS::Heap<T> at the cost of not
595  * automatically handling deletion or movement.
596  */
597 template <class T>
598 class GCPtr : public WriteBarriered<T> {
599  public:
600   GCPtr() : WriteBarriered<T>(JS::SafelyInitialized<T>::create()) {}
601 
602   explicit GCPtr(const T& v) : WriteBarriered<T>(v) {
603     this->post(JS::SafelyInitialized<T>::create(), v);
604   }
605 
606   explicit GCPtr(const GCPtr<T>& v) : WriteBarriered<T>(v) {
607     this->post(JS::SafelyInitialized<T>::create(), v);
608   }
609 
610 #ifdef DEBUG
611   ~GCPtr() {
612     // No barriers are necessary as this only happens when the GC is sweeping.
613     //
614     // If this assertion fails you may need to make the containing object use a
615     // HeapPtr instead, as this can be deleted from outside of GC.
616     MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadIsGCFinalizing());
617 
618     Poison(this, JS_FREED_HEAP_PTR_PATTERN, sizeof(*this),
619            MemCheckKind::MakeNoAccess);
620   }
621 #endif
622 
623   void init(const T& v) {
624     AssertTargetIsNotGray(v);
625     this->value = v;
626     this->post(JS::SafelyInitialized<T>::create(), v);
627   }
628 
629   DECLARE_POINTER_ASSIGN_OPS(GCPtr, T);
630 
631   void set(const T& v) {
632     AssertTargetIsNotGray(v);
633     setUnchecked(v);
634   }
635 
636  private:
637   void setUnchecked(const T& v) {
638     this->pre();
639     T tmp = this->value;
640     this->value = v;
641     this->post(tmp, this->value);
642   }
643 
644   /*
645    * Unlike HeapPtr<T>, GCPtr<T> must be managed with GC lifetimes.
646    * Specifically, the memory used by the pointer itself must be live until
647    * at least the next minor GC. For that reason, move semantics are invalid
648    * and are deleted here. Please note that not all containers support move
649    * semantics, so this does not completely prevent invalid uses.
650    */
651   GCPtr(GCPtr<T>&&) = delete;
652   GCPtr<T>& operator=(GCPtr<T>&&) = delete;
653 };
654 
655 }  // namespace js
656 
657 namespace JS {
658 
659 namespace detail {
660 
661 template <typename T>
662 struct DefineComparisonOps<js::GCPtr<T>> : std::true_type {
663   static const T& get(const js::GCPtr<T>& v) { return v.get(); }
664 };
665 
666 }  // namespace detail
667 
668 }  // namespace JS
669 
670 namespace js {
671 
672 /*
673  * A pre- and post-barriered heap pointer, for use inside the JS engine. These
674  * heap pointers can be stored in C++ containers like GCVector and GCHashMap.
675  *
676  * The GC sometimes keeps pointers to pointers to GC things --- for example, to
677  * track references into the nursery. However, C++ containers like GCVector and
678  * GCHashMap usually reserve the right to relocate their elements any time
679  * they're modified, invalidating all pointers to the elements. HeapPtr
680  * has a move constructor which knows how to keep the GC up to date if it is
681  * moved to a new location.
682  *
683  * However, because of this additional communication with the GC, HeapPtr
684  * is somewhat slower, so it should only be used in contexts where this ability
685  * is necessary.
686  *
687  * Obviously, JSObjects, JSStrings, and the like get tenured and compacted, so
688  * whatever pointers they contain get relocated, in the sense used here.
689  * However, since the GC itself is moving those values, it takes care of its
690  * internal pointers to those pointers itself. HeapPtr is only necessary
691  * when the relocation would otherwise occur without the GC's knowledge.
692  */
693 template <class T>
694 class HeapPtr : public WriteBarriered<T> {
695  public:
696   HeapPtr() : WriteBarriered<T>(JS::SafelyInitialized<T>::create()) {}
697 
698   // Implicitly adding barriers is a reasonable default.
699   MOZ_IMPLICIT HeapPtr(const T& v) : WriteBarriered<T>(v) {
700     this->post(JS::SafelyInitialized<T>::create(), this->value);
701   }
702 
703   MOZ_IMPLICIT HeapPtr(const HeapPtr<T>& other) : WriteBarriered<T>(other) {
704     this->post(JS::SafelyInitialized<T>::create(), this->value);
705   }
706 
707   HeapPtr(HeapPtr<T>&& other) : WriteBarriered<T>(other.release()) {
708     this->post(JS::SafelyInitialized<T>::create(), this->value);
709   }
710 
711   ~HeapPtr() {
712     this->pre();
713     this->post(this->value, JS::SafelyInitialized<T>::create());
714   }
715 
716   void init(const T& v) {
717     MOZ_ASSERT(this->value == JS::SafelyInitialized<T>::create());
718     AssertTargetIsNotGray(v);
719     this->value = v;
720     this->post(JS::SafelyInitialized<T>::create(), this->value);
721   }
722 
723   DECLARE_POINTER_ASSIGN_AND_MOVE_OPS(HeapPtr, T);
724 
725   void set(const T& v) {
726     AssertTargetIsNotGray(v);
727     setUnchecked(v);
728   }
729 
730   /* Make this friend so it can access pre() and post(). */
731   template <class T1, class T2>
732   friend inline void BarrieredSetPair(Zone* zone, HeapPtr<T1*>& v1, T1* val1,
733                                       HeapPtr<T2*>& v2, T2* val2);
734 
735  protected:
736   void setUnchecked(const T& v) {
737     this->pre();
738     postBarrieredSet(v);
739   }
740 
741   void postBarrieredSet(const T& v) {
742     T tmp = this->value;
743     this->value = v;
744     this->post(tmp, this->value);
745   }
746 
747   T release() {
748     T tmp = this->value;
749     postBarrieredSet(JS::SafelyInitialized<T>::create());
750     return tmp;
751   }
752 };
753 
754 /*
755  * A pre-barriered heap pointer, for use inside the JS engine.
756  *
757  * Similar to GCPtr, but used for a pointer to a malloc-allocated structure
758  * containing GC thing pointers.
759  *
760  * It must only be stored in memory that has GC lifetime. It must not be used in
761  * contexts where it may be implicitly moved or deleted, e.g. most containers.
762  *
763  * A post-barrier is unnecessary since malloc-allocated structures cannot be in
764  * the nursery.
765  */
766 template <class T>
767 class GCStructPtr : public BarrieredBase<T> {
768  public:
769   // This is sometimes used to hold tagged pointers.
770   static constexpr uintptr_t MaxTaggedPointer = 0x2;
771 
772   GCStructPtr() : BarrieredBase<T>(JS::SafelyInitialized<T>::create()) {}
773 
774   // Implicitly adding barriers is a reasonable default.
775   MOZ_IMPLICIT GCStructPtr(const T& v) : BarrieredBase<T>(v) {}
776 
777   GCStructPtr(const GCStructPtr<T>& other) : BarrieredBase<T>(other) {}
778 
779   GCStructPtr(GCStructPtr<T>&& other) : BarrieredBase<T>(other.release()) {}
780 
781   ~GCStructPtr() {
782     // No barriers are necessary as this only happens when the GC is sweeping.
783     MOZ_ASSERT_IF(isTraceable(),
784                   CurrentThreadIsGCSweeping() || CurrentThreadIsGCFinalizing());
785   }
786 
787   void init(const T& v) {
788     MOZ_ASSERT(this->get() == JS::SafelyInitialized<T>());
789     AssertTargetIsNotGray(v);
790     this->value = v;
791   }
792 
793   void set(JS::Zone* zone, const T& v) {
794     pre(zone);
795     this->value = v;
796   }
797 
798   T get() const { return this->value; }
799   operator T() const { return get(); }
800   T operator->() const { return get(); }
801 
802  protected:
803   bool isTraceable() const { return uintptr_t(get()) > MaxTaggedPointer; }
804 
805   void pre(JS::Zone* zone) {
806     if (isTraceable()) {
807       PreWriteBarrier(zone, get());
808     }
809   }
810 };
811 
812 }  // namespace js
813 
814 namespace JS {
815 
816 namespace detail {
817 
818 template <typename T>
819 struct DefineComparisonOps<js::HeapPtr<T>> : std::true_type {
820   static const T& get(const js::HeapPtr<T>& v) { return v.get(); }
821 };
822 
823 }  // namespace detail
824 
825 }  // namespace JS
826 
827 namespace js {
828 
829 // Base class for barriered pointer types that intercept reads and writes.
830 template <typename T>
831 class ReadBarriered : public BarrieredBase<T> {
832  protected:
833   // ReadBarriered is not directly instantiable.
834   explicit ReadBarriered(const T& v) : BarrieredBase<T>(v) {}
835 
836   void read() const { InternalBarrierMethods<T>::readBarrier(this->value); }
837   void post(const T& prev, const T& next) {
838     InternalBarrierMethods<T>::postBarrier(&this->value, prev, next);
839   }
840 };
841 
842 // Incremental GC requires that weak pointers have read barriers. See the block
843 // comment at the top of Barrier.h for a complete discussion of why.
844 //
845 // Note that this class also has post-barriers, so is safe to use with nursery
846 // pointers. However, when used as a hashtable key, care must still be taken to
847 // insert manual post-barriers on the table for rekeying if the key is based in
848 // any way on the address of the object.
849 template <typename T>
850 class WeakHeapPtr : public ReadBarriered<T>,
851                     public WrappedPtrOperations<T, WeakHeapPtr<T>> {
852  protected:
853   using ReadBarriered<T>::value;
854 
855  public:
856   WeakHeapPtr() : ReadBarriered<T>(JS::SafelyInitialized<T>::create()) {}
857 
858   // It is okay to add barriers implicitly.
859   MOZ_IMPLICIT WeakHeapPtr(const T& v) : ReadBarriered<T>(v) {
860     this->post(JS::SafelyInitialized<T>::create(), v);
861   }
862 
863   // The copy constructor creates a new weak edge but the wrapped pointer does
864   // not escape, so no read barrier is necessary.
865   explicit WeakHeapPtr(const WeakHeapPtr& other) : ReadBarriered<T>(other) {
866     this->post(JS::SafelyInitialized<T>::create(), value);
867   }
868 
869   // Move retains the lifetime status of the source edge, so does not fire
870   // the read barrier of the defunct edge.
871   WeakHeapPtr(WeakHeapPtr&& other) : ReadBarriered<T>(other.release()) {
872     this->post(JS::SafelyInitialized<T>::create(), value);
873   }
874 
875   ~WeakHeapPtr() {
876     this->post(this->value, JS::SafelyInitialized<T>::create());
877   }
878 
879   WeakHeapPtr& operator=(const WeakHeapPtr& v) {
880     AssertTargetIsNotGray(v.value);
881     T prior = this->value;
882     this->value = v.value;
883     this->post(prior, v.value);
884     return *this;
885   }
886 
887   const T& get() const {
888     if (InternalBarrierMethods<T>::isMarkable(this->value)) {
889       this->read();
890     }
891     return this->value;
892   }
893 
894   const T& unbarrieredGet() const { return this->value; }
895 
896   explicit operator bool() const { return bool(this->value); }
897 
898   operator const T&() const { return get(); }
899 
900   const T& operator->() const { return get(); }
901 
902   void set(const T& v) {
903     AssertTargetIsNotGray(v);
904     setUnchecked(v);
905   }
906 
907   void unbarrieredSet(const T& v) {
908     AssertTargetIsNotGray(v);
909     this->value = v;
910   }
911 
912  private:
913   void setUnchecked(const T& v) {
914     T tmp = this->value;
915     this->value = v;
916     this->post(tmp, v);
917   }
918 
919   T release() {
920     T tmp = value;
921     set(JS::SafelyInitialized<T>::create());
922     return tmp;
923   }
924 };
925 
926 // A wrapper for a bare pointer, with no barriers.
927 //
928 // This should only be necessary in a limited number of cases. Please don't add
929 // more uses of this if at all possible.
930 template <typename T>
931 class UnsafeBarePtr : public BarrieredBase<T> {
932  public:
933   UnsafeBarePtr() : BarrieredBase<T>(JS::SafelyInitialized<T>::create()) {}
934   MOZ_IMPLICIT UnsafeBarePtr(T v) : BarrieredBase<T>(v) {}
935   const T& get() const { return this->value; }
936   void set(T newValue) { this->value = newValue; }
937   DECLARE_POINTER_CONSTREF_OPS(T);
938 };
939 
940 }  // namespace js
941 
942 namespace JS {
943 
944 namespace detail {
945 
946 template <typename T>
947 struct DefineComparisonOps<js::WeakHeapPtr<T>> : std::true_type {
948   static const T& get(const js::WeakHeapPtr<T>& v) {
949     return v.unbarrieredGet();
950   }
951 };
952 
953 }  // namespace detail
954 
955 }  // namespace JS
956 
957 namespace js {
958 
959 // A pre- and post-barriered Value that is specialized to be aware that it
960 // resides in a slots or elements vector. This allows it to be relocated in
961 // memory, but with substantially less overhead than a HeapPtr.
962 class HeapSlot : public WriteBarriered<Value> {
963  public:
964   enum Kind { Slot = 0, Element = 1 };
965 
966   void init(NativeObject* owner, Kind kind, uint32_t slot, const Value& v) {
967     value = v;
968     post(owner, kind, slot, v);
969   }
970 
971   void initAsUndefined() { value.setUndefined(); }
972 
973   void destroy() { pre(); }
974 
975 #ifdef DEBUG
976   bool preconditionForSet(NativeObject* owner, Kind kind, uint32_t slot) const;
977   void assertPreconditionForPostWriteBarrier(NativeObject* obj, Kind kind,
978                                              uint32_t slot,
979                                              const Value& target) const;
980 #endif
981 
982   MOZ_ALWAYS_INLINE void set(NativeObject* owner, Kind kind, uint32_t slot,
983                              const Value& v) {
984     MOZ_ASSERT(preconditionForSet(owner, kind, slot));
985     pre();
986     value = v;
987     post(owner, kind, slot, v);
988   }
989 
990  private:
991   void post(NativeObject* owner, Kind kind, uint32_t slot,
992             const Value& target) {
993 #ifdef DEBUG
994     assertPreconditionForPostWriteBarrier(owner, kind, slot, target);
995 #endif
996     if (this->value.isNurseryAllocatableGCThing()) {
997       gc::Cell* cell = this->value.toGCThing();
998       if (cell->storeBuffer()) {
999         cell->storeBuffer()->putSlot(owner, kind, slot, 1);
1000       }
1001     }
1002   }
1003 };
1004 
1005 }  // namespace js
1006 
1007 namespace JS {
1008 
1009 namespace detail {
1010 
1011 template <>
1012 struct DefineComparisonOps<js::HeapSlot> : std::true_type {
1013   static const Value& get(const js::HeapSlot& v) { return v.get(); }
1014 };
1015 
1016 }  // namespace detail
1017 
1018 }  // namespace JS
1019 
1020 namespace js {
1021 
1022 class HeapSlotArray {
1023   HeapSlot* array;
1024 
1025  public:
1026   explicit HeapSlotArray(HeapSlot* array) : array(array) {}
1027 
1028   HeapSlot* begin() const { return array; }
1029 
1030   operator const Value*() const {
1031     static_assert(sizeof(GCPtr<Value>) == sizeof(Value));
1032     static_assert(sizeof(HeapSlot) == sizeof(Value));
1033     return reinterpret_cast<const Value*>(array);
1034   }
1035   operator HeapSlot*() const { return begin(); }
1036 
1037   HeapSlotArray operator+(int offset) const {
1038     return HeapSlotArray(array + offset);
1039   }
1040   HeapSlotArray operator+(uint32_t offset) const {
1041     return HeapSlotArray(array + offset);
1042   }
1043 };
1044 
1045 /*
1046  * This is a hack for RegExpStatics::updateFromMatch. It allows us to do two
1047  * barriers with only one branch to check if we're in an incremental GC.
1048  */
1049 template <class T1, class T2>
1050 static inline void BarrieredSetPair(Zone* zone, HeapPtr<T1*>& v1, T1* val1,
1051                                     HeapPtr<T2*>& v2, T2* val2) {
1052   AssertTargetIsNotGray(val1);
1053   AssertTargetIsNotGray(val2);
1054   if (T1::needPreWriteBarrier(zone)) {
1055     v1.pre();
1056     v2.pre();
1057   }
1058   v1.postBarrieredSet(val1);
1059   v2.postBarrieredSet(val2);
1060 }
1061 
1062 /*
1063  * ImmutableTenuredPtr is designed for one very narrow case: replacing
1064  * immutable raw pointers to GC-managed things, implicitly converting to a
1065  * handle type for ease of use. Pointers encapsulated by this type must:
1066  *
1067  *   be immutable (no incremental write barriers),
1068  *   never point into the nursery (no generational write barriers), and
1069  *   be traced via MarkRuntime (we use fromMarkedLocation).
1070  *
1071  * In short: you *really* need to know what you're doing before you use this
1072  * class!
1073  */
1074 template <typename T>
1075 class MOZ_HEAP_CLASS ImmutableTenuredPtr {
1076   T value;
1077 
1078  public:
1079   operator T() const { return value; }
1080   T operator->() const { return value; }
1081 
1082   // `ImmutableTenuredPtr<T>` is implicitly convertible to `Handle<T>`.
1083   //
1084   // In case you need to convert to `Handle<U>` where `U` is base class of `T`,
1085   // convert this to `Handle<T>` by `toHandle()` and then use implicit
1086   // conversion from `Handle<T>` to `Handle<U>`.
1087   operator Handle<T>() const { return toHandle(); }
1088   Handle<T> toHandle() const { return Handle<T>::fromMarkedLocation(&value); }
1089 
1090   void init(T ptr) {
1091     MOZ_ASSERT(ptr->isTenured());
1092     AssertTargetIsNotGray(ptr);
1093     value = ptr;
1094   }
1095 
1096   T get() const { return value; }
1097   const T* address() { return &value; }
1098 };
1099 
1100 #if MOZ_IS_GCC
1101 template struct JS_PUBLIC_API MovableCellHasher<JSObject*>;
1102 #endif
1103 
1104 template <typename T>
1105 struct MovableCellHasher<PreBarriered<T>> {
1106   using Key = PreBarriered<T>;
1107   using Lookup = T;
1108 
1109   static bool hasHash(const Lookup& l) {
1110     return MovableCellHasher<T>::hasHash(l);
1111   }
1112   static bool ensureHash(const Lookup& l) {
1113     return MovableCellHasher<T>::ensureHash(l);
1114   }
1115   static HashNumber hash(const Lookup& l) {
1116     return MovableCellHasher<T>::hash(l);
1117   }
1118   static bool match(const Key& k, const Lookup& l) {
1119     return MovableCellHasher<T>::match(k, l);
1120   }
1121 };
1122 
1123 template <typename T>
1124 struct MovableCellHasher<HeapPtr<T>> {
1125   using Key = HeapPtr<T>;
1126   using Lookup = T;
1127 
1128   static bool hasHash(const Lookup& l) {
1129     return MovableCellHasher<T>::hasHash(l);
1130   }
1131   static bool ensureHash(const Lookup& l) {
1132     return MovableCellHasher<T>::ensureHash(l);
1133   }
1134   static HashNumber hash(const Lookup& l) {
1135     return MovableCellHasher<T>::hash(l);
1136   }
1137   static bool match(const Key& k, const Lookup& l) {
1138     return MovableCellHasher<T>::match(k, l);
1139   }
1140 };
1141 
1142 template <typename T>
1143 struct MovableCellHasher<WeakHeapPtr<T>> {
1144   using Key = WeakHeapPtr<T>;
1145   using Lookup = T;
1146 
1147   static bool hasHash(const Lookup& l) {
1148     return MovableCellHasher<T>::hasHash(l);
1149   }
1150   static bool ensureHash(const Lookup& l) {
1151     return MovableCellHasher<T>::ensureHash(l);
1152   }
1153   static HashNumber hash(const Lookup& l) {
1154     return MovableCellHasher<T>::hash(l);
1155   }
1156   static bool match(const Key& k, const Lookup& l) {
1157     return MovableCellHasher<T>::match(k.unbarrieredGet(), l);
1158   }
1159 };
1160 
1161 /* Useful for hashtables with a HeapPtr as key. */
1162 template <class T>
1163 struct HeapPtrHasher {
1164   using Key = HeapPtr<T>;
1165   using Lookup = T;
1166 
1167   static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
1168   static bool match(const Key& k, Lookup l) { return k.get() == l; }
1169   static void rekey(Key& k, const Key& newKey) { k.unbarrieredSet(newKey); }
1170 };
1171 
1172 template <class T>
1173 struct PreBarrieredHasher {
1174   using Key = PreBarriered<T>;
1175   using Lookup = T;
1176 
1177   static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
1178   static bool match(const Key& k, Lookup l) { return k.get() == l; }
1179   static void rekey(Key& k, const Key& newKey) { k.unbarrieredSet(newKey); }
1180 };
1181 
1182 /* Useful for hashtables with a WeakHeapPtr as key. */
1183 template <class T>
1184 struct WeakHeapPtrHasher {
1185   using Key = WeakHeapPtr<T>;
1186   using Lookup = T;
1187 
1188   static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
1189   static bool match(const Key& k, Lookup l) { return k.unbarrieredGet() == l; }
1190   static void rekey(Key& k, const Key& newKey) {
1191     k.set(newKey.unbarrieredGet());
1192   }
1193 };
1194 
1195 template <class T>
1196 struct UnsafeBarePtrHasher {
1197   using Key = UnsafeBarePtr<T>;
1198   using Lookup = T;
1199 
1200   static HashNumber hash(const Lookup& l) { return DefaultHasher<T>::hash(l); }
1201   static bool match(const Key& k, Lookup l) { return k.get() == l; }
1202   static void rekey(Key& k, const Key& newKey) { k.set(newKey.get()); }
1203 };
1204 
1205 }  // namespace js
1206 
1207 namespace mozilla {
1208 
1209 template <class T>
1210 struct DefaultHasher<js::HeapPtr<T>> : js::HeapPtrHasher<T> {};
1211 
1212 template <class T>
1213 struct DefaultHasher<js::GCPtr<T>> {
1214   // Not implemented. GCPtr can't be used as a hash table key because it has a
1215   // post barrier but doesn't support relocation.
1216 };
1217 
1218 template <class T>
1219 struct DefaultHasher<js::PreBarriered<T>> : js::PreBarrieredHasher<T> {};
1220 
1221 template <class T>
1222 struct DefaultHasher<js::WeakHeapPtr<T>> : js::WeakHeapPtrHasher<T> {};
1223 
1224 template <class T>
1225 struct DefaultHasher<js::UnsafeBarePtr<T>> : js::UnsafeBarePtrHasher<T> {};
1226 
1227 }  // namespace mozilla
1228 
1229 namespace js {
1230 
1231 class ArrayObject;
1232 class DebugEnvironmentProxy;
1233 class GlobalObject;
1234 class PropertyName;
1235 class Scope;
1236 class ScriptSourceObject;
1237 class Shape;
1238 class BaseShape;
1239 class GetterSetter;
1240 class PropMap;
1241 class WasmInstanceObject;
1242 class WasmTableObject;
1243 
1244 namespace jit {
1245 class JitCode;
1246 }  // namespace jit
1247 
1248 using PreBarrieredId = PreBarriered<jsid>;
1249 using PreBarrieredObject = PreBarriered<JSObject*>;
1250 using PreBarrieredValue = PreBarriered<Value>;
1251 
1252 using GCPtrNativeObject = GCPtr<NativeObject*>;
1253 using GCPtrArrayObject = GCPtr<ArrayObject*>;
1254 using GCPtrAtom = GCPtr<JSAtom*>;
1255 using GCPtrBigInt = GCPtr<BigInt*>;
1256 using GCPtrFunction = GCPtr<JSFunction*>;
1257 using GCPtrLinearString = GCPtr<JSLinearString*>;
1258 using GCPtrObject = GCPtr<JSObject*>;
1259 using GCPtrScript = GCPtr<JSScript*>;
1260 using GCPtrString = GCPtr<JSString*>;
1261 using GCPtrShape = GCPtr<Shape*>;
1262 using GCPtrGetterSetter = GCPtr<GetterSetter*>;
1263 using GCPtrPropMap = GCPtr<PropMap*>;
1264 using GCPtrValue = GCPtr<Value>;
1265 using GCPtrId = GCPtr<jsid>;
1266 
1267 using ImmutablePropertyNamePtr = ImmutableTenuredPtr<PropertyName*>;
1268 using ImmutableSymbolPtr = ImmutableTenuredPtr<JS::Symbol*>;
1269 
1270 using WeakHeapPtrAtom = WeakHeapPtr<JSAtom*>;
1271 using WeakHeapPtrDebugEnvironmentProxy = WeakHeapPtr<DebugEnvironmentProxy*>;
1272 using WeakHeapPtrGlobalObject = WeakHeapPtr<GlobalObject*>;
1273 using WeakHeapPtrObject = WeakHeapPtr<JSObject*>;
1274 using WeakHeapPtrScript = WeakHeapPtr<JSScript*>;
1275 using WeakHeapPtrScriptSourceObject = WeakHeapPtr<ScriptSourceObject*>;
1276 using WeakHeapPtrShape = WeakHeapPtr<Shape*>;
1277 using WeakHeapPtrJitCode = WeakHeapPtr<jit::JitCode*>;
1278 using WeakHeapPtrSymbol = WeakHeapPtr<JS::Symbol*>;
1279 using WeakHeapPtrWasmInstanceObject = WeakHeapPtr<WasmInstanceObject*>;
1280 using WeakHeapPtrWasmTableObject = WeakHeapPtr<WasmTableObject*>;
1281 
1282 using HeapPtrJitCode = HeapPtr<jit::JitCode*>;
1283 using HeapPtrNativeObject = HeapPtr<NativeObject*>;
1284 using HeapPtrObject = HeapPtr<JSObject*>;
1285 using HeapPtrRegExpShared = HeapPtr<RegExpShared*>;
1286 using HeapPtrValue = HeapPtr<Value>;
1287 
1288 } /* namespace js */
1289 
1290 #endif /* gc_Barrier_h */
1291