1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_HEAP_OBJECT_H_
6 #define V8_OBJECTS_HEAP_OBJECT_H_
7 
8 #include "src/common/globals.h"
9 #include "src/roots/roots.h"
10 
11 #include "src/objects/objects.h"
12 #include "src/objects/tagged-field.h"
13 
14 // Has to be the last include (doesn't have include guards):
15 #include "src/objects/object-macros.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 class Heap;
21 
22 // HeapObject is the superclass for all classes describing heap allocated
23 // objects.
24 class HeapObject : public Object {
25  public:
is_null()26   bool is_null() const {
27     return static_cast<Tagged_t>(ptr()) == static_cast<Tagged_t>(kNullAddress);
28   }
29 
30   // [map]: Contains a map which contains the object's reflective
31   // information.
32   DECL_GETTER(map, Map)
33   inline void set_map(Map value);
34 
35   inline ObjectSlot map_slot() const;
36 
37   // The no-write-barrier version.  This is OK if the object is white and in
38   // new space, or if the value is an immortal immutable object, like the maps
39   // of primitive (non-JS) objects like strings, heap numbers etc.
40   inline void set_map_no_write_barrier(Map value);
41 
42   // Access the map using acquire load and release store.
43   DECL_GETTER(synchronized_map, Map)
44   inline void synchronized_set_map(Map value);
45 
46   // Compare-and-swaps map word using release store, returns true if the map
47   // word was actually swapped.
48   inline bool synchronized_compare_and_swap_map_word(MapWord old_map_word,
49                                                      MapWord new_map_word);
50 
51   // Initialize the map immediately after the object is allocated.
52   // Do not use this outside Heap.
53   inline void set_map_after_allocation(
54       Map value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
55 
56   // During garbage collection, the map word of a heap object does not
57   // necessarily contain a map pointer.
58   DECL_GETTER(map_word, MapWord)
59   inline void set_map_word(MapWord map_word);
60 
61   // Access the map word using acquire load and release store.
62   DECL_GETTER(synchronized_map_word, MapWord)
63   inline void synchronized_set_map_word(MapWord map_word);
64 
65   // TODO(v8:7464): Once RO_SPACE is shared between isolates, this method can be
66   // removed as ReadOnlyRoots will be accessible from a global variable. For now
67   // this method exists to help remove GetIsolate/GetHeap from HeapObject, in a
68   // way that doesn't require passing Isolate/Heap down huge call chains or to
69   // places where it might not be safe to access it.
70   inline ReadOnlyRoots GetReadOnlyRoots() const;
71   // This version is intended to be used for the isolate values produced by
72   // i::GetIsolateForPtrCompr(HeapObject) function which may return nullptr.
73   inline ReadOnlyRoots GetReadOnlyRoots(const Isolate* isolate) const;
74 
75 #define IS_TYPE_FUNCTION_DECL(Type) \
76   V8_INLINE bool Is##Type() const;  \
77   V8_INLINE bool Is##Type(const Isolate* isolate) const;
78   HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
79   IS_TYPE_FUNCTION_DECL(HashTableBase)
80   IS_TYPE_FUNCTION_DECL(SmallOrderedHashTable)
81 #undef IS_TYPE_FUNCTION_DECL
82 
83   bool IsExternal(Isolate* isolate) const;
84 
85 // Oddball checks are faster when they are raw pointer comparisons, so the
86 // isolate/read-only roots overloads should be preferred where possible.
87 #define IS_TYPE_FUNCTION_DECL(Type, Value)                  \
88   V8_INLINE bool Is##Type(Isolate* isolate) const;          \
89   V8_INLINE bool Is##Type(OffThreadIsolate* isolate) const; \
90   V8_INLINE bool Is##Type(ReadOnlyRoots roots) const;       \
91   V8_INLINE bool Is##Type() const;
92   ODDBALL_LIST(IS_TYPE_FUNCTION_DECL)
93   IS_TYPE_FUNCTION_DECL(NullOrUndefined, /* unused */)
94 #undef IS_TYPE_FUNCTION_DECL
95 
96 #define DECL_STRUCT_PREDICATE(NAME, Name, name) \
97   V8_INLINE bool Is##Name() const;              \
98   V8_INLINE bool Is##Name(const Isolate* isolate) const;
STRUCT_LIST(DECL_STRUCT_PREDICATE)99   STRUCT_LIST(DECL_STRUCT_PREDICATE)
100 #undef DECL_STRUCT_PREDICATE
101 
102   // Converts an address to a HeapObject pointer.
103   static inline HeapObject FromAddress(Address address) {
104     DCHECK_TAG_ALIGNED(address);
105     return HeapObject(address + kHeapObjectTag);
106   }
107 
108   // Returns the address of this HeapObject.
address()109   inline Address address() const { return ptr() - kHeapObjectTag; }
110 
111   // Iterates over pointers contained in the object (including the Map).
112   // If it's not performance critical iteration use the non-templatized
113   // version.
114   void Iterate(ObjectVisitor* v);
115 
116   template <typename ObjectVisitor>
117   inline void IterateFast(ObjectVisitor* v);
118 
119   // Iterates over all pointers contained in the object except the
120   // first map pointer.  The object type is given in the first
121   // parameter. This function does not access the map pointer in the
122   // object, and so is safe to call while the map pointer is modified.
123   // If it's not performance critical iteration use the non-templatized
124   // version.
125   void IterateBody(ObjectVisitor* v);
126   void IterateBody(Map map, int object_size, ObjectVisitor* v);
127 
128   template <typename ObjectVisitor>
129   inline void IterateBodyFast(ObjectVisitor* v);
130 
131   template <typename ObjectVisitor>
132   inline void IterateBodyFast(Map map, int object_size, ObjectVisitor* v);
133 
134   // Returns true if the object contains a tagged value at given offset.
135   // It is used for invalid slots filtering. If the offset points outside
136   // of the object or to the map word, the result is UNDEFINED (!!!).
137   V8_EXPORT_PRIVATE bool IsValidSlot(Map map, int offset);
138 
139   // Returns the heap object's size in bytes
140   inline int Size() const;
141 
142   // Given a heap object's map pointer, returns the heap size in bytes
143   // Useful when the map pointer field is used for other purposes.
144   // GC internal.
145   V8_EXPORT_PRIVATE int SizeFromMap(Map map) const;
146 
147   // Returns the field at offset in obj, as a read/write Object reference.
148   // Does no checking, and is safe to use during GC, while maps are invalid.
149   // Does not invoke write barrier, so should only be assigned to
150   // during marking GC.
151   inline ObjectSlot RawField(int byte_offset) const;
152   inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const;
153 
154   DECL_CAST(HeapObject)
155 
156   // Return the write barrier mode for this. Callers of this function
157   // must be able to present a reference to an DisallowHeapAllocation
158   // object as a sign that they are not going to use this function
159   // from code that allocates and thus invalidates the returned write
160   // barrier mode.
161   inline WriteBarrierMode GetWriteBarrierMode(
162       const DisallowHeapAllocation& promise);
163 
164   // Dispatched behavior.
165   void HeapObjectShortPrint(std::ostream& os);  // NOLINT
166 #ifdef OBJECT_PRINT
167   void PrintHeader(std::ostream& os, const char* id);  // NOLINT
168 #endif
169   DECL_PRINTER(HeapObject)
170   EXPORT_DECL_VERIFIER(HeapObject)
171 #ifdef VERIFY_HEAP
172   inline void VerifyObjectField(Isolate* isolate, int offset);
173   inline void VerifySmiField(int offset);
174   inline void VerifyMaybeObjectField(Isolate* isolate, int offset);
175 
176   // Verify a pointer is a valid HeapObject pointer that points to object
177   // areas in the heap.
178   static void VerifyHeapPointer(Isolate* isolate, Object p);
179 #endif
180 
181   static inline AllocationAlignment RequiredAlignment(Map map);
182 
183   // Whether the object needs rehashing. That is the case if the object's
184   // content depends on FLAG_hash_seed. When the object is deserialized into
185   // a heap with a different hash seed, these objects need to adapt.
186   bool NeedsRehashing() const;
187 
188   // Rehashing support is not implemented for all objects that need rehashing.
189   // With objects that need rehashing but cannot be rehashed, rehashing has to
190   // be disabled.
191   bool CanBeRehashed() const;
192 
193   // Rehash the object based on the layout inferred from its map.
194   void RehashBasedOnMap(ReadOnlyRoots root);
195 
196   // Layout description.
197 #define HEAP_OBJECT_FIELDS(V) \
198   V(kMapOffset, kTaggedSize)  \
199   /* Header size. */          \
200   V(kHeaderSize, 0)
201 
202   DEFINE_FIELD_OFFSET_CONSTANTS(Object::kHeaderSize, HEAP_OBJECT_FIELDS)
203 #undef HEAP_OBJECT_FIELDS
204 
205   STATIC_ASSERT(kMapOffset == Internals::kHeapObjectMapOffset);
206 
207   using MapField = TaggedField<MapWord, HeapObject::kMapOffset>;
208 
209   inline Address GetFieldAddress(int field_offset) const;
210 
211  protected:
212   // Special-purpose constructor for subclasses that have fast paths where
213   // their ptr() is a Smi.
214   enum class AllowInlineSmiStorage { kRequireHeapObjectTag, kAllowBeingASmi };
215   inline HeapObject(Address ptr, AllowInlineSmiStorage allow_smi);
216 
217   OBJECT_CONSTRUCTORS(HeapObject, Object);
218 };
219 
220 OBJECT_CONSTRUCTORS_IMPL(HeapObject, Object)
221 CAST_ACCESSOR(HeapObject)
222 
223 }  // namespace internal
224 }  // namespace v8
225 
226 #include "src/objects/object-macros-undef.h"
227 
228 #endif  // V8_OBJECTS_HEAP_OBJECT_H_
229