1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_HEAP_WRITE_BARRIER_INL_H_
6 #define V8_HEAP_HEAP_WRITE_BARRIER_INL_H_
7
8 // Clients of this interface shouldn't depend on lots of heap internals.
9 // Do not include anything from src/heap here!
10
11 #include "src/heap/heap-write-barrier.h"
12
13 #include "src/common/globals.h"
14 #include "src/objects/code.h"
15 #include "src/objects/compressed-slots-inl.h"
16 #include "src/objects/fixed-array.h"
17 #include "src/objects/heap-object.h"
18 #include "src/objects/maybe-object-inl.h"
19 #include "src/objects/slots-inl.h"
20
21 namespace v8 {
22 namespace internal {
23
24 // Defined in heap.cc.
25 V8_EXPORT_PRIVATE bool Heap_PageFlagsAreConsistent(HeapObject object);
26 V8_EXPORT_PRIVATE void Heap_GenerationalBarrierSlow(HeapObject object,
27 Address slot,
28 HeapObject value);
29 V8_EXPORT_PRIVATE void Heap_MarkingBarrierSlow(HeapObject object, Address slot,
30 HeapObject value);
31 V8_EXPORT_PRIVATE void Heap_WriteBarrierForCodeSlow(Code host);
32
33 V8_EXPORT_PRIVATE void Heap_MarkingBarrierForArrayBufferExtensionSlow(
34 HeapObject object, ArrayBufferExtension* extension);
35
36 V8_EXPORT_PRIVATE void Heap_GenerationalBarrierForCodeSlow(Code host,
37 RelocInfo* rinfo,
38 HeapObject object);
39 V8_EXPORT_PRIVATE void Heap_MarkingBarrierForCodeSlow(Code host,
40 RelocInfo* rinfo,
41 HeapObject object);
42 V8_EXPORT_PRIVATE void Heap_MarkingBarrierForDescriptorArraySlow(
43 Heap* heap, HeapObject host, HeapObject descriptor_array,
44 int number_of_own_descriptors);
45
46 V8_EXPORT_PRIVATE void Heap_GenerationalEphemeronKeyBarrierSlow(
47 Heap* heap, EphemeronHashTable table, Address slot);
48
49 // Do not use these internal details anywhere outside of this file. These
50 // internals are only intended to shortcut write barrier checks.
51 namespace heap_internals {
52
53 struct MemoryChunk {
54 static constexpr uintptr_t kFlagsOffset = kSizetSize;
55 static constexpr uintptr_t kHeapOffset =
56 kSizetSize + kUIntptrSize + kSystemPointerSize;
57 static constexpr uintptr_t kMarkingBit = uintptr_t{1} << 18;
58 static constexpr uintptr_t kFromPageBit = uintptr_t{1} << 3;
59 static constexpr uintptr_t kToPageBit = uintptr_t{1} << 4;
60 static constexpr uintptr_t kReadOnlySpaceBit = uintptr_t{1} << 21;
61
FromHeapObjectMemoryChunk62 V8_INLINE static heap_internals::MemoryChunk* FromHeapObject(
63 HeapObject object) {
64 DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL);
65 return reinterpret_cast<MemoryChunk*>(object.ptr() & ~kPageAlignmentMask);
66 }
67
IsMarkingMemoryChunk68 V8_INLINE bool IsMarking() const { return GetFlags() & kMarkingBit; }
69
InYoungGenerationMemoryChunk70 V8_INLINE bool InYoungGeneration() const {
71 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return false;
72 constexpr uintptr_t kYoungGenerationMask = kFromPageBit | kToPageBit;
73 return GetFlags() & kYoungGenerationMask;
74 }
75
GetFlagsMemoryChunk76 V8_INLINE uintptr_t GetFlags() const {
77 return *reinterpret_cast<const uintptr_t*>(reinterpret_cast<Address>(this) +
78 kFlagsOffset);
79 }
80
GetHeapMemoryChunk81 V8_INLINE Heap* GetHeap() {
82 Heap* heap = *reinterpret_cast<Heap**>(reinterpret_cast<Address>(this) +
83 kHeapOffset);
84 DCHECK_NOT_NULL(heap);
85 return heap;
86 }
87
InReadOnlySpaceMemoryChunk88 V8_INLINE bool InReadOnlySpace() const {
89 return GetFlags() & kReadOnlySpaceBit;
90 }
91 };
92
GenerationalBarrierInternal(HeapObject object,Address slot,HeapObject value)93 inline void GenerationalBarrierInternal(HeapObject object, Address slot,
94 HeapObject value) {
95 DCHECK(Heap_PageFlagsAreConsistent(object));
96 heap_internals::MemoryChunk* value_chunk =
97 heap_internals::MemoryChunk::FromHeapObject(value);
98 heap_internals::MemoryChunk* object_chunk =
99 heap_internals::MemoryChunk::FromHeapObject(object);
100
101 if (!value_chunk->InYoungGeneration() || object_chunk->InYoungGeneration()) {
102 return;
103 }
104
105 Heap_GenerationalBarrierSlow(object, slot, value);
106 }
107
GenerationalEphemeronKeyBarrierInternal(EphemeronHashTable table,Address slot,HeapObject value)108 inline void GenerationalEphemeronKeyBarrierInternal(EphemeronHashTable table,
109 Address slot,
110 HeapObject value) {
111 DCHECK(Heap::PageFlagsAreConsistent(table));
112 heap_internals::MemoryChunk* value_chunk =
113 heap_internals::MemoryChunk::FromHeapObject(value);
114 heap_internals::MemoryChunk* table_chunk =
115 heap_internals::MemoryChunk::FromHeapObject(table);
116
117 if (!value_chunk->InYoungGeneration() || table_chunk->InYoungGeneration()) {
118 return;
119 }
120
121 Heap_GenerationalEphemeronKeyBarrierSlow(table_chunk->GetHeap(), table, slot);
122 }
123
MarkingBarrierInternal(HeapObject object,Address slot,HeapObject value)124 inline void MarkingBarrierInternal(HeapObject object, Address slot,
125 HeapObject value) {
126 DCHECK(Heap_PageFlagsAreConsistent(object));
127 heap_internals::MemoryChunk* value_chunk =
128 heap_internals::MemoryChunk::FromHeapObject(value);
129
130 if (!value_chunk->IsMarking()) return;
131
132 Heap_MarkingBarrierSlow(object, slot, value);
133 }
134
135 } // namespace heap_internals
136
WriteBarrierForCode(Code host,RelocInfo * rinfo,Object value)137 inline void WriteBarrierForCode(Code host, RelocInfo* rinfo, Object value) {
138 DCHECK(!HasWeakHeapObjectTag(value));
139 if (!value.IsHeapObject()) return;
140 WriteBarrierForCode(host, rinfo, HeapObject::cast(value));
141 }
142
WriteBarrierForCode(Code host,RelocInfo * rinfo,HeapObject value)143 inline void WriteBarrierForCode(Code host, RelocInfo* rinfo, HeapObject value) {
144 GenerationalBarrierForCode(host, rinfo, value);
145 MarkingBarrierForCode(host, rinfo, value);
146 }
147
WriteBarrierForCode(Code host)148 inline void WriteBarrierForCode(Code host) {
149 Heap_WriteBarrierForCodeSlow(host);
150 }
151
MarkingBarrierForArrayBufferExtension(HeapObject object,ArrayBufferExtension * extension)152 inline void MarkingBarrierForArrayBufferExtension(
153 HeapObject object, ArrayBufferExtension* extension) {
154 heap_internals::MemoryChunk* object_chunk =
155 heap_internals::MemoryChunk::FromHeapObject(object);
156 if (!extension || !object_chunk->IsMarking()) return;
157 Heap_MarkingBarrierForArrayBufferExtensionSlow(object, extension);
158 }
159
GenerationalBarrier(HeapObject object,ObjectSlot slot,Object value)160 inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
161 Object value) {
162 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
163 DCHECK(!HasWeakHeapObjectTag(value));
164 if (!value.IsHeapObject()) return;
165 GenerationalBarrier(object, slot, HeapObject::cast(value));
166 }
167
GenerationalBarrier(HeapObject object,ObjectSlot slot,HeapObject value)168 inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
169 HeapObject value) {
170 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
171 DCHECK(!HasWeakHeapObjectTag(*slot));
172 heap_internals::GenerationalBarrierInternal(object, slot.address(),
173 HeapObject::cast(value));
174 }
175
GenerationalEphemeronKeyBarrier(EphemeronHashTable table,ObjectSlot slot,Object value)176 inline void GenerationalEphemeronKeyBarrier(EphemeronHashTable table,
177 ObjectSlot slot, Object value) {
178 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
179 DCHECK(!HasWeakHeapObjectTag(*slot));
180 DCHECK(!HasWeakHeapObjectTag(value));
181 DCHECK(value.IsHeapObject());
182 heap_internals::GenerationalEphemeronKeyBarrierInternal(
183 table, slot.address(), HeapObject::cast(value));
184 }
185
GenerationalBarrier(HeapObject object,MaybeObjectSlot slot,MaybeObject value)186 inline void GenerationalBarrier(HeapObject object, MaybeObjectSlot slot,
187 MaybeObject value) {
188 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
189 HeapObject value_heap_object;
190 if (!value->GetHeapObject(&value_heap_object)) return;
191 heap_internals::GenerationalBarrierInternal(object, slot.address(),
192 value_heap_object);
193 }
194
GenerationalBarrierForCode(Code host,RelocInfo * rinfo,HeapObject object)195 inline void GenerationalBarrierForCode(Code host, RelocInfo* rinfo,
196 HeapObject object) {
197 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
198 heap_internals::MemoryChunk* object_chunk =
199 heap_internals::MemoryChunk::FromHeapObject(object);
200 if (!object_chunk->InYoungGeneration()) return;
201 Heap_GenerationalBarrierForCodeSlow(host, rinfo, object);
202 }
203
MarkingBarrier(HeapObject object,ObjectSlot slot,Object value)204 inline void MarkingBarrier(HeapObject object, ObjectSlot slot, Object value) {
205 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
206 DCHECK(!HasWeakHeapObjectTag(value));
207 if (!value.IsHeapObject()) return;
208 MarkingBarrier(object, slot, HeapObject::cast(value));
209 }
210
MarkingBarrier(HeapObject object,ObjectSlot slot,HeapObject value)211 inline void MarkingBarrier(HeapObject object, ObjectSlot slot,
212 HeapObject value) {
213 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
214 DCHECK_IMPLIES(slot.address() != kNullAddress, !HasWeakHeapObjectTag(*slot));
215 heap_internals::MarkingBarrierInternal(object, slot.address(),
216 HeapObject::cast(value));
217 }
218
MarkingBarrier(HeapObject object,MaybeObjectSlot slot,MaybeObject value)219 inline void MarkingBarrier(HeapObject object, MaybeObjectSlot slot,
220 MaybeObject value) {
221 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
222 HeapObject value_heap_object;
223 if (!value->GetHeapObject(&value_heap_object)) return;
224 heap_internals::MarkingBarrierInternal(object, slot.address(),
225 value_heap_object);
226 }
227
MarkingBarrierForCode(Code host,RelocInfo * rinfo,HeapObject object)228 inline void MarkingBarrierForCode(Code host, RelocInfo* rinfo,
229 HeapObject object) {
230 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
231 DCHECK(!HasWeakHeapObjectTag(object));
232 heap_internals::MemoryChunk* object_chunk =
233 heap_internals::MemoryChunk::FromHeapObject(object);
234 if (!object_chunk->IsMarking()) return;
235 Heap_MarkingBarrierForCodeSlow(host, rinfo, object);
236 }
237
MarkingBarrierForDescriptorArray(Heap * heap,HeapObject host,HeapObject descriptor_array,int number_of_own_descriptors)238 inline void MarkingBarrierForDescriptorArray(Heap* heap, HeapObject host,
239 HeapObject descriptor_array,
240 int number_of_own_descriptors) {
241 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
242 heap_internals::MemoryChunk* chunk =
243 heap_internals::MemoryChunk::FromHeapObject(descriptor_array);
244 if (!chunk->IsMarking()) return;
245
246 Heap_MarkingBarrierForDescriptorArraySlow(heap, host, descriptor_array,
247 number_of_own_descriptors);
248 }
249
GetWriteBarrierModeForObject(HeapObject object,const DisallowHeapAllocation * promise)250 inline WriteBarrierMode GetWriteBarrierModeForObject(
251 HeapObject object, const DisallowHeapAllocation* promise) {
252 if (FLAG_disable_write_barriers) return SKIP_WRITE_BARRIER;
253 DCHECK(Heap_PageFlagsAreConsistent(object));
254 heap_internals::MemoryChunk* chunk =
255 heap_internals::MemoryChunk::FromHeapObject(object);
256 if (chunk->IsMarking()) return UPDATE_WRITE_BARRIER;
257 if (chunk->InYoungGeneration()) return SKIP_WRITE_BARRIER;
258 return UPDATE_WRITE_BARRIER;
259 }
260
ObjectInYoungGeneration(Object object)261 inline bool ObjectInYoungGeneration(Object object) {
262 // TODO(rong): Fix caller of this function when we deploy
263 // v8_use_third_party_heap.
264 if (FLAG_single_generation) return false;
265 if (object.IsSmi()) return false;
266 return heap_internals::MemoryChunk::FromHeapObject(HeapObject::cast(object))
267 ->InYoungGeneration();
268 }
269
IsReadOnlyHeapObject(HeapObject object)270 inline bool IsReadOnlyHeapObject(HeapObject object) {
271 heap_internals::MemoryChunk* chunk =
272 heap_internals::MemoryChunk::FromHeapObject(object);
273 return chunk->InReadOnlySpace();
274 }
275
276 } // namespace internal
277 } // namespace v8
278
279 #endif // V8_HEAP_HEAP_WRITE_BARRIER_INL_H_
280