1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 // Note 1: Any file that includes this one should include object-macros-undef.h
6 // at the bottom.
7 
8 // Note 2: This file is deliberately missing the include guards (the undeffing
9 // approach wouldn't work otherwise).
10 //
11 // PRESUBMIT_INTENTIONALLY_MISSING_INCLUDE_GUARD
12 
13 // The accessors with RELAXED_, ACQUIRE_, and RELEASE_ prefixes should be used
14 // for fields that can be written to and read from multiple threads at the same
15 // time. See comments in src/base/atomicops.h for the memory ordering sematics.
16 
17 #include "src/base/memory.h"
18 
19 // Since this changes visibility, it should always be last in a class
20 // definition.
21 #define OBJECT_CONSTRUCTORS(Type, ...)             \
22  public:                                           \
23   constexpr Type() : __VA_ARGS__() {}              \
24                                                    \
25  protected:                                        \
26   template <typename TFieldType, int kFieldOffset> \
27   friend class TaggedField;                        \
28                                                    \
29   explicit inline Type(Address ptr)
30 
31 #define OBJECT_CONSTRUCTORS_IMPL(Type, Super) \
32   inline Type::Type(Address ptr) : Super(ptr) { SLOW_DCHECK(Is##Type()); }
33 // In these cases, we don't have our own instance type to check, so check the
34 // supertype instead. This happens for types denoting a NativeContext-dependent
35 // set of maps.
36 #define OBJECT_CONSTRUCTORS_IMPL_CHECK_SUPER(Type, Super) \
37   inline Type::Type(Address ptr) : Super(ptr) { SLOW_DCHECK(Is##Super()); }
38 
39 #define NEVER_READ_ONLY_SPACE   \
40   inline Heap* GetHeap() const; \
41   inline Isolate* GetIsolate() const;
42 
43 // TODO(leszeks): Add checks in the factory that we never allocate these
44 // objects in RO space.
45 #define NEVER_READ_ONLY_SPACE_IMPL(Type)                                   \
46   Heap* Type::GetHeap() const { return GetHeapFromWritableObject(*this); } \
47   Isolate* Type::GetIsolate() const {                                      \
48     return GetIsolateFromWritableObject(*this);                            \
49   }
50 
51 #define DECL_PRIMITIVE_ACCESSORS(name, type) \
52   inline type name() const;                  \
53   inline void set_##name(type value);
54 
55 #define DECL_SYNCHRONIZED_PRIMITIVE_ACCESSORS(name, type) \
56   inline type synchronized_##name() const;                \
57   inline void synchronized_set_##name(type value);
58 
59 #define DECL_BOOLEAN_ACCESSORS(name) DECL_PRIMITIVE_ACCESSORS(name, bool)
60 
61 #define DECL_INT_ACCESSORS(name) DECL_PRIMITIVE_ACCESSORS(name, int)
62 
63 #define DECL_SYNCHRONIZED_INT_ACCESSORS(name) \
64   DECL_SYNCHRONIZED_PRIMITIVE_ACCESSORS(name, int)
65 
66 #define DECL_INT32_ACCESSORS(name) DECL_PRIMITIVE_ACCESSORS(name, int32_t)
67 
68 #define DECL_UINT16_ACCESSORS(name) \
69   inline uint16_t name() const;     \
70   inline void set_##name(int value);
71 
72 #define DECL_INT16_ACCESSORS(name) \
73   inline int16_t name() const;     \
74   inline void set_##name(int16_t value);
75 
76 #define DECL_UINT8_ACCESSORS(name) \
77   inline uint8_t name() const;     \
78   inline void set_##name(int value);
79 
80 // TODO(ishell): eventually isolate-less getters should not be used anymore.
81 // For full pointer-mode the C++ compiler should optimize away unused isolate
82 // parameter.
83 #define DECL_GETTER(name, type) \
84   inline type name() const;     \
85   inline type name(const Isolate* isolate) const;
86 
87 #define DEF_GETTER(holder, name, type)                     \
88   type holder::name() const {                              \
89     const Isolate* isolate = GetIsolateForPtrCompr(*this); \
90     return holder::name(isolate);                          \
91   }                                                        \
92   type holder::name(const Isolate* isolate) const
93 
94 #define DECL_ACCESSORS(name, type)   \
95   DECL_GETTER(name, type)            \
96   inline void set_##name(type value, \
97                          WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
98 
99 #define DECL_CAST(Type)                                 \
100   V8_INLINE static Type cast(Object object);            \
101   V8_INLINE static Type unchecked_cast(Object object) { \
102     return bit_cast<Type>(object);                      \
103   }
104 
105 #define CAST_ACCESSOR(Type) \
106   Type Type::cast(Object object) { return Type(object.ptr()); }
107 
108 #define INT_ACCESSORS(holder, name, offset)                   \
109   int holder::name() const { return ReadField<int>(offset); } \
110   void holder::set_##name(int value) { WriteField<int>(offset, value); }
111 
112 #define INT32_ACCESSORS(holder, name, offset)                         \
113   int32_t holder::name() const { return ReadField<int32_t>(offset); } \
114   void holder::set_##name(int32_t value) { WriteField<int32_t>(offset, value); }
115 
116 #define RELAXED_INT32_ACCESSORS(holder, name, offset) \
117   int32_t holder::name() const {                      \
118     return RELAXED_READ_INT32_FIELD(*this, offset);   \
119   }                                                   \
120   void holder::set_##name(int32_t value) {            \
121     RELAXED_WRITE_INT32_FIELD(*this, offset, value);  \
122   }
123 
124 #define UINT16_ACCESSORS(holder, name, offset)                          \
125   uint16_t holder::name() const { return ReadField<uint16_t>(offset); } \
126   void holder::set_##name(int value) {                                  \
127     DCHECK_GE(value, 0);                                                \
128     DCHECK_LE(value, static_cast<uint16_t>(-1));                        \
129     WriteField<uint16_t>(offset, value);                                \
130   }
131 
132 #define UINT8_ACCESSORS(holder, name, offset)                         \
133   uint8_t holder::name() const { return ReadField<uint8_t>(offset); } \
134   void holder::set_##name(int value) {                                \
135     DCHECK_GE(value, 0);                                              \
136     DCHECK_LE(value, static_cast<uint8_t>(-1));                       \
137     WriteField<uint8_t>(offset, value);                               \
138   }
139 
140 #define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
141                            set_condition)                             \
142   DEF_GETTER(holder, name, type) {                                    \
143     type value = TaggedField<type, offset>::load(isolate, *this);     \
144     DCHECK(get_condition);                                            \
145     return value;                                                     \
146   }                                                                   \
147   void holder::set_##name(type value, WriteBarrierMode mode) {        \
148     DCHECK(set_condition);                                            \
149     TaggedField<type, offset>::store(*this, value);                   \
150     CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);            \
151   }
152 
153 #define ACCESSORS_CHECKED(holder, name, type, offset, condition) \
154   ACCESSORS_CHECKED2(holder, name, type, offset, condition, condition)
155 
156 #define ACCESSORS(holder, name, type, offset) \
157   ACCESSORS_CHECKED(holder, name, type, offset, true)
158 
159 #define SYNCHRONIZED_ACCESSORS_CHECKED2(holder, name, type, offset,       \
160                                         get_condition, set_condition)     \
161   DEF_GETTER(holder, name, type) {                                        \
162     type value = TaggedField<type, offset>::Acquire_Load(isolate, *this); \
163     DCHECK(get_condition);                                                \
164     return value;                                                         \
165   }                                                                       \
166   void holder::set_##name(type value, WriteBarrierMode mode) {            \
167     DCHECK(set_condition);                                                \
168     TaggedField<type, offset>::Release_Store(*this, value);               \
169     CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);                \
170   }
171 
172 #define SYNCHRONIZED_ACCESSORS_CHECKED(holder, name, type, offset, condition) \
173   SYNCHRONIZED_ACCESSORS_CHECKED2(holder, name, type, offset, condition,      \
174                                   condition)
175 
176 #define SYNCHRONIZED_ACCESSORS(holder, name, type, offset) \
177   SYNCHRONIZED_ACCESSORS_CHECKED(holder, name, type, offset, true)
178 
179 #define WEAK_ACCESSORS_CHECKED2(holder, name, offset, get_condition,  \
180                                 set_condition)                        \
181   DEF_GETTER(holder, name, MaybeObject) {                             \
182     MaybeObject value =                                               \
183         TaggedField<MaybeObject, offset>::load(isolate, *this);       \
184     DCHECK(get_condition);                                            \
185     return value;                                                     \
186   }                                                                   \
187   void holder::set_##name(MaybeObject value, WriteBarrierMode mode) { \
188     DCHECK(set_condition);                                            \
189     TaggedField<MaybeObject, offset>::store(*this, value);            \
190     CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode);       \
191   }
192 
193 #define WEAK_ACCESSORS_CHECKED(holder, name, offset, condition) \
194   WEAK_ACCESSORS_CHECKED2(holder, name, offset, condition, condition)
195 
196 #define WEAK_ACCESSORS(holder, name, offset) \
197   WEAK_ACCESSORS_CHECKED(holder, name, offset, true)
198 
199 // Getter that returns a Smi as an int and writes an int as a Smi.
200 #define SMI_ACCESSORS_CHECKED(holder, name, offset, condition)   \
201   int holder::name() const {                                     \
202     DCHECK(condition);                                           \
203     Smi value = TaggedField<Smi, offset>::load(*this);           \
204     return value.value();                                        \
205   }                                                              \
206   void holder::set_##name(int value) {                           \
207     DCHECK(condition);                                           \
208     TaggedField<Smi, offset>::store(*this, Smi::FromInt(value)); \
209   }
210 
211 #define SMI_ACCESSORS(holder, name, offset) \
212   SMI_ACCESSORS_CHECKED(holder, name, offset, true)
213 
214 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset)                 \
215   int holder::synchronized_##name() const {                              \
216     Smi value = TaggedField<Smi, offset>::Acquire_Load(*this);           \
217     return value.value();                                                \
218   }                                                                      \
219   void holder::synchronized_set_##name(int value) {                      \
220     TaggedField<Smi, offset>::Release_Store(*this, Smi::FromInt(value)); \
221   }
222 
223 #define RELAXED_SMI_ACCESSORS(holder, name, offset)                      \
224   int holder::relaxed_read_##name() const {                              \
225     Smi value = TaggedField<Smi, offset>::Relaxed_Load(*this);           \
226     return value.value();                                                \
227   }                                                                      \
228   void holder::relaxed_write_##name(int value) {                         \
229     TaggedField<Smi, offset>::Relaxed_Store(*this, Smi::FromInt(value)); \
230   }
231 
232 #define BOOL_GETTER(holder, field, name, offset) \
233   bool holder::name() const { return BooleanBit::get(field(), offset); }
234 
235 #define BOOL_ACCESSORS(holder, field, name, offset)                      \
236   bool holder::name() const { return BooleanBit::get(field(), offset); } \
237   void holder::set_##name(bool value) {                                  \
238     set_##field(BooleanBit::set(field(), offset, value));                \
239   }
240 
241 #define BIT_FIELD_ACCESSORS(holder, field, name, BitField)      \
242   typename BitField::FieldType holder::name() const {           \
243     return BitField::decode(field());                           \
244   }                                                             \
245   void holder::set_##name(typename BitField::FieldType value) { \
246     set_##field(BitField::update(field(), value));              \
247   }
248 
249 #define INSTANCE_TYPE_CHECKER(type, forinstancetype)    \
250   V8_INLINE bool Is##type(InstanceType instance_type) { \
251     return instance_type == forinstancetype;            \
252   }
253 
254 #define TYPE_CHECKER(type, ...)                                         \
255   DEF_GETTER(HeapObject, Is##type, bool) {                              \
256     return InstanceTypeChecker::Is##type(map(isolate).instance_type()); \
257   }
258 
259 #define RELAXED_INT16_ACCESSORS(holder, name, offset) \
260   int16_t holder::name() const {                      \
261     return RELAXED_READ_INT16_FIELD(*this, offset);   \
262   }                                                   \
263   void holder::set_##name(int16_t value) {            \
264     RELAXED_WRITE_INT16_FIELD(*this, offset, value);  \
265   }
266 
267 #define FIELD_ADDR(p, offset) ((p).ptr() + offset - kHeapObjectTag)
268 
269 #define ACQUIRE_READ_FIELD(p, offset) \
270   TaggedField<Object>::Acquire_Load(p, offset)
271 
272 #define RELAXED_READ_FIELD(p, offset) \
273   TaggedField<Object>::Relaxed_Load(p, offset)
274 
275 #define RELAXED_READ_WEAK_FIELD(p, offset) \
276   TaggedField<MaybeObject>::Relaxed_Load(p, offset)
277 
278 #define WRITE_FIELD(p, offset, value) \
279   TaggedField<Object>::store(p, offset, value)
280 
281 #define RELEASE_WRITE_FIELD(p, offset, value) \
282   TaggedField<Object>::Release_Store(p, offset, value)
283 
284 #define RELAXED_WRITE_FIELD(p, offset, value) \
285   TaggedField<Object>::Relaxed_Store(p, offset, value)
286 
287 #define RELAXED_WRITE_WEAK_FIELD(p, offset, value) \
288   TaggedField<MaybeObject>::Relaxed_Store(p, offset, value)
289 
290 #ifdef V8_DISABLE_WRITE_BARRIERS
291 #define WRITE_BARRIER(object, offset, value)
292 #else
293 #define WRITE_BARRIER(object, offset, value)                       \
294   do {                                                             \
295     DCHECK_NOT_NULL(GetHeapFromWritableObject(object));            \
296     MarkingBarrier(object, (object).RawField(offset), value);      \
297     GenerationalBarrier(object, (object).RawField(offset), value); \
298   } while (false)
299 #endif
300 
301 #ifdef V8_DISABLE_WRITE_BARRIERS
302 #define WEAK_WRITE_BARRIER(object, offset, value)
303 #else
304 #define WEAK_WRITE_BARRIER(object, offset, value)                           \
305   do {                                                                      \
306     DCHECK_NOT_NULL(GetHeapFromWritableObject(object));                     \
307     MarkingBarrier(object, (object).RawMaybeWeakField(offset), value);      \
308     GenerationalBarrier(object, (object).RawMaybeWeakField(offset), value); \
309   } while (false)
310 #endif
311 
312 #ifdef V8_DISABLE_WRITE_BARRIERS
313 #define EPHEMERON_KEY_WRITE_BARRIER(object, offset, value)
314 #else
315 #define EPHEMERON_KEY_WRITE_BARRIER(object, offset, value)                    \
316   do {                                                                        \
317     DCHECK_NOT_NULL(GetHeapFromWritableObject(object));                       \
318     EphemeronHashTable table = EphemeronHashTable::cast(object);              \
319     MarkingBarrier(object, (object).RawField(offset), value);                 \
320     GenerationalEphemeronKeyBarrier(table, (object).RawField(offset), value); \
321   } while (false)
322 #endif
323 
324 #ifdef V8_DISABLE_WRITE_BARRIERS
325 #define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode)
326 #else
327 #define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode)       \
328   do {                                                               \
329     DCHECK_NOT_NULL(GetHeapFromWritableObject(object));              \
330     DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER);             \
331     if (mode != SKIP_WRITE_BARRIER) {                                \
332       if (mode == UPDATE_WRITE_BARRIER) {                            \
333         MarkingBarrier(object, (object).RawField(offset), value);    \
334       }                                                              \
335       GenerationalBarrier(object, (object).RawField(offset), value); \
336     }                                                                \
337   } while (false)
338 #endif
339 
340 #ifdef V8_DISABLE_WRITE_BARRIERS
341 #define CONDITIONAL_WEAK_WRITE_BARRIER(object, offset, value, mode)
342 #else
343 #define CONDITIONAL_WEAK_WRITE_BARRIER(object, offset, value, mode)           \
344   do {                                                                        \
345     DCHECK_NOT_NULL(GetHeapFromWritableObject(object));                       \
346     DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER);                      \
347     if (mode != SKIP_WRITE_BARRIER) {                                         \
348       if (mode == UPDATE_WRITE_BARRIER) {                                     \
349         MarkingBarrier(object, (object).RawMaybeWeakField(offset), value);    \
350       }                                                                       \
351       GenerationalBarrier(object, (object).RawMaybeWeakField(offset), value); \
352     }                                                                         \
353   } while (false)
354 #endif
355 
356 #ifdef V8_DISABLE_WRITE_BARRIERS
357 #define CONDITIONAL_EPHEMERON_KEY_WRITE_BARRIER(object, offset, value, mode)
358 #else
359 #define CONDITIONAL_EPHEMERON_KEY_WRITE_BARRIER(object, offset, value, mode) \
360   do {                                                                       \
361     DCHECK_NOT_NULL(GetHeapFromWritableObject(object));                      \
362     DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER);                     \
363     EphemeronHashTable table = EphemeronHashTable::cast(object);             \
364     if (mode != SKIP_WRITE_BARRIER) {                                        \
365       if (mode == UPDATE_WRITE_BARRIER) {                                    \
366         MarkingBarrier(object, (object).RawField(offset), value);            \
367       }                                                                      \
368       GenerationalEphemeronKeyBarrier(table, (object).RawField(offset),      \
369                                       value);                                \
370     }                                                                        \
371   } while (false)
372 #endif
373 
374 #define ACQUIRE_READ_INT32_FIELD(p, offset) \
375   static_cast<int32_t>(base::Acquire_Load(  \
376       reinterpret_cast<const base::Atomic32*>(FIELD_ADDR(p, offset))))
377 
378 #define RELAXED_WRITE_INT8_FIELD(p, offset, value)                             \
379   base::Relaxed_Store(reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
380                       static_cast<base::Atomic8>(value));
381 #define RELAXED_READ_INT8_FIELD(p, offset) \
382   static_cast<int8_t>(base::Relaxed_Load(  \
383       reinterpret_cast<const base::Atomic8*>(FIELD_ADDR(p, offset))))
384 
385 #define RELAXED_READ_INT16_FIELD(p, offset) \
386   static_cast<int16_t>(base::Relaxed_Load(  \
387       reinterpret_cast<const base::Atomic16*>(FIELD_ADDR(p, offset))))
388 
389 #define RELAXED_WRITE_INT16_FIELD(p, offset, value)             \
390   base::Relaxed_Store(                                          \
391       reinterpret_cast<base::Atomic16*>(FIELD_ADDR(p, offset)), \
392       static_cast<base::Atomic16>(value));
393 
394 #define RELAXED_READ_UINT32_FIELD(p, offset) \
395   static_cast<uint32_t>(base::Relaxed_Load(  \
396       reinterpret_cast<const base::Atomic32*>(FIELD_ADDR(p, offset))))
397 
398 #define RELAXED_WRITE_UINT32_FIELD(p, offset, value)            \
399   base::Relaxed_Store(                                          \
400       reinterpret_cast<base::Atomic32*>(FIELD_ADDR(p, offset)), \
401       static_cast<base::Atomic32>(value));
402 
403 #define RELAXED_READ_INT32_FIELD(p, offset) \
404   static_cast<int32_t>(base::Relaxed_Load(  \
405       reinterpret_cast<const base::Atomic32*>(FIELD_ADDR(p, offset))))
406 
407 #define RELEASE_WRITE_INT32_FIELD(p, offset, value)             \
408   base::Release_Store(                                          \
409       reinterpret_cast<base::Atomic32*>(FIELD_ADDR(p, offset)), \
410       static_cast<base::Atomic32>(value))
411 
412 #define RELAXED_WRITE_INT32_FIELD(p, offset, value)             \
413   base::Relaxed_Store(                                          \
414       reinterpret_cast<base::Atomic32*>(FIELD_ADDR(p, offset)), \
415       static_cast<base::Atomic32>(value));
416 
417 #define RELAXED_READ_BYTE_FIELD(p, offset) \
418   static_cast<byte>(base::Relaxed_Load(    \
419       reinterpret_cast<const base::Atomic8*>(FIELD_ADDR(p, offset))))
420 
421 #define RELAXED_WRITE_BYTE_FIELD(p, offset, value)                             \
422   base::Relaxed_Store(reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
423                       static_cast<base::Atomic8>(value));
424 
425 #ifdef OBJECT_PRINT
426 #define DECL_PRINTER(Name) void Name##Print(std::ostream& os);  // NOLINT
427 #else
428 #define DECL_PRINTER(Name)
429 #endif
430 
431 #ifdef VERIFY_HEAP
432 #define DECL_VERIFIER(Name) void Name##Verify(Isolate* isolate);
433 #define EXPORT_DECL_VERIFIER(Name) \
434   V8_EXPORT_PRIVATE void Name##Verify(Isolate* isolate);
435 #else
436 #define DECL_VERIFIER(Name)
437 #define EXPORT_DECL_VERIFIER(Name)
438 #endif
439 
440 #define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type) \
441   type DeoptimizationData::name() const {          \
442     return type::cast(get(k##name##Index));        \
443   }                                                \
444   void DeoptimizationData::Set##name(type value) { set(k##name##Index, value); }
445 
446 #define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type)                \
447   type DeoptimizationData::name(int i) const {                  \
448     return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
449   }                                                             \
450   void DeoptimizationData::Set##name(int i, type value) {       \
451     set(IndexForEntry(i) + k##name##Offset, value);             \
452   }
453 
454 #define TQ_OBJECT_CONSTRUCTORS(Type)               \
455  public:                                           \
456   constexpr Type() = default;                      \
457                                                    \
458  protected:                                        \
459   template <typename TFieldType, int kFieldOffset> \
460   friend class TaggedField;                        \
461                                                    \
462   inline explicit Type(Address ptr);               \
463   friend class TorqueGenerated##Type<Type, Super>;
464 
465 #define TQ_OBJECT_CONSTRUCTORS_IMPL(Type) \
466   inline Type::Type(Address ptr)          \
467       : TorqueGenerated##Type<Type, Type::Super>(ptr) {}
468