1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_MAP_INL_H_
6 #define V8_OBJECTS_MAP_INL_H_
7 
8 #include "src/heap/heap-write-barrier-inl.h"
9 #include "src/objects/api-callbacks-inl.h"
10 #include "src/objects/cell-inl.h"
11 #include "src/objects/descriptor-array-inl.h"
12 #include "src/objects/field-type.h"
13 #include "src/objects/instance-type-inl.h"
14 #include "src/objects/js-function-inl.h"
15 #include "src/objects/map.h"
16 #include "src/objects/objects-inl.h"
17 #include "src/objects/property.h"
18 #include "src/objects/prototype-info-inl.h"
19 #include "src/objects/shared-function-info-inl.h"
20 #include "src/objects/templates-inl.h"
21 #include "src/objects/transitions-inl.h"
22 #include "src/objects/transitions.h"
23 
24 #if V8_ENABLE_WEBASSEMBLY
25 #include "src/wasm/wasm-objects-inl.h"
26 #endif  // V8_ENABLE_WEBASSEMBLY
27 
28 // Has to be the last include (doesn't have include guards):
29 #include "src/objects/object-macros.h"
30 
31 namespace v8 {
32 namespace internal {
33 
34 #include "torque-generated/src/objects/map-tq-inl.inc"
35 
36 TQ_OBJECT_CONSTRUCTORS_IMPL(Map)
37 
ACCESSORS(Map,instance_descriptors,DescriptorArray,kInstanceDescriptorsOffset)38 ACCESSORS(Map, instance_descriptors, DescriptorArray,
39           kInstanceDescriptorsOffset)
40 RELAXED_ACCESSORS(Map, instance_descriptors, DescriptorArray,
41                   kInstanceDescriptorsOffset)
42 RELEASE_ACQUIRE_ACCESSORS(Map, instance_descriptors, DescriptorArray,
43                           kInstanceDescriptorsOffset)
44 
45 // A freshly allocated layout descriptor can be set on an existing map.
46 // We need to use release-store and acquire-load accessor pairs to ensure
47 // that the concurrent marking thread observes initializing stores of the
48 // layout descriptor.
49 WEAK_ACCESSORS(Map, raw_transitions, kTransitionsOrPrototypeInfoOffset)
50 RELEASE_ACQUIRE_WEAK_ACCESSORS(Map, raw_transitions,
51                                kTransitionsOrPrototypeInfoOffset)
52 
53 ACCESSORS_CHECKED2(Map, prototype, HeapObject, kPrototypeOffset, true,
54                    value.IsNull() || value.IsJSReceiver())
55 
56 DEF_GETTER(Map, prototype_info, Object) {
57   Object value = TaggedField<Object, kTransitionsOrPrototypeInfoOffset>::load(
58       cage_base, *this);
59   DCHECK(this->is_prototype_map());
60   return value;
61 }
RELEASE_ACQUIRE_ACCESSORS(Map,prototype_info,Object,kTransitionsOrPrototypeInfoOffset)62 RELEASE_ACQUIRE_ACCESSORS(Map, prototype_info, Object,
63                           kTransitionsOrPrototypeInfoOffset)
64 
65 // |bit_field| fields.
66 // Concurrent access to |has_prototype_slot| and |has_non_instance_prototype|
67 // is explicitly allowlisted here. The former is never modified after the map
68 // is setup but it's being read by concurrent marker when pointer compression
69 // is enabled. The latter bit can be modified on a live objects.
70 BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_non_instance_prototype,
71                     Map::Bits1::HasNonInstancePrototypeBit)
72 BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_prototype_slot,
73                     Map::Bits1::HasPrototypeSlotBit)
74 
75 // These are fine to be written as non-atomic since we don't have data races.
76 // However, they have to be read atomically from the background since the
77 // |bit_field| as a whole can mutate when using the above setters.
78 BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field, bit_field, is_callable,
79                      Map::Bits1::IsCallableBit)
80 BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field, bit_field, has_named_interceptor,
81                      Map::Bits1::HasNamedInterceptorBit)
82 BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field, bit_field, has_indexed_interceptor,
83                      Map::Bits1::HasIndexedInterceptorBit)
84 BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field, bit_field, is_undetectable,
85                      Map::Bits1::IsUndetectableBit)
86 BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field, bit_field, is_access_check_needed,
87                      Map::Bits1::IsAccessCheckNeededBit)
88 BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field, bit_field, is_constructor,
89                      Map::Bits1::IsConstructorBit)
90 
91 // |bit_field2| fields.
92 BIT_FIELD_ACCESSORS(Map, bit_field2, new_target_is_base,
93                     Map::Bits2::NewTargetIsBaseBit)
94 BIT_FIELD_ACCESSORS(Map, bit_field2, is_immutable_proto,
95                     Map::Bits2::IsImmutablePrototypeBit)
96 
97 // |bit_field3| fields.
98 BIT_FIELD_ACCESSORS(Map, relaxed_bit_field3, owns_descriptors,
99                     Map::Bits3::OwnsDescriptorsBit)
100 BIT_FIELD_ACCESSORS(Map, release_acquire_bit_field3, is_deprecated,
101                     Map::Bits3::IsDeprecatedBit)
102 BIT_FIELD_ACCESSORS(Map, relaxed_bit_field3, is_in_retained_map_list,
103                     Map::Bits3::IsInRetainedMapListBit)
104 BIT_FIELD_ACCESSORS(Map, release_acquire_bit_field3, is_prototype_map,
105                     Map::Bits3::IsPrototypeMapBit)
106 BIT_FIELD_ACCESSORS(Map, relaxed_bit_field3, is_migration_target,
107                     Map::Bits3::IsMigrationTargetBit)
108 BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field3, bit_field3, is_extensible,
109                      Map::Bits3::IsExtensibleBit)
110 BIT_FIELD_ACCESSORS(Map, bit_field3, may_have_interesting_symbols,
111                     Map::Bits3::MayHaveInterestingSymbolsBit)
112 BIT_FIELD_ACCESSORS(Map, relaxed_bit_field3, construction_counter,
113                     Map::Bits3::ConstructionCounterBits)
114 
115 DEF_GETTER(Map, GetNamedInterceptor, InterceptorInfo) {
116   DCHECK(has_named_interceptor());
117   FunctionTemplateInfo info = GetFunctionTemplateInfo(cage_base);
118   return InterceptorInfo::cast(info.GetNamedPropertyHandler(cage_base));
119 }
120 
DEF_GETTER(Map,GetIndexedInterceptor,InterceptorInfo)121 DEF_GETTER(Map, GetIndexedInterceptor, InterceptorInfo) {
122   DCHECK(has_indexed_interceptor());
123   FunctionTemplateInfo info = GetFunctionTemplateInfo(cage_base);
124   return InterceptorInfo::cast(info.GetIndexedPropertyHandler(cage_base));
125 }
126 
127 // static
IsMostGeneralFieldType(Representation representation,FieldType field_type)128 bool Map::IsMostGeneralFieldType(Representation representation,
129                                  FieldType field_type) {
130   return !representation.IsHeapObject() || field_type.IsAny();
131 }
132 
133 // static
FieldTypeIsCleared(Representation rep,FieldType type)134 bool Map::FieldTypeIsCleared(Representation rep, FieldType type) {
135   return type.IsNone() && rep.IsHeapObject();
136 }
137 
138 // static
CanHaveFastTransitionableElementsKind(InstanceType instance_type)139 bool Map::CanHaveFastTransitionableElementsKind(InstanceType instance_type) {
140   return instance_type == JS_ARRAY_TYPE ||
141          instance_type == JS_PRIMITIVE_WRAPPER_TYPE ||
142          instance_type == JS_ARGUMENTS_OBJECT_TYPE;
143 }
144 
CanHaveFastTransitionableElementsKind()145 bool Map::CanHaveFastTransitionableElementsKind() const {
146   return CanHaveFastTransitionableElementsKind(instance_type());
147 }
148 
IsDetached(Isolate * isolate)149 bool Map::IsDetached(Isolate* isolate) const {
150   if (is_prototype_map()) return true;
151   return instance_type() == JS_OBJECT_TYPE && NumberOfOwnDescriptors() > 0 &&
152          GetBackPointer().IsUndefined(isolate);
153 }
154 
155 // static
GeneralizeIfCanHaveTransitionableFastElementsKind(Isolate * isolate,InstanceType instance_type,Representation * representation,Handle<FieldType> * field_type)156 void Map::GeneralizeIfCanHaveTransitionableFastElementsKind(
157     Isolate* isolate, InstanceType instance_type,
158     Representation* representation, Handle<FieldType>* field_type) {
159   if (CanHaveFastTransitionableElementsKind(instance_type)) {
160     // We don't support propagation of field generalization through elements
161     // kind transitions because they are inserted into the transition tree
162     // before field transitions. In order to avoid complexity of handling
163     // such a case we ensure that all maps with transitionable elements kinds
164     // have the most general field representation and type.
165     *field_type = FieldType::Any(isolate);
166     *representation = Representation::Tagged();
167   }
168 }
169 
Normalize(Isolate * isolate,Handle<Map> fast_map,PropertyNormalizationMode mode,const char * reason)170 Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
171                            PropertyNormalizationMode mode, const char* reason) {
172   return Normalize(isolate, fast_map, fast_map->elements_kind(), mode, reason);
173 }
174 
EquivalentToForNormalization(const Map other,PropertyNormalizationMode mode)175 bool Map::EquivalentToForNormalization(const Map other,
176                                        PropertyNormalizationMode mode) const {
177   return EquivalentToForNormalization(other, elements_kind(), mode);
178 }
179 
TooManyFastProperties(StoreOrigin store_origin)180 bool Map::TooManyFastProperties(StoreOrigin store_origin) const {
181   if (UnusedPropertyFields() != 0) return false;
182   if (is_prototype_map()) return false;
183   if (store_origin == StoreOrigin::kNamed) {
184     int limit = std::max({kMaxFastProperties, GetInObjectProperties()});
185     FieldCounts counts = GetFieldCounts();
186     // Only count mutable fields so that objects with large numbers of
187     // constant functions do not go to dictionary mode. That would be bad
188     // because such objects have often been used as modules.
189     int external = counts.mutable_count() - GetInObjectProperties();
190     return external > limit || counts.GetTotal() > kMaxNumberOfDescriptors;
191   } else {
192     int limit = std::max({kFastPropertiesSoftLimit, GetInObjectProperties()});
193     int external = NumberOfFields(ConcurrencyMode::kNotConcurrent) -
194                    GetInObjectProperties();
195     return external > limit;
196   }
197 }
198 
GetLastDescriptorName(Isolate * isolate)199 Name Map::GetLastDescriptorName(Isolate* isolate) const {
200   return instance_descriptors(isolate).GetKey(LastAdded());
201 }
202 
GetLastDescriptorDetails(Isolate * isolate)203 PropertyDetails Map::GetLastDescriptorDetails(Isolate* isolate) const {
204   return instance_descriptors(isolate).GetDetails(LastAdded());
205 }
206 
LastAdded()207 InternalIndex Map::LastAdded() const {
208   int number_of_own_descriptors = NumberOfOwnDescriptors();
209   DCHECK_GT(number_of_own_descriptors, 0);
210   return InternalIndex(number_of_own_descriptors - 1);
211 }
212 
NumberOfOwnDescriptors()213 int Map::NumberOfOwnDescriptors() const {
214   return Bits3::NumberOfOwnDescriptorsBits::decode(
215       release_acquire_bit_field3());
216 }
217 
SetNumberOfOwnDescriptors(int number)218 void Map::SetNumberOfOwnDescriptors(int number) {
219   DCHECK_LE(number, instance_descriptors().number_of_descriptors());
220   CHECK_LE(static_cast<unsigned>(number),
221            static_cast<unsigned>(kMaxNumberOfDescriptors));
222   set_release_acquire_bit_field3(
223       Bits3::NumberOfOwnDescriptorsBits::update(bit_field3(), number));
224 }
225 
IterateOwnDescriptors()226 InternalIndex::Range Map::IterateOwnDescriptors() const {
227   return InternalIndex::Range(NumberOfOwnDescriptors());
228 }
229 
EnumLength()230 int Map::EnumLength() const {
231   return Bits3::EnumLengthBits::decode(bit_field3());
232 }
233 
SetEnumLength(int length)234 void Map::SetEnumLength(int length) {
235   if (length != kInvalidEnumCacheSentinel) {
236     DCHECK_LE(length, NumberOfOwnDescriptors());
237     CHECK_LE(static_cast<unsigned>(length),
238              static_cast<unsigned>(kMaxNumberOfDescriptors));
239   }
240   set_relaxed_bit_field3(Bits3::EnumLengthBits::update(bit_field3(), length));
241 }
242 
GetInitialElements()243 FixedArrayBase Map::GetInitialElements() const {
244   FixedArrayBase result;
245   if (has_fast_elements() || has_fast_string_wrapper_elements() ||
246       has_any_nonextensible_elements()) {
247     result = GetReadOnlyRoots().empty_fixed_array();
248   } else if (has_typed_array_or_rab_gsab_typed_array_elements()) {
249     result = GetReadOnlyRoots().empty_byte_array();
250   } else if (has_dictionary_elements()) {
251     result = GetReadOnlyRoots().empty_slow_element_dictionary();
252   } else {
253     UNREACHABLE();
254   }
255   DCHECK(!ObjectInYoungGeneration(result));
256   return result;
257 }
258 
visitor_id()259 VisitorId Map::visitor_id() const {
260   return static_cast<VisitorId>(
261       RELAXED_READ_BYTE_FIELD(*this, kVisitorIdOffset));
262 }
263 
set_visitor_id(VisitorId id)264 void Map::set_visitor_id(VisitorId id) {
265   CHECK_LT(static_cast<unsigned>(id), 256);
266   RELAXED_WRITE_BYTE_FIELD(*this, kVisitorIdOffset, static_cast<byte>(id));
267 }
268 
instance_size_in_words()269 int Map::instance_size_in_words() const {
270   return RELAXED_READ_BYTE_FIELD(*this, kInstanceSizeInWordsOffset);
271 }
272 
set_instance_size_in_words(int value)273 void Map::set_instance_size_in_words(int value) {
274   RELAXED_WRITE_BYTE_FIELD(*this, kInstanceSizeInWordsOffset,
275                            static_cast<byte>(value));
276 }
277 
instance_size()278 int Map::instance_size() const {
279   return instance_size_in_words() << kTaggedSizeLog2;
280 }
281 
set_instance_size(int value)282 void Map::set_instance_size(int value) {
283   CHECK(IsAligned(value, kTaggedSize));
284   value >>= kTaggedSizeLog2;
285   CHECK_LT(static_cast<unsigned>(value), 256);
286   set_instance_size_in_words(value);
287 }
288 
inobject_properties_start_or_constructor_function_index()289 int Map::inobject_properties_start_or_constructor_function_index() const {
290   // TODO(solanes, v8:7790, v8:11353): Make this and the setter non-atomic
291   // when TSAN sees the map's store synchronization.
292   return RELAXED_READ_BYTE_FIELD(
293       *this, kInobjectPropertiesStartOrConstructorFunctionIndexOffset);
294 }
295 
set_inobject_properties_start_or_constructor_function_index(int value)296 void Map::set_inobject_properties_start_or_constructor_function_index(
297     int value) {
298   CHECK_LT(static_cast<unsigned>(value), 256);
299   RELAXED_WRITE_BYTE_FIELD(
300       *this, kInobjectPropertiesStartOrConstructorFunctionIndexOffset,
301       static_cast<byte>(value));
302 }
303 
GetInObjectPropertiesStartInWords()304 int Map::GetInObjectPropertiesStartInWords() const {
305   DCHECK(IsJSObjectMap());
306   return inobject_properties_start_or_constructor_function_index();
307 }
308 
SetInObjectPropertiesStartInWords(int value)309 void Map::SetInObjectPropertiesStartInWords(int value) {
310   CHECK(IsJSObjectMap());
311   set_inobject_properties_start_or_constructor_function_index(value);
312 }
313 
GetInObjectProperties()314 int Map::GetInObjectProperties() const {
315   DCHECK(IsJSObjectMap());
316   return instance_size_in_words() - GetInObjectPropertiesStartInWords();
317 }
318 
GetConstructorFunctionIndex()319 int Map::GetConstructorFunctionIndex() const {
320   DCHECK(IsPrimitiveMap());
321   return inobject_properties_start_or_constructor_function_index();
322 }
323 
SetConstructorFunctionIndex(int value)324 void Map::SetConstructorFunctionIndex(int value) {
325   CHECK(IsPrimitiveMap());
326   set_inobject_properties_start_or_constructor_function_index(value);
327 }
328 
GetInObjectPropertyOffset(int index)329 int Map::GetInObjectPropertyOffset(int index) const {
330   return (GetInObjectPropertiesStartInWords() + index) * kTaggedSize;
331 }
332 
AddMissingTransitionsForTesting(Isolate * isolate,Handle<Map> split_map,Handle<DescriptorArray> descriptors)333 Handle<Map> Map::AddMissingTransitionsForTesting(
334     Isolate* isolate, Handle<Map> split_map,
335     Handle<DescriptorArray> descriptors) {
336   return AddMissingTransitions(isolate, split_map, descriptors);
337 }
338 
instance_type()339 InstanceType Map::instance_type() const {
340   // TODO(solanes, v8:7790, v8:11353, v8:11945): Make this and the setter
341   // non-atomic when TSAN sees the map's store synchronization.
342   return static_cast<InstanceType>(
343       RELAXED_READ_UINT16_FIELD(*this, kInstanceTypeOffset));
344 }
345 
set_instance_type(InstanceType value)346 void Map::set_instance_type(InstanceType value) {
347   RELAXED_WRITE_UINT16_FIELD(*this, kInstanceTypeOffset, value);
348 }
349 
UnusedPropertyFields()350 int Map::UnusedPropertyFields() const {
351   int value = used_or_unused_instance_size_in_words();
352   DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
353   int unused;
354   if (value >= JSObject::kFieldsAdded) {
355     unused = instance_size_in_words() - value;
356   } else {
357     // For out of object properties "used_or_unused_instance_size_in_words"
358     // byte encodes the slack in the property array.
359     unused = value;
360   }
361   return unused;
362 }
363 
UnusedInObjectProperties()364 int Map::UnusedInObjectProperties() const {
365   // Like Map::UnusedPropertyFields(), but returns 0 for out of object
366   // properties.
367   int value = used_or_unused_instance_size_in_words();
368   DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
369   if (value >= JSObject::kFieldsAdded) {
370     return instance_size_in_words() - value;
371   }
372   return 0;
373 }
374 
used_or_unused_instance_size_in_words()375 int Map::used_or_unused_instance_size_in_words() const {
376   return RELAXED_READ_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset);
377 }
378 
set_used_or_unused_instance_size_in_words(int value)379 void Map::set_used_or_unused_instance_size_in_words(int value) {
380   CHECK_LE(static_cast<unsigned>(value), 255);
381   RELAXED_WRITE_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset,
382                            static_cast<byte>(value));
383 }
384 
UsedInstanceSize()385 int Map::UsedInstanceSize() const {
386   int words = used_or_unused_instance_size_in_words();
387   if (words < JSObject::kFieldsAdded) {
388     // All in-object properties are used and the words is tracking the slack
389     // in the property array.
390     return instance_size();
391   }
392   return words * kTaggedSize;
393 }
394 
SetInObjectUnusedPropertyFields(int value)395 void Map::SetInObjectUnusedPropertyFields(int value) {
396   STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
397   if (!IsJSObjectMap()) {
398     CHECK_EQ(0, value);
399     set_used_or_unused_instance_size_in_words(0);
400     DCHECK_EQ(0, UnusedPropertyFields());
401     return;
402   }
403   CHECK_LE(0, value);
404   DCHECK_LE(value, GetInObjectProperties());
405   int used_inobject_properties = GetInObjectProperties() - value;
406   set_used_or_unused_instance_size_in_words(
407       GetInObjectPropertyOffset(used_inobject_properties) / kTaggedSize);
408   DCHECK_EQ(value, UnusedPropertyFields());
409 }
410 
SetOutOfObjectUnusedPropertyFields(int value)411 void Map::SetOutOfObjectUnusedPropertyFields(int value) {
412   STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
413   CHECK_LT(static_cast<unsigned>(value), JSObject::kFieldsAdded);
414   // For out of object properties "used_instance_size_in_words" byte encodes
415   // the slack in the property array.
416   set_used_or_unused_instance_size_in_words(value);
417   DCHECK_EQ(value, UnusedPropertyFields());
418 }
419 
CopyUnusedPropertyFields(Map map)420 void Map::CopyUnusedPropertyFields(Map map) {
421   set_used_or_unused_instance_size_in_words(
422       map.used_or_unused_instance_size_in_words());
423   DCHECK_EQ(UnusedPropertyFields(), map.UnusedPropertyFields());
424 }
425 
CopyUnusedPropertyFieldsAdjustedForInstanceSize(Map map)426 void Map::CopyUnusedPropertyFieldsAdjustedForInstanceSize(Map map) {
427   int value = map.used_or_unused_instance_size_in_words();
428   if (value >= JSPrimitiveWrapper::kFieldsAdded) {
429     // Unused in-object fields. Adjust the offset from the object’s start
430     // so it matches the distance to the object’s end.
431     value += instance_size_in_words() - map.instance_size_in_words();
432   }
433   set_used_or_unused_instance_size_in_words(value);
434   DCHECK_EQ(UnusedPropertyFields(), map.UnusedPropertyFields());
435 }
436 
AccountAddedPropertyField()437 void Map::AccountAddedPropertyField() {
438   // Update used instance size and unused property fields number.
439   STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
440 #ifdef DEBUG
441   int new_unused = UnusedPropertyFields() - 1;
442   if (new_unused < 0) new_unused += JSObject::kFieldsAdded;
443 #endif
444   int value = used_or_unused_instance_size_in_words();
445   if (value >= JSObject::kFieldsAdded) {
446     if (value == instance_size_in_words()) {
447       AccountAddedOutOfObjectPropertyField(0);
448     } else {
449       // The property is added in-object, so simply increment the counter.
450       set_used_or_unused_instance_size_in_words(value + 1);
451     }
452   } else {
453     AccountAddedOutOfObjectPropertyField(value);
454   }
455   DCHECK_EQ(new_unused, UnusedPropertyFields());
456 }
457 
AccountAddedOutOfObjectPropertyField(int unused_in_property_array)458 void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) {
459   unused_in_property_array--;
460   if (unused_in_property_array < 0) {
461     unused_in_property_array += JSObject::kFieldsAdded;
462   }
463   CHECK_LT(static_cast<unsigned>(unused_in_property_array),
464            JSObject::kFieldsAdded);
465   set_used_or_unused_instance_size_in_words(unused_in_property_array);
466   DCHECK_EQ(unused_in_property_array, UnusedPropertyFields());
467 }
468 
469 #if V8_ENABLE_WEBASSEMBLY
WasmByte1()470 uint8_t Map::WasmByte1() const {
471   DCHECK(IsWasmObjectMap());
472   return inobject_properties_start_or_constructor_function_index();
473 }
474 
WasmByte2()475 uint8_t Map::WasmByte2() const {
476   DCHECK(IsWasmObjectMap());
477   return used_or_unused_instance_size_in_words();
478 }
479 
SetWasmByte1(uint8_t value)480 void Map::SetWasmByte1(uint8_t value) {
481   CHECK(IsWasmObjectMap());
482   set_inobject_properties_start_or_constructor_function_index(value);
483 }
484 
SetWasmByte2(uint8_t value)485 void Map::SetWasmByte2(uint8_t value) {
486   CHECK(IsWasmObjectMap());
487   set_used_or_unused_instance_size_in_words(value);
488 }
489 #endif  // V8_ENABLE_WEBASSEMBLY
490 
bit_field()491 byte Map::bit_field() const {
492   // TODO(solanes, v8:7790, v8:11353): Make this non-atomic when TSAN sees the
493   // map's store synchronization.
494   return relaxed_bit_field();
495 }
496 
set_bit_field(byte value)497 void Map::set_bit_field(byte value) {
498   // TODO(solanes, v8:7790, v8:11353): Make this non-atomic when TSAN sees the
499   // map's store synchronization.
500   set_relaxed_bit_field(value);
501 }
502 
relaxed_bit_field()503 byte Map::relaxed_bit_field() const {
504   return RELAXED_READ_BYTE_FIELD(*this, kBitFieldOffset);
505 }
506 
set_relaxed_bit_field(byte value)507 void Map::set_relaxed_bit_field(byte value) {
508   RELAXED_WRITE_BYTE_FIELD(*this, kBitFieldOffset, value);
509 }
510 
bit_field2()511 byte Map::bit_field2() const { return ReadField<byte>(kBitField2Offset); }
512 
set_bit_field2(byte value)513 void Map::set_bit_field2(byte value) {
514   WriteField<byte>(kBitField2Offset, value);
515 }
516 
bit_field3()517 uint32_t Map::bit_field3() const {
518   // TODO(solanes, v8:7790, v8:11353): Make this and the setter non-atomic
519   // when TSAN sees the map's store synchronization.
520   return relaxed_bit_field3();
521 }
522 
set_bit_field3(uint32_t value)523 void Map::set_bit_field3(uint32_t value) { set_relaxed_bit_field3(value); }
524 
relaxed_bit_field3()525 uint32_t Map::relaxed_bit_field3() const {
526   return RELAXED_READ_UINT32_FIELD(*this, kBitField3Offset);
527 }
528 
set_relaxed_bit_field3(uint32_t value)529 void Map::set_relaxed_bit_field3(uint32_t value) {
530   RELAXED_WRITE_UINT32_FIELD(*this, kBitField3Offset, value);
531 }
532 
release_acquire_bit_field3()533 uint32_t Map::release_acquire_bit_field3() const {
534   return ACQUIRE_READ_UINT32_FIELD(*this, kBitField3Offset);
535 }
536 
set_release_acquire_bit_field3(uint32_t value)537 void Map::set_release_acquire_bit_field3(uint32_t value) {
538   RELEASE_WRITE_UINT32_FIELD(*this, kBitField3Offset, value);
539 }
540 
is_abandoned_prototype_map()541 bool Map::is_abandoned_prototype_map() const {
542   return is_prototype_map() && !owns_descriptors();
543 }
544 
should_be_fast_prototype_map()545 bool Map::should_be_fast_prototype_map() const {
546   if (!prototype_info().IsPrototypeInfo()) return false;
547   return PrototypeInfo::cast(prototype_info()).should_be_fast_map();
548 }
549 
set_elements_kind(ElementsKind elements_kind)550 void Map::set_elements_kind(ElementsKind elements_kind) {
551   CHECK_LT(static_cast<int>(elements_kind), kElementsKindCount);
552   set_bit_field2(
553       Map::Bits2::ElementsKindBits::update(bit_field2(), elements_kind));
554 }
555 
elements_kind()556 ElementsKind Map::elements_kind() const {
557   return Map::Bits2::ElementsKindBits::decode(bit_field2());
558 }
559 
has_fast_smi_elements()560 bool Map::has_fast_smi_elements() const {
561   return IsSmiElementsKind(elements_kind());
562 }
563 
has_fast_object_elements()564 bool Map::has_fast_object_elements() const {
565   return IsObjectElementsKind(elements_kind());
566 }
567 
has_fast_smi_or_object_elements()568 bool Map::has_fast_smi_or_object_elements() const {
569   return IsSmiOrObjectElementsKind(elements_kind());
570 }
571 
has_fast_double_elements()572 bool Map::has_fast_double_elements() const {
573   return IsDoubleElementsKind(elements_kind());
574 }
575 
has_fast_elements()576 bool Map::has_fast_elements() const {
577   return IsFastElementsKind(elements_kind());
578 }
579 
has_sloppy_arguments_elements()580 bool Map::has_sloppy_arguments_elements() const {
581   return IsSloppyArgumentsElementsKind(elements_kind());
582 }
583 
has_fast_sloppy_arguments_elements()584 bool Map::has_fast_sloppy_arguments_elements() const {
585   return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
586 }
587 
has_fast_string_wrapper_elements()588 bool Map::has_fast_string_wrapper_elements() const {
589   return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
590 }
591 
has_typed_array_elements()592 bool Map::has_typed_array_elements() const {
593   return IsTypedArrayElementsKind(elements_kind());
594 }
595 
has_rab_gsab_typed_array_elements()596 bool Map::has_rab_gsab_typed_array_elements() const {
597   return IsRabGsabTypedArrayElementsKind(elements_kind());
598 }
599 
has_typed_array_or_rab_gsab_typed_array_elements()600 bool Map::has_typed_array_or_rab_gsab_typed_array_elements() const {
601   return IsTypedArrayOrRabGsabTypedArrayElementsKind(elements_kind());
602 }
603 
has_any_typed_array_or_wasm_array_elements()604 bool Map::has_any_typed_array_or_wasm_array_elements() const {
605   ElementsKind kind = elements_kind();
606   return IsTypedArrayOrRabGsabTypedArrayElementsKind(kind) ||
607 #if V8_ENABLE_WEBASSEMBLY
608          IsWasmArrayElementsKind(kind) ||
609 #endif  // V8_ENABLE_WEBASSEMBLY
610          false;
611 }
612 
has_dictionary_elements()613 bool Map::has_dictionary_elements() const {
614   return IsDictionaryElementsKind(elements_kind());
615 }
616 
has_any_nonextensible_elements()617 bool Map::has_any_nonextensible_elements() const {
618   return IsAnyNonextensibleElementsKind(elements_kind());
619 }
620 
has_nonextensible_elements()621 bool Map::has_nonextensible_elements() const {
622   return IsNonextensibleElementsKind(elements_kind());
623 }
624 
has_sealed_elements()625 bool Map::has_sealed_elements() const {
626   return IsSealedElementsKind(elements_kind());
627 }
628 
has_frozen_elements()629 bool Map::has_frozen_elements() const {
630   return IsFrozenElementsKind(elements_kind());
631 }
632 
set_is_dictionary_map(bool value)633 void Map::set_is_dictionary_map(bool value) {
634   uint32_t new_bit_field3 =
635       Bits3::IsDictionaryMapBit::update(bit_field3(), value);
636   new_bit_field3 = Bits3::IsUnstableBit::update(new_bit_field3, value);
637   set_bit_field3(new_bit_field3);
638 }
639 
is_dictionary_map()640 bool Map::is_dictionary_map() const {
641   return Bits3::IsDictionaryMapBit::decode(relaxed_bit_field3());
642 }
643 
mark_unstable()644 void Map::mark_unstable() {
645   set_release_acquire_bit_field3(
646       Bits3::IsUnstableBit::update(bit_field3(), true));
647 }
648 
is_stable()649 bool Map::is_stable() const {
650   return !Bits3::IsUnstableBit::decode(release_acquire_bit_field3());
651 }
652 
CanBeDeprecated()653 bool Map::CanBeDeprecated() const {
654   for (InternalIndex i : IterateOwnDescriptors()) {
655     PropertyDetails details = instance_descriptors(kRelaxedLoad).GetDetails(i);
656     if (details.representation().MightCauseMapDeprecation()) return true;
657     if (details.kind() == kData &&
658         details.location() == PropertyLocation::kDescriptor) {
659       return true;
660     }
661   }
662   return false;
663 }
664 
NotifyLeafMapLayoutChange(Isolate * isolate)665 void Map::NotifyLeafMapLayoutChange(Isolate* isolate) {
666   if (is_stable()) {
667     mark_unstable();
668     dependent_code().DeoptimizeDependentCodeGroup(
669         DependentCode::kPrototypeCheckGroup);
670   }
671 }
672 
CanTransition()673 bool Map::CanTransition() const {
674   // Only JSObject and subtypes have map transitions and back pointers.
675   return InstanceTypeChecker::IsJSObject(instance_type());
676 }
677 
678 #define DEF_TESTER(Type, ...)                              \
679   bool Map::Is##Type##Map() const {                        \
680     return InstanceTypeChecker::Is##Type(instance_type()); \
681   }
INSTANCE_TYPE_CHECKERS(DEF_TESTER)682 INSTANCE_TYPE_CHECKERS(DEF_TESTER)
683 #undef DEF_TESTER
684 
685 bool Map::IsBooleanMap() const {
686   return *this == GetReadOnlyRoots().boolean_map();
687 }
688 
IsNullOrUndefinedMap()689 bool Map::IsNullOrUndefinedMap() const {
690   return *this == GetReadOnlyRoots().null_map() ||
691          *this == GetReadOnlyRoots().undefined_map();
692 }
693 
IsPrimitiveMap()694 bool Map::IsPrimitiveMap() const {
695   return instance_type() <= LAST_PRIMITIVE_HEAP_OBJECT_TYPE;
696 }
697 
UpdateDescriptors(Isolate * isolate,DescriptorArray descriptors,int number_of_own_descriptors)698 void Map::UpdateDescriptors(Isolate* isolate, DescriptorArray descriptors,
699                             int number_of_own_descriptors) {
700   SetInstanceDescriptors(isolate, descriptors, number_of_own_descriptors);
701 }
702 
InitializeDescriptors(Isolate * isolate,DescriptorArray descriptors)703 void Map::InitializeDescriptors(Isolate* isolate, DescriptorArray descriptors) {
704   SetInstanceDescriptors(isolate, descriptors,
705                          descriptors.number_of_descriptors());
706 }
707 
clear_padding()708 void Map::clear_padding() {
709   if (FIELD_SIZE(kOptionalPaddingOffset) == 0) return;
710   DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
711   memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
712          FIELD_SIZE(kOptionalPaddingOffset));
713 }
714 
AppendDescriptor(Isolate * isolate,Descriptor * desc)715 void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) {
716   DescriptorArray descriptors = instance_descriptors(isolate);
717   int number_of_own_descriptors = NumberOfOwnDescriptors();
718   DCHECK(descriptors.number_of_descriptors() == number_of_own_descriptors);
719   {
720     // The following two operations need to happen before the marking write
721     // barrier.
722     descriptors.Append(desc);
723     SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
724 #ifndef V8_DISABLE_WRITE_BARRIERS
725     WriteBarrier::Marking(descriptors, number_of_own_descriptors + 1);
726 #endif
727   }
728   // Properly mark the map if the {desc} is an "interesting symbol".
729   if (desc->GetKey()->IsInterestingSymbol()) {
730     set_may_have_interesting_symbols(true);
731   }
732   PropertyDetails details = desc->GetDetails();
733   if (details.location() == PropertyLocation::kField) {
734     DCHECK_GT(UnusedPropertyFields(), 0);
735     AccountAddedPropertyField();
736   }
737 
738 // This function does not support appending double field descriptors and
739 // it should never try to (otherwise, layout descriptor must be updated too).
740 #ifdef DEBUG
741   DCHECK(details.location() != PropertyLocation::kField ||
742          !details.representation().IsDouble());
743 #endif
744 }
745 
ConcurrentIsMap(PtrComprCageBase cage_base,const Object & object)746 bool Map::ConcurrentIsMap(PtrComprCageBase cage_base,
747                           const Object& object) const {
748   return object.IsHeapObject() && HeapObject::cast(object).map(cage_base) ==
749                                       GetReadOnlyRoots(cage_base).meta_map();
750 }
751 
DEF_GETTER(Map,GetBackPointer,HeapObject)752 DEF_GETTER(Map, GetBackPointer, HeapObject) {
753   Object object = constructor_or_back_pointer(cage_base, kRelaxedLoad);
754   if (ConcurrentIsMap(cage_base, object)) {
755     return Map::cast(object);
756   }
757   return GetReadOnlyRoots(cage_base).undefined_value();
758 }
759 
SetBackPointer(HeapObject value,WriteBarrierMode mode)760 void Map::SetBackPointer(HeapObject value, WriteBarrierMode mode) {
761   CHECK_GE(instance_type(), FIRST_JS_RECEIVER_TYPE);
762   CHECK(value.IsMap());
763   CHECK(GetBackPointer().IsUndefined());
764   CHECK_EQ(Map::cast(value).GetConstructor(), constructor_or_back_pointer());
765   set_constructor_or_back_pointer(value, mode);
766 }
767 
768 // static
ElementsTransitionMap(Isolate * isolate,ConcurrencyMode cmode)769 Map Map::ElementsTransitionMap(Isolate* isolate, ConcurrencyMode cmode) {
770   DisallowGarbageCollection no_gc;
771   return TransitionsAccessor(isolate, *this, &no_gc,
772                              cmode == ConcurrencyMode::kConcurrent)
773       .SearchSpecial(ReadOnlyRoots(isolate).elements_transition_symbol());
774 }
775 
ACCESSORS(Map,dependent_code,DependentCode,kDependentCodeOffset)776 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
777 ACCESSORS(Map, prototype_validity_cell, Object, kPrototypeValidityCellOffset)
778 ACCESSORS_CHECKED2(Map, constructor_or_back_pointer, Object,
779                    kConstructorOrBackPointerOrNativeContextOffset,
780                    !IsContextMap(), value.IsNull() || !IsContextMap())
781 RELAXED_ACCESSORS_CHECKED2(Map, constructor_or_back_pointer, Object,
782                            kConstructorOrBackPointerOrNativeContextOffset,
783                            !IsContextMap(), value.IsNull() || !IsContextMap())
784 ACCESSORS_CHECKED(Map, native_context, NativeContext,
785                   kConstructorOrBackPointerOrNativeContextOffset,
786                   IsContextMap())
787 ACCESSORS_CHECKED(Map, native_context_or_null, Object,
788                   kConstructorOrBackPointerOrNativeContextOffset,
789                   (value.IsNull() || value.IsNativeContext()) && IsContextMap())
790 #if V8_ENABLE_WEBASSEMBLY
791 ACCESSORS_CHECKED(Map, wasm_type_info, WasmTypeInfo,
792                   kConstructorOrBackPointerOrNativeContextOffset,
793                   IsWasmStructMap() || IsWasmArrayMap())
794 #endif  // V8_ENABLE_WEBASSEMBLY
795 
796 bool Map::IsPrototypeValidityCellValid() const {
797   Object validity_cell = prototype_validity_cell();
798   Object value = validity_cell.IsSmi() ? Smi::cast(validity_cell)
799                                        : Cell::cast(validity_cell).value();
800   return value == Smi::FromInt(Map::kPrototypeChainValid);
801 }
802 
DEF_GETTER(Map,GetConstructor,Object)803 DEF_GETTER(Map, GetConstructor, Object) {
804   Object maybe_constructor = constructor_or_back_pointer(cage_base);
805   // Follow any back pointers.
806   while (ConcurrentIsMap(cage_base, maybe_constructor)) {
807     maybe_constructor =
808         Map::cast(maybe_constructor).constructor_or_back_pointer(cage_base);
809   }
810   return maybe_constructor;
811 }
812 
TryGetConstructor(Isolate * isolate,int max_steps)813 Object Map::TryGetConstructor(Isolate* isolate, int max_steps) {
814   Object maybe_constructor = constructor_or_back_pointer(isolate);
815   // Follow any back pointers.
816   while (maybe_constructor.IsMap(isolate)) {
817     if (max_steps-- == 0) return Smi::FromInt(0);
818     maybe_constructor =
819         Map::cast(maybe_constructor).constructor_or_back_pointer(isolate);
820   }
821   return maybe_constructor;
822 }
823 
DEF_GETTER(Map,GetFunctionTemplateInfo,FunctionTemplateInfo)824 DEF_GETTER(Map, GetFunctionTemplateInfo, FunctionTemplateInfo) {
825   Object constructor = GetConstructor(cage_base);
826   if (constructor.IsJSFunction(cage_base)) {
827     // TODO(ishell): IsApiFunction(isolate) and get_api_func_data(isolate)
828     DCHECK(JSFunction::cast(constructor).shared(cage_base).IsApiFunction());
829     return JSFunction::cast(constructor).shared(cage_base).get_api_func_data();
830   }
831   DCHECK(constructor.IsFunctionTemplateInfo(cage_base));
832   return FunctionTemplateInfo::cast(constructor);
833 }
834 
SetConstructor(Object constructor,WriteBarrierMode mode)835 void Map::SetConstructor(Object constructor, WriteBarrierMode mode) {
836   // Never overwrite a back pointer with a constructor.
837   CHECK(!constructor_or_back_pointer().IsMap());
838   set_constructor_or_back_pointer(constructor, mode);
839 }
840 
CopyInitialMap(Isolate * isolate,Handle<Map> map)841 Handle<Map> Map::CopyInitialMap(Isolate* isolate, Handle<Map> map) {
842   return CopyInitialMap(isolate, map, map->instance_size(),
843                         map->GetInObjectProperties(),
844                         map->UnusedPropertyFields());
845 }
846 
IsInobjectSlackTrackingInProgress()847 bool Map::IsInobjectSlackTrackingInProgress() const {
848   return construction_counter() != Map::kNoSlackTracking;
849 }
850 
InobjectSlackTrackingStep(Isolate * isolate)851 void Map::InobjectSlackTrackingStep(Isolate* isolate) {
852   DisallowGarbageCollection no_gc;
853   // Slack tracking should only be performed on an initial map.
854   DCHECK(GetBackPointer().IsUndefined());
855   if (!IsInobjectSlackTrackingInProgress()) return;
856   int counter = construction_counter();
857   set_construction_counter(counter - 1);
858   if (counter == kSlackTrackingCounterEnd) {
859     CompleteInobjectSlackTracking(isolate);
860   }
861 }
862 
SlackForArraySize(int old_size,int size_limit)863 int Map::SlackForArraySize(int old_size, int size_limit) {
864   const int max_slack = size_limit - old_size;
865   CHECK_LE(0, max_slack);
866   if (old_size < 4) {
867     DCHECK_LE(1, max_slack);
868     return 1;
869   }
870   return std::min(max_slack, old_size / 4);
871 }
872 
InstanceSizeFromSlack(int slack)873 int Map::InstanceSizeFromSlack(int slack) const {
874   return instance_size() - slack * kTaggedSize;
875 }
876 
OBJECT_CONSTRUCTORS_IMPL(NormalizedMapCache,WeakFixedArray)877 OBJECT_CONSTRUCTORS_IMPL(NormalizedMapCache, WeakFixedArray)
878 CAST_ACCESSOR(NormalizedMapCache)
879 NEVER_READ_ONLY_SPACE_IMPL(NormalizedMapCache)
880 
881 int NormalizedMapCache::GetIndex(Handle<Map> map) {
882   return map->Hash() % NormalizedMapCache::kEntries;
883 }
884 
DEF_GETTER(HeapObject,IsNormalizedMapCache,bool)885 DEF_GETTER(HeapObject, IsNormalizedMapCache, bool) {
886   if (!IsWeakFixedArray(cage_base)) return false;
887   if (WeakFixedArray::cast(*this).length() != NormalizedMapCache::kEntries) {
888     return false;
889   }
890   return true;
891 }
892 
893 }  // namespace internal
894 }  // namespace v8
895 
896 #include "src/objects/object-macros-undef.h"
897 
898 #endif  // V8_OBJECTS_MAP_INL_H_
899