1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/snapshot/deserializer.h"
6 
7 #include "src/assembler-inl.h"
8 #include "src/isolate.h"
9 #include "src/objects/api-callbacks.h"
10 #include "src/objects/hash-table.h"
11 #include "src/objects/maybe-object.h"
12 #include "src/objects/string.h"
13 #include "src/snapshot/builtin-deserializer-allocator.h"
14 #include "src/snapshot/natives.h"
15 #include "src/snapshot/snapshot.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 template <class AllocatorT>
Initialize(Isolate * isolate)21 void Deserializer<AllocatorT>::Initialize(Isolate* isolate) {
22   DCHECK_NULL(isolate_);
23   DCHECK_NOT_NULL(isolate);
24   isolate_ = isolate;
25   DCHECK_NULL(external_reference_table_);
26   external_reference_table_ = isolate->heap()->external_reference_table();
27 #ifdef DEBUG
28   // Count the number of external references registered through the API.
29   num_api_references_ = 0;
30   if (isolate_->api_external_references() != nullptr) {
31     while (isolate_->api_external_references()[num_api_references_] != 0) {
32       num_api_references_++;
33     }
34   }
35 #endif  // DEBUG
36   CHECK_EQ(magic_number_,
37            SerializedData::ComputeMagicNumber(external_reference_table_));
38 }
39 
40 template <class AllocatorT>
IsLazyDeserializationEnabled() const41 bool Deserializer<AllocatorT>::IsLazyDeserializationEnabled() const {
42   return FLAG_lazy_deserialization && !isolate()->serializer_enabled();
43 }
44 
45 template <class AllocatorT>
Rehash()46 void Deserializer<AllocatorT>::Rehash() {
47   DCHECK(can_rehash() || deserializing_user_code());
48   for (const auto& item : to_rehash_) item->RehashBasedOnMap();
49 }
50 
51 template <class AllocatorT>
~Deserializer()52 Deserializer<AllocatorT>::~Deserializer() {
53 #ifdef DEBUG
54   // Do not perform checks if we aborted deserialization.
55   if (source_.position() == 0) return;
56   // Check that we only have padding bytes remaining.
57   while (source_.HasMore()) DCHECK_EQ(kNop, source_.Get());
58   // Check that we've fully used all reserved space.
59   DCHECK(allocator()->ReservationsAreFullyUsed());
60 #endif  // DEBUG
61 }
62 
63 // This is called on the roots.  It is the driver of the deserialization
64 // process.  It is also called on the body of each function.
65 template <class AllocatorT>
VisitRootPointers(Root root,const char * description,Object ** start,Object ** end)66 void Deserializer<AllocatorT>::VisitRootPointers(Root root,
67                                                  const char* description,
68                                                  Object** start, Object** end) {
69   // Builtins and bytecode handlers are deserialized in a separate pass by the
70   // BuiltinDeserializer.
71   if (root == Root::kBuiltins || root == Root::kDispatchTable) return;
72 
73   // The space must be new space.  Any other space would cause ReadChunk to try
74   // to update the remembered using nullptr as the address.
75   ReadData(reinterpret_cast<MaybeObject**>(start),
76            reinterpret_cast<MaybeObject**>(end), NEW_SPACE, kNullAddress);
77 }
78 
79 template <class AllocatorT>
Synchronize(VisitorSynchronization::SyncTag tag)80 void Deserializer<AllocatorT>::Synchronize(
81     VisitorSynchronization::SyncTag tag) {
82   static const byte expected = kSynchronize;
83   CHECK_EQ(expected, source_.Get());
84 }
85 
86 template <class AllocatorT>
DeserializeDeferredObjects()87 void Deserializer<AllocatorT>::DeserializeDeferredObjects() {
88   for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) {
89     switch (code) {
90       case kAlignmentPrefix:
91       case kAlignmentPrefix + 1:
92       case kAlignmentPrefix + 2: {
93         int alignment = code - (SerializerDeserializer::kAlignmentPrefix - 1);
94         allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
95         break;
96       }
97       default: {
98         int space = code & kSpaceMask;
99         DCHECK_LE(space, kNumberOfSpaces);
100         DCHECK_EQ(code - space, kNewObject);
101         HeapObject* object = GetBackReferencedObject(space);
102         int size = source_.GetInt() << kPointerSizeLog2;
103         Address obj_address = object->address();
104         MaybeObject** start =
105             reinterpret_cast<MaybeObject**>(obj_address + kPointerSize);
106         MaybeObject** end = reinterpret_cast<MaybeObject**>(obj_address + size);
107         bool filled = ReadData(start, end, space, obj_address);
108         CHECK(filled);
109         DCHECK(CanBeDeferred(object));
110         PostProcessNewObject(object, space);
111       }
112     }
113   }
114 }
115 
StringTableInsertionKey(String * string)116 StringTableInsertionKey::StringTableInsertionKey(String* string)
117     : StringTableKey(ComputeHashField(string)), string_(string) {
118   DCHECK(string->IsInternalizedString());
119 }
120 
IsMatch(Object * string)121 bool StringTableInsertionKey::IsMatch(Object* string) {
122   // We know that all entries in a hash table had their hash keys created.
123   // Use that knowledge to have fast failure.
124   if (Hash() != String::cast(string)->Hash()) return false;
125   // We want to compare the content of two internalized strings here.
126   return string_->SlowEquals(String::cast(string));
127 }
128 
AsHandle(Isolate * isolate)129 Handle<String> StringTableInsertionKey::AsHandle(Isolate* isolate) {
130   return handle(string_, isolate);
131 }
132 
ComputeHashField(String * string)133 uint32_t StringTableInsertionKey::ComputeHashField(String* string) {
134   // Make sure hash_field() is computed.
135   string->Hash();
136   return string->hash_field();
137 }
138 
139 template <class AllocatorT>
PostProcessNewObject(HeapObject * obj,int space)140 HeapObject* Deserializer<AllocatorT>::PostProcessNewObject(HeapObject* obj,
141                                                            int space) {
142   if ((FLAG_rehash_snapshot && can_rehash_) || deserializing_user_code()) {
143     if (obj->IsString()) {
144       // Uninitialize hash field as we need to recompute the hash.
145       String* string = String::cast(obj);
146       string->set_hash_field(String::kEmptyHashField);
147     } else if (obj->NeedsRehashing()) {
148       to_rehash_.push_back(obj);
149     }
150   }
151 
152   if (deserializing_user_code()) {
153     if (obj->IsString()) {
154       String* string = String::cast(obj);
155       if (string->IsInternalizedString()) {
156         // Canonicalize the internalized string. If it already exists in the
157         // string table, set it to forward to the existing one.
158         StringTableInsertionKey key(string);
159         String* canonical =
160             StringTable::ForwardStringIfExists(isolate_, &key, string);
161 
162         if (canonical != nullptr) return canonical;
163 
164         new_internalized_strings_.push_back(handle(string));
165         return string;
166       }
167     } else if (obj->IsScript()) {
168       new_scripts_.push_back(handle(Script::cast(obj)));
169     } else {
170       DCHECK(CanBeDeferred(obj));
171     }
172   }
173 
174   if (obj->IsAllocationSite()) {
175     // Allocation sites are present in the snapshot, and must be linked into
176     // a list at deserialization time.
177     AllocationSite* site = AllocationSite::cast(obj);
178     // TODO(mvstanton): consider treating the heap()->allocation_sites_list()
179     // as a (weak) root. If this root is relocated correctly, this becomes
180     // unnecessary.
181     if (isolate_->heap()->allocation_sites_list() == Smi::kZero) {
182       site->set_weak_next(isolate_->heap()->undefined_value());
183     } else {
184       site->set_weak_next(isolate_->heap()->allocation_sites_list());
185     }
186     isolate_->heap()->set_allocation_sites_list(site);
187   } else if (obj->IsCode()) {
188     // We flush all code pages after deserializing the startup snapshot. In that
189     // case, we only need to remember code objects in the large object space.
190     // When deserializing user code, remember each individual code object.
191     if (deserializing_user_code() || space == LO_SPACE) {
192       new_code_objects_.push_back(Code::cast(obj));
193     }
194   } else if (obj->IsAccessorInfo()) {
195 #ifdef USE_SIMULATOR
196     accessor_infos_.push_back(AccessorInfo::cast(obj));
197 #endif
198   } else if (obj->IsCallHandlerInfo()) {
199 #ifdef USE_SIMULATOR
200     call_handler_infos_.push_back(CallHandlerInfo::cast(obj));
201 #endif
202   } else if (obj->IsExternalString()) {
203     if (obj->map() == isolate_->heap()->native_source_string_map()) {
204       ExternalOneByteString* string = ExternalOneByteString::cast(obj);
205       DCHECK(string->is_short());
206       string->set_resource(
207           NativesExternalStringResource::DecodeForDeserialization(
208               string->resource()));
209     } else {
210       ExternalString* string = ExternalString::cast(obj);
211       uint32_t index = string->resource_as_uint32();
212       Address address =
213           static_cast<Address>(isolate_->api_external_references()[index]);
214       string->set_address_as_resource(address);
215     }
216     isolate_->heap()->RegisterExternalString(String::cast(obj));
217   } else if (obj->IsJSTypedArray()) {
218     JSTypedArray* typed_array = JSTypedArray::cast(obj);
219     CHECK(typed_array->byte_offset()->IsSmi());
220     int32_t byte_offset = NumberToInt32(typed_array->byte_offset());
221     if (byte_offset > 0) {
222       FixedTypedArrayBase* elements =
223           FixedTypedArrayBase::cast(typed_array->elements());
224       // Must be off-heap layout.
225       DCHECK(!typed_array->is_on_heap());
226 
227       void* pointer_with_offset = reinterpret_cast<void*>(
228           reinterpret_cast<intptr_t>(elements->external_pointer()) +
229           byte_offset);
230       elements->set_external_pointer(pointer_with_offset);
231     }
232   } else if (obj->IsJSArrayBuffer()) {
233     JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
234     // Only fixup for the off-heap case.
235     if (buffer->backing_store() != nullptr) {
236       Smi* store_index = reinterpret_cast<Smi*>(buffer->backing_store());
237       void* backing_store = off_heap_backing_stores_[store_index->value()];
238 
239       buffer->set_backing_store(backing_store);
240       isolate_->heap()->RegisterNewArrayBuffer(buffer);
241     }
242   } else if (obj->IsFixedTypedArrayBase()) {
243     FixedTypedArrayBase* fta = FixedTypedArrayBase::cast(obj);
244     // Only fixup for the off-heap case.
245     if (fta->base_pointer() == nullptr) {
246       Smi* store_index = reinterpret_cast<Smi*>(fta->external_pointer());
247       void* backing_store = off_heap_backing_stores_[store_index->value()];
248       fta->set_external_pointer(backing_store);
249     }
250   } else if (obj->IsBytecodeArray()) {
251     // TODO(mythria): Remove these once we store the default values for these
252     // fields in the serializer.
253     BytecodeArray* bytecode_array = BytecodeArray::cast(obj);
254     bytecode_array->set_interrupt_budget(
255         interpreter::Interpreter::InterruptBudget());
256     bytecode_array->set_osr_loop_nesting_level(0);
257   }
258 
259   // Check alignment.
260   DCHECK_EQ(0, Heap::GetFillToAlign(obj->address(),
261                                     HeapObject::RequiredAlignment(obj->map())));
262   return obj;
263 }
264 
265 template <class AllocatorT>
MaybeReplaceWithDeserializeLazy(int builtin_id)266 int Deserializer<AllocatorT>::MaybeReplaceWithDeserializeLazy(int builtin_id) {
267   DCHECK(Builtins::IsBuiltinId(builtin_id));
268   return IsLazyDeserializationEnabled() && Builtins::IsLazy(builtin_id)
269              ? Builtins::kDeserializeLazy
270              : builtin_id;
271 }
272 
273 template <class AllocatorT>
GetBackReferencedObject(int space)274 HeapObject* Deserializer<AllocatorT>::GetBackReferencedObject(int space) {
275   HeapObject* obj;
276   SerializerReference back_reference =
277       SerializerReference::FromBitfield(source_.GetInt());
278 
279   switch (space) {
280     case LO_SPACE:
281       obj = allocator()->GetLargeObject(back_reference.large_object_index());
282       break;
283     case MAP_SPACE:
284       obj = allocator()->GetMap(back_reference.map_index());
285       break;
286     case RO_SPACE:
287       if (isolate()->heap()->deserialization_complete()) {
288         PagedSpace* read_only_space = isolate()->heap()->read_only_space();
289         Page* page = read_only_space->FirstPage();
290         for (uint32_t i = 0; i < back_reference.chunk_index(); ++i) {
291           page = page->next_page();
292         }
293         Address address = page->OffsetToAddress(back_reference.chunk_offset());
294         obj = HeapObject::FromAddress(address);
295         break;
296       }
297       V8_FALLTHROUGH;
298     default:
299       obj = allocator()->GetObject(static_cast<AllocationSpace>(space),
300                                    back_reference.chunk_index(),
301                                    back_reference.chunk_offset());
302       break;
303   }
304 
305   if (deserializing_user_code() && obj->IsThinString()) {
306     obj = ThinString::cast(obj)->actual();
307   }
308 
309   hot_objects_.Add(obj);
310   DCHECK(!HasWeakHeapObjectTag(obj));
311   return obj;
312 }
313 
314 // This routine writes the new object into the pointer provided and then
315 // returns true if the new object was in young space and false otherwise.
316 // The reason for this strange interface is that otherwise the object is
317 // written very late, which means the FreeSpace map is not set up by the
318 // time we need to use it to mark the space at the end of a page free.
319 template <class AllocatorT>
ReadObject(int space_number,MaybeObject ** write_back,HeapObjectReferenceType reference_type)320 void Deserializer<AllocatorT>::ReadObject(
321     int space_number, MaybeObject** write_back,
322     HeapObjectReferenceType reference_type) {
323   const int size = source_.GetInt() << kObjectAlignmentBits;
324 
325   Address address =
326       allocator()->Allocate(static_cast<AllocationSpace>(space_number), size);
327   HeapObject* obj = HeapObject::FromAddress(address);
328 
329   isolate_->heap()->OnAllocationEvent(obj, size);
330   MaybeObject** current = reinterpret_cast<MaybeObject**>(address);
331   MaybeObject** limit = current + (size >> kPointerSizeLog2);
332 
333   if (ReadData(current, limit, space_number, address)) {
334     // Only post process if object content has not been deferred.
335     obj = PostProcessNewObject(obj, space_number);
336   }
337 
338   MaybeObject* write_back_obj =
339       reference_type == HeapObjectReferenceType::STRONG
340           ? HeapObjectReference::Strong(obj)
341           : HeapObjectReference::Weak(obj);
342   UnalignedCopy(write_back, &write_back_obj);
343 #ifdef DEBUG
344   if (obj->IsCode()) {
345     DCHECK(space_number == CODE_SPACE || space_number == LO_SPACE);
346   } else {
347     DCHECK(space_number != CODE_SPACE);
348   }
349 #endif  // DEBUG
350 }
351 
352 template <class AllocatorT>
ReadDataSingle()353 Object* Deserializer<AllocatorT>::ReadDataSingle() {
354   MaybeObject* o;
355   MaybeObject** start = &o;
356   MaybeObject** end = start + 1;
357   int source_space = NEW_SPACE;
358   Address current_object = kNullAddress;
359 
360   CHECK(ReadData(start, end, source_space, current_object));
361   HeapObject* heap_object;
362   bool success = o->ToStrongHeapObject(&heap_object);
363   DCHECK(success);
364   USE(success);
365   return heap_object;
366 }
367 
NoExternalReferencesCallback()368 static void NoExternalReferencesCallback() {
369   // The following check will trigger if a function or object template
370   // with references to native functions have been deserialized from
371   // snapshot, but no actual external references were provided when the
372   // isolate was created.
373   CHECK_WITH_MSG(false, "No external references provided via API");
374 }
375 
376 template <class AllocatorT>
ReadData(MaybeObject ** current,MaybeObject ** limit,int source_space,Address current_object_address)377 bool Deserializer<AllocatorT>::ReadData(MaybeObject** current,
378                                         MaybeObject** limit, int source_space,
379                                         Address current_object_address) {
380   Isolate* const isolate = isolate_;
381   // Write barrier support costs around 1% in startup time.  In fact there
382   // are no new space objects in current boot snapshots, so it's not needed,
383   // but that may change.
384   bool write_barrier_needed =
385       (current_object_address != kNullAddress && source_space != NEW_SPACE &&
386        source_space != CODE_SPACE);
387   while (current < limit) {
388     byte data = source_.Get();
389     switch (data) {
390 #define CASE_STATEMENT(where, how, within, space_number) \
391   case where + how + within + space_number:              \
392     STATIC_ASSERT((where & ~kWhereMask) == 0);           \
393     STATIC_ASSERT((how & ~kHowToCodeMask) == 0);         \
394     STATIC_ASSERT((within & ~kWhereToPointMask) == 0);   \
395     STATIC_ASSERT((space_number & ~kSpaceMask) == 0);
396 
397 #define CASE_BODY(where, how, within, space_number_if_any)                   \
398   current = ReadDataCase<where, how, within, space_number_if_any>(           \
399       isolate, current, current_object_address, data, write_barrier_needed); \
400   break;
401 
402 // This generates a case and a body for the new space (which has to do extra
403 // write barrier handling) and handles the other spaces with fall-through cases
404 // and one body.
405 #define ALL_SPACES(where, how, within)           \
406   CASE_STATEMENT(where, how, within, NEW_SPACE)  \
407   CASE_BODY(where, how, within, NEW_SPACE)       \
408   CASE_STATEMENT(where, how, within, OLD_SPACE)  \
409   V8_FALLTHROUGH;                                \
410   CASE_STATEMENT(where, how, within, CODE_SPACE) \
411   V8_FALLTHROUGH;                                \
412   CASE_STATEMENT(where, how, within, MAP_SPACE)  \
413   V8_FALLTHROUGH;                                \
414   CASE_STATEMENT(where, how, within, LO_SPACE)   \
415   V8_FALLTHROUGH;                                \
416   CASE_STATEMENT(where, how, within, RO_SPACE)   \
417   CASE_BODY(where, how, within, kAnyOldSpace)
418 
419 #define FOUR_CASES(byte_code) \
420   case byte_code:             \
421   case byte_code + 1:         \
422   case byte_code + 2:         \
423   case byte_code + 3:
424 
425 #define SIXTEEN_CASES(byte_code) \
426   FOUR_CASES(byte_code)          \
427   FOUR_CASES(byte_code + 4)      \
428   FOUR_CASES(byte_code + 8)      \
429   FOUR_CASES(byte_code + 12)
430 
431 #define SINGLE_CASE(where, how, within, space) \
432   CASE_STATEMENT(where, how, within, space)    \
433   CASE_BODY(where, how, within, space)
434 
435       // Deserialize a new object and write a pointer to it to the current
436       // object.
437       ALL_SPACES(kNewObject, kPlain, kStartOfObject)
438       // Deserialize a new code object and write a pointer to its first
439       // instruction to the current code object.
440       ALL_SPACES(kNewObject, kFromCode, kInnerPointer)
441       // Find a recently deserialized object using its offset from the current
442       // allocation point and write a pointer to it to the current object.
443       ALL_SPACES(kBackref, kPlain, kStartOfObject)
444       ALL_SPACES(kBackrefWithSkip, kPlain, kStartOfObject)
445 #if V8_CODE_EMBEDS_OBJECT_POINTER
446       // Deserialize a new object from pointer found in code and write
447       // a pointer to it to the current object. Required only for MIPS, PPC, ARM
448       // or S390 with embedded constant pool, and omitted on the other
449       // architectures because it is fully unrolled and would cause bloat.
450       ALL_SPACES(kNewObject, kFromCode, kStartOfObject)
451       // Find a recently deserialized code object using its offset from the
452       // current allocation point and write a pointer to it to the current
453       // object. Required only for MIPS, PPC, ARM or S390 with embedded
454       // constant pool.
455       ALL_SPACES(kBackref, kFromCode, kStartOfObject)
456       ALL_SPACES(kBackrefWithSkip, kFromCode, kStartOfObject)
457 #endif
458       // Find a recently deserialized code object using its offset from the
459       // current allocation point and write a pointer to its first instruction
460       // to the current code object or the instruction pointer in a function
461       // object.
462       ALL_SPACES(kBackref, kFromCode, kInnerPointer)
463       ALL_SPACES(kBackrefWithSkip, kFromCode, kInnerPointer)
464       // Find an object in the roots array and write a pointer to it to the
465       // current object.
466       SINGLE_CASE(kRootArray, kPlain, kStartOfObject, 0)
467 #if V8_CODE_EMBEDS_OBJECT_POINTER
468       // Find an object in the roots array and write a pointer to it to in code.
469       SINGLE_CASE(kRootArray, kFromCode, kStartOfObject, 0)
470 #endif
471       // Find an object in the partial snapshots cache and write a pointer to it
472       // to the current object.
473       SINGLE_CASE(kPartialSnapshotCache, kPlain, kStartOfObject, 0)
474       SINGLE_CASE(kPartialSnapshotCache, kFromCode, kStartOfObject, 0)
475       SINGLE_CASE(kPartialSnapshotCache, kFromCode, kInnerPointer, 0)
476       // Find an object in the attached references and write a pointer to it to
477       // the current object.
478       SINGLE_CASE(kAttachedReference, kPlain, kStartOfObject, 0)
479       SINGLE_CASE(kAttachedReference, kFromCode, kStartOfObject, 0)
480       SINGLE_CASE(kAttachedReference, kFromCode, kInnerPointer, 0)
481       // Find a builtin and write a pointer to it to the current object.
482       SINGLE_CASE(kBuiltin, kPlain, kStartOfObject, 0)
483       SINGLE_CASE(kBuiltin, kFromCode, kStartOfObject, 0)
484       SINGLE_CASE(kBuiltin, kFromCode, kInnerPointer, 0)
485 
486 #undef CASE_STATEMENT
487 #undef CASE_BODY
488 #undef ALL_SPACES
489 
490       case kSkip: {
491         int size = source_.GetInt();
492         current = reinterpret_cast<MaybeObject**>(
493             reinterpret_cast<Address>(current) + size);
494         break;
495       }
496 
497       // Find an external reference and write a pointer to it to the current
498       // object.
499       case kExternalReference + kPlain + kStartOfObject:
500         current = reinterpret_cast<MaybeObject**>(ReadExternalReferenceCase(
501             kPlain, isolate, reinterpret_cast<void**>(current),
502             current_object_address));
503         break;
504       // Find an external reference and write a pointer to it in the current
505       // code object.
506       case kExternalReference + kFromCode + kStartOfObject:
507         current = reinterpret_cast<MaybeObject**>(ReadExternalReferenceCase(
508             kFromCode, isolate, reinterpret_cast<void**>(current),
509             current_object_address));
510         break;
511 
512       case kInternalReferenceEncoded:
513       case kInternalReference: {
514         // Internal reference address is not encoded via skip, but by offset
515         // from code entry.
516         int pc_offset = source_.GetInt();
517         int target_offset = source_.GetInt();
518         Code* code =
519             Code::cast(HeapObject::FromAddress(current_object_address));
520         DCHECK(0 <= pc_offset && pc_offset <= code->raw_instruction_size());
521         DCHECK(0 <= target_offset &&
522                target_offset <= code->raw_instruction_size());
523         Address pc = code->entry() + pc_offset;
524         Address target = code->entry() + target_offset;
525         Assembler::deserialization_set_target_internal_reference_at(
526             pc, target,
527             data == kInternalReference ? RelocInfo::INTERNAL_REFERENCE
528                                        : RelocInfo::INTERNAL_REFERENCE_ENCODED);
529         break;
530       }
531 
532       case kOffHeapTarget: {
533 #ifdef V8_EMBEDDED_BUILTINS
534         int skip = source_.GetInt();
535         int builtin_index = source_.GetInt();
536         DCHECK(Builtins::IsBuiltinId(builtin_index));
537 
538         current = reinterpret_cast<MaybeObject**>(
539             reinterpret_cast<Address>(current) + skip);
540 
541         CHECK_NOT_NULL(isolate->embedded_blob());
542         EmbeddedData d = EmbeddedData::FromBlob();
543         Address address = d.InstructionStartOfBuiltin(builtin_index);
544         CHECK_NE(kNullAddress, address);
545 
546         if (RelocInfo::OffHeapTargetIsCodedSpecially()) {
547           Address location_of_branch_data = reinterpret_cast<Address>(current);
548           int skip = Assembler::deserialization_special_target_size(
549               location_of_branch_data);
550           Assembler::deserialization_set_special_target_at(
551               location_of_branch_data,
552               Code::cast(HeapObject::FromAddress(current_object_address)),
553               address);
554           location_of_branch_data += skip;
555           current = reinterpret_cast<MaybeObject**>(location_of_branch_data);
556         } else {
557           MaybeObject* o = reinterpret_cast<MaybeObject*>(address);
558           UnalignedCopy(current, &o);
559           current++;
560         }
561 #else
562         UNREACHABLE();
563 #endif
564         break;
565       }
566 
567       case kNop:
568         break;
569 
570       case kNextChunk: {
571         int space = source_.Get();
572         allocator()->MoveToNextChunk(static_cast<AllocationSpace>(space));
573         break;
574       }
575 
576       case kDeferred: {
577         // Deferred can only occur right after the heap object header.
578         DCHECK_EQ(current, reinterpret_cast<MaybeObject**>(
579                                current_object_address + kPointerSize));
580         HeapObject* obj = HeapObject::FromAddress(current_object_address);
581         // If the deferred object is a map, its instance type may be used
582         // during deserialization. Initialize it with a temporary value.
583         if (obj->IsMap()) Map::cast(obj)->set_instance_type(FILLER_TYPE);
584         current = limit;
585         return false;
586       }
587 
588       case kSynchronize:
589         // If we get here then that indicates that you have a mismatch between
590         // the number of GC roots when serializing and deserializing.
591         UNREACHABLE();
592 
593       // Deserialize raw data of variable length.
594       case kVariableRawData: {
595         int size_in_bytes = source_.GetInt();
596         byte* raw_data_out = reinterpret_cast<byte*>(current);
597         source_.CopyRaw(raw_data_out, size_in_bytes);
598         current = reinterpret_cast<MaybeObject**>(
599             reinterpret_cast<intptr_t>(current) + size_in_bytes);
600         break;
601       }
602 
603       // Deserialize raw code directly into the body of the code object.
604       // Do not move current.
605       case kVariableRawCode: {
606         int size_in_bytes = source_.GetInt();
607         source_.CopyRaw(
608             reinterpret_cast<byte*>(current_object_address + Code::kDataStart),
609             size_in_bytes);
610         break;
611       }
612 
613       case kVariableRepeat: {
614         int repeats = source_.GetInt();
615         MaybeObject* object = current[-1];
616         DCHECK(!isolate->heap()->InNewSpace(object));
617         DCHECK(!allocator()->next_reference_is_weak());
618         for (int i = 0; i < repeats; i++) UnalignedCopy(current++, &object);
619         break;
620       }
621 
622       case kOffHeapBackingStore: {
623         int byte_length = source_.GetInt();
624         byte* backing_store = static_cast<byte*>(
625             isolate->array_buffer_allocator()->AllocateUninitialized(
626                 byte_length));
627         CHECK_NOT_NULL(backing_store);
628         source_.CopyRaw(backing_store, byte_length);
629         off_heap_backing_stores_.push_back(backing_store);
630         break;
631       }
632 
633       case kApiReference: {
634         int skip = source_.GetInt();
635         current = reinterpret_cast<MaybeObject**>(
636             reinterpret_cast<Address>(current) + skip);
637         uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
638         Address address;
639         if (isolate->api_external_references()) {
640           DCHECK_WITH_MSG(
641               reference_id < num_api_references_,
642               "too few external references provided through the API");
643           address = static_cast<Address>(
644               isolate->api_external_references()[reference_id]);
645         } else {
646           address = reinterpret_cast<Address>(NoExternalReferencesCallback);
647         }
648         memcpy(current, &address, kPointerSize);
649         current++;
650         break;
651       }
652 
653       case kWeakPrefix:
654         DCHECK(!allocator()->next_reference_is_weak());
655         allocator()->set_next_reference_is_weak(true);
656         break;
657 
658       case kAlignmentPrefix:
659       case kAlignmentPrefix + 1:
660       case kAlignmentPrefix + 2: {
661         int alignment = data - (SerializerDeserializer::kAlignmentPrefix - 1);
662         allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
663         break;
664       }
665 
666       STATIC_ASSERT(kNumberOfRootArrayConstants == Heap::kOldSpaceRoots);
667       STATIC_ASSERT(kNumberOfRootArrayConstants == 32);
668       SIXTEEN_CASES(kRootArrayConstantsWithSkip)
669       SIXTEEN_CASES(kRootArrayConstantsWithSkip + 16) {
670         int skip = source_.GetInt();
671         current = reinterpret_cast<MaybeObject**>(
672             reinterpret_cast<intptr_t>(current) + skip);
673         V8_FALLTHROUGH;
674       }
675 
676       SIXTEEN_CASES(kRootArrayConstants)
677       SIXTEEN_CASES(kRootArrayConstants + 16) {
678         int id = data & kRootArrayConstantsMask;
679         Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(id);
680         MaybeObject* object =
681             MaybeObject::FromObject(isolate->heap()->root(root_index));
682         DCHECK(!isolate->heap()->InNewSpace(object));
683         DCHECK(!allocator()->next_reference_is_weak());
684         UnalignedCopy(current++, &object);
685         break;
686       }
687 
688       STATIC_ASSERT(kNumberOfHotObjects == 8);
689       FOUR_CASES(kHotObjectWithSkip)
690       FOUR_CASES(kHotObjectWithSkip + 4) {
691         int skip = source_.GetInt();
692         current = reinterpret_cast<MaybeObject**>(
693             reinterpret_cast<Address>(current) + skip);
694         V8_FALLTHROUGH;
695       }
696 
697       FOUR_CASES(kHotObject)
698       FOUR_CASES(kHotObject + 4) {
699         int index = data & kHotObjectMask;
700         Object* hot_object = hot_objects_.Get(index);
701         MaybeObject* hot_maybe_object = MaybeObject::FromObject(hot_object);
702         if (allocator()->GetAndClearNextReferenceIsWeak()) {
703           hot_maybe_object = MaybeObject::MakeWeak(hot_maybe_object);
704         }
705 
706         UnalignedCopy(current, &hot_maybe_object);
707         if (write_barrier_needed && isolate->heap()->InNewSpace(hot_object)) {
708           Address current_address = reinterpret_cast<Address>(current);
709           isolate->heap()->RecordWrite(
710               HeapObject::FromAddress(current_object_address),
711               reinterpret_cast<MaybeObject**>(current_address),
712               hot_maybe_object);
713         }
714         current++;
715         break;
716       }
717 
718       // Deserialize raw data of fixed length from 1 to 32 words.
719       STATIC_ASSERT(kNumberOfFixedRawData == 32);
720       SIXTEEN_CASES(kFixedRawData)
721       SIXTEEN_CASES(kFixedRawData + 16) {
722         byte* raw_data_out = reinterpret_cast<byte*>(current);
723         int size_in_bytes = (data - kFixedRawDataStart) << kPointerSizeLog2;
724         source_.CopyRaw(raw_data_out, size_in_bytes);
725         current = reinterpret_cast<MaybeObject**>(raw_data_out + size_in_bytes);
726         break;
727       }
728 
729       STATIC_ASSERT(kNumberOfFixedRepeat == 16);
730       SIXTEEN_CASES(kFixedRepeat) {
731         int repeats = data - kFixedRepeatStart;
732         MaybeObject* object;
733         DCHECK(!allocator()->next_reference_is_weak());
734         UnalignedCopy(&object, current - 1);
735         DCHECK(!isolate->heap()->InNewSpace(object));
736         for (int i = 0; i < repeats; i++) UnalignedCopy(current++, &object);
737         break;
738       }
739 
740 #ifdef DEBUG
741 #define UNUSED_CASE(byte_code) \
742   case byte_code:              \
743     UNREACHABLE();
744       UNUSED_SERIALIZER_BYTE_CODES(UNUSED_CASE)
745 #endif
746 #undef UNUSED_CASE
747 
748 #undef SIXTEEN_CASES
749 #undef FOUR_CASES
750 #undef SINGLE_CASE
751     }
752   }
753   CHECK_EQ(limit, current);
754   return true;
755 }
756 
757 template <class AllocatorT>
ReadExternalReferenceCase(HowToCode how,Isolate * isolate,void ** current,Address current_object_address)758 void** Deserializer<AllocatorT>::ReadExternalReferenceCase(
759     HowToCode how, Isolate* isolate, void** current,
760     Address current_object_address) {
761   int skip = source_.GetInt();
762   current = reinterpret_cast<void**>(reinterpret_cast<Address>(current) + skip);
763   uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
764   Address address = external_reference_table_->address(reference_id);
765 
766   if (how == kFromCode) {
767     Address location_of_branch_data = reinterpret_cast<Address>(current);
768     int skip =
769         Assembler::deserialization_special_target_size(location_of_branch_data);
770     Assembler::deserialization_set_special_target_at(
771         location_of_branch_data,
772         Code::cast(HeapObject::FromAddress(current_object_address)), address);
773     location_of_branch_data += skip;
774     current = reinterpret_cast<void**>(location_of_branch_data);
775   } else {
776     void* new_current = reinterpret_cast<void**>(address);
777     UnalignedCopy(current, &new_current);
778     ++current;
779   }
780   return current;
781 }
782 
783 template <class AllocatorT>
784 template <int where, int how, int within, int space_number_if_any>
ReadDataCase(Isolate * isolate,MaybeObject ** current,Address current_object_address,byte data,bool write_barrier_needed)785 MaybeObject** Deserializer<AllocatorT>::ReadDataCase(
786     Isolate* isolate, MaybeObject** current, Address current_object_address,
787     byte data, bool write_barrier_needed) {
788   bool emit_write_barrier = false;
789   bool current_was_incremented = false;
790   int space_number = space_number_if_any == kAnyOldSpace ? (data & kSpaceMask)
791                                                          : space_number_if_any;
792   HeapObjectReferenceType reference_type = HeapObjectReferenceType::STRONG;
793   if (where == kNewObject && how == kPlain && within == kStartOfObject) {
794     if (allocator()->GetAndClearNextReferenceIsWeak()) {
795       reference_type = HeapObjectReferenceType::WEAK;
796     }
797     ReadObject(space_number, current, reference_type);
798     emit_write_barrier = (space_number == NEW_SPACE);
799   } else {
800     Object* new_object = nullptr; /* May not be a real Object pointer. */
801     if (where == kNewObject) {
802       ReadObject(space_number, reinterpret_cast<MaybeObject**>(&new_object),
803                  HeapObjectReferenceType::STRONG);
804     } else if (where == kBackref) {
805       emit_write_barrier = (space_number == NEW_SPACE);
806       new_object = GetBackReferencedObject(data & kSpaceMask);
807     } else if (where == kBackrefWithSkip) {
808       int skip = source_.GetInt();
809       current = reinterpret_cast<MaybeObject**>(
810           reinterpret_cast<Address>(current) + skip);
811       emit_write_barrier = (space_number == NEW_SPACE);
812       new_object = GetBackReferencedObject(data & kSpaceMask);
813     } else if (where == kRootArray) {
814       int id = source_.GetInt();
815       Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(id);
816       new_object = isolate->heap()->root(root_index);
817       emit_write_barrier = isolate->heap()->InNewSpace(new_object);
818       hot_objects_.Add(HeapObject::cast(new_object));
819     } else if (where == kPartialSnapshotCache) {
820       int cache_index = source_.GetInt();
821       new_object = isolate->partial_snapshot_cache()->at(cache_index);
822       emit_write_barrier = isolate->heap()->InNewSpace(new_object);
823     } else if (where == kAttachedReference) {
824       int index = source_.GetInt();
825       new_object = *attached_objects_[index];
826       emit_write_barrier = isolate->heap()->InNewSpace(new_object);
827     } else {
828       DCHECK_EQ(where, kBuiltin);
829       int builtin_id = MaybeReplaceWithDeserializeLazy(source_.GetInt());
830       new_object = isolate->builtins()->builtin(builtin_id);
831       emit_write_barrier = false;
832     }
833     if (within == kInnerPointer) {
834       DCHECK_EQ(how, kFromCode);
835       if (where == kBuiltin) {
836         // At this point, new_object may still be uninitialized, thus the
837         // unchecked Code cast.
838         new_object = reinterpret_cast<Object*>(
839             reinterpret_cast<Code*>(new_object)->raw_instruction_start());
840       } else if (new_object->IsCode()) {
841         new_object = reinterpret_cast<Object*>(
842             Code::cast(new_object)->raw_instruction_start());
843       } else {
844         Cell* cell = Cell::cast(new_object);
845         new_object = reinterpret_cast<Object*>(cell->ValueAddress());
846       }
847     }
848     if (how == kFromCode) {
849       DCHECK(!allocator()->next_reference_is_weak());
850       Address location_of_branch_data = reinterpret_cast<Address>(current);
851       int skip = Assembler::deserialization_special_target_size(
852           location_of_branch_data);
853       Assembler::deserialization_set_special_target_at(
854           location_of_branch_data,
855           Code::cast(HeapObject::FromAddress(current_object_address)),
856           reinterpret_cast<Address>(new_object));
857       location_of_branch_data += skip;
858       current = reinterpret_cast<MaybeObject**>(location_of_branch_data);
859       current_was_incremented = true;
860     } else {
861       MaybeObject* new_maybe_object = MaybeObject::FromObject(new_object);
862       if (allocator()->GetAndClearNextReferenceIsWeak()) {
863         new_maybe_object = MaybeObject::MakeWeak(new_maybe_object);
864       }
865       UnalignedCopy(current, &new_maybe_object);
866     }
867   }
868   if (emit_write_barrier && write_barrier_needed) {
869     Address current_address = reinterpret_cast<Address>(current);
870     SLOW_DCHECK(isolate->heap()->ContainsSlow(current_object_address));
871     isolate->heap()->RecordWrite(
872         HeapObject::FromAddress(current_object_address),
873         reinterpret_cast<MaybeObject**>(current_address),
874         *reinterpret_cast<MaybeObject**>(current_address));
875   }
876   if (!current_was_incremented) {
877     current++;
878   }
879 
880   return current;
881 }
882 
883 // Explicit instantiation.
884 template class Deserializer<BuiltinDeserializerAllocator>;
885 template class Deserializer<DefaultDeserializerAllocator>;
886 
887 }  // namespace internal
888 }  // namespace v8
889