1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/execution/isolate.h"
6 
7 #include <stdlib.h>
8 
9 #include <atomic>
10 #include <fstream>
11 #include <memory>
12 #include <sstream>
13 #include <string>
14 #include <unordered_map>
15 #include <utility>
16 
17 #include "include/v8-template.h"
18 #include "src/api/api-inl.h"
19 #include "src/ast/ast-value-factory.h"
20 #include "src/ast/scopes.h"
21 #include "src/base/hashmap.h"
22 #include "src/base/logging.h"
23 #include "src/base/platform/mutex.h"
24 #include "src/base/platform/platform.h"
25 #include "src/base/sys-info.h"
26 #include "src/base/utils/random-number-generator.h"
27 #include "src/baseline/baseline-batch-compiler.h"
28 #include "src/bigint/bigint.h"
29 #include "src/builtins/builtins-promise.h"
30 #include "src/builtins/constants-table-builder.h"
31 #include "src/codegen/assembler-inl.h"
32 #include "src/codegen/compilation-cache.h"
33 #include "src/codegen/flush-instruction-cache.h"
34 #include "src/common/assert-scope.h"
35 #include "src/common/ptr-compr.h"
36 #include "src/compiler-dispatcher/lazy-compile-dispatcher.h"
37 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
38 #include "src/date/date.h"
39 #include "src/debug/debug-frames.h"
40 #include "src/debug/debug.h"
41 #include "src/deoptimizer/deoptimizer.h"
42 #include "src/deoptimizer/materialized-object-store.h"
43 #include "src/diagnostics/basic-block-profiler.h"
44 #include "src/diagnostics/compilation-statistics.h"
45 #include "src/execution/frames-inl.h"
46 #include "src/execution/isolate-inl.h"
47 #include "src/execution/local-isolate.h"
48 #include "src/execution/messages.h"
49 #include "src/execution/microtask-queue.h"
50 #include "src/execution/protectors-inl.h"
51 #include "src/execution/runtime-profiler.h"
52 #include "src/execution/simulator.h"
53 #include "src/execution/v8threads.h"
54 #include "src/execution/vm-state-inl.h"
55 #include "src/handles/global-handles-inl.h"
56 #include "src/handles/persistent-handles.h"
57 #include "src/heap/heap-inl.h"
58 #include "src/heap/read-only-heap.h"
59 #include "src/ic/stub-cache.h"
60 #include "src/init/bootstrapper.h"
61 #include "src/init/setup-isolate.h"
62 #include "src/init/v8.h"
63 #include "src/interpreter/interpreter.h"
64 #include "src/libsampler/sampler.h"
65 #include "src/logging/counters.h"
66 #include "src/logging/log.h"
67 #include "src/logging/metrics.h"
68 #include "src/logging/runtime-call-stats-scope.h"
69 #include "src/numbers/hash-seed-inl.h"
70 #include "src/objects/backing-store.h"
71 #include "src/objects/elements.h"
72 #include "src/objects/feedback-vector.h"
73 #include "src/objects/hash-table-inl.h"
74 #include "src/objects/js-array-buffer-inl.h"
75 #include "src/objects/js-array-inl.h"
76 #include "src/objects/js-generator-inl.h"
77 #include "src/objects/js-weak-refs-inl.h"
78 #include "src/objects/managed-inl.h"
79 #include "src/objects/module-inl.h"
80 #include "src/objects/promise-inl.h"
81 #include "src/objects/prototype.h"
82 #include "src/objects/slots.h"
83 #include "src/objects/smi.h"
84 #include "src/objects/source-text-module-inl.h"
85 #include "src/objects/stack-frame-info-inl.h"
86 #include "src/objects/visitors.h"
87 #include "src/profiler/heap-profiler.h"
88 #include "src/profiler/tracing-cpu-profiler.h"
89 #include "src/regexp/regexp-stack.h"
90 #include "src/snapshot/embedded/embedded-data.h"
91 #include "src/snapshot/embedded/embedded-file-writer-interface.h"
92 #include "src/snapshot/read-only-deserializer.h"
93 #include "src/snapshot/startup-deserializer.h"
94 #include "src/strings/string-builder-inl.h"
95 #include "src/strings/string-stream.h"
96 #include "src/tasks/cancelable-task.h"
97 #include "src/tracing/tracing-category-observer.h"
98 #include "src/utils/address-map.h"
99 #include "src/utils/ostreams.h"
100 #include "src/utils/version.h"
101 #include "src/zone/accounting-allocator.h"
102 #include "src/zone/type-stats.h"
103 #ifdef V8_INTL_SUPPORT
104 #include "unicode/uobject.h"
105 #endif  // V8_INTL_SUPPORT
106 
107 #if V8_ENABLE_WEBASSEMBLY
108 #include "src/trap-handler/trap-handler.h"
109 #include "src/wasm/wasm-code-manager.h"
110 #include "src/wasm/wasm-engine.h"
111 #include "src/wasm/wasm-module.h"
112 #include "src/wasm/wasm-objects.h"
113 #endif  // V8_ENABLE_WEBASSEMBLY
114 
115 #if defined(V8_OS_WIN64)
116 #include "src/diagnostics/unwinding-info-win64.h"
117 #endif  // V8_OS_WIN64
118 
119 #ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
120 #include "src/base/platform/wrappers.h"
121 #include "src/heap/conservative-stack-visitor.h"
122 #endif
123 
124 extern "C" const uint8_t* v8_Default_embedded_blob_code_;
125 extern "C" uint32_t v8_Default_embedded_blob_code_size_;
126 extern "C" const uint8_t* v8_Default_embedded_blob_data_;
127 extern "C" uint32_t v8_Default_embedded_blob_data_size_;
128 
129 namespace v8 {
130 namespace internal {
131 
132 #ifdef DEBUG
133 #define TRACE_ISOLATE(tag)                                                  \
134   do {                                                                      \
135     if (FLAG_trace_isolates) {                                              \
136       PrintF("Isolate %p (id %d)" #tag "\n", reinterpret_cast<void*>(this), \
137              id());                                                         \
138     }                                                                       \
139   } while (false)
140 #else
141 #define TRACE_ISOLATE(tag)
142 #endif
143 
DefaultEmbeddedBlobCode()144 const uint8_t* DefaultEmbeddedBlobCode() {
145   return v8_Default_embedded_blob_code_;
146 }
DefaultEmbeddedBlobCodeSize()147 uint32_t DefaultEmbeddedBlobCodeSize() {
148   return v8_Default_embedded_blob_code_size_;
149 }
DefaultEmbeddedBlobData()150 const uint8_t* DefaultEmbeddedBlobData() {
151   return v8_Default_embedded_blob_data_;
152 }
DefaultEmbeddedBlobDataSize()153 uint32_t DefaultEmbeddedBlobDataSize() {
154   return v8_Default_embedded_blob_data_size_;
155 }
156 
157 namespace {
158 // These variables provide access to the current embedded blob without requiring
159 // an isolate instance. This is needed e.g. by Code::InstructionStart, which may
160 // not have access to an isolate but still needs to access the embedded blob.
161 // The variables are initialized by each isolate in Init(). Writes and reads are
162 // relaxed since we can guarantee that the current thread has initialized these
163 // variables before accessing them. Different threads may race, but this is fine
164 // since they all attempt to set the same values of the blob pointer and size.
165 
166 std::atomic<const uint8_t*> current_embedded_blob_code_(nullptr);
167 std::atomic<uint32_t> current_embedded_blob_code_size_(0);
168 std::atomic<const uint8_t*> current_embedded_blob_data_(nullptr);
169 std::atomic<uint32_t> current_embedded_blob_data_size_(0);
170 
171 // The various workflows around embedded snapshots are fairly complex. We need
172 // to support plain old snapshot builds, nosnap builds, and the requirements of
173 // subtly different serialization tests. There's two related knobs to twiddle:
174 //
175 // - The default embedded blob may be overridden by setting the sticky embedded
176 // blob. This is set automatically whenever we create a new embedded blob.
177 //
178 // - Lifecycle management can be either manual or set to refcounting.
179 //
180 // A few situations to demonstrate their use:
181 //
182 // - A plain old snapshot build neither overrides the default blob nor
183 // refcounts.
184 //
185 // - mksnapshot sets the sticky blob and manually frees the embedded
186 // blob once done.
187 //
188 // - Most serializer tests do the same.
189 //
190 // - Nosnapshot builds set the sticky blob and enable refcounting.
191 
192 // This mutex protects access to the following variables:
193 // - sticky_embedded_blob_code_
194 // - sticky_embedded_blob_code_size_
195 // - sticky_embedded_blob_data_
196 // - sticky_embedded_blob_data_size_
197 // - enable_embedded_blob_refcounting_
198 // - current_embedded_blob_refs_
199 base::LazyMutex current_embedded_blob_refcount_mutex_ = LAZY_MUTEX_INITIALIZER;
200 
201 const uint8_t* sticky_embedded_blob_code_ = nullptr;
202 uint32_t sticky_embedded_blob_code_size_ = 0;
203 const uint8_t* sticky_embedded_blob_data_ = nullptr;
204 uint32_t sticky_embedded_blob_data_size_ = 0;
205 
206 bool enable_embedded_blob_refcounting_ = true;
207 int current_embedded_blob_refs_ = 0;
208 
StickyEmbeddedBlobCode()209 const uint8_t* StickyEmbeddedBlobCode() { return sticky_embedded_blob_code_; }
StickyEmbeddedBlobCodeSize()210 uint32_t StickyEmbeddedBlobCodeSize() {
211   return sticky_embedded_blob_code_size_;
212 }
StickyEmbeddedBlobData()213 const uint8_t* StickyEmbeddedBlobData() { return sticky_embedded_blob_data_; }
StickyEmbeddedBlobDataSize()214 uint32_t StickyEmbeddedBlobDataSize() {
215   return sticky_embedded_blob_data_size_;
216 }
217 
SetStickyEmbeddedBlob(const uint8_t * code,uint32_t code_size,const uint8_t * data,uint32_t data_size)218 void SetStickyEmbeddedBlob(const uint8_t* code, uint32_t code_size,
219                            const uint8_t* data, uint32_t data_size) {
220   sticky_embedded_blob_code_ = code;
221   sticky_embedded_blob_code_size_ = code_size;
222   sticky_embedded_blob_data_ = data;
223   sticky_embedded_blob_data_size_ = data_size;
224 }
225 
226 }  // namespace
227 
DisableEmbeddedBlobRefcounting()228 void DisableEmbeddedBlobRefcounting() {
229   base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
230   enable_embedded_blob_refcounting_ = false;
231 }
232 
FreeCurrentEmbeddedBlob()233 void FreeCurrentEmbeddedBlob() {
234   CHECK(!enable_embedded_blob_refcounting_);
235   base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
236 
237   if (StickyEmbeddedBlobCode() == nullptr) return;
238 
239   CHECK_EQ(StickyEmbeddedBlobCode(), Isolate::CurrentEmbeddedBlobCode());
240   CHECK_EQ(StickyEmbeddedBlobData(), Isolate::CurrentEmbeddedBlobData());
241 
242   InstructionStream::FreeOffHeapInstructionStream(
243       const_cast<uint8_t*>(Isolate::CurrentEmbeddedBlobCode()),
244       Isolate::CurrentEmbeddedBlobCodeSize(),
245       const_cast<uint8_t*>(Isolate::CurrentEmbeddedBlobData()),
246       Isolate::CurrentEmbeddedBlobDataSize());
247 
248   current_embedded_blob_code_.store(nullptr, std::memory_order_relaxed);
249   current_embedded_blob_code_size_.store(0, std::memory_order_relaxed);
250   current_embedded_blob_data_.store(nullptr, std::memory_order_relaxed);
251   current_embedded_blob_data_size_.store(0, std::memory_order_relaxed);
252   sticky_embedded_blob_code_ = nullptr;
253   sticky_embedded_blob_code_size_ = 0;
254   sticky_embedded_blob_data_ = nullptr;
255   sticky_embedded_blob_data_size_ = 0;
256 }
257 
258 // static
CurrentEmbeddedBlobIsBinaryEmbedded()259 bool Isolate::CurrentEmbeddedBlobIsBinaryEmbedded() {
260   // In some situations, we must be able to rely on the embedded blob being
261   // immortal immovable. This is the case if the blob is binary-embedded.
262   // See blob lifecycle controls above for descriptions of when the current
263   // embedded blob may change (e.g. in tests or mksnapshot). If the blob is
264   // binary-embedded, it is immortal immovable.
265   const uint8_t* code =
266       current_embedded_blob_code_.load(std::memory_order::memory_order_relaxed);
267   if (code == nullptr) return false;
268   return code == DefaultEmbeddedBlobCode();
269 }
270 
SetEmbeddedBlob(const uint8_t * code,uint32_t code_size,const uint8_t * data,uint32_t data_size)271 void Isolate::SetEmbeddedBlob(const uint8_t* code, uint32_t code_size,
272                               const uint8_t* data, uint32_t data_size) {
273   CHECK_NOT_NULL(code);
274   CHECK_NOT_NULL(data);
275 
276   embedded_blob_code_ = code;
277   embedded_blob_code_size_ = code_size;
278   embedded_blob_data_ = data;
279   embedded_blob_data_size_ = data_size;
280   current_embedded_blob_code_.store(code, std::memory_order_relaxed);
281   current_embedded_blob_code_size_.store(code_size, std::memory_order_relaxed);
282   current_embedded_blob_data_.store(data, std::memory_order_relaxed);
283   current_embedded_blob_data_size_.store(data_size, std::memory_order_relaxed);
284 
285 #ifdef DEBUG
286   // Verify that the contents of the embedded blob are unchanged from
287   // serialization-time, just to ensure the compiler isn't messing with us.
288   EmbeddedData d = EmbeddedData::FromBlob();
289   if (d.EmbeddedBlobDataHash() != d.CreateEmbeddedBlobDataHash()) {
290     FATAL(
291         "Embedded blob data section checksum verification failed. This "
292         "indicates that the embedded blob has been modified since compilation "
293         "time.");
294   }
295   if (FLAG_text_is_readable) {
296     if (d.EmbeddedBlobCodeHash() != d.CreateEmbeddedBlobCodeHash()) {
297       FATAL(
298           "Embedded blob code section checksum verification failed. This "
299           "indicates that the embedded blob has been modified since "
300           "compilation time. A common cause is a debugging breakpoint set "
301           "within builtin code.");
302     }
303   }
304 #endif  // DEBUG
305 }
306 
ClearEmbeddedBlob()307 void Isolate::ClearEmbeddedBlob() {
308   CHECK(enable_embedded_blob_refcounting_);
309   CHECK_EQ(embedded_blob_code_, CurrentEmbeddedBlobCode());
310   CHECK_EQ(embedded_blob_code_, StickyEmbeddedBlobCode());
311   CHECK_EQ(embedded_blob_data_, CurrentEmbeddedBlobData());
312   CHECK_EQ(embedded_blob_data_, StickyEmbeddedBlobData());
313 
314   embedded_blob_code_ = nullptr;
315   embedded_blob_code_size_ = 0;
316   embedded_blob_data_ = nullptr;
317   embedded_blob_data_size_ = 0;
318   current_embedded_blob_code_.store(nullptr, std::memory_order_relaxed);
319   current_embedded_blob_code_size_.store(0, std::memory_order_relaxed);
320   current_embedded_blob_data_.store(nullptr, std::memory_order_relaxed);
321   current_embedded_blob_data_size_.store(0, std::memory_order_relaxed);
322   sticky_embedded_blob_code_ = nullptr;
323   sticky_embedded_blob_code_size_ = 0;
324   sticky_embedded_blob_data_ = nullptr;
325   sticky_embedded_blob_data_size_ = 0;
326 }
327 
embedded_blob_code() const328 const uint8_t* Isolate::embedded_blob_code() const {
329   return embedded_blob_code_;
330 }
embedded_blob_code_size() const331 uint32_t Isolate::embedded_blob_code_size() const {
332   return embedded_blob_code_size_;
333 }
embedded_blob_data() const334 const uint8_t* Isolate::embedded_blob_data() const {
335   return embedded_blob_data_;
336 }
embedded_blob_data_size() const337 uint32_t Isolate::embedded_blob_data_size() const {
338   return embedded_blob_data_size_;
339 }
340 
341 // static
CurrentEmbeddedBlobCode()342 const uint8_t* Isolate::CurrentEmbeddedBlobCode() {
343   return current_embedded_blob_code_.load(
344       std::memory_order::memory_order_relaxed);
345 }
346 
347 // static
CurrentEmbeddedBlobCodeSize()348 uint32_t Isolate::CurrentEmbeddedBlobCodeSize() {
349   return current_embedded_blob_code_size_.load(
350       std::memory_order::memory_order_relaxed);
351 }
352 
353 // static
CurrentEmbeddedBlobData()354 const uint8_t* Isolate::CurrentEmbeddedBlobData() {
355   return current_embedded_blob_data_.load(
356       std::memory_order::memory_order_relaxed);
357 }
358 
359 // static
CurrentEmbeddedBlobDataSize()360 uint32_t Isolate::CurrentEmbeddedBlobDataSize() {
361   return current_embedded_blob_data_size_.load(
362       std::memory_order::memory_order_relaxed);
363 }
364 
HashIsolateForEmbeddedBlob()365 size_t Isolate::HashIsolateForEmbeddedBlob() {
366   DCHECK(builtins_.is_initialized());
367   DCHECK(Builtins::AllBuiltinsAreIsolateIndependent());
368 
369   DisallowGarbageCollection no_gc;
370 
371   static constexpr size_t kSeed = 0;
372   size_t hash = kSeed;
373 
374   // Hash data sections of builtin code objects.
375   for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
376        ++builtin) {
377     Code code = heap_.builtin(builtin);
378 
379     DCHECK(Internals::HasHeapObjectTag(code.ptr()));
380     uint8_t* const code_ptr =
381         reinterpret_cast<uint8_t*>(code.ptr() - kHeapObjectTag);
382 
383     // These static asserts ensure we don't miss relevant fields. We don't hash
384     // instruction/metadata size and flags since they change when creating the
385     // off-heap trampolines. Other data fields must remain the same.
386     STATIC_ASSERT(Code::kInstructionSizeOffset == Code::kDataStart);
387     STATIC_ASSERT(Code::kMetadataSizeOffset ==
388                   Code::kInstructionSizeOffsetEnd + 1);
389     STATIC_ASSERT(Code::kFlagsOffset == Code::kMetadataSizeOffsetEnd + 1);
390     STATIC_ASSERT(Code::kBuiltinIndexOffset == Code::kFlagsOffsetEnd + 1);
391     static constexpr int kStartOffset = Code::kBuiltinIndexOffset;
392 
393     for (int j = kStartOffset; j < Code::kUnalignedHeaderSize; j++) {
394       hash = base::hash_combine(hash, size_t{code_ptr[j]});
395     }
396   }
397 
398   // The builtins constants table is also tightly tied to embedded builtins.
399   hash = base::hash_combine(
400       hash, static_cast<size_t>(heap_.builtins_constants_table().length()));
401 
402   return hash;
403 }
404 
405 base::Thread::LocalStorageKey Isolate::isolate_key_;
406 base::Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_;
407 #if DEBUG
408 std::atomic<bool> Isolate::isolate_key_created_{false};
409 #endif
410 
411 namespace {
412 // A global counter for all generated Isolates, might overflow.
413 std::atomic<int> isolate_counter{0};
414 }  // namespace
415 
416 Isolate::PerIsolateThreadData*
FindOrAllocatePerThreadDataForThisThread()417 Isolate::FindOrAllocatePerThreadDataForThisThread() {
418   ThreadId thread_id = ThreadId::Current();
419   PerIsolateThreadData* per_thread = nullptr;
420   {
421     base::MutexGuard lock_guard(&thread_data_table_mutex_);
422     per_thread = thread_data_table_.Lookup(thread_id);
423     if (per_thread == nullptr) {
424       if (FLAG_adjust_os_scheduling_parameters) {
425         base::OS::AdjustSchedulingParams();
426       }
427       per_thread = new PerIsolateThreadData(this, thread_id);
428       thread_data_table_.Insert(per_thread);
429     }
430     DCHECK(thread_data_table_.Lookup(thread_id) == per_thread);
431   }
432   return per_thread;
433 }
434 
DiscardPerThreadDataForThisThread()435 void Isolate::DiscardPerThreadDataForThisThread() {
436   ThreadId thread_id = ThreadId::TryGetCurrent();
437   if (thread_id.IsValid()) {
438     DCHECK_NE(thread_manager_->mutex_owner_.load(std::memory_order_relaxed),
439               thread_id);
440     base::MutexGuard lock_guard(&thread_data_table_mutex_);
441     PerIsolateThreadData* per_thread = thread_data_table_.Lookup(thread_id);
442     if (per_thread) {
443       DCHECK(!per_thread->thread_state_);
444       thread_data_table_.Remove(per_thread);
445     }
446   }
447 }
448 
FindPerThreadDataForThisThread()449 Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() {
450   ThreadId thread_id = ThreadId::Current();
451   return FindPerThreadDataForThread(thread_id);
452 }
453 
FindPerThreadDataForThread(ThreadId thread_id)454 Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread(
455     ThreadId thread_id) {
456   PerIsolateThreadData* per_thread = nullptr;
457   {
458     base::MutexGuard lock_guard(&thread_data_table_mutex_);
459     per_thread = thread_data_table_.Lookup(thread_id);
460   }
461   return per_thread;
462 }
463 
InitializeOncePerProcess()464 void Isolate::InitializeOncePerProcess() {
465   isolate_key_ = base::Thread::CreateThreadLocalKey();
466 #if DEBUG
467   bool expected = false;
468   DCHECK_EQ(true, isolate_key_created_.compare_exchange_strong(
469                       expected, true, std::memory_order_relaxed));
470 #endif
471   per_isolate_thread_data_key_ = base::Thread::CreateThreadLocalKey();
472 }
473 
get_address_from_id(IsolateAddressId id)474 Address Isolate::get_address_from_id(IsolateAddressId id) {
475   return isolate_addresses_[id];
476 }
477 
Iterate(RootVisitor * v,char * thread_storage)478 char* Isolate::Iterate(RootVisitor* v, char* thread_storage) {
479   ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage);
480   Iterate(v, thread);
481   return thread_storage + sizeof(ThreadLocalTop);
482 }
483 
IterateThread(ThreadVisitor * v,char * t)484 void Isolate::IterateThread(ThreadVisitor* v, char* t) {
485   ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(t);
486   v->VisitThread(this, thread);
487 }
488 
Iterate(RootVisitor * v,ThreadLocalTop * thread)489 void Isolate::Iterate(RootVisitor* v, ThreadLocalTop* thread) {
490   // Visit the roots from the top for a given thread.
491   v->VisitRootPointer(Root::kStackRoots, nullptr,
492                       FullObjectSlot(&thread->pending_exception_));
493   v->VisitRootPointer(Root::kStackRoots, nullptr,
494                       FullObjectSlot(&thread->pending_message_));
495   v->VisitRootPointer(Root::kStackRoots, nullptr,
496                       FullObjectSlot(&thread->context_));
497   v->VisitRootPointer(Root::kStackRoots, nullptr,
498                       FullObjectSlot(&thread->scheduled_exception_));
499 
500   for (v8::TryCatch* block = thread->try_catch_handler_; block != nullptr;
501        block = block->next_) {
502     // TODO(3770): Make TryCatch::exception_ an Address (and message_obj_ too).
503     v->VisitRootPointer(
504         Root::kStackRoots, nullptr,
505         FullObjectSlot(reinterpret_cast<Address>(&(block->exception_))));
506     v->VisitRootPointer(
507         Root::kStackRoots, nullptr,
508         FullObjectSlot(reinterpret_cast<Address>(&(block->message_obj_))));
509   }
510 
511 #ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
512   ConservativeStackVisitor stack_visitor(this, v);
513   thread_local_top()->stack_.IteratePointers(&stack_visitor);
514 #endif
515 
516   // Iterate over pointers on native execution stack.
517 #if V8_ENABLE_WEBASSEMBLY
518   wasm::WasmCodeRefScope wasm_code_ref_scope;
519 #endif  // V8_ENABLE_WEBASSEMBLY
520   for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) {
521     it.frame()->Iterate(v);
522   }
523 }
524 
Iterate(RootVisitor * v)525 void Isolate::Iterate(RootVisitor* v) {
526   ThreadLocalTop* current_t = thread_local_top();
527   Iterate(v, current_t);
528 }
529 
RegisterTryCatchHandler(v8::TryCatch * that)530 void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) {
531   thread_local_top()->try_catch_handler_ = that;
532 }
533 
UnregisterTryCatchHandler(v8::TryCatch * that)534 void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) {
535   DCHECK(thread_local_top()->try_catch_handler_ == that);
536   thread_local_top()->try_catch_handler_ = that->next_;
537 }
538 
StackTraceString()539 Handle<String> Isolate::StackTraceString() {
540   if (stack_trace_nesting_level_ == 0) {
541     stack_trace_nesting_level_++;
542     HeapStringAllocator allocator;
543     StringStream::ClearMentionedObjectCache(this);
544     StringStream accumulator(&allocator);
545     incomplete_message_ = &accumulator;
546     PrintStack(&accumulator);
547     Handle<String> stack_trace = accumulator.ToString(this);
548     incomplete_message_ = nullptr;
549     stack_trace_nesting_level_ = 0;
550     return stack_trace;
551   } else if (stack_trace_nesting_level_ == 1) {
552     stack_trace_nesting_level_++;
553     base::OS::PrintError(
554         "\n\nAttempt to print stack while printing stack (double fault)\n");
555     base::OS::PrintError(
556         "If you are lucky you may find a partial stack dump on stdout.\n\n");
557     incomplete_message_->OutputToStdOut();
558     return factory()->empty_string();
559   } else {
560     base::OS::Abort();
561   }
562 }
563 
PushStackTraceAndDie(void * ptr1,void * ptr2,void * ptr3,void * ptr4)564 void Isolate::PushStackTraceAndDie(void* ptr1, void* ptr2, void* ptr3,
565                                    void* ptr4) {
566   StackTraceFailureMessage message(this, ptr1, ptr2, ptr3, ptr4);
567   message.Print();
568   base::OS::Abort();
569 }
570 
Print()571 void StackTraceFailureMessage::Print() volatile {
572   // Print the details of this failure message object, including its own address
573   // to force stack allocation.
574   base::OS::PrintError(
575       "Stacktrace:\n   ptr1=%p\n    ptr2=%p\n    ptr3=%p\n    ptr4=%p\n    "
576       "failure_message_object=%p\n%s",
577       ptr1_, ptr2_, ptr3_, ptr4_, this, &js_stack_trace_[0]);
578 }
579 
StackTraceFailureMessage(Isolate * isolate,void * ptr1,void * ptr2,void * ptr3,void * ptr4)580 StackTraceFailureMessage::StackTraceFailureMessage(Isolate* isolate, void* ptr1,
581                                                    void* ptr2, void* ptr3,
582                                                    void* ptr4) {
583   isolate_ = isolate;
584   ptr1_ = ptr1;
585   ptr2_ = ptr2;
586   ptr3_ = ptr3;
587   ptr4_ = ptr4;
588   // Write a stracktrace into the {js_stack_trace_} buffer.
589   const size_t buffer_length = arraysize(js_stack_trace_);
590   memset(&js_stack_trace_, 0, buffer_length);
591   FixedStringAllocator fixed(&js_stack_trace_[0], buffer_length - 1);
592   StringStream accumulator(&fixed, StringStream::kPrintObjectConcise);
593   isolate->PrintStack(&accumulator, Isolate::kPrintStackVerbose);
594   // Keeping a reference to the last code objects to increase likelyhood that
595   // they get included in the minidump.
596   const size_t code_objects_length = arraysize(code_objects_);
597   size_t i = 0;
598   StackFrameIterator it(isolate);
599   for (; !it.done() && i < code_objects_length; it.Advance()) {
600     code_objects_[i++] =
601         reinterpret_cast<void*>(it.frame()->unchecked_code().ptr());
602   }
603 }
604 
605 class StackTraceBuilder {
606  public:
607   enum FrameFilterMode { ALL, CURRENT_SECURITY_CONTEXT };
608 
StackTraceBuilder(Isolate * isolate,FrameSkipMode mode,int limit,Handle<Object> caller,FrameFilterMode filter_mode)609   StackTraceBuilder(Isolate* isolate, FrameSkipMode mode, int limit,
610                     Handle<Object> caller, FrameFilterMode filter_mode)
611       : isolate_(isolate),
612         mode_(mode),
613         limit_(limit),
614         caller_(caller),
615         skip_next_frame_(mode != SKIP_NONE),
616         check_security_context_(filter_mode == CURRENT_SECURITY_CONTEXT) {
617     DCHECK_IMPLIES(mode_ == SKIP_UNTIL_SEEN, caller_->IsJSFunction());
618     // Modern web applications are usually built with multiple layers of
619     // framework and library code, and stack depth tends to be more than
620     // a dozen frames, so we over-allocate a bit here to avoid growing
621     // the elements array in the common case.
622     elements_ = isolate->factory()->NewFixedArray(std::min(64, limit));
623   }
624 
AppendAsyncFrame(Handle<JSGeneratorObject> generator_object)625   void AppendAsyncFrame(Handle<JSGeneratorObject> generator_object) {
626     Handle<JSFunction> function(generator_object->function(), isolate_);
627     if (!IsVisibleInStackTrace(function)) return;
628     int flags = StackFrameInfo::kIsAsync;
629     if (IsStrictFrame(function)) flags |= StackFrameInfo::kIsStrict;
630 
631     Handle<Object> receiver(generator_object->receiver(), isolate_);
632     Handle<BytecodeArray> code(function->shared().GetBytecodeArray(isolate_),
633                                isolate_);
634     // The stored bytecode offset is relative to a different base than what
635     // is used in the source position table, hence the subtraction.
636     int offset = Smi::ToInt(generator_object->input_or_debug_pos()) -
637                  (BytecodeArray::kHeaderSize - kHeapObjectTag);
638 
639     Handle<FixedArray> parameters = isolate_->factory()->empty_fixed_array();
640     if (V8_UNLIKELY(FLAG_detailed_error_stack_trace)) {
641       parameters = isolate_->factory()->CopyFixedArrayUpTo(
642           handle(generator_object->parameters_and_registers(), isolate_),
643           function->shared()
644               .internal_formal_parameter_count_without_receiver());
645     }
646 
647     AppendFrame(receiver, function, code, offset, flags, parameters);
648   }
649 
AppendPromiseCombinatorFrame(Handle<JSFunction> element_function,Handle<JSFunction> combinator)650   void AppendPromiseCombinatorFrame(Handle<JSFunction> element_function,
651                                     Handle<JSFunction> combinator) {
652     if (!IsVisibleInStackTrace(combinator)) return;
653     int flags =
654         StackFrameInfo::kIsAsync | StackFrameInfo::kIsSourcePositionComputed;
655 
656     Handle<Object> receiver(combinator->native_context().promise_function(),
657                             isolate_);
658     Handle<Code> code(combinator->code(), isolate_);
659 
660     // TODO(mmarchini) save Promises list from the Promise combinator
661     Handle<FixedArray> parameters = isolate_->factory()->empty_fixed_array();
662 
663     // We store the offset of the promise into the element function's
664     // hash field for element callbacks.
665     int promise_index =
666         Smi::ToInt(Smi::cast(element_function->GetIdentityHash())) - 1;
667 
668     AppendFrame(receiver, combinator, code, promise_index, flags, parameters);
669   }
670 
AppendJavaScriptFrame(FrameSummary::JavaScriptFrameSummary const & summary)671   void AppendJavaScriptFrame(
672       FrameSummary::JavaScriptFrameSummary const& summary) {
673     // Filter out internal frames that we do not want to show.
674     if (!IsVisibleInStackTrace(summary.function())) return;
675 
676     int flags = 0;
677     Handle<JSFunction> function = summary.function();
678     if (IsStrictFrame(function)) flags |= StackFrameInfo::kIsStrict;
679     if (summary.is_constructor()) flags |= StackFrameInfo::kIsConstructor;
680 
681     AppendFrame(summary.receiver(), function, summary.abstract_code(),
682                 summary.code_offset(), flags, summary.parameters());
683   }
684 
685 #if V8_ENABLE_WEBASSEMBLY
AppendWasmFrame(FrameSummary::WasmFrameSummary const & summary)686   void AppendWasmFrame(FrameSummary::WasmFrameSummary const& summary) {
687     if (summary.code()->kind() != wasm::WasmCode::kWasmFunction) return;
688     Handle<WasmInstanceObject> instance = summary.wasm_instance();
689     int flags = StackFrameInfo::kIsWasm;
690     if (instance->module_object().is_asm_js()) {
691       flags |= StackFrameInfo::kIsAsmJsWasm;
692       if (summary.at_to_number_conversion()) {
693         flags |= StackFrameInfo::kIsAsmJsAtNumberConversion;
694       }
695     }
696 
697     auto code = Managed<wasm::GlobalWasmCodeRef>::Allocate(
698         isolate_, 0, summary.code(),
699         instance->module_object().shared_native_module());
700     AppendFrame(instance,
701                 handle(Smi::FromInt(summary.function_index()), isolate_), code,
702                 summary.code_offset(), flags,
703                 isolate_->factory()->empty_fixed_array());
704   }
705 #endif  // V8_ENABLE_WEBASSEMBLY
706 
AppendBuiltinExitFrame(BuiltinExitFrame * exit_frame)707   void AppendBuiltinExitFrame(BuiltinExitFrame* exit_frame) {
708     Handle<JSFunction> function(exit_frame->function(), isolate_);
709     if (!IsVisibleInStackTrace(function)) return;
710 
711     // TODO(szuend): Remove this check once the flag is enabled
712     //               by default.
713     if (!FLAG_experimental_stack_trace_frames &&
714         function->shared().IsApiFunction()) {
715       return;
716     }
717 
718     Handle<Object> receiver(exit_frame->receiver(), isolate_);
719     Handle<Code> code(exit_frame->LookupCode(), isolate_);
720     const int offset =
721         code->GetOffsetFromInstructionStart(isolate_, exit_frame->pc());
722 
723     int flags = 0;
724     if (IsStrictFrame(function)) flags |= StackFrameInfo::kIsStrict;
725     if (exit_frame->IsConstructor()) flags |= StackFrameInfo::kIsConstructor;
726 
727     Handle<FixedArray> parameters = isolate_->factory()->empty_fixed_array();
728     if (V8_UNLIKELY(FLAG_detailed_error_stack_trace)) {
729       int param_count = exit_frame->ComputeParametersCount();
730       parameters = isolate_->factory()->NewFixedArray(param_count);
731       for (int i = 0; i < param_count; i++) {
732         parameters->set(i, exit_frame->GetParameter(i));
733       }
734     }
735 
736     AppendFrame(receiver, function, code, offset, flags, parameters);
737   }
738 
Full()739   bool Full() { return index_ >= limit_; }
740 
Build()741   Handle<FixedArray> Build() {
742     return FixedArray::ShrinkOrEmpty(isolate_, elements_, index_);
743   }
744 
745  private:
746   // Poison stack frames below the first strict mode frame.
747   // The stack trace API should not expose receivers and function
748   // objects on frames deeper than the top-most one with a strict mode
749   // function.
IsStrictFrame(Handle<JSFunction> function)750   bool IsStrictFrame(Handle<JSFunction> function) {
751     if (!encountered_strict_function_) {
752       encountered_strict_function_ =
753           is_strict(function->shared().language_mode());
754     }
755     return encountered_strict_function_;
756   }
757 
758   // Determines whether the given stack frame should be displayed in a stack
759   // trace.
IsVisibleInStackTrace(Handle<JSFunction> function)760   bool IsVisibleInStackTrace(Handle<JSFunction> function) {
761     return ShouldIncludeFrame(function) && IsNotHidden(function) &&
762            IsInSameSecurityContext(function);
763   }
764 
765   // This mechanism excludes a number of uninteresting frames from the stack
766   // trace. This can be be the first frame (which will be a builtin-exit frame
767   // for the error constructor builtin) or every frame until encountering a
768   // user-specified function.
ShouldIncludeFrame(Handle<JSFunction> function)769   bool ShouldIncludeFrame(Handle<JSFunction> function) {
770     switch (mode_) {
771       case SKIP_NONE:
772         return true;
773       case SKIP_FIRST:
774         if (!skip_next_frame_) return true;
775         skip_next_frame_ = false;
776         return false;
777       case SKIP_UNTIL_SEEN:
778         if (skip_next_frame_ && (*function == *caller_)) {
779           skip_next_frame_ = false;
780           return false;
781         }
782         return !skip_next_frame_;
783     }
784     UNREACHABLE();
785   }
786 
IsNotHidden(Handle<JSFunction> function)787   bool IsNotHidden(Handle<JSFunction> function) {
788     // Functions defined not in user scripts are not visible unless directly
789     // exposed, in which case the native flag is set.
790     // The --builtins-in-stack-traces command line flag allows including
791     // internal call sites in the stack trace for debugging purposes.
792     if (!FLAG_builtins_in_stack_traces &&
793         !function->shared().IsUserJavaScript()) {
794       return function->shared().native() || function->shared().IsApiFunction();
795     }
796     return true;
797   }
798 
IsInSameSecurityContext(Handle<JSFunction> function)799   bool IsInSameSecurityContext(Handle<JSFunction> function) {
800     if (!check_security_context_) return true;
801     return isolate_->context().HasSameSecurityTokenAs(function->context());
802   }
803 
AppendFrame(Handle<Object> receiver_or_instance,Handle<Object> function,Handle<HeapObject> code,int offset,int flags,Handle<FixedArray> parameters)804   void AppendFrame(Handle<Object> receiver_or_instance, Handle<Object> function,
805                    Handle<HeapObject> code, int offset, int flags,
806                    Handle<FixedArray> parameters) {
807     DCHECK_LE(index_, elements_->length());
808     DCHECK_LE(elements_->length(), limit_);
809     if (index_ == elements_->length()) {
810       elements_ = isolate_->factory()->CopyFixedArrayAndGrow(
811           elements_, std::min(16, limit_ - elements_->length()));
812     }
813     if (receiver_or_instance->IsTheHole(isolate_)) {
814       // TODO(jgruber): Fix all cases in which frames give us a hole value
815       // (e.g. the receiver in RegExp constructor frames).
816       receiver_or_instance = isolate_->factory()->undefined_value();
817     }
818     auto info = isolate_->factory()->NewStackFrameInfo(
819         receiver_or_instance, function, code, offset, flags, parameters);
820     elements_->set(index_++, *info);
821   }
822 
823   Isolate* isolate_;
824   const FrameSkipMode mode_;
825   int index_ = 0;
826   const int limit_;
827   const Handle<Object> caller_;
828   bool skip_next_frame_;
829   bool encountered_strict_function_ = false;
830   const bool check_security_context_;
831   Handle<FixedArray> elements_;
832 };
833 
GetStackTraceLimit(Isolate * isolate,int * result)834 bool GetStackTraceLimit(Isolate* isolate, int* result) {
835   DCHECK(!FLAG_correctness_fuzzer_suppressions);
836   Handle<JSObject> error = isolate->error_function();
837 
838   Handle<String> key = isolate->factory()->stackTraceLimit_string();
839   Handle<Object> stack_trace_limit = JSReceiver::GetDataProperty(error, key);
840   if (!stack_trace_limit->IsNumber()) return false;
841 
842   // Ensure that limit is not negative.
843   *result = std::max(FastD2IChecked(stack_trace_limit->Number()), 0);
844 
845   if (*result != FLAG_stack_trace_limit) {
846     isolate->CountUsage(v8::Isolate::kErrorStackTraceLimit);
847   }
848 
849   return true;
850 }
851 
NoExtension(const v8::FunctionCallbackInfo<v8::Value> &)852 bool NoExtension(const v8::FunctionCallbackInfo<v8::Value>&) { return false; }
853 
854 namespace {
855 
IsBuiltinFunction(Isolate * isolate,HeapObject object,Builtin builtin)856 bool IsBuiltinFunction(Isolate* isolate, HeapObject object, Builtin builtin) {
857   if (!object.IsJSFunction()) return false;
858   JSFunction const function = JSFunction::cast(object);
859   return function.code() == isolate->builtins()->code(builtin);
860 }
861 
CaptureAsyncStackTrace(Isolate * isolate,Handle<JSPromise> promise,StackTraceBuilder * builder)862 void CaptureAsyncStackTrace(Isolate* isolate, Handle<JSPromise> promise,
863                             StackTraceBuilder* builder) {
864   while (!builder->Full()) {
865     // Check that the {promise} is not settled.
866     if (promise->status() != Promise::kPending) return;
867 
868     // Check that we have exactly one PromiseReaction on the {promise}.
869     if (!promise->reactions().IsPromiseReaction()) return;
870     Handle<PromiseReaction> reaction(
871         PromiseReaction::cast(promise->reactions()), isolate);
872     if (!reaction->next().IsSmi()) return;
873 
874     // Check if the {reaction} has one of the known async function or
875     // async generator continuations as its fulfill handler.
876     if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
877                           Builtin::kAsyncFunctionAwaitResolveClosure) ||
878         IsBuiltinFunction(isolate, reaction->fulfill_handler(),
879                           Builtin::kAsyncGeneratorAwaitResolveClosure) ||
880         IsBuiltinFunction(isolate, reaction->fulfill_handler(),
881                           Builtin::kAsyncGeneratorYieldResolveClosure)) {
882       // Now peek into the handlers' AwaitContext to get to
883       // the JSGeneratorObject for the async function.
884       Handle<Context> context(
885           JSFunction::cast(reaction->fulfill_handler()).context(), isolate);
886       Handle<JSGeneratorObject> generator_object(
887           JSGeneratorObject::cast(context->extension()), isolate);
888       CHECK(generator_object->is_suspended());
889 
890       // Append async frame corresponding to the {generator_object}.
891       builder->AppendAsyncFrame(generator_object);
892 
893       // Try to continue from here.
894       if (generator_object->IsJSAsyncFunctionObject()) {
895         Handle<JSAsyncFunctionObject> async_function_object =
896             Handle<JSAsyncFunctionObject>::cast(generator_object);
897         promise = handle(async_function_object->promise(), isolate);
898       } else {
899         Handle<JSAsyncGeneratorObject> async_generator_object =
900             Handle<JSAsyncGeneratorObject>::cast(generator_object);
901         if (async_generator_object->queue().IsUndefined(isolate)) return;
902         Handle<AsyncGeneratorRequest> async_generator_request(
903             AsyncGeneratorRequest::cast(async_generator_object->queue()),
904             isolate);
905         promise = handle(JSPromise::cast(async_generator_request->promise()),
906                          isolate);
907       }
908     } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
909                                  Builtin::kPromiseAllResolveElementClosure)) {
910       Handle<JSFunction> function(JSFunction::cast(reaction->fulfill_handler()),
911                                   isolate);
912       Handle<Context> context(function->context(), isolate);
913       Handle<JSFunction> combinator(context->native_context().promise_all(),
914                                     isolate);
915       builder->AppendPromiseCombinatorFrame(function, combinator);
916 
917       // Now peak into the Promise.all() resolve element context to
918       // find the promise capability that's being resolved when all
919       // the concurrent promises resolve.
920       int const index =
921           PromiseBuiltins::kPromiseAllResolveElementCapabilitySlot;
922       Handle<PromiseCapability> capability(
923           PromiseCapability::cast(context->get(index)), isolate);
924       if (!capability->promise().IsJSPromise()) return;
925       promise = handle(JSPromise::cast(capability->promise()), isolate);
926     } else if (IsBuiltinFunction(isolate, reaction->reject_handler(),
927                                  Builtin::kPromiseAnyRejectElementClosure)) {
928       Handle<JSFunction> function(JSFunction::cast(reaction->reject_handler()),
929                                   isolate);
930       Handle<Context> context(function->context(), isolate);
931       Handle<JSFunction> combinator(context->native_context().promise_any(),
932                                     isolate);
933       builder->AppendPromiseCombinatorFrame(function, combinator);
934 
935       // Now peak into the Promise.any() reject element context to
936       // find the promise capability that's being resolved when any of
937       // the concurrent promises resolve.
938       int const index = PromiseBuiltins::kPromiseAnyRejectElementCapabilitySlot;
939       Handle<PromiseCapability> capability(
940           PromiseCapability::cast(context->get(index)), isolate);
941       if (!capability->promise().IsJSPromise()) return;
942       promise = handle(JSPromise::cast(capability->promise()), isolate);
943     } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
944                                  Builtin::kPromiseCapabilityDefaultResolve)) {
945       Handle<JSFunction> function(JSFunction::cast(reaction->fulfill_handler()),
946                                   isolate);
947       Handle<Context> context(function->context(), isolate);
948       promise =
949           handle(JSPromise::cast(context->get(PromiseBuiltins::kPromiseSlot)),
950                  isolate);
951     } else {
952       // We have some generic promise chain here, so try to
953       // continue with the chained promise on the reaction
954       // (only works for native promise chains).
955       Handle<HeapObject> promise_or_capability(
956           reaction->promise_or_capability(), isolate);
957       if (promise_or_capability->IsJSPromise()) {
958         promise = Handle<JSPromise>::cast(promise_or_capability);
959       } else if (promise_or_capability->IsPromiseCapability()) {
960         Handle<PromiseCapability> capability =
961             Handle<PromiseCapability>::cast(promise_or_capability);
962         if (!capability->promise().IsJSPromise()) return;
963         promise = handle(JSPromise::cast(capability->promise()), isolate);
964       } else {
965         // Otherwise the {promise_or_capability} must be undefined here.
966         CHECK(promise_or_capability->IsUndefined(isolate));
967         return;
968       }
969     }
970   }
971 }
972 
973 struct CaptureStackTraceOptions {
974   int limit;
975   // 'filter_mode' and 'skip_mode' are somewhat orthogonal. 'filter_mode'
976   // specifies whether to capture all frames, or just frames in the same
977   // security context. While 'skip_mode' allows skipping the first frame.
978   FrameSkipMode skip_mode;
979   StackTraceBuilder::FrameFilterMode filter_mode;
980 
981   bool capture_builtin_exit_frames;
982   bool capture_only_frames_subject_to_debugging;
983   bool async_stack_trace;
984 };
985 
CaptureStackTrace(Isolate * isolate,Handle<Object> caller,CaptureStackTraceOptions options)986 Handle<FixedArray> CaptureStackTrace(Isolate* isolate, Handle<Object> caller,
987                                      CaptureStackTraceOptions options) {
988   DisallowJavascriptExecution no_js(isolate);
989 
990   TRACE_EVENT_BEGIN1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"),
991                      "CaptureStackTrace", "maxFrameCount", options.limit);
992 
993 #if V8_ENABLE_WEBASSEMBLY
994   wasm::WasmCodeRefScope code_ref_scope;
995 #endif  // V8_ENABLE_WEBASSEMBLY
996 
997   StackTraceBuilder builder(isolate, options.skip_mode, options.limit, caller,
998                             options.filter_mode);
999 
1000   // Build the regular stack trace, and remember the last relevant
1001   // frame ID and inlined index (for the async stack trace handling
1002   // below, which starts from this last frame).
1003   for (StackFrameIterator it(isolate); !it.done() && !builder.Full();
1004        it.Advance()) {
1005     StackFrame* const frame = it.frame();
1006     switch (frame->type()) {
1007       case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION:
1008       case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
1009       case StackFrame::OPTIMIZED:
1010       case StackFrame::INTERPRETED:
1011       case StackFrame::BASELINE:
1012       case StackFrame::BUILTIN:
1013 #if V8_ENABLE_WEBASSEMBLY
1014       case StackFrame::WASM:
1015 #endif  // V8_ENABLE_WEBASSEMBLY
1016       {
1017         // A standard frame may include many summarized frames (due to
1018         // inlining).
1019         std::vector<FrameSummary> frames;
1020         CommonFrame::cast(frame)->Summarize(&frames);
1021         for (size_t i = frames.size(); i-- != 0 && !builder.Full();) {
1022           auto& summary = frames[i];
1023           if (options.capture_only_frames_subject_to_debugging &&
1024               !summary.is_subject_to_debugging()) {
1025             continue;
1026           }
1027 
1028           if (summary.IsJavaScript()) {
1029             //=========================================================
1030             // Handle a JavaScript frame.
1031             //=========================================================
1032             auto const& java_script = summary.AsJavaScript();
1033             builder.AppendJavaScriptFrame(java_script);
1034 #if V8_ENABLE_WEBASSEMBLY
1035           } else if (summary.IsWasm()) {
1036             //=========================================================
1037             // Handle a Wasm frame.
1038             //=========================================================
1039             auto const& wasm = summary.AsWasm();
1040             builder.AppendWasmFrame(wasm);
1041 #endif  // V8_ENABLE_WEBASSEMBLY
1042           }
1043         }
1044         break;
1045       }
1046 
1047       case StackFrame::BUILTIN_EXIT:
1048         if (!options.capture_builtin_exit_frames) continue;
1049 
1050         // BuiltinExitFrames are not standard frames, so they do not have
1051         // Summarize(). However, they may have one JS frame worth showing.
1052         builder.AppendBuiltinExitFrame(BuiltinExitFrame::cast(frame));
1053         break;
1054 
1055       default:
1056         break;
1057     }
1058   }
1059 
1060   // If --async-stack-traces are enabled and the "current microtask" is a
1061   // PromiseReactionJobTask, we try to enrich the stack trace with async
1062   // frames.
1063   if (options.async_stack_trace) {
1064     Handle<Object> current_microtask = isolate->factory()->current_microtask();
1065     if (current_microtask->IsPromiseReactionJobTask()) {
1066       Handle<PromiseReactionJobTask> promise_reaction_job_task =
1067           Handle<PromiseReactionJobTask>::cast(current_microtask);
1068       // Check if the {reaction} has one of the known async function or
1069       // async generator continuations as its fulfill handler.
1070       if (IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1071                             Builtin::kAsyncFunctionAwaitResolveClosure) ||
1072           IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1073                             Builtin::kAsyncGeneratorAwaitResolveClosure) ||
1074           IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1075                             Builtin::kAsyncGeneratorYieldResolveClosure) ||
1076           IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1077                             Builtin::kAsyncFunctionAwaitRejectClosure) ||
1078           IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1079                             Builtin::kAsyncGeneratorAwaitRejectClosure)) {
1080         // Now peek into the handlers' AwaitContext to get to
1081         // the JSGeneratorObject for the async function.
1082         Handle<Context> context(
1083             JSFunction::cast(promise_reaction_job_task->handler()).context(),
1084             isolate);
1085         Handle<JSGeneratorObject> generator_object(
1086             JSGeneratorObject::cast(context->extension()), isolate);
1087         if (generator_object->is_executing()) {
1088           if (generator_object->IsJSAsyncFunctionObject()) {
1089             Handle<JSAsyncFunctionObject> async_function_object =
1090                 Handle<JSAsyncFunctionObject>::cast(generator_object);
1091             Handle<JSPromise> promise(async_function_object->promise(),
1092                                       isolate);
1093             CaptureAsyncStackTrace(isolate, promise, &builder);
1094           } else {
1095             Handle<JSAsyncGeneratorObject> async_generator_object =
1096                 Handle<JSAsyncGeneratorObject>::cast(generator_object);
1097             Handle<Object> queue(async_generator_object->queue(), isolate);
1098             if (!queue->IsUndefined(isolate)) {
1099               Handle<AsyncGeneratorRequest> async_generator_request =
1100                   Handle<AsyncGeneratorRequest>::cast(queue);
1101               Handle<JSPromise> promise(
1102                   JSPromise::cast(async_generator_request->promise()), isolate);
1103               CaptureAsyncStackTrace(isolate, promise, &builder);
1104             }
1105           }
1106         }
1107       } else {
1108         // The {promise_reaction_job_task} doesn't belong to an await (or
1109         // yield inside an async generator), but we might still be able to
1110         // find an async frame if we follow along the chain of promises on
1111         // the {promise_reaction_job_task}.
1112         Handle<HeapObject> promise_or_capability(
1113             promise_reaction_job_task->promise_or_capability(), isolate);
1114         if (promise_or_capability->IsJSPromise()) {
1115           Handle<JSPromise> promise =
1116               Handle<JSPromise>::cast(promise_or_capability);
1117           CaptureAsyncStackTrace(isolate, promise, &builder);
1118         }
1119       }
1120     }
1121   }
1122 
1123   Handle<FixedArray> stack_trace = builder.Build();
1124   TRACE_EVENT_END1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"),
1125                    "CaptureStackTrace", "frameCount", stack_trace->length());
1126   return stack_trace;
1127 }
1128 
1129 }  // namespace
1130 
CaptureSimpleStackTrace(Handle<JSReceiver> error_object,FrameSkipMode mode,Handle<Object> caller)1131 Handle<Object> Isolate::CaptureSimpleStackTrace(Handle<JSReceiver> error_object,
1132                                                 FrameSkipMode mode,
1133                                                 Handle<Object> caller) {
1134   int limit;
1135   if (FLAG_correctness_fuzzer_suppressions ||
1136       !GetStackTraceLimit(this, &limit)) {
1137     return factory()->undefined_value();
1138   }
1139 
1140   CaptureStackTraceOptions options;
1141   options.limit = limit;
1142   options.skip_mode = mode;
1143   options.capture_builtin_exit_frames = true;
1144   options.async_stack_trace = FLAG_async_stack_traces;
1145   options.filter_mode = StackTraceBuilder::CURRENT_SECURITY_CONTEXT;
1146   options.capture_only_frames_subject_to_debugging = false;
1147 
1148   return CaptureStackTrace(this, caller, options);
1149 }
1150 
CaptureAndSetDetailedStackTrace(Handle<JSReceiver> error_object)1151 MaybeHandle<JSReceiver> Isolate::CaptureAndSetDetailedStackTrace(
1152     Handle<JSReceiver> error_object) {
1153   if (capture_stack_trace_for_uncaught_exceptions_) {
1154     // Capture stack trace for a detailed exception message.
1155     Handle<Name> key = factory()->detailed_stack_trace_symbol();
1156     Handle<FixedArray> stack_trace = CaptureCurrentStackTrace(
1157         stack_trace_for_uncaught_exceptions_frame_limit_,
1158         stack_trace_for_uncaught_exceptions_options_);
1159     RETURN_ON_EXCEPTION(
1160         this,
1161         Object::SetProperty(this, error_object, key, stack_trace,
1162                             StoreOrigin::kMaybeKeyed,
1163                             Just(ShouldThrow::kThrowOnError)),
1164         JSReceiver);
1165   }
1166   return error_object;
1167 }
1168 
CaptureAndSetSimpleStackTrace(Handle<JSReceiver> error_object,FrameSkipMode mode,Handle<Object> caller)1169 MaybeHandle<JSReceiver> Isolate::CaptureAndSetSimpleStackTrace(
1170     Handle<JSReceiver> error_object, FrameSkipMode mode,
1171     Handle<Object> caller) {
1172   // Capture stack trace for simple stack trace string formatting.
1173   Handle<Name> key = factory()->stack_trace_symbol();
1174   Handle<Object> stack_trace =
1175       CaptureSimpleStackTrace(error_object, mode, caller);
1176   RETURN_ON_EXCEPTION(this,
1177                       Object::SetProperty(this, error_object, key, stack_trace,
1178                                           StoreOrigin::kMaybeKeyed,
1179                                           Just(ShouldThrow::kThrowOnError)),
1180                       JSReceiver);
1181   return error_object;
1182 }
1183 
GetDetailedStackTrace(Handle<JSObject> error_object)1184 Handle<FixedArray> Isolate::GetDetailedStackTrace(
1185     Handle<JSObject> error_object) {
1186   Handle<Name> key_detailed = factory()->detailed_stack_trace_symbol();
1187   Handle<Object> stack_trace =
1188       JSReceiver::GetDataProperty(error_object, key_detailed);
1189   if (stack_trace->IsFixedArray()) return Handle<FixedArray>::cast(stack_trace);
1190   return Handle<FixedArray>();
1191 }
1192 
GetAbstractPC(int * line,int * column)1193 Address Isolate::GetAbstractPC(int* line, int* column) {
1194   JavaScriptFrameIterator it(this);
1195 
1196   if (it.done()) {
1197     *line = -1;
1198     *column = -1;
1199     return kNullAddress;
1200   }
1201   JavaScriptFrame* frame = it.frame();
1202   DCHECK(!frame->is_builtin());
1203 
1204   Handle<SharedFunctionInfo> shared = handle(frame->function().shared(), this);
1205   SharedFunctionInfo::EnsureSourcePositionsAvailable(this, shared);
1206   int position = frame->position();
1207 
1208   Object maybe_script = frame->function().shared().script();
1209   if (maybe_script.IsScript()) {
1210     Handle<Script> script(Script::cast(maybe_script), this);
1211     Script::PositionInfo info;
1212     Script::GetPositionInfo(script, position, &info, Script::WITH_OFFSET);
1213     *line = info.line + 1;
1214     *column = info.column + 1;
1215   } else {
1216     *line = position;
1217     *column = -1;
1218   }
1219 
1220   if (frame->is_unoptimized()) {
1221     UnoptimizedFrame* iframe = static_cast<UnoptimizedFrame*>(frame);
1222     Address bytecode_start =
1223         iframe->GetBytecodeArray().GetFirstBytecodeAddress();
1224     return bytecode_start + iframe->GetBytecodeOffset();
1225   }
1226 
1227   return frame->pc();
1228 }
1229 
CaptureCurrentStackTrace(int frame_limit,StackTrace::StackTraceOptions stack_trace_options)1230 Handle<FixedArray> Isolate::CaptureCurrentStackTrace(
1231     int frame_limit, StackTrace::StackTraceOptions stack_trace_options) {
1232   CaptureStackTraceOptions options;
1233   options.limit = std::max(frame_limit, 0);  // Ensure no negative values.
1234   options.skip_mode = SKIP_NONE;
1235   options.capture_builtin_exit_frames = false;
1236   options.async_stack_trace = false;
1237   options.filter_mode =
1238       (stack_trace_options & StackTrace::kExposeFramesAcrossSecurityOrigins)
1239           ? StackTraceBuilder::ALL
1240           : StackTraceBuilder::CURRENT_SECURITY_CONTEXT;
1241   options.capture_only_frames_subject_to_debugging = true;
1242 
1243   return CaptureStackTrace(this, factory()->undefined_value(), options);
1244 }
1245 
PrintStack(FILE * out,PrintStackMode mode)1246 void Isolate::PrintStack(FILE* out, PrintStackMode mode) {
1247   if (stack_trace_nesting_level_ == 0) {
1248     stack_trace_nesting_level_++;
1249     StringStream::ClearMentionedObjectCache(this);
1250     HeapStringAllocator allocator;
1251     StringStream accumulator(&allocator);
1252     incomplete_message_ = &accumulator;
1253     PrintStack(&accumulator, mode);
1254     accumulator.OutputToFile(out);
1255     InitializeLoggingAndCounters();
1256     accumulator.Log(this);
1257     incomplete_message_ = nullptr;
1258     stack_trace_nesting_level_ = 0;
1259   } else if (stack_trace_nesting_level_ == 1) {
1260     stack_trace_nesting_level_++;
1261     base::OS::PrintError(
1262         "\n\nAttempt to print stack while printing stack (double fault)\n");
1263     base::OS::PrintError(
1264         "If you are lucky you may find a partial stack dump on stdout.\n\n");
1265     incomplete_message_->OutputToFile(out);
1266   }
1267 }
1268 
PrintFrames(Isolate * isolate,StringStream * accumulator,StackFrame::PrintMode mode)1269 static void PrintFrames(Isolate* isolate, StringStream* accumulator,
1270                         StackFrame::PrintMode mode) {
1271   StackFrameIterator it(isolate);
1272   for (int i = 0; !it.done(); it.Advance()) {
1273     it.frame()->Print(accumulator, mode, i++);
1274   }
1275 }
1276 
PrintStack(StringStream * accumulator,PrintStackMode mode)1277 void Isolate::PrintStack(StringStream* accumulator, PrintStackMode mode) {
1278   HandleScope scope(this);
1279   DCHECK(accumulator->IsMentionedObjectCacheClear(this));
1280 
1281   // Avoid printing anything if there are no frames.
1282   if (c_entry_fp(thread_local_top()) == 0) return;
1283 
1284   accumulator->Add(
1285       "\n==== JS stack trace =========================================\n\n");
1286   PrintFrames(this, accumulator, StackFrame::OVERVIEW);
1287   if (mode == kPrintStackVerbose) {
1288     accumulator->Add(
1289         "\n==== Details ================================================\n\n");
1290     PrintFrames(this, accumulator, StackFrame::DETAILS);
1291     accumulator->PrintMentionedObjectCache(this);
1292   }
1293   accumulator->Add("=====================\n\n");
1294 }
1295 
SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback)1296 void Isolate::SetFailedAccessCheckCallback(
1297     v8::FailedAccessCheckCallback callback) {
1298   thread_local_top()->failed_access_check_callback_ = callback;
1299 }
1300 
ReportFailedAccessCheck(Handle<JSObject> receiver)1301 void Isolate::ReportFailedAccessCheck(Handle<JSObject> receiver) {
1302   if (!thread_local_top()->failed_access_check_callback_) {
1303     return ScheduleThrow(*factory()->NewTypeError(MessageTemplate::kNoAccess));
1304   }
1305 
1306   DCHECK(receiver->IsAccessCheckNeeded());
1307   DCHECK(!context().is_null());
1308 
1309   // Get the data object from access check info.
1310   HandleScope scope(this);
1311   Handle<Object> data;
1312   {
1313     DisallowGarbageCollection no_gc;
1314     AccessCheckInfo access_check_info = AccessCheckInfo::Get(this, receiver);
1315     if (access_check_info.is_null()) {
1316       no_gc.Release();
1317       return ScheduleThrow(
1318           *factory()->NewTypeError(MessageTemplate::kNoAccess));
1319     }
1320     data = handle(access_check_info.data(), this);
1321   }
1322 
1323   // Leaving JavaScript.
1324   VMState<EXTERNAL> state(this);
1325   thread_local_top()->failed_access_check_callback_(
1326       v8::Utils::ToLocal(receiver), v8::ACCESS_HAS, v8::Utils::ToLocal(data));
1327 }
1328 
MayAccess(Handle<Context> accessing_context,Handle<JSObject> receiver)1329 bool Isolate::MayAccess(Handle<Context> accessing_context,
1330                         Handle<JSObject> receiver) {
1331   DCHECK(receiver->IsJSGlobalProxy() || receiver->IsAccessCheckNeeded());
1332 
1333   // Check for compatibility between the security tokens in the
1334   // current lexical context and the accessed object.
1335 
1336   // During bootstrapping, callback functions are not enabled yet.
1337   if (bootstrapper()->IsActive()) return true;
1338   {
1339     DisallowGarbageCollection no_gc;
1340 
1341     if (receiver->IsJSGlobalProxy()) {
1342       Object receiver_context = JSGlobalProxy::cast(*receiver).native_context();
1343       if (!receiver_context.IsContext()) return false;
1344 
1345       // Get the native context of current top context.
1346       // avoid using Isolate::native_context() because it uses Handle.
1347       Context native_context =
1348           accessing_context->global_object().native_context();
1349       if (receiver_context == native_context) return true;
1350 
1351       if (Context::cast(receiver_context).security_token() ==
1352           native_context.security_token())
1353         return true;
1354     }
1355   }
1356 
1357   HandleScope scope(this);
1358   Handle<Object> data;
1359   v8::AccessCheckCallback callback = nullptr;
1360   {
1361     DisallowGarbageCollection no_gc;
1362     AccessCheckInfo access_check_info = AccessCheckInfo::Get(this, receiver);
1363     if (access_check_info.is_null()) return false;
1364     Object fun_obj = access_check_info.callback();
1365     callback = v8::ToCData<v8::AccessCheckCallback>(fun_obj);
1366     data = handle(access_check_info.data(), this);
1367   }
1368 
1369   LOG(this, ApiSecurityCheck());
1370 
1371   {
1372     // Leaving JavaScript.
1373     VMState<EXTERNAL> state(this);
1374     return callback(v8::Utils::ToLocal(accessing_context),
1375                     v8::Utils::ToLocal(receiver), v8::Utils::ToLocal(data));
1376   }
1377 }
1378 
StackOverflow()1379 Object Isolate::StackOverflow() {
1380   // Whoever calls this method should not have overflown the stack limit by too
1381   // much. Otherwise we risk actually running out of stack space.
1382   // We allow for up to 8kB overflow, because we typically allow up to 4KB
1383   // overflow per frame in generated code, but might call through more smaller
1384   // frames until we reach this method.
1385   // If this DCHECK fails, one of the frames on the stack should be augmented by
1386   // an additional stack check.
1387 #if defined(V8_USE_ADDRESS_SANITIZER) || defined(MEMORY_SANITIZER)
1388   // Allow for a bit more overflow in sanitizer builds, because C++ frames take
1389   // significantly more space there.
1390   DCHECK_GE(GetCurrentStackPosition(), stack_guard()->real_climit() - 32 * KB);
1391 #else
1392   DCHECK_GE(GetCurrentStackPosition(), stack_guard()->real_climit() - 8 * KB);
1393 #endif
1394 
1395   if (FLAG_correctness_fuzzer_suppressions) {
1396     FATAL("Aborting on stack overflow");
1397   }
1398 
1399   DisallowJavascriptExecution no_js(this);
1400   HandleScope scope(this);
1401 
1402   Handle<JSFunction> fun = range_error_function();
1403   Handle<Object> msg = factory()->NewStringFromAsciiChecked(
1404       MessageFormatter::TemplateString(MessageTemplate::kStackOverflow));
1405   Handle<Object> options = factory()->undefined_value();
1406   Handle<Object> no_caller;
1407   Handle<JSObject> exception;
1408   ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1409       this, exception,
1410       ErrorUtils::Construct(this, fun, fun, msg, options, SKIP_NONE, no_caller,
1411                             ErrorUtils::StackTraceCollection::kSimple));
1412   JSObject::AddProperty(this, exception, factory()->wasm_uncatchable_symbol(),
1413                         factory()->true_value(), NONE);
1414 
1415   Throw(*exception);
1416 
1417 #ifdef VERIFY_HEAP
1418   if (FLAG_verify_heap && FLAG_stress_compaction) {
1419     heap()->CollectAllGarbage(Heap::kNoGCFlags,
1420                               GarbageCollectionReason::kTesting);
1421   }
1422 #endif  // VERIFY_HEAP
1423 
1424   return ReadOnlyRoots(heap()).exception();
1425 }
1426 
ThrowAt(Handle<JSObject> exception,MessageLocation * location)1427 Object Isolate::ThrowAt(Handle<JSObject> exception, MessageLocation* location) {
1428   Handle<Name> key_start_pos = factory()->error_start_pos_symbol();
1429   Object::SetProperty(this, exception, key_start_pos,
1430                       handle(Smi::FromInt(location->start_pos()), this),
1431                       StoreOrigin::kMaybeKeyed,
1432                       Just(ShouldThrow::kThrowOnError))
1433       .Check();
1434 
1435   Handle<Name> key_end_pos = factory()->error_end_pos_symbol();
1436   Object::SetProperty(this, exception, key_end_pos,
1437                       handle(Smi::FromInt(location->end_pos()), this),
1438                       StoreOrigin::kMaybeKeyed,
1439                       Just(ShouldThrow::kThrowOnError))
1440       .Check();
1441 
1442   Handle<Name> key_script = factory()->error_script_symbol();
1443   Object::SetProperty(this, exception, key_script, location->script(),
1444                       StoreOrigin::kMaybeKeyed,
1445                       Just(ShouldThrow::kThrowOnError))
1446       .Check();
1447 
1448   return ThrowInternal(*exception, location);
1449 }
1450 
TerminateExecution()1451 Object Isolate::TerminateExecution() {
1452   return Throw(ReadOnlyRoots(this).termination_exception());
1453 }
1454 
CancelTerminateExecution()1455 void Isolate::CancelTerminateExecution() {
1456   if (try_catch_handler()) {
1457     try_catch_handler()->has_terminated_ = false;
1458   }
1459   if (has_pending_exception() &&
1460       pending_exception() == ReadOnlyRoots(this).termination_exception()) {
1461     thread_local_top()->external_caught_exception_ = false;
1462     clear_pending_exception();
1463   }
1464   if (has_scheduled_exception() &&
1465       scheduled_exception() == ReadOnlyRoots(this).termination_exception()) {
1466     thread_local_top()->external_caught_exception_ = false;
1467     clear_scheduled_exception();
1468   }
1469 }
1470 
RequestInterrupt(InterruptCallback callback,void * data)1471 void Isolate::RequestInterrupt(InterruptCallback callback, void* data) {
1472   ExecutionAccess access(this);
1473   api_interrupts_queue_.push(InterruptEntry(callback, data));
1474   stack_guard()->RequestApiInterrupt();
1475 }
1476 
InvokeApiInterruptCallbacks()1477 void Isolate::InvokeApiInterruptCallbacks() {
1478   RCS_SCOPE(this, RuntimeCallCounterId::kInvokeApiInterruptCallbacks);
1479   // Note: callback below should be called outside of execution access lock.
1480   while (true) {
1481     InterruptEntry entry;
1482     {
1483       ExecutionAccess access(this);
1484       if (api_interrupts_queue_.empty()) return;
1485       entry = api_interrupts_queue_.front();
1486       api_interrupts_queue_.pop();
1487     }
1488     VMState<EXTERNAL> state(this);
1489     HandleScope handle_scope(this);
1490     entry.first(reinterpret_cast<v8::Isolate*>(this), entry.second);
1491   }
1492 }
1493 
1494 namespace {
1495 
ReportBootstrappingException(Handle<Object> exception,MessageLocation * location)1496 void ReportBootstrappingException(Handle<Object> exception,
1497                                   MessageLocation* location) {
1498   base::OS::PrintError("Exception thrown during bootstrapping\n");
1499   if (location == nullptr || location->script().is_null()) return;
1500   // We are bootstrapping and caught an error where the location is set
1501   // and we have a script for the location.
1502   // In this case we could have an extension (or an internal error
1503   // somewhere) and we print out the line number at which the error occurred
1504   // to the console for easier debugging.
1505   int line_number =
1506       location->script()->GetLineNumber(location->start_pos()) + 1;
1507   if (exception->IsString() && location->script()->name().IsString()) {
1508     base::OS::PrintError(
1509         "Extension or internal compilation error: %s in %s at line %d.\n",
1510         String::cast(*exception).ToCString().get(),
1511         String::cast(location->script()->name()).ToCString().get(),
1512         line_number);
1513   } else if (location->script()->name().IsString()) {
1514     base::OS::PrintError(
1515         "Extension or internal compilation error in %s at line %d.\n",
1516         String::cast(location->script()->name()).ToCString().get(),
1517         line_number);
1518   } else if (exception->IsString()) {
1519     base::OS::PrintError("Extension or internal compilation error: %s.\n",
1520                          String::cast(*exception).ToCString().get());
1521   } else {
1522     base::OS::PrintError("Extension or internal compilation error.\n");
1523   }
1524 #ifdef OBJECT_PRINT
1525   // Since comments and empty lines have been stripped from the source of
1526   // builtins, print the actual source here so that line numbers match.
1527   if (location->script()->source().IsString()) {
1528     Handle<String> src(String::cast(location->script()->source()),
1529                        location->script()->GetIsolate());
1530     PrintF("Failing script:");
1531     int len = src->length();
1532     if (len == 0) {
1533       PrintF(" <not available>\n");
1534     } else {
1535       PrintF("\n");
1536       line_number = 1;
1537       PrintF("%5d: ", line_number);
1538       for (int i = 0; i < len; i++) {
1539         uint16_t character = src->Get(i);
1540         PrintF("%c", character);
1541         if (character == '\n' && i < len - 2) {
1542           PrintF("%5d: ", ++line_number);
1543         }
1544       }
1545       PrintF("\n");
1546     }
1547   }
1548 #endif
1549 }
1550 
1551 }  // anonymous namespace
1552 
CreateMessageOrAbort(Handle<Object> exception,MessageLocation * location)1553 Handle<JSMessageObject> Isolate::CreateMessageOrAbort(
1554     Handle<Object> exception, MessageLocation* location) {
1555   Handle<JSMessageObject> message_obj = CreateMessage(exception, location);
1556 
1557   // If the abort-on-uncaught-exception flag is specified, and if the
1558   // embedder didn't specify a custom uncaught exception callback,
1559   // or if the custom callback determined that V8 should abort, then
1560   // abort.
1561   if (FLAG_abort_on_uncaught_exception) {
1562     CatchType prediction = PredictExceptionCatcher();
1563     if ((prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) &&
1564         (!abort_on_uncaught_exception_callback_ ||
1565          abort_on_uncaught_exception_callback_(
1566              reinterpret_cast<v8::Isolate*>(this)))) {
1567       // Prevent endless recursion.
1568       FLAG_abort_on_uncaught_exception = false;
1569       // This flag is intended for use by JavaScript developers, so
1570       // print a user-friendly stack trace (not an internal one).
1571       PrintF(stderr, "%s\n\nFROM\n",
1572              MessageHandler::GetLocalizedMessage(this, message_obj).get());
1573       std::ostringstream stack_trace_stream;
1574       PrintCurrentStackTrace(stack_trace_stream);
1575       PrintF(stderr, "%s", stack_trace_stream.str().c_str());
1576       base::OS::Abort();
1577     }
1578   }
1579 
1580   return message_obj;
1581 }
1582 
ThrowInternal(Object raw_exception,MessageLocation * location)1583 Object Isolate::ThrowInternal(Object raw_exception, MessageLocation* location) {
1584   DCHECK(!has_pending_exception());
1585   IF_WASM(DCHECK_IMPLIES, trap_handler::IsTrapHandlerEnabled(),
1586           !trap_handler::IsThreadInWasm());
1587 
1588   HandleScope scope(this);
1589   Handle<Object> exception(raw_exception, this);
1590 
1591   if (FLAG_print_all_exceptions) {
1592     PrintF("=========================================================\n");
1593     PrintF("Exception thrown:\n");
1594     if (location) {
1595       Handle<Script> script = location->script();
1596       Handle<Object> name(script->GetNameOrSourceURL(), this);
1597       PrintF("at ");
1598       if (name->IsString() && String::cast(*name).length() > 0)
1599         String::cast(*name).PrintOn(stdout);
1600       else
1601         PrintF("<anonymous>");
1602 // Script::GetLineNumber and Script::GetColumnNumber can allocate on the heap to
1603 // initialize the line_ends array, so be careful when calling them.
1604 #ifdef DEBUG
1605       if (AllowGarbageCollection::IsAllowed()) {
1606 #else
1607       if ((false)) {
1608 #endif
1609         PrintF(", %d:%d - %d:%d\n",
1610                Script::GetLineNumber(script, location->start_pos()) + 1,
1611                Script::GetColumnNumber(script, location->start_pos()),
1612                Script::GetLineNumber(script, location->end_pos()) + 1,
1613                Script::GetColumnNumber(script, location->end_pos()));
1614         // Make sure to update the raw exception pointer in case it moved.
1615         raw_exception = *exception;
1616       } else {
1617         PrintF(", line %d\n", script->GetLineNumber(location->start_pos()) + 1);
1618       }
1619     }
1620     raw_exception.Print();
1621     PrintF("Stack Trace:\n");
1622     PrintStack(stdout);
1623     PrintF("=========================================================\n");
1624   }
1625 
1626   // Determine whether a message needs to be created for the given exception
1627   // depending on the following criteria:
1628   // 1) External v8::TryCatch missing: Always create a message because any
1629   //    JavaScript handler for a finally-block might re-throw to top-level.
1630   // 2) External v8::TryCatch exists: Only create a message if the handler
1631   //    captures messages or is verbose (which reports despite the catch).
1632   // 3) ReThrow from v8::TryCatch: The message from a previous throw still
1633   //    exists and we preserve it instead of creating a new message.
1634   bool requires_message = try_catch_handler() == nullptr ||
1635                           try_catch_handler()->is_verbose_ ||
1636                           try_catch_handler()->capture_message_;
1637   bool rethrowing_message = thread_local_top()->rethrowing_message_;
1638 
1639   thread_local_top()->rethrowing_message_ = false;
1640 
1641   // Notify debugger of exception.
1642   if (is_catchable_by_javascript(raw_exception)) {
1643     base::Optional<Object> maybe_exception = debug()->OnThrow(exception);
1644     if (maybe_exception.has_value()) {
1645       return *maybe_exception;
1646     }
1647   }
1648 
1649   // Generate the message if required.
1650   if (requires_message && !rethrowing_message) {
1651     MessageLocation computed_location;
1652     // If no location was specified we try to use a computed one instead.
1653     if (location == nullptr && ComputeLocation(&computed_location)) {
1654       location = &computed_location;
1655     }
1656     if (bootstrapper()->IsActive()) {
1657       // It's not safe to try to make message objects or collect stack traces
1658       // while the bootstrapper is active since the infrastructure may not have
1659       // been properly initialized.
1660       ReportBootstrappingException(exception, location);
1661     } else {
1662       Handle<Object> message_obj = CreateMessageOrAbort(exception, location);
1663       set_pending_message(*message_obj);
1664     }
1665   }
1666 
1667   // Set the exception being thrown.
1668   set_pending_exception(*exception);
1669   return ReadOnlyRoots(heap()).exception();
1670 }
1671 
1672 Object Isolate::ReThrow(Object exception) {
1673   DCHECK(!has_pending_exception());
1674 
1675   // Set the exception being re-thrown.
1676   set_pending_exception(exception);
1677   return ReadOnlyRoots(heap()).exception();
1678 }
1679 
1680 namespace {
1681 #if V8_ENABLE_WEBASSEMBLY
1682 // This scope will set the thread-in-wasm flag after the execution of all
1683 // destructors. The thread-in-wasm flag is only set when the scope gets enabled.
1684 class SetThreadInWasmFlagScope {
1685  public:
1686   SetThreadInWasmFlagScope() {
1687     DCHECK_IMPLIES(trap_handler::IsTrapHandlerEnabled(),
1688                    !trap_handler::IsThreadInWasm());
1689   }
1690 
1691   ~SetThreadInWasmFlagScope() {
1692     if (enabled_) trap_handler::SetThreadInWasm();
1693   }
1694 
1695   void Enable() { enabled_ = true; }
1696 
1697  private:
1698   bool enabled_ = false;
1699 };
1700 #endif  // V8_ENABLE_WEBASSEMBLY
1701 }  // namespace
1702 
1703 Object Isolate::UnwindAndFindHandler() {
1704 #if V8_ENABLE_WEBASSEMBLY
1705   // Create the {SetThreadInWasmFlagScope} first in this function so that its
1706   // destructor gets called after all the other destructors. It is important
1707   // that the destructor sets the thread-in-wasm flag after all other
1708   // destructors. The other destructors may cause exceptions, e.g. ASan on
1709   // Windows, which would invalidate the thread-in-wasm flag when the wasm trap
1710   // handler handles such non-wasm exceptions.
1711   SetThreadInWasmFlagScope set_thread_in_wasm_flag_scope;
1712 #endif  // V8_ENABLE_WEBASSEMBLY
1713   Object exception = pending_exception();
1714 
1715   auto FoundHandler = [&](Context context, Address instruction_start,
1716                           intptr_t handler_offset,
1717                           Address constant_pool_address, Address handler_sp,
1718                           Address handler_fp) {
1719     // Store information to be consumed by the CEntry.
1720     thread_local_top()->pending_handler_context_ = context;
1721     thread_local_top()->pending_handler_entrypoint_ =
1722         instruction_start + handler_offset;
1723     thread_local_top()->pending_handler_constant_pool_ = constant_pool_address;
1724     thread_local_top()->pending_handler_fp_ = handler_fp;
1725     thread_local_top()->pending_handler_sp_ = handler_sp;
1726 
1727     // Return and clear pending exception. The contract is that:
1728     // (1) the pending exception is stored in one place (no duplication), and
1729     // (2) within generated-code land, that one place is the return register.
1730     // If/when we unwind back into C++ (returning to the JSEntry stub,
1731     // or to Execution::CallWasm), the returned exception will be sent
1732     // back to isolate->set_pending_exception(...).
1733     clear_pending_exception();
1734     return exception;
1735   };
1736 
1737   // Special handling of termination exceptions, uncatchable by JavaScript and
1738   // Wasm code, we unwind the handlers until the top ENTRY handler is found.
1739   bool catchable_by_js = is_catchable_by_javascript(exception);
1740 
1741   // Compute handler and stack unwinding information by performing a full walk
1742   // over the stack and dispatching according to the frame type.
1743   for (StackFrameIterator iter(this);; iter.Advance()) {
1744     // Handler must exist.
1745     DCHECK(!iter.done());
1746 
1747     StackFrame* frame = iter.frame();
1748 
1749     switch (frame->type()) {
1750       case StackFrame::ENTRY:
1751       case StackFrame::CONSTRUCT_ENTRY: {
1752         // For JSEntry frames we always have a handler.
1753         StackHandler* handler = frame->top_handler();
1754 
1755         // Restore the next handler.
1756         thread_local_top()->handler_ = handler->next_address();
1757 
1758         // Gather information from the handler.
1759         Code code = frame->LookupCode();
1760         HandlerTable table(code);
1761         return FoundHandler(Context(), code.InstructionStart(this, frame->pc()),
1762                             table.LookupReturn(0), code.constant_pool(),
1763                             handler->address() + StackHandlerConstants::kSize,
1764                             0);
1765       }
1766 
1767 #if V8_ENABLE_WEBASSEMBLY
1768       case StackFrame::C_WASM_ENTRY: {
1769         StackHandler* handler = frame->top_handler();
1770         thread_local_top()->handler_ = handler->next_address();
1771         Code code = frame->LookupCode();
1772         HandlerTable table(code);
1773         Address instruction_start = code.InstructionStart(this, frame->pc());
1774         int return_offset = static_cast<int>(frame->pc() - instruction_start);
1775         int handler_offset = table.LookupReturn(return_offset);
1776         DCHECK_NE(-1, handler_offset);
1777         // Compute the stack pointer from the frame pointer. This ensures that
1778         // argument slots on the stack are dropped as returning would.
1779         Address return_sp = frame->fp() +
1780                             StandardFrameConstants::kFixedFrameSizeAboveFp -
1781                             code.stack_slots() * kSystemPointerSize;
1782         return FoundHandler(Context(), instruction_start, handler_offset,
1783                             code.constant_pool(), return_sp, frame->fp());
1784       }
1785 
1786       case StackFrame::WASM: {
1787         if (!is_catchable_by_wasm(exception)) break;
1788 
1789         // For WebAssembly frames we perform a lookup in the handler table.
1790         // This code ref scope is here to avoid a check failure when looking up
1791         // the code. It's not actually necessary to keep the code alive as it's
1792         // currently being executed.
1793         wasm::WasmCodeRefScope code_ref_scope;
1794         WasmFrame* wasm_frame = static_cast<WasmFrame*>(frame);
1795         wasm::WasmCode* wasm_code =
1796             wasm::GetWasmCodeManager()->LookupCode(frame->pc());
1797         int offset = wasm_frame->LookupExceptionHandlerInTable();
1798         if (offset < 0) break;
1799         wasm::GetWasmEngine()->SampleCatchEvent(this);
1800         // Compute the stack pointer from the frame pointer. This ensures that
1801         // argument slots on the stack are dropped as returning would.
1802         Address return_sp = frame->fp() +
1803                             StandardFrameConstants::kFixedFrameSizeAboveFp -
1804                             wasm_code->stack_slots() * kSystemPointerSize;
1805 
1806         // This is going to be handled by WebAssembly, so we need to set the TLS
1807         // flag. The {SetThreadInWasmFlagScope} will set the flag after all
1808         // destructors have been executed.
1809         set_thread_in_wasm_flag_scope.Enable();
1810         return FoundHandler(Context(), wasm_code->instruction_start(), offset,
1811                             wasm_code->constant_pool(), return_sp, frame->fp());
1812       }
1813 
1814       case StackFrame::WASM_COMPILE_LAZY: {
1815         // Can only fail directly on invocation. This happens if an invalid
1816         // function was validated lazily.
1817         DCHECK(FLAG_wasm_lazy_validation);
1818         break;
1819       }
1820 #endif  // V8_ENABLE_WEBASSEMBLY
1821 
1822       case StackFrame::OPTIMIZED: {
1823         // For optimized frames we perform a lookup in the handler table.
1824         if (!catchable_by_js) break;
1825         OptimizedFrame* js_frame = static_cast<OptimizedFrame*>(frame);
1826         Code code = frame->LookupCode();
1827         int offset = js_frame->LookupExceptionHandlerInTable(nullptr, nullptr);
1828         if (offset < 0) break;
1829         // Compute the stack pointer from the frame pointer. This ensures
1830         // that argument slots on the stack are dropped as returning would.
1831         Address return_sp = frame->fp() +
1832                             StandardFrameConstants::kFixedFrameSizeAboveFp -
1833                             code.stack_slots() * kSystemPointerSize;
1834 
1835         // TODO(bmeurer): Turbofanned BUILTIN frames appear as OPTIMIZED,
1836         // but do not have a code kind of TURBOFAN.
1837         if (CodeKindCanDeoptimize(code.kind()) &&
1838             code.marked_for_deoptimization()) {
1839           // If the target code is lazy deoptimized, we jump to the original
1840           // return address, but we make a note that we are throwing, so
1841           // that the deoptimizer can do the right thing.
1842           offset = static_cast<int>(frame->pc() - code.entry());
1843           set_deoptimizer_lazy_throw(true);
1844         }
1845 
1846         return FoundHandler(Context(), code.InstructionStart(this, frame->pc()),
1847                             offset, code.constant_pool(), return_sp,
1848                             frame->fp());
1849       }
1850 
1851       case StackFrame::STUB: {
1852         // Some stubs are able to handle exceptions.
1853         if (!catchable_by_js) break;
1854         StubFrame* stub_frame = static_cast<StubFrame*>(frame);
1855 #if defined(DEBUG) && V8_ENABLE_WEBASSEMBLY
1856         wasm::WasmCodeRefScope code_ref_scope;
1857         DCHECK_NULL(wasm::GetWasmCodeManager()->LookupCode(frame->pc()));
1858 #endif  // defined(DEBUG) && V8_ENABLE_WEBASSEMBLY
1859         Code code = stub_frame->LookupCode();
1860         if (!code.IsCode() || code.kind() != CodeKind::BUILTIN ||
1861             !code.has_handler_table() || !code.is_turbofanned()) {
1862           break;
1863         }
1864 
1865         int offset = stub_frame->LookupExceptionHandlerInTable();
1866         if (offset < 0) break;
1867 
1868         // Compute the stack pointer from the frame pointer. This ensures
1869         // that argument slots on the stack are dropped as returning would.
1870         Address return_sp = frame->fp() +
1871                             StandardFrameConstants::kFixedFrameSizeAboveFp -
1872                             code.stack_slots() * kSystemPointerSize;
1873 
1874         return FoundHandler(Context(), code.InstructionStart(this, frame->pc()),
1875                             offset, code.constant_pool(), return_sp,
1876                             frame->fp());
1877       }
1878 
1879       case StackFrame::INTERPRETED:
1880       case StackFrame::BASELINE: {
1881         // For interpreted frame we perform a range lookup in the handler table.
1882         if (!catchable_by_js) break;
1883         UnoptimizedFrame* js_frame = UnoptimizedFrame::cast(frame);
1884         int register_slots = UnoptimizedFrameConstants::RegisterStackSlotCount(
1885             js_frame->GetBytecodeArray().register_count());
1886         int context_reg = 0;  // Will contain register index holding context.
1887         int offset =
1888             js_frame->LookupExceptionHandlerInTable(&context_reg, nullptr);
1889         if (offset < 0) break;
1890         // Compute the stack pointer from the frame pointer. This ensures that
1891         // argument slots on the stack are dropped as returning would.
1892         // Note: This is only needed for interpreted frames that have been
1893         //       materialized by the deoptimizer. If there is a handler frame
1894         //       in between then {frame->sp()} would already be correct.
1895         Address return_sp = frame->fp() -
1896                             InterpreterFrameConstants::kFixedFrameSizeFromFp -
1897                             register_slots * kSystemPointerSize;
1898 
1899         // Patch the bytecode offset in the interpreted frame to reflect the
1900         // position of the exception handler. The special builtin below will
1901         // take care of continuing to dispatch at that position. Also restore
1902         // the correct context for the handler from the interpreter register.
1903         Context context =
1904             Context::cast(js_frame->ReadInterpreterRegister(context_reg));
1905         DCHECK(context.IsContext());
1906 
1907         if (frame->is_baseline()) {
1908           BaselineFrame* sp_frame = BaselineFrame::cast(js_frame);
1909           Code code = sp_frame->LookupCode();
1910           intptr_t pc_offset = sp_frame->GetPCForBytecodeOffset(offset);
1911           // Patch the context register directly on the frame, so that we don't
1912           // need to have a context read + write in the baseline code.
1913           sp_frame->PatchContext(context);
1914           return FoundHandler(
1915               Context(), code.InstructionStart(this, sp_frame->sp()), pc_offset,
1916               code.constant_pool(), return_sp, sp_frame->fp());
1917         } else {
1918           InterpretedFrame::cast(js_frame)->PatchBytecodeOffset(
1919               static_cast<int>(offset));
1920 
1921           Code code = builtins()->code(Builtin::kInterpreterEnterAtBytecode);
1922           return FoundHandler(context, code.InstructionStart(), 0,
1923                               code.constant_pool(), return_sp, frame->fp());
1924         }
1925       }
1926 
1927       case StackFrame::BUILTIN:
1928         // For builtin frames we are guaranteed not to find a handler.
1929         if (catchable_by_js) {
1930           CHECK_EQ(-1, BuiltinFrame::cast(frame)->LookupExceptionHandlerInTable(
1931                            nullptr, nullptr));
1932         }
1933         break;
1934 
1935       case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
1936         // Builtin continuation frames with catch can handle exceptions.
1937         if (!catchable_by_js) break;
1938         JavaScriptBuiltinContinuationWithCatchFrame* js_frame =
1939             JavaScriptBuiltinContinuationWithCatchFrame::cast(frame);
1940         js_frame->SetException(exception);
1941 
1942         // Reconstruct the stack pointer from the frame pointer.
1943         Address return_sp = js_frame->fp() - js_frame->GetSPToFPDelta();
1944         Code code = js_frame->LookupCode();
1945         return FoundHandler(Context(), code.InstructionStart(), 0,
1946                             code.constant_pool(), return_sp, frame->fp());
1947       }
1948 
1949       default:
1950         // All other types can not handle exception.
1951         break;
1952     }
1953 
1954     if (frame->is_optimized()) {
1955       // Remove per-frame stored materialized objects.
1956       bool removed = materialized_object_store_->Remove(frame->fp());
1957       USE(removed);
1958       // If there were any materialized objects, the code should be
1959       // marked for deopt.
1960       DCHECK_IMPLIES(removed, frame->LookupCode().marked_for_deoptimization());
1961     }
1962   }
1963 
1964   UNREACHABLE();
1965 }
1966 
1967 namespace {
1968 HandlerTable::CatchPrediction PredictException(JavaScriptFrame* frame) {
1969   HandlerTable::CatchPrediction prediction;
1970   if (frame->is_optimized()) {
1971     if (frame->LookupExceptionHandlerInTable(nullptr, nullptr) > 0) {
1972       // This optimized frame will catch. It's handler table does not include
1973       // exception prediction, and we need to use the corresponding handler
1974       // tables on the unoptimized code objects.
1975       std::vector<FrameSummary> summaries;
1976       frame->Summarize(&summaries);
1977       for (size_t i = summaries.size(); i != 0; i--) {
1978         const FrameSummary& summary = summaries[i - 1];
1979         Handle<AbstractCode> code = summary.AsJavaScript().abstract_code();
1980         if (code->IsCode() && code->kind() == CodeKind::BUILTIN) {
1981           prediction = code->GetCode().GetBuiltinCatchPrediction();
1982           if (prediction == HandlerTable::UNCAUGHT) continue;
1983           return prediction;
1984         }
1985 
1986         // Must have been constructed from a bytecode array.
1987         CHECK_EQ(CodeKind::INTERPRETED_FUNCTION, code->kind());
1988         int code_offset = summary.code_offset();
1989         HandlerTable table(code->GetBytecodeArray());
1990         int index = table.LookupRange(code_offset, nullptr, &prediction);
1991         if (index <= 0) continue;
1992         if (prediction == HandlerTable::UNCAUGHT) continue;
1993         return prediction;
1994       }
1995     }
1996   } else if (frame->LookupExceptionHandlerInTable(nullptr, &prediction) > 0) {
1997     return prediction;
1998   }
1999   return HandlerTable::UNCAUGHT;
2000 }
2001 
2002 Isolate::CatchType ToCatchType(HandlerTable::CatchPrediction prediction) {
2003   switch (prediction) {
2004     case HandlerTable::UNCAUGHT:
2005       return Isolate::NOT_CAUGHT;
2006     case HandlerTable::CAUGHT:
2007       return Isolate::CAUGHT_BY_JAVASCRIPT;
2008     case HandlerTable::PROMISE:
2009       return Isolate::CAUGHT_BY_PROMISE;
2010     case HandlerTable::UNCAUGHT_ASYNC_AWAIT:
2011     case HandlerTable::ASYNC_AWAIT:
2012       return Isolate::CAUGHT_BY_ASYNC_AWAIT;
2013     default:
2014       UNREACHABLE();
2015   }
2016 }
2017 }  // anonymous namespace
2018 
2019 Isolate::CatchType Isolate::PredictExceptionCatcher() {
2020   Address external_handler = thread_local_top()->try_catch_handler_address();
2021   if (IsExternalHandlerOnTop(Object())) return CAUGHT_BY_EXTERNAL;
2022 
2023   // Search for an exception handler by performing a full walk over the stack.
2024   for (StackFrameIterator iter(this); !iter.done(); iter.Advance()) {
2025     StackFrame* frame = iter.frame();
2026 
2027     switch (frame->type()) {
2028       case StackFrame::ENTRY:
2029       case StackFrame::CONSTRUCT_ENTRY: {
2030         Address entry_handler = frame->top_handler()->next_address();
2031         // The exception has been externally caught if and only if there is an
2032         // external handler which is on top of the top-most JS_ENTRY handler.
2033         if (external_handler != kNullAddress &&
2034             !try_catch_handler()->is_verbose_) {
2035           if (entry_handler == kNullAddress ||
2036               entry_handler > external_handler) {
2037             return CAUGHT_BY_EXTERNAL;
2038           }
2039         }
2040       } break;
2041 
2042       // For JavaScript frames we perform a lookup in the handler table.
2043       case StackFrame::OPTIMIZED:
2044       case StackFrame::INTERPRETED:
2045       case StackFrame::BASELINE:
2046       case StackFrame::BUILTIN: {
2047         JavaScriptFrame* js_frame = JavaScriptFrame::cast(frame);
2048         Isolate::CatchType prediction = ToCatchType(PredictException(js_frame));
2049         if (prediction == NOT_CAUGHT) break;
2050         return prediction;
2051       }
2052 
2053       case StackFrame::STUB: {
2054         Handle<Code> code(frame->LookupCode(), this);
2055         if (!code->IsCode() || code->kind() != CodeKind::BUILTIN ||
2056             !code->has_handler_table() || !code->is_turbofanned()) {
2057           break;
2058         }
2059 
2060         CatchType prediction = ToCatchType(code->GetBuiltinCatchPrediction());
2061         if (prediction != NOT_CAUGHT) return prediction;
2062       } break;
2063 
2064       case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
2065         Handle<Code> code(frame->LookupCode(), this);
2066         CatchType prediction = ToCatchType(code->GetBuiltinCatchPrediction());
2067         if (prediction != NOT_CAUGHT) return prediction;
2068       } break;
2069 
2070       default:
2071         // All other types can not handle exception.
2072         break;
2073     }
2074   }
2075 
2076   // Handler not found.
2077   return NOT_CAUGHT;
2078 }
2079 
2080 Object Isolate::ThrowIllegalOperation() {
2081   if (FLAG_stack_trace_on_illegal) PrintStack(stdout);
2082   return Throw(ReadOnlyRoots(heap()).illegal_access_string());
2083 }
2084 
2085 void Isolate::ScheduleThrow(Object exception) {
2086   // When scheduling a throw we first throw the exception to get the
2087   // error reporting if it is uncaught before rescheduling it.
2088   Throw(exception);
2089   PropagatePendingExceptionToExternalTryCatch();
2090   if (has_pending_exception()) {
2091     set_scheduled_exception(pending_exception());
2092     thread_local_top()->external_caught_exception_ = false;
2093     clear_pending_exception();
2094   }
2095 }
2096 
2097 void Isolate::RestorePendingMessageFromTryCatch(v8::TryCatch* handler) {
2098   DCHECK(handler == try_catch_handler());
2099   DCHECK(handler->HasCaught());
2100   DCHECK(handler->rethrow_);
2101   DCHECK(handler->capture_message_);
2102   Object message(reinterpret_cast<Address>(handler->message_obj_));
2103   DCHECK(message.IsJSMessageObject() || message.IsTheHole(this));
2104   set_pending_message(message);
2105 }
2106 
2107 void Isolate::CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler) {
2108   DCHECK(has_scheduled_exception());
2109   if (reinterpret_cast<void*>(scheduled_exception().ptr()) ==
2110       handler->exception_) {
2111     DCHECK_NE(scheduled_exception(),
2112               ReadOnlyRoots(heap()).termination_exception());
2113     clear_scheduled_exception();
2114   } else {
2115     DCHECK_EQ(scheduled_exception(),
2116               ReadOnlyRoots(heap()).termination_exception());
2117     // Clear termination once we returned from all V8 frames.
2118     if (thread_local_top()->CallDepthIsZero()) {
2119       thread_local_top()->external_caught_exception_ = false;
2120       clear_scheduled_exception();
2121     }
2122   }
2123   if (reinterpret_cast<void*>(thread_local_top()->pending_message_.ptr()) ==
2124       handler->message_obj_) {
2125     clear_pending_message();
2126   }
2127 }
2128 
2129 Object Isolate::PromoteScheduledException() {
2130   Object thrown = scheduled_exception();
2131   clear_scheduled_exception();
2132   // Re-throw the exception to avoid getting repeated error reporting.
2133   return ReThrow(thrown);
2134 }
2135 
2136 void Isolate::PrintCurrentStackTrace(std::ostream& out) {
2137   CaptureStackTraceOptions options;
2138   options.limit = 0;
2139   options.skip_mode = SKIP_NONE;
2140   options.capture_builtin_exit_frames = true;
2141   options.async_stack_trace = FLAG_async_stack_traces;
2142   options.filter_mode = StackTraceBuilder::CURRENT_SECURITY_CONTEXT;
2143   options.capture_only_frames_subject_to_debugging = false;
2144 
2145   Handle<FixedArray> frames =
2146       CaptureStackTrace(this, this->factory()->undefined_value(), options);
2147 
2148   IncrementalStringBuilder builder(this);
2149   for (int i = 0; i < frames->length(); ++i) {
2150     Handle<StackFrameInfo> frame(StackFrameInfo::cast(frames->get(i)), this);
2151     SerializeStackFrameInfo(this, frame, &builder);
2152   }
2153 
2154   Handle<String> stack_trace = builder.Finish().ToHandleChecked();
2155   stack_trace->PrintOn(out);
2156 }
2157 
2158 bool Isolate::ComputeLocation(MessageLocation* target) {
2159   StackTraceFrameIterator it(this);
2160   if (it.done()) return false;
2161   // Compute the location from the function and the relocation info of the
2162   // baseline code. For optimized code this will use the deoptimization
2163   // information to get canonical location information.
2164 #if V8_ENABLE_WEBASSEMBLY
2165   wasm::WasmCodeRefScope code_ref_scope;
2166 #endif  // V8_ENABLE_WEBASSEMBLY
2167   FrameSummary summary = it.GetTopValidFrame();
2168   Handle<SharedFunctionInfo> shared;
2169   Handle<Object> script = summary.script();
2170   if (!script->IsScript() || Script::cast(*script).source().IsUndefined(this)) {
2171     return false;
2172   }
2173 
2174   if (summary.IsJavaScript()) {
2175     shared = handle(summary.AsJavaScript().function()->shared(), this);
2176   }
2177   if (summary.AreSourcePositionsAvailable()) {
2178     int pos = summary.SourcePosition();
2179     *target =
2180         MessageLocation(Handle<Script>::cast(script), pos, pos + 1, shared);
2181   } else {
2182     *target = MessageLocation(Handle<Script>::cast(script), shared,
2183                               summary.code_offset());
2184   }
2185   return true;
2186 }
2187 
2188 bool Isolate::ComputeLocationFromException(MessageLocation* target,
2189                                            Handle<Object> exception) {
2190   if (!exception->IsJSObject()) return false;
2191 
2192   Handle<Name> start_pos_symbol = factory()->error_start_pos_symbol();
2193   Handle<Object> start_pos = JSReceiver::GetDataProperty(
2194       Handle<JSObject>::cast(exception), start_pos_symbol);
2195   if (!start_pos->IsSmi()) return false;
2196   int start_pos_value = Handle<Smi>::cast(start_pos)->value();
2197 
2198   Handle<Name> end_pos_symbol = factory()->error_end_pos_symbol();
2199   Handle<Object> end_pos = JSReceiver::GetDataProperty(
2200       Handle<JSObject>::cast(exception), end_pos_symbol);
2201   if (!end_pos->IsSmi()) return false;
2202   int end_pos_value = Handle<Smi>::cast(end_pos)->value();
2203 
2204   Handle<Name> script_symbol = factory()->error_script_symbol();
2205   Handle<Object> script = JSReceiver::GetDataProperty(
2206       Handle<JSObject>::cast(exception), script_symbol);
2207   if (!script->IsScript()) return false;
2208 
2209   Handle<Script> cast_script(Script::cast(*script), this);
2210   *target = MessageLocation(cast_script, start_pos_value, end_pos_value);
2211   return true;
2212 }
2213 
2214 bool Isolate::ComputeLocationFromStackTrace(MessageLocation* target,
2215                                             Handle<Object> exception) {
2216   if (!exception->IsJSObject()) return false;
2217   Handle<Name> key = factory()->stack_trace_symbol();
2218   Handle<Object> property =
2219       JSReceiver::GetDataProperty(Handle<JSObject>::cast(exception), key);
2220   if (!property->IsFixedArray()) return false;
2221   Handle<FixedArray> stack = Handle<FixedArray>::cast(property);
2222   for (int i = 0; i < stack->length(); i++) {
2223     Handle<StackFrameInfo> frame(StackFrameInfo::cast(stack->get(i)), this);
2224     if (StackFrameInfo::ComputeLocation(frame, target)) return true;
2225   }
2226   return false;
2227 }
2228 
2229 Handle<JSMessageObject> Isolate::CreateMessage(Handle<Object> exception,
2230                                                MessageLocation* location) {
2231   Handle<FixedArray> stack_trace_object;
2232   if (capture_stack_trace_for_uncaught_exceptions_) {
2233     if (exception->IsJSError()) {
2234       // We fetch the stack trace that corresponds to this error object.
2235       // If the lookup fails, the exception is probably not a valid Error
2236       // object. In that case, we fall through and capture the stack trace
2237       // at this throw site.
2238       stack_trace_object =
2239           GetDetailedStackTrace(Handle<JSObject>::cast(exception));
2240     }
2241     if (stack_trace_object.is_null()) {
2242       // Not an error object, we capture stack and location at throw site.
2243       stack_trace_object = CaptureCurrentStackTrace(
2244           stack_trace_for_uncaught_exceptions_frame_limit_,
2245           stack_trace_for_uncaught_exceptions_options_);
2246     }
2247   }
2248   MessageLocation computed_location;
2249   if (location == nullptr &&
2250       (ComputeLocationFromException(&computed_location, exception) ||
2251        ComputeLocationFromStackTrace(&computed_location, exception) ||
2252        ComputeLocation(&computed_location))) {
2253     location = &computed_location;
2254   }
2255 
2256   return MessageHandler::MakeMessageObject(
2257       this, MessageTemplate::kUncaughtException, location, exception,
2258       stack_trace_object);
2259 }
2260 
2261 bool Isolate::IsJavaScriptHandlerOnTop(Object exception) {
2262   DCHECK_NE(ReadOnlyRoots(heap()).the_hole_value(), exception);
2263 
2264   // For uncatchable exceptions, the JavaScript handler cannot be on top.
2265   if (!is_catchable_by_javascript(exception)) return false;
2266 
2267   // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
2268   Address entry_handler = Isolate::handler(thread_local_top());
2269   if (entry_handler == kNullAddress) return false;
2270 
2271   // Get the address of the external handler so we can compare the address to
2272   // determine which one is closer to the top of the stack.
2273   Address external_handler = thread_local_top()->try_catch_handler_address();
2274   if (external_handler == kNullAddress) return true;
2275 
2276   // The exception has been externally caught if and only if there is an
2277   // external handler which is on top of the top-most JS_ENTRY handler.
2278   //
2279   // Note, that finally clauses would re-throw an exception unless it's aborted
2280   // by jumps in control flow (like return, break, etc.) and we'll have another
2281   // chance to set proper v8::TryCatch later.
2282   return (entry_handler < external_handler);
2283 }
2284 
2285 bool Isolate::IsExternalHandlerOnTop(Object exception) {
2286   DCHECK_NE(ReadOnlyRoots(heap()).the_hole_value(), exception);
2287 
2288   // Get the address of the external handler so we can compare the address to
2289   // determine which one is closer to the top of the stack.
2290   Address external_handler = thread_local_top()->try_catch_handler_address();
2291   if (external_handler == kNullAddress) return false;
2292 
2293   // For uncatchable exceptions, the external handler is always on top.
2294   if (!is_catchable_by_javascript(exception)) return true;
2295 
2296   // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
2297   Address entry_handler = Isolate::handler(thread_local_top());
2298   if (entry_handler == kNullAddress) return true;
2299 
2300   // The exception has been externally caught if and only if there is an
2301   // external handler which is on top of the top-most JS_ENTRY handler.
2302   //
2303   // Note, that finally clauses would re-throw an exception unless it's aborted
2304   // by jumps in control flow (like return, break, etc.) and we'll have another
2305   // chance to set proper v8::TryCatch later.
2306   return (entry_handler > external_handler);
2307 }
2308 
2309 std::vector<MemoryRange>* Isolate::GetCodePages() const {
2310   return code_pages_.load(std::memory_order_acquire);
2311 }
2312 
2313 void Isolate::SetCodePages(std::vector<MemoryRange>* new_code_pages) {
2314   code_pages_.store(new_code_pages, std::memory_order_release);
2315 }
2316 
2317 void Isolate::ReportPendingMessages() {
2318   DCHECK(AllowExceptions::IsAllowed(this));
2319 
2320   // The embedder might run script in response to an exception.
2321   AllowJavascriptExecutionDebugOnly allow_script(this);
2322 
2323   Object exception_obj = pending_exception();
2324 
2325   // Try to propagate the exception to an external v8::TryCatch handler. If
2326   // propagation was unsuccessful, then we will get another chance at reporting
2327   // the pending message if the exception is re-thrown.
2328   bool has_been_propagated = PropagatePendingExceptionToExternalTryCatch();
2329   if (!has_been_propagated) return;
2330 
2331   // Clear the pending message object early to avoid endless recursion.
2332   Object message_obj = pending_message();
2333   clear_pending_message();
2334 
2335   // For uncatchable exceptions we do nothing. If needed, the exception and the
2336   // message have already been propagated to v8::TryCatch.
2337   if (!is_catchable_by_javascript(exception_obj)) return;
2338 
2339   // Determine whether the message needs to be reported to all message handlers
2340   // depending on whether and external v8::TryCatch or an internal JavaScript
2341   // handler is on top.
2342   bool should_report_exception;
2343   if (IsExternalHandlerOnTop(exception_obj)) {
2344     // Only report the exception if the external handler is verbose.
2345     should_report_exception = try_catch_handler()->is_verbose_;
2346   } else {
2347     // Report the exception if it isn't caught by JavaScript code.
2348     should_report_exception = !IsJavaScriptHandlerOnTop(exception_obj);
2349   }
2350 
2351   // Actually report the pending message to all message handlers.
2352   if (!message_obj.IsTheHole(this) && should_report_exception) {
2353     HandleScope scope(this);
2354     Handle<JSMessageObject> message(JSMessageObject::cast(message_obj), this);
2355     Handle<Object> exception(exception_obj, this);
2356     Handle<Script> script(message->script(), this);
2357     // Clear the exception and restore it afterwards, otherwise
2358     // CollectSourcePositions will abort.
2359     clear_pending_exception();
2360     JSMessageObject::EnsureSourcePositionsAvailable(this, message);
2361     set_pending_exception(*exception);
2362     int start_pos = message->GetStartPosition();
2363     int end_pos = message->GetEndPosition();
2364     MessageLocation location(script, start_pos, end_pos);
2365     MessageHandler::ReportMessage(this, &location, message);
2366   }
2367 }
2368 
2369 bool Isolate::OptionalRescheduleException(bool clear_exception) {
2370   DCHECK(has_pending_exception());
2371   PropagatePendingExceptionToExternalTryCatch();
2372 
2373   bool is_termination_exception =
2374       pending_exception() == ReadOnlyRoots(this).termination_exception();
2375 
2376   if (is_termination_exception) {
2377     if (clear_exception) {
2378       thread_local_top()->external_caught_exception_ = false;
2379       clear_pending_exception();
2380       return false;
2381     }
2382   } else if (thread_local_top()->external_caught_exception_) {
2383     // If the exception is externally caught, clear it if there are no
2384     // JavaScript frames on the way to the C++ frame that has the
2385     // external handler.
2386     DCHECK_NE(thread_local_top()->try_catch_handler_address(), kNullAddress);
2387     Address external_handler_address =
2388         thread_local_top()->try_catch_handler_address();
2389     JavaScriptFrameIterator it(this);
2390     if (it.done() || (it.frame()->sp() > external_handler_address)) {
2391       clear_exception = true;
2392     }
2393   }
2394 
2395   // Clear the exception if needed.
2396   if (clear_exception) {
2397     thread_local_top()->external_caught_exception_ = false;
2398     clear_pending_exception();
2399     return false;
2400   }
2401 
2402   // Reschedule the exception.
2403   set_scheduled_exception(pending_exception());
2404   clear_pending_exception();
2405   return true;
2406 }
2407 
2408 void Isolate::PushPromise(Handle<JSObject> promise) {
2409   ThreadLocalTop* tltop = thread_local_top();
2410   PromiseOnStack* prev = tltop->promise_on_stack_;
2411   Handle<JSObject> global_promise = global_handles()->Create(*promise);
2412   tltop->promise_on_stack_ = new PromiseOnStack(global_promise, prev);
2413 }
2414 
2415 void Isolate::PopPromise() {
2416   ThreadLocalTop* tltop = thread_local_top();
2417   if (tltop->promise_on_stack_ == nullptr) return;
2418   PromiseOnStack* prev = tltop->promise_on_stack_->prev();
2419   Handle<Object> global_promise = tltop->promise_on_stack_->promise();
2420   delete tltop->promise_on_stack_;
2421   tltop->promise_on_stack_ = prev;
2422   global_handles()->Destroy(global_promise.location());
2423 }
2424 
2425 namespace {
2426 bool PromiseIsRejectHandler(Isolate* isolate, Handle<JSReceiver> handler) {
2427   // Recurse to the forwarding Promise (e.g. return false) due to
2428   //  - await reaction forwarding to the throwaway Promise, which has
2429   //    a dependency edge to the outer Promise.
2430   //  - PromiseIdResolveHandler forwarding to the output of .then
2431   //  - Promise.all/Promise.race forwarding to a throwaway Promise, which
2432   //    has a dependency edge to the generated outer Promise.
2433   // Otherwise, this is a real reject handler for the Promise.
2434   Handle<Symbol> key = isolate->factory()->promise_forwarding_handler_symbol();
2435   Handle<Object> forwarding_handler = JSReceiver::GetDataProperty(handler, key);
2436   return forwarding_handler->IsUndefined(isolate);
2437 }
2438 
2439 bool PromiseHasUserDefinedRejectHandlerInternal(Isolate* isolate,
2440                                                 Handle<JSPromise> promise) {
2441   Handle<Object> current(promise->reactions(), isolate);
2442   while (!current->IsSmi()) {
2443     Handle<PromiseReaction> reaction = Handle<PromiseReaction>::cast(current);
2444     Handle<HeapObject> promise_or_capability(reaction->promise_or_capability(),
2445                                              isolate);
2446     if (!promise_or_capability->IsUndefined(isolate)) {
2447       if (!promise_or_capability->IsJSPromise()) {
2448         promise_or_capability = handle(
2449             Handle<PromiseCapability>::cast(promise_or_capability)->promise(),
2450             isolate);
2451       }
2452       promise = Handle<JSPromise>::cast(promise_or_capability);
2453       if (!reaction->reject_handler().IsUndefined(isolate)) {
2454         Handle<JSReceiver> reject_handler(
2455             JSReceiver::cast(reaction->reject_handler()), isolate);
2456         if (PromiseIsRejectHandler(isolate, reject_handler)) return true;
2457       }
2458       if (isolate->PromiseHasUserDefinedRejectHandler(promise)) return true;
2459     }
2460     current = handle(reaction->next(), isolate);
2461   }
2462   return false;
2463 }
2464 
2465 }  // namespace
2466 
2467 bool Isolate::PromiseHasUserDefinedRejectHandler(Handle<JSPromise> promise) {
2468   Handle<Symbol> key = factory()->promise_handled_by_symbol();
2469   std::stack<Handle<JSPromise>> promises;
2470   // First descend into the outermost promise and collect the stack of
2471   // Promises for reverse processing.
2472   while (true) {
2473     // If this promise was marked as being handled by a catch block
2474     // in an async function, then it has a user-defined reject handler.
2475     if (promise->handled_hint()) return true;
2476     if (promise->status() == Promise::kPending) {
2477       promises.push(promise);
2478     }
2479     Handle<Object> outer_promise_obj = JSObject::GetDataProperty(promise, key);
2480     if (!outer_promise_obj->IsJSPromise()) break;
2481     promise = Handle<JSPromise>::cast(outer_promise_obj);
2482   }
2483 
2484   while (!promises.empty()) {
2485     promise = promises.top();
2486     if (PromiseHasUserDefinedRejectHandlerInternal(this, promise)) return true;
2487     promises.pop();
2488   }
2489   return false;
2490 }
2491 
2492 Handle<Object> Isolate::GetPromiseOnStackOnThrow() {
2493   Handle<Object> undefined = factory()->undefined_value();
2494   ThreadLocalTop* tltop = thread_local_top();
2495   if (tltop->promise_on_stack_ == nullptr) return undefined;
2496   // Find the top-most try-catch or try-finally handler.
2497   CatchType prediction = PredictExceptionCatcher();
2498   if (prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) {
2499     return undefined;
2500   }
2501   Handle<Object> retval = undefined;
2502   PromiseOnStack* promise_on_stack = tltop->promise_on_stack_;
2503   for (StackFrameIterator it(this); !it.done(); it.Advance()) {
2504     StackFrame* frame = it.frame();
2505     HandlerTable::CatchPrediction catch_prediction;
2506     if (frame->is_java_script()) {
2507       catch_prediction = PredictException(JavaScriptFrame::cast(frame));
2508     } else if (frame->type() == StackFrame::STUB) {
2509       Code code = frame->LookupCode();
2510       if (!code.IsCode() || code.kind() != CodeKind::BUILTIN ||
2511           !code.has_handler_table() || !code.is_turbofanned()) {
2512         continue;
2513       }
2514       catch_prediction = code.GetBuiltinCatchPrediction();
2515     } else {
2516       continue;
2517     }
2518 
2519     switch (catch_prediction) {
2520       case HandlerTable::UNCAUGHT:
2521         continue;
2522       case HandlerTable::CAUGHT:
2523         if (retval->IsJSPromise()) {
2524           // Caught the result of an inner async/await invocation.
2525           // Mark the inner promise as caught in the "synchronous case" so
2526           // that Debug::OnException will see. In the synchronous case,
2527           // namely in the code in an async function before the first
2528           // await, the function which has this exception event has not yet
2529           // returned, so the generated Promise has not yet been marked
2530           // by AsyncFunctionAwaitCaught with promiseHandledHintSymbol.
2531           Handle<JSPromise>::cast(retval)->set_handled_hint(true);
2532         }
2533         return retval;
2534       case HandlerTable::PROMISE:
2535         return promise_on_stack
2536                    ? Handle<Object>::cast(promise_on_stack->promise())
2537                    : undefined;
2538       case HandlerTable::UNCAUGHT_ASYNC_AWAIT:
2539       case HandlerTable::ASYNC_AWAIT: {
2540         // If in the initial portion of async/await, continue the loop to pop up
2541         // successive async/await stack frames until an asynchronous one with
2542         // dependents is found, or a non-async stack frame is encountered, in
2543         // order to handle the synchronous async/await catch prediction case:
2544         // assume that async function calls are awaited.
2545         if (!promise_on_stack) return retval;
2546         retval = promise_on_stack->promise();
2547         if (retval->IsJSPromise()) {
2548           if (PromiseHasUserDefinedRejectHandler(
2549                   Handle<JSPromise>::cast(retval))) {
2550             return retval;
2551           }
2552         }
2553         promise_on_stack = promise_on_stack->prev();
2554         continue;
2555       }
2556     }
2557   }
2558   return retval;
2559 }
2560 
2561 void Isolate::SetCaptureStackTraceForUncaughtExceptions(
2562     bool capture, int frame_limit, StackTrace::StackTraceOptions options) {
2563   capture_stack_trace_for_uncaught_exceptions_ = capture;
2564   stack_trace_for_uncaught_exceptions_frame_limit_ = frame_limit;
2565   stack_trace_for_uncaught_exceptions_options_ = options;
2566 }
2567 
2568 bool Isolate::get_capture_stack_trace_for_uncaught_exceptions() const {
2569   return capture_stack_trace_for_uncaught_exceptions_;
2570 }
2571 
2572 void Isolate::SetAbortOnUncaughtExceptionCallback(
2573     v8::Isolate::AbortOnUncaughtExceptionCallback callback) {
2574   abort_on_uncaught_exception_callback_ = callback;
2575 }
2576 
2577 void Isolate::InstallConditionalFeatures(Handle<Context> context) {
2578   Handle<JSGlobalObject> global = handle(context->global_object(), this);
2579   Handle<String> sab_name = factory()->SharedArrayBuffer_string();
2580   if (IsSharedArrayBufferConstructorEnabled(context)) {
2581     if (!JSObject::HasRealNamedProperty(global, sab_name).FromMaybe(true)) {
2582       JSObject::AddProperty(this, global, factory()->SharedArrayBuffer_string(),
2583                             shared_array_buffer_fun(), DONT_ENUM);
2584     }
2585   }
2586 }
2587 
2588 bool Isolate::IsSharedArrayBufferConstructorEnabled(Handle<Context> context) {
2589   if (!FLAG_harmony_sharedarraybuffer) return false;
2590 
2591   if (!FLAG_enable_sharedarraybuffer_per_context) return true;
2592 
2593   if (sharedarraybuffer_constructor_enabled_callback()) {
2594     v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
2595     return sharedarraybuffer_constructor_enabled_callback()(api_context);
2596   }
2597   return false;
2598 }
2599 
2600 bool Isolate::IsWasmSimdEnabled(Handle<Context> context) {
2601 #if V8_ENABLE_WEBASSEMBLY
2602   if (wasm_simd_enabled_callback()) {
2603     v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
2604     return wasm_simd_enabled_callback()(api_context);
2605   }
2606   return FLAG_experimental_wasm_simd;
2607 #else
2608   return false;
2609 #endif  // V8_ENABLE_WEBASSEMBLY
2610 }
2611 
2612 bool Isolate::AreWasmExceptionsEnabled(Handle<Context> context) {
2613 #if V8_ENABLE_WEBASSEMBLY
2614   if (wasm_exceptions_enabled_callback()) {
2615     v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
2616     return wasm_exceptions_enabled_callback()(api_context);
2617   }
2618   return FLAG_experimental_wasm_eh;
2619 #else
2620   return false;
2621 #endif  // V8_ENABLE_WEBASSEMBLY
2622 }
2623 
2624 bool Isolate::IsWasmDynamicTieringEnabled() {
2625 #if V8_ENABLE_WEBASSEMBLY
2626   if (wasm_dynamic_tiering_enabled_callback()) {
2627     HandleScope handle_scope(this);
2628     v8::Local<v8::Context> api_context =
2629         v8::Utils::ToLocal(handle(context(), this));
2630     return wasm_dynamic_tiering_enabled_callback()(api_context);
2631   }
2632   return FLAG_wasm_dynamic_tiering;
2633 #else
2634   return false;
2635 #endif  // V8_ENABLE_WEBASSEMBLY
2636 }
2637 
2638 Handle<Context> Isolate::GetIncumbentContext() {
2639   JavaScriptFrameIterator it(this);
2640 
2641   // 1st candidate: most-recently-entered author function's context
2642   // if it's newer than the last Context::BackupIncumbentScope entry.
2643   //
2644   // NOTE: This code assumes that the stack grows downward.
2645   Address top_backup_incumbent =
2646       top_backup_incumbent_scope()
2647           ? top_backup_incumbent_scope()->JSStackComparableAddressPrivate()
2648           : 0;
2649   if (!it.done() &&
2650       (!top_backup_incumbent || it.frame()->sp() < top_backup_incumbent)) {
2651     Context context = Context::cast(it.frame()->context());
2652     return Handle<Context>(context.native_context(), this);
2653   }
2654 
2655   // 2nd candidate: the last Context::Scope's incumbent context if any.
2656   if (top_backup_incumbent_scope()) {
2657     return Utils::OpenHandle(
2658         *top_backup_incumbent_scope()->backup_incumbent_context_);
2659   }
2660 
2661   // Last candidate: the entered context or microtask context.
2662   // Given that there is no other author function is running, there must be
2663   // no cross-context function running, then the incumbent realm must match
2664   // the entry realm.
2665   v8::Local<v8::Context> entered_context =
2666       reinterpret_cast<v8::Isolate*>(this)->GetEnteredOrMicrotaskContext();
2667   return Utils::OpenHandle(*entered_context);
2668 }
2669 
2670 char* Isolate::ArchiveThread(char* to) {
2671   MemCopy(to, reinterpret_cast<char*>(thread_local_top()),
2672           sizeof(ThreadLocalTop));
2673   return to + sizeof(ThreadLocalTop);
2674 }
2675 
2676 char* Isolate::RestoreThread(char* from) {
2677   MemCopy(reinterpret_cast<char*>(thread_local_top()), from,
2678           sizeof(ThreadLocalTop));
2679   DCHECK(context().is_null() || context().IsContext());
2680   return from + sizeof(ThreadLocalTop);
2681 }
2682 
2683 void Isolate::ReleaseSharedPtrs() {
2684   base::MutexGuard lock(&managed_ptr_destructors_mutex_);
2685   while (managed_ptr_destructors_head_) {
2686     ManagedPtrDestructor* l = managed_ptr_destructors_head_;
2687     ManagedPtrDestructor* n = nullptr;
2688     managed_ptr_destructors_head_ = nullptr;
2689     for (; l != nullptr; l = n) {
2690       l->destructor_(l->shared_ptr_ptr_);
2691       n = l->next_;
2692       delete l;
2693     }
2694   }
2695 }
2696 
2697 bool Isolate::IsBuiltinTableHandleLocation(Address* handle_location) {
2698   FullObjectSlot location(handle_location);
2699   FullObjectSlot first_root(builtin_table());
2700   FullObjectSlot last_root(builtin_table() + Builtins::kBuiltinCount);
2701   if (location >= last_root) return false;
2702   if (location < first_root) return false;
2703   return true;
2704 }
2705 
2706 void Isolate::RegisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
2707   base::MutexGuard lock(&managed_ptr_destructors_mutex_);
2708   DCHECK_NULL(destructor->prev_);
2709   DCHECK_NULL(destructor->next_);
2710   if (managed_ptr_destructors_head_) {
2711     managed_ptr_destructors_head_->prev_ = destructor;
2712   }
2713   destructor->next_ = managed_ptr_destructors_head_;
2714   managed_ptr_destructors_head_ = destructor;
2715 }
2716 
2717 void Isolate::UnregisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
2718   base::MutexGuard lock(&managed_ptr_destructors_mutex_);
2719   if (destructor->prev_) {
2720     destructor->prev_->next_ = destructor->next_;
2721   } else {
2722     DCHECK_EQ(destructor, managed_ptr_destructors_head_);
2723     managed_ptr_destructors_head_ = destructor->next_;
2724   }
2725   if (destructor->next_) destructor->next_->prev_ = destructor->prev_;
2726   destructor->prev_ = nullptr;
2727   destructor->next_ = nullptr;
2728 }
2729 
2730 #if V8_ENABLE_WEBASSEMBLY
2731 void Isolate::AddSharedWasmMemory(Handle<WasmMemoryObject> memory_object) {
2732   HandleScope scope(this);
2733   Handle<WeakArrayList> shared_wasm_memories =
2734       factory()->shared_wasm_memories();
2735   shared_wasm_memories = WeakArrayList::AddToEnd(
2736       this, shared_wasm_memories, MaybeObjectHandle::Weak(memory_object));
2737   heap()->set_shared_wasm_memories(*shared_wasm_memories);
2738 }
2739 #endif  // V8_ENABLE_WEBASSEMBLY
2740 
2741 Isolate::PerIsolateThreadData::~PerIsolateThreadData() {
2742 #if defined(USE_SIMULATOR)
2743   delete simulator_;
2744 #endif
2745 }
2746 
2747 Isolate::PerIsolateThreadData* Isolate::ThreadDataTable::Lookup(
2748     ThreadId thread_id) {
2749   auto t = table_.find(thread_id);
2750   if (t == table_.end()) return nullptr;
2751   return t->second;
2752 }
2753 
2754 void Isolate::ThreadDataTable::Insert(Isolate::PerIsolateThreadData* data) {
2755   bool inserted = table_.insert(std::make_pair(data->thread_id_, data)).second;
2756   CHECK(inserted);
2757 }
2758 
2759 void Isolate::ThreadDataTable::Remove(PerIsolateThreadData* data) {
2760   table_.erase(data->thread_id_);
2761   delete data;
2762 }
2763 
2764 void Isolate::ThreadDataTable::RemoveAllThreads() {
2765   for (auto& x : table_) {
2766     delete x.second;
2767   }
2768   table_.clear();
2769 }
2770 
2771 class TracingAccountingAllocator : public AccountingAllocator {
2772  public:
2773   explicit TracingAccountingAllocator(Isolate* isolate) : isolate_(isolate) {}
2774   ~TracingAccountingAllocator() = default;
2775 
2776  protected:
2777   void TraceAllocateSegmentImpl(v8::internal::Segment* segment) override {
2778     base::MutexGuard lock(&mutex_);
2779     UpdateMemoryTrafficAndReportMemoryUsage(segment->total_size());
2780   }
2781 
2782   void TraceZoneCreationImpl(const Zone* zone) override {
2783     base::MutexGuard lock(&mutex_);
2784     active_zones_.insert(zone);
2785     nesting_depth_++;
2786   }
2787 
2788   void TraceZoneDestructionImpl(const Zone* zone) override {
2789     base::MutexGuard lock(&mutex_);
2790 #ifdef V8_ENABLE_PRECISE_ZONE_STATS
2791     if (FLAG_trace_zone_type_stats) {
2792       type_stats_.MergeWith(zone->type_stats());
2793     }
2794 #endif
2795     UpdateMemoryTrafficAndReportMemoryUsage(zone->segment_bytes_allocated());
2796     active_zones_.erase(zone);
2797     nesting_depth_--;
2798 
2799 #ifdef V8_ENABLE_PRECISE_ZONE_STATS
2800     if (FLAG_trace_zone_type_stats && active_zones_.empty()) {
2801       type_stats_.Dump();
2802     }
2803 #endif
2804   }
2805 
2806  private:
2807   void UpdateMemoryTrafficAndReportMemoryUsage(size_t memory_traffic_delta) {
2808     if (!FLAG_trace_zone_stats &&
2809         !(TracingFlags::zone_stats.load(std::memory_order_relaxed) &
2810           v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
2811       // Don't print anything if the zone tracing was enabled only because of
2812       // FLAG_trace_zone_type_stats.
2813       return;
2814     }
2815 
2816     memory_traffic_since_last_report_ += memory_traffic_delta;
2817     if (memory_traffic_since_last_report_ < FLAG_zone_stats_tolerance) return;
2818     memory_traffic_since_last_report_ = 0;
2819 
2820     Dump(buffer_, true);
2821 
2822     {
2823       std::string trace_str = buffer_.str();
2824 
2825       if (FLAG_trace_zone_stats) {
2826         PrintF(
2827             "{"
2828             "\"type\": \"v8-zone-trace\", "
2829             "\"stats\": %s"
2830             "}\n",
2831             trace_str.c_str());
2832       }
2833       if (V8_UNLIKELY(
2834               TracingFlags::zone_stats.load(std::memory_order_relaxed) &
2835               v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
2836         TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("v8.zone_stats"),
2837                              "V8.Zone_Stats", TRACE_EVENT_SCOPE_THREAD, "stats",
2838                              TRACE_STR_COPY(trace_str.c_str()));
2839       }
2840     }
2841 
2842     // Clear the buffer.
2843     buffer_.str(std::string());
2844   }
2845 
2846   void Dump(std::ostringstream& out, bool dump_details) {
2847     // Note: Neither isolate nor zones are locked, so be careful with accesses
2848     // as the allocator is potentially used on a concurrent thread.
2849     double time = isolate_->time_millis_since_init();
2850     out << "{"
2851         << "\"isolate\": \"" << reinterpret_cast<void*>(isolate_) << "\", "
2852         << "\"time\": " << time << ", ";
2853     size_t total_segment_bytes_allocated = 0;
2854     size_t total_zone_allocation_size = 0;
2855     size_t total_zone_freed_size = 0;
2856 
2857     if (dump_details) {
2858       // Print detailed zone stats if memory usage changes direction.
2859       out << "\"zones\": [";
2860       bool first = true;
2861       for (const Zone* zone : active_zones_) {
2862         size_t zone_segment_bytes_allocated = zone->segment_bytes_allocated();
2863         size_t zone_allocation_size = zone->allocation_size_for_tracing();
2864         size_t freed_size = zone->freed_size_for_tracing();
2865         if (first) {
2866           first = false;
2867         } else {
2868           out << ", ";
2869         }
2870         out << "{"
2871             << "\"name\": \"" << zone->name() << "\", "
2872             << "\"allocated\": " << zone_segment_bytes_allocated << ", "
2873             << "\"used\": " << zone_allocation_size << ", "
2874             << "\"freed\": " << freed_size << "}";
2875         total_segment_bytes_allocated += zone_segment_bytes_allocated;
2876         total_zone_allocation_size += zone_allocation_size;
2877         total_zone_freed_size += freed_size;
2878       }
2879       out << "], ";
2880     } else {
2881       // Just calculate total allocated/used memory values.
2882       for (const Zone* zone : active_zones_) {
2883         total_segment_bytes_allocated += zone->segment_bytes_allocated();
2884         total_zone_allocation_size += zone->allocation_size_for_tracing();
2885         total_zone_freed_size += zone->freed_size_for_tracing();
2886       }
2887     }
2888     out << "\"allocated\": " << total_segment_bytes_allocated << ", "
2889         << "\"used\": " << total_zone_allocation_size << ", "
2890         << "\"freed\": " << total_zone_freed_size << "}";
2891   }
2892 
2893   Isolate* const isolate_;
2894   std::atomic<size_t> nesting_depth_{0};
2895 
2896   base::Mutex mutex_;
2897   std::unordered_set<const Zone*> active_zones_;
2898 #ifdef V8_ENABLE_PRECISE_ZONE_STATS
2899   TypeStats type_stats_;
2900 #endif
2901   std::ostringstream buffer_;
2902   // This value is increased on both allocations and deallocations.
2903   size_t memory_traffic_since_last_report_ = 0;
2904 };
2905 
2906 #ifdef DEBUG
2907 std::atomic<size_t> Isolate::non_disposed_isolates_;
2908 #endif  // DEBUG
2909 
2910 // static
2911 Isolate* Isolate::New() { return Isolate::Allocate(false); }
2912 
2913 // static
2914 Isolate* Isolate::NewShared(const v8::Isolate::CreateParams& params) {
2915   DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
2916   Isolate* isolate = Isolate::Allocate(true);
2917   v8::Isolate::Initialize(reinterpret_cast<v8::Isolate*>(isolate), params);
2918   return isolate;
2919 }
2920 
2921 // static
2922 Isolate* Isolate::Allocate(bool is_shared) {
2923   // IsolateAllocator allocates the memory for the Isolate object according to
2924   // the given allocation mode.
2925   std::unique_ptr<IsolateAllocator> isolate_allocator =
2926       std::make_unique<IsolateAllocator>();
2927   // Construct Isolate object in the allocated memory.
2928   void* isolate_ptr = isolate_allocator->isolate_memory();
2929   Isolate* isolate =
2930       new (isolate_ptr) Isolate(std::move(isolate_allocator), is_shared);
2931 #ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
2932   DCHECK(IsAligned(isolate->isolate_root(), kPtrComprCageBaseAlignment));
2933   DCHECK_EQ(isolate->isolate_root(), isolate->cage_base());
2934 #endif
2935 
2936 #ifdef DEBUG
2937   non_disposed_isolates_++;
2938 #endif  // DEBUG
2939 
2940   return isolate;
2941 }
2942 
2943 // static
2944 void Isolate::Delete(Isolate* isolate) {
2945   DCHECK_NOT_NULL(isolate);
2946   // Temporarily set this isolate as current so that various parts of
2947   // the isolate can access it in their destructors without having a
2948   // direct pointer. We don't use Enter/Exit here to avoid
2949   // initializing the thread data.
2950   PerIsolateThreadData* saved_data = isolate->CurrentPerIsolateThreadData();
2951   DCHECK_EQ(true, isolate_key_created_.load(std::memory_order_relaxed));
2952   Isolate* saved_isolate = reinterpret_cast<Isolate*>(
2953       base::Thread::GetThreadLocal(isolate->isolate_key_));
2954   SetIsolateThreadLocals(isolate, nullptr);
2955   isolate->set_thread_id(ThreadId::Current());
2956 
2957   isolate->Deinit();
2958 
2959 #ifdef DEBUG
2960   non_disposed_isolates_--;
2961 #endif  // DEBUG
2962 
2963   // Take ownership of the IsolateAllocator to ensure the Isolate memory will
2964   // be available during Isolate descructor call.
2965   std::unique_ptr<IsolateAllocator> isolate_allocator =
2966       std::move(isolate->isolate_allocator_);
2967   isolate->~Isolate();
2968   // Now free the memory owned by the allocator.
2969   isolate_allocator.reset();
2970 
2971   // Restore the previous current isolate.
2972   SetIsolateThreadLocals(saved_isolate, saved_data);
2973 }
2974 
2975 void Isolate::SetUpFromReadOnlyArtifacts(
2976     std::shared_ptr<ReadOnlyArtifacts> artifacts, ReadOnlyHeap* ro_heap) {
2977   if (ReadOnlyHeap::IsReadOnlySpaceShared()) {
2978     DCHECK_NOT_NULL(artifacts);
2979     artifacts_ = artifacts;
2980   } else {
2981     DCHECK_NULL(artifacts);
2982   }
2983   DCHECK_NOT_NULL(ro_heap);
2984   DCHECK_IMPLIES(read_only_heap_ != nullptr, read_only_heap_ == ro_heap);
2985   read_only_heap_ = ro_heap;
2986   heap_.SetUpFromReadOnlyHeap(read_only_heap_);
2987 }
2988 
2989 v8::PageAllocator* Isolate::page_allocator() const {
2990   return isolate_allocator_->page_allocator();
2991 }
2992 
2993 Isolate::Isolate(std::unique_ptr<i::IsolateAllocator> isolate_allocator,
2994                  bool is_shared)
2995     : isolate_data_(this, isolate_allocator->GetPtrComprCageBase()),
2996       isolate_allocator_(std::move(isolate_allocator)),
2997       id_(isolate_counter.fetch_add(1, std::memory_order_relaxed)),
2998       allocator_(new TracingAccountingAllocator(this)),
2999       builtins_(this),
3000 #if defined(DEBUG) || defined(VERIFY_HEAP)
3001       num_active_deserializers_(0),
3002 #endif
3003       rail_mode_(PERFORMANCE_ANIMATION),
3004       code_event_dispatcher_(new CodeEventDispatcher()),
3005       detailed_source_positions_for_profiling_(FLAG_detailed_line_info),
3006       persistent_handles_list_(new PersistentHandlesList()),
3007       jitless_(FLAG_jitless),
3008 #if V8_SFI_HAS_UNIQUE_ID
3009       next_unique_sfi_id_(0),
3010 #endif
3011       next_module_async_evaluating_ordinal_(
3012           SourceTextModule::kFirstAsyncEvaluatingOrdinal),
3013       cancelable_task_manager_(new CancelableTaskManager()),
3014       is_shared_(is_shared) {
3015   TRACE_ISOLATE(constructor);
3016   CheckIsolateLayout();
3017 
3018   // ThreadManager is initialized early to support locking an isolate
3019   // before it is entered.
3020   thread_manager_ = new ThreadManager(this);
3021 
3022   handle_scope_data_.Initialize();
3023 
3024   // When pointer compression is on with a per-Isolate cage, allocation in the
3025   // shared Isolate can point into the per-Isolate RO heap as the offsets are
3026   // constant across Isolates.
3027   //
3028   // When pointer compression is on with a shared cage or when pointer
3029   // compression is off, a shared RO heap is required. Otherwise a shared
3030   // allocation requested by a client Isolate could point into the client
3031   // Isolate's RO space (e.g. an RO map) whose pages gets unmapped when it is
3032   // disposed.
3033   CHECK_IMPLIES(is_shared_, COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL ||
3034                                 V8_SHARED_RO_HEAP_BOOL);
3035 
3036 #define ISOLATE_INIT_EXECUTE(type, name, initial_value) \
3037   name##_ = (initial_value);
3038   ISOLATE_INIT_LIST(ISOLATE_INIT_EXECUTE)
3039 #undef ISOLATE_INIT_EXECUTE
3040 
3041 #define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length) \
3042   memset(name##_, 0, sizeof(type) * length);
3043   ISOLATE_INIT_ARRAY_LIST(ISOLATE_INIT_ARRAY_EXECUTE)
3044 #undef ISOLATE_INIT_ARRAY_EXECUTE
3045 
3046   InitializeLoggingAndCounters();
3047   debug_ = new Debug(this);
3048 
3049   InitializeDefaultEmbeddedBlob();
3050 
3051   MicrotaskQueue::SetUpDefaultMicrotaskQueue(this);
3052 }
3053 
3054 void Isolate::CheckIsolateLayout() {
3055   CHECK_EQ(OFFSET_OF(Isolate, isolate_data_), 0);
3056   CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.embedder_data_)),
3057            Internals::kIsolateEmbedderDataOffset);
3058   CHECK_EQ(static_cast<int>(
3059                OFFSET_OF(Isolate, isolate_data_.fast_c_call_caller_fp_)),
3060            Internals::kIsolateFastCCallCallerFpOffset);
3061   CHECK_EQ(static_cast<int>(
3062                OFFSET_OF(Isolate, isolate_data_.fast_c_call_caller_pc_)),
3063            Internals::kIsolateFastCCallCallerPcOffset);
3064   CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.cage_base_)),
3065            Internals::kIsolateCageBaseOffset);
3066   CHECK_EQ(static_cast<int>(
3067                OFFSET_OF(Isolate, isolate_data_.long_task_stats_counter_)),
3068            Internals::kIsolateLongTaskStatsCounterOffset);
3069   CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.stack_guard_)),
3070            Internals::kIsolateStackGuardOffset);
3071   CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.roots_table_)),
3072            Internals::kIsolateRootsOffset);
3073 
3074   STATIC_ASSERT(Internals::kStackGuardSize == sizeof(StackGuard));
3075   STATIC_ASSERT(Internals::kBuiltinTier0TableSize ==
3076                 Builtins::kBuiltinTier0Count * kSystemPointerSize);
3077   STATIC_ASSERT(Internals::kBuiltinTier0EntryTableSize ==
3078                 Builtins::kBuiltinTier0Count * kSystemPointerSize);
3079 
3080 #ifdef V8_HEAP_SANDBOX
3081   CHECK_EQ(static_cast<int>(OFFSET_OF(ExternalPointerTable, buffer_)),
3082            Internals::kExternalPointerTableBufferOffset);
3083   CHECK_EQ(static_cast<int>(OFFSET_OF(ExternalPointerTable, length_)),
3084            Internals::kExternalPointerTableLengthOffset);
3085   CHECK_EQ(static_cast<int>(OFFSET_OF(ExternalPointerTable, capacity_)),
3086            Internals::kExternalPointerTableCapacityOffset);
3087 #endif
3088 }
3089 
3090 void Isolate::ClearSerializerData() {
3091   delete external_reference_map_;
3092   external_reference_map_ = nullptr;
3093 }
3094 
3095 bool Isolate::LogObjectRelocation() {
3096   return FLAG_verify_predictable || logger()->is_logging() || is_profiling() ||
3097          heap()->isolate()->logger()->is_listening_to_code_events() ||
3098          (heap_profiler() != nullptr &&
3099           heap_profiler()->is_tracking_object_moves()) ||
3100          heap()->has_heap_object_allocation_tracker();
3101 }
3102 
3103 void Isolate::Deinit() {
3104   TRACE_ISOLATE(deinit);
3105 
3106   tracing_cpu_profiler_.reset();
3107   if (FLAG_stress_sampling_allocation_profiler > 0) {
3108     heap_profiler()->StopSamplingHeapProfiler();
3109   }
3110 
3111   metrics_recorder_->NotifyIsolateDisposal();
3112   recorder_context_id_map_.clear();
3113 
3114 #if defined(V8_OS_WIN64)
3115   if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
3116       heap()->memory_allocator() && RequiresCodeRange() &&
3117       heap()->code_range()->AtomicDecrementUnwindInfoUseCount() == 1) {
3118     const base::AddressRegion& code_region = heap()->code_region();
3119     void* start = reinterpret_cast<void*>(code_region.begin());
3120     win64_unwindinfo::UnregisterNonABICompliantCodeRange(start);
3121   }
3122 #endif  // V8_OS_WIN64
3123 
3124   FutexEmulation::IsolateDeinit(this);
3125 
3126   debug()->Unload();
3127 
3128 #if V8_ENABLE_WEBASSEMBLY
3129   wasm::GetWasmEngine()->DeleteCompileJobsOnIsolate(this);
3130 
3131   BackingStore::RemoveSharedWasmMemoryObjects(this);
3132 #endif  // V8_ENABLE_WEBASSEMBLY
3133 
3134   if (concurrent_recompilation_enabled()) {
3135     optimizing_compile_dispatcher_->Stop();
3136     delete optimizing_compile_dispatcher_;
3137     optimizing_compile_dispatcher_ = nullptr;
3138   }
3139 
3140   // All client isolates should already be detached.
3141   DCHECK_NULL(client_isolate_head_);
3142 
3143   if (FLAG_print_deopt_stress) {
3144     PrintF(stdout, "=== Stress deopt counter: %u\n", stress_deopt_count_);
3145   }
3146 
3147   // We must stop the logger before we tear down other components.
3148   sampler::Sampler* sampler = logger_->sampler();
3149   if (sampler && sampler->IsActive()) sampler->Stop();
3150 
3151   FreeThreadResources();
3152   logger_->StopProfilerThread();
3153 
3154   // We start with the heap tear down so that releasing managed objects does
3155   // not cause a GC.
3156   heap_.StartTearDown();
3157 
3158   // This stops cancelable tasks (i.e. concurrent marking tasks).
3159   // Stop concurrent tasks before destroying resources since they might still
3160   // use those.
3161   cancelable_task_manager()->CancelAndWait();
3162 
3163   ReleaseSharedPtrs();
3164 
3165   string_table_.reset();
3166   builtins_.TearDown();
3167   bootstrapper_->TearDown();
3168 
3169   if (runtime_profiler_ != nullptr) {
3170     delete runtime_profiler_;
3171     runtime_profiler_ = nullptr;
3172   }
3173 
3174   delete heap_profiler_;
3175   heap_profiler_ = nullptr;
3176 
3177   compiler_dispatcher_->AbortAll();
3178   delete compiler_dispatcher_;
3179   compiler_dispatcher_ = nullptr;
3180 
3181   delete baseline_batch_compiler_;
3182   baseline_batch_compiler_ = nullptr;
3183 
3184   // After all concurrent tasks are stopped, we know for sure that stats aren't
3185   // updated anymore.
3186   DumpAndResetStats();
3187 
3188   main_thread_local_isolate_->heap()->FreeLinearAllocationArea();
3189 
3190   if (shared_isolate_) {
3191     DetachFromSharedIsolate();
3192   }
3193 
3194   heap_.TearDown();
3195 
3196   main_thread_local_isolate_.reset();
3197 
3198   FILE* logfile = logger_->TearDownAndGetLogFile();
3199   if (logfile != nullptr) base::Fclose(logfile);
3200 
3201 #if V8_ENABLE_WEBASSEMBLY
3202   wasm::GetWasmEngine()->RemoveIsolate(this);
3203 #endif  // V8_ENABLE_WEBASSEMBLY
3204 
3205   TearDownEmbeddedBlob();
3206 
3207   delete interpreter_;
3208   interpreter_ = nullptr;
3209 
3210   delete ast_string_constants_;
3211   ast_string_constants_ = nullptr;
3212 
3213   code_event_dispatcher_.reset();
3214 
3215   delete root_index_map_;
3216   root_index_map_ = nullptr;
3217 
3218   delete compiler_zone_;
3219   compiler_zone_ = nullptr;
3220   compiler_cache_ = nullptr;
3221 
3222   SetCodePages(nullptr);
3223 
3224   ClearSerializerData();
3225 
3226   {
3227     base::MutexGuard lock_guard(&thread_data_table_mutex_);
3228     thread_data_table_.RemoveAllThreads();
3229   }
3230 }
3231 
3232 void Isolate::SetIsolateThreadLocals(Isolate* isolate,
3233                                      PerIsolateThreadData* data) {
3234   base::Thread::SetThreadLocal(isolate_key_, isolate);
3235   base::Thread::SetThreadLocal(per_isolate_thread_data_key_, data);
3236 }
3237 
3238 Isolate::~Isolate() {
3239   TRACE_ISOLATE(destructor);
3240 
3241   // The entry stack must be empty when we get here.
3242   DCHECK(entry_stack_ == nullptr || entry_stack_->previous_item == nullptr);
3243 
3244   delete entry_stack_;
3245   entry_stack_ = nullptr;
3246 
3247   delete date_cache_;
3248   date_cache_ = nullptr;
3249 
3250   delete regexp_stack_;
3251   regexp_stack_ = nullptr;
3252 
3253   delete descriptor_lookup_cache_;
3254   descriptor_lookup_cache_ = nullptr;
3255 
3256   delete load_stub_cache_;
3257   load_stub_cache_ = nullptr;
3258   delete store_stub_cache_;
3259   store_stub_cache_ = nullptr;
3260 
3261   delete materialized_object_store_;
3262   materialized_object_store_ = nullptr;
3263 
3264   delete logger_;
3265   logger_ = nullptr;
3266 
3267   delete handle_scope_implementer_;
3268   handle_scope_implementer_ = nullptr;
3269 
3270   delete code_tracer();
3271   set_code_tracer(nullptr);
3272 
3273   delete compilation_cache_;
3274   compilation_cache_ = nullptr;
3275   delete bootstrapper_;
3276   bootstrapper_ = nullptr;
3277   delete inner_pointer_to_code_cache_;
3278   inner_pointer_to_code_cache_ = nullptr;
3279 
3280   delete thread_manager_;
3281   thread_manager_ = nullptr;
3282 
3283   bigint_processor_->Destroy();
3284 
3285   delete global_handles_;
3286   global_handles_ = nullptr;
3287   delete eternal_handles_;
3288   eternal_handles_ = nullptr;
3289 
3290   delete string_stream_debug_object_cache_;
3291   string_stream_debug_object_cache_ = nullptr;
3292 
3293   delete random_number_generator_;
3294   random_number_generator_ = nullptr;
3295 
3296   delete fuzzer_rng_;
3297   fuzzer_rng_ = nullptr;
3298 
3299   delete debug_;
3300   debug_ = nullptr;
3301 
3302   delete cancelable_task_manager_;
3303   cancelable_task_manager_ = nullptr;
3304 
3305   delete allocator_;
3306   allocator_ = nullptr;
3307 
3308   // Assert that |default_microtask_queue_| is the last MicrotaskQueue instance.
3309   DCHECK_IMPLIES(default_microtask_queue_,
3310                  default_microtask_queue_ == default_microtask_queue_->next());
3311   delete default_microtask_queue_;
3312   default_microtask_queue_ = nullptr;
3313 
3314   // The ReadOnlyHeap should not be destroyed when sharing without pointer
3315   // compression as the object itself is shared.
3316   if (read_only_heap_->IsOwnedByIsolate()) {
3317     delete read_only_heap_;
3318     read_only_heap_ = nullptr;
3319   }
3320 }
3321 
3322 void Isolate::InitializeThreadLocal() {
3323   thread_local_top()->Initialize(this);
3324   clear_pending_exception();
3325   clear_pending_message();
3326   clear_scheduled_exception();
3327 }
3328 
3329 void Isolate::SetTerminationOnExternalTryCatch() {
3330   if (try_catch_handler() == nullptr) return;
3331   try_catch_handler()->can_continue_ = false;
3332   try_catch_handler()->has_terminated_ = true;
3333   try_catch_handler()->exception_ =
3334       reinterpret_cast<void*>(ReadOnlyRoots(heap()).null_value().ptr());
3335 }
3336 
3337 bool Isolate::PropagatePendingExceptionToExternalTryCatch() {
3338   Object exception = pending_exception();
3339 
3340   if (IsJavaScriptHandlerOnTop(exception)) {
3341     thread_local_top()->external_caught_exception_ = false;
3342     return false;
3343   }
3344 
3345   if (!IsExternalHandlerOnTop(exception)) {
3346     thread_local_top()->external_caught_exception_ = false;
3347     return true;
3348   }
3349 
3350   thread_local_top()->external_caught_exception_ = true;
3351   if (!is_catchable_by_javascript(exception)) {
3352     SetTerminationOnExternalTryCatch();
3353   } else {
3354     v8::TryCatch* handler = try_catch_handler();
3355     DCHECK(pending_message().IsJSMessageObject() ||
3356            pending_message().IsTheHole(this));
3357     handler->can_continue_ = true;
3358     handler->has_terminated_ = false;
3359     handler->exception_ = reinterpret_cast<void*>(pending_exception().ptr());
3360     // Propagate to the external try-catch only if we got an actual message.
3361     if (!has_pending_message()) return true;
3362     handler->message_obj_ = reinterpret_cast<void*>(pending_message().ptr());
3363   }
3364   return true;
3365 }
3366 
3367 bool Isolate::InitializeCounters() {
3368   if (async_counters_) return false;
3369   async_counters_ = std::make_shared<Counters>(this);
3370   return true;
3371 }
3372 
3373 void Isolate::InitializeLoggingAndCounters() {
3374   if (logger_ == nullptr) {
3375     logger_ = new Logger(this);
3376   }
3377   InitializeCounters();
3378 }
3379 
3380 namespace {
3381 
3382 void CreateOffHeapTrampolines(Isolate* isolate) {
3383   DCHECK_NOT_NULL(isolate->embedded_blob_code());
3384   DCHECK_NE(0, isolate->embedded_blob_code_size());
3385   DCHECK_NOT_NULL(isolate->embedded_blob_data());
3386   DCHECK_NE(0, isolate->embedded_blob_data_size());
3387 
3388   HandleScope scope(isolate);
3389   Builtins* builtins = isolate->builtins();
3390 
3391   EmbeddedData d = EmbeddedData::FromBlob(isolate);
3392 
3393   STATIC_ASSERT(Builtins::kAllBuiltinsAreIsolateIndependent);
3394   for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
3395        ++builtin) {
3396     Address instruction_start = d.InstructionStartOfBuiltin(builtin);
3397     Handle<Code> trampoline = isolate->factory()->NewOffHeapTrampolineFor(
3398         builtins->code_handle(builtin), instruction_start);
3399 
3400     // From this point onwards, the old builtin code object is unreachable and
3401     // will be collected by the next GC.
3402     builtins->set_code(builtin, *trampoline);
3403   }
3404 }
3405 
3406 #ifdef DEBUG
3407 bool IsolateIsCompatibleWithEmbeddedBlob(Isolate* isolate) {
3408   EmbeddedData d = EmbeddedData::FromBlob(isolate);
3409   return (d.IsolateHash() == isolate->HashIsolateForEmbeddedBlob());
3410 }
3411 #endif  // DEBUG
3412 
3413 }  // namespace
3414 
3415 void Isolate::InitializeDefaultEmbeddedBlob() {
3416   const uint8_t* code = DefaultEmbeddedBlobCode();
3417   uint32_t code_size = DefaultEmbeddedBlobCodeSize();
3418   const uint8_t* data = DefaultEmbeddedBlobData();
3419   uint32_t data_size = DefaultEmbeddedBlobDataSize();
3420 
3421   if (StickyEmbeddedBlobCode() != nullptr) {
3422     base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
3423     // Check again now that we hold the lock.
3424     if (StickyEmbeddedBlobCode() != nullptr) {
3425       code = StickyEmbeddedBlobCode();
3426       code_size = StickyEmbeddedBlobCodeSize();
3427       data = StickyEmbeddedBlobData();
3428       data_size = StickyEmbeddedBlobDataSize();
3429       current_embedded_blob_refs_++;
3430     }
3431   }
3432 
3433   if (code == nullptr) {
3434     CHECK_EQ(0, code_size);
3435   } else {
3436     SetEmbeddedBlob(code, code_size, data, data_size);
3437   }
3438 }
3439 
3440 void Isolate::CreateAndSetEmbeddedBlob() {
3441   base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
3442 
3443   PrepareBuiltinSourcePositionMap();
3444 
3445   PrepareBuiltinLabelInfoMap();
3446 
3447   // If a sticky blob has been set, we reuse it.
3448   if (StickyEmbeddedBlobCode() != nullptr) {
3449     CHECK_EQ(embedded_blob_code(), StickyEmbeddedBlobCode());
3450     CHECK_EQ(embedded_blob_data(), StickyEmbeddedBlobData());
3451     CHECK_EQ(CurrentEmbeddedBlobCode(), StickyEmbeddedBlobCode());
3452     CHECK_EQ(CurrentEmbeddedBlobData(), StickyEmbeddedBlobData());
3453   } else {
3454     // Create and set a new embedded blob.
3455     uint8_t* code;
3456     uint32_t code_size;
3457     uint8_t* data;
3458     uint32_t data_size;
3459     InstructionStream::CreateOffHeapInstructionStream(this, &code, &code_size,
3460                                                       &data, &data_size);
3461 
3462     CHECK_EQ(0, current_embedded_blob_refs_);
3463     const uint8_t* const_code = const_cast<const uint8_t*>(code);
3464     const uint8_t* const_data = const_cast<const uint8_t*>(data);
3465     SetEmbeddedBlob(const_code, code_size, const_data, data_size);
3466     current_embedded_blob_refs_++;
3467 
3468     SetStickyEmbeddedBlob(code, code_size, data, data_size);
3469   }
3470 
3471   MaybeRemapEmbeddedBuiltinsIntoCodeRange();
3472 
3473   CreateOffHeapTrampolines(this);
3474 }
3475 
3476 void Isolate::MaybeRemapEmbeddedBuiltinsIntoCodeRange() {
3477   if (!is_short_builtin_calls_enabled() || !RequiresCodeRange()) return;
3478 
3479   CHECK_NOT_NULL(embedded_blob_code_);
3480   CHECK_NE(embedded_blob_code_size_, 0);
3481 
3482   DCHECK_NOT_NULL(heap_.code_range_);
3483   embedded_blob_code_ = heap_.code_range_->RemapEmbeddedBuiltins(
3484       this, embedded_blob_code_, embedded_blob_code_size_);
3485   CHECK_NOT_NULL(embedded_blob_code_);
3486   // The un-embedded code blob is already a part of the registered code range
3487   // so it's not necessary to register it again.
3488 }
3489 
3490 void Isolate::TearDownEmbeddedBlob() {
3491   // Nothing to do in case the blob is embedded into the binary or unset.
3492   if (StickyEmbeddedBlobCode() == nullptr) return;
3493 
3494   if (!is_short_builtin_calls_enabled()) {
3495     CHECK_EQ(embedded_blob_code(), StickyEmbeddedBlobCode());
3496     CHECK_EQ(embedded_blob_data(), StickyEmbeddedBlobData());
3497   }
3498   CHECK_EQ(CurrentEmbeddedBlobCode(), StickyEmbeddedBlobCode());
3499   CHECK_EQ(CurrentEmbeddedBlobData(), StickyEmbeddedBlobData());
3500 
3501   base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
3502   current_embedded_blob_refs_--;
3503   if (current_embedded_blob_refs_ == 0 && enable_embedded_blob_refcounting_) {
3504     // We own the embedded blob and are the last holder. Free it.
3505     InstructionStream::FreeOffHeapInstructionStream(
3506         const_cast<uint8_t*>(CurrentEmbeddedBlobCode()),
3507         embedded_blob_code_size(),
3508         const_cast<uint8_t*>(CurrentEmbeddedBlobData()),
3509         embedded_blob_data_size());
3510     ClearEmbeddedBlob();
3511   }
3512 }
3513 
3514 bool Isolate::InitWithoutSnapshot() { return Init(nullptr, nullptr, false); }
3515 
3516 bool Isolate::InitWithSnapshot(SnapshotData* startup_snapshot_data,
3517                                SnapshotData* read_only_snapshot_data,
3518                                bool can_rehash) {
3519   DCHECK_NOT_NULL(startup_snapshot_data);
3520   DCHECK_NOT_NULL(read_only_snapshot_data);
3521   return Init(startup_snapshot_data, read_only_snapshot_data, can_rehash);
3522 }
3523 
3524 static std::string AddressToString(uintptr_t address) {
3525   std::stringstream stream_address;
3526   stream_address << "0x" << std::hex << address;
3527   return stream_address.str();
3528 }
3529 
3530 void Isolate::AddCrashKeysForIsolateAndHeapPointers() {
3531   DCHECK_NOT_NULL(add_crash_key_callback_);
3532 
3533   const uintptr_t isolate_address = reinterpret_cast<uintptr_t>(this);
3534   add_crash_key_callback_(v8::CrashKeyId::kIsolateAddress,
3535                           AddressToString(isolate_address));
3536 
3537   const uintptr_t ro_space_firstpage_address =
3538       heap()->read_only_space()->FirstPageAddress();
3539   add_crash_key_callback_(v8::CrashKeyId::kReadonlySpaceFirstPageAddress,
3540                           AddressToString(ro_space_firstpage_address));
3541   const uintptr_t map_space_firstpage_address =
3542       heap()->map_space()->FirstPageAddress();
3543   add_crash_key_callback_(v8::CrashKeyId::kMapSpaceFirstPageAddress,
3544                           AddressToString(map_space_firstpage_address));
3545   const uintptr_t code_space_firstpage_address =
3546       heap()->code_space()->FirstPageAddress();
3547   add_crash_key_callback_(v8::CrashKeyId::kCodeSpaceFirstPageAddress,
3548                           AddressToString(code_space_firstpage_address));
3549 }
3550 
3551 void Isolate::InitializeCodeRanges() {
3552   DCHECK_NULL(GetCodePages());
3553   MemoryRange embedded_range{
3554       reinterpret_cast<const void*>(embedded_blob_code()),
3555       embedded_blob_code_size()};
3556   code_pages_buffer1_.push_back(embedded_range);
3557   SetCodePages(&code_pages_buffer1_);
3558 }
3559 
3560 namespace {
3561 
3562 // This global counter contains number of stack loads/stores per optimized/wasm
3563 // function.
3564 using MapOfLoadsAndStoresPerFunction =
3565     std::map<std::string /* function_name */,
3566              std::pair<uint64_t /* loads */, uint64_t /* stores */>>;
3567 MapOfLoadsAndStoresPerFunction* stack_access_count_map = nullptr;
3568 
3569 class BigIntPlatform : public bigint::Platform {
3570  public:
3571   explicit BigIntPlatform(Isolate* isolate) : isolate_(isolate) {}
3572   ~BigIntPlatform() override = default;
3573 
3574   bool InterruptRequested() override {
3575     StackLimitCheck interrupt_check(isolate_);
3576     return (interrupt_check.InterruptRequested() &&
3577             isolate_->stack_guard()->HasTerminationRequest());
3578   }
3579 
3580  private:
3581   Isolate* isolate_;
3582 };
3583 }  // namespace
3584 
3585 bool Isolate::Init(SnapshotData* startup_snapshot_data,
3586                    SnapshotData* read_only_snapshot_data, bool can_rehash) {
3587   TRACE_ISOLATE(init);
3588   const bool create_heap_objects = (read_only_snapshot_data == nullptr);
3589   // We either have both or neither.
3590   DCHECK_EQ(create_heap_objects, startup_snapshot_data == nullptr);
3591 
3592   base::ElapsedTimer timer;
3593   if (create_heap_objects && FLAG_profile_deserialization) timer.Start();
3594 
3595   time_millis_at_init_ = heap_.MonotonicallyIncreasingTimeInMs();
3596 
3597   stress_deopt_count_ = FLAG_deopt_every_n_times;
3598   force_slow_path_ = FLAG_force_slow_path;
3599 
3600   has_fatal_error_ = false;
3601 
3602   // The initialization process does not handle memory exhaustion.
3603   AlwaysAllocateScope always_allocate(heap());
3604 
3605 #define ASSIGN_ELEMENT(CamelName, hacker_name)                  \
3606   isolate_addresses_[IsolateAddressId::k##CamelName##Address] = \
3607       reinterpret_cast<Address>(hacker_name##_address());
3608   FOR_EACH_ISOLATE_ADDRESS_NAME(ASSIGN_ELEMENT)
3609 #undef ASSIGN_ELEMENT
3610 
3611   // We need to initialize code_pages_ before any on-heap code is allocated to
3612   // make sure we record all code allocations.
3613   InitializeCodeRanges();
3614 
3615   compilation_cache_ = new CompilationCache(this);
3616   descriptor_lookup_cache_ = new DescriptorLookupCache();
3617   inner_pointer_to_code_cache_ = new InnerPointerToCodeCache(this);
3618   global_handles_ = new GlobalHandles(this);
3619   eternal_handles_ = new EternalHandles();
3620   bootstrapper_ = new Bootstrapper(this);
3621   handle_scope_implementer_ = new HandleScopeImplementer(this);
3622   load_stub_cache_ = new StubCache(this);
3623   store_stub_cache_ = new StubCache(this);
3624   materialized_object_store_ = new MaterializedObjectStore(this);
3625   regexp_stack_ = new RegExpStack();
3626   date_cache_ = new DateCache();
3627   heap_profiler_ = new HeapProfiler(heap());
3628   interpreter_ = new interpreter::Interpreter(this);
3629   string_table_.reset(new StringTable(this));
3630   bigint_processor_ = bigint::Processor::New(new BigIntPlatform(this));
3631 
3632   compiler_dispatcher_ = new LazyCompileDispatcher(
3633       this, V8::GetCurrentPlatform(), FLAG_stack_size);
3634   baseline_batch_compiler_ = new baseline::BaselineBatchCompiler(this);
3635 
3636   // Enable logging before setting up the heap
3637   logger_->SetUp(this);
3638 
3639   metrics_recorder_ = std::make_shared<metrics::Recorder>();
3640 
3641   {
3642     // Ensure that the thread has a valid stack guard.  The v8::Locker object
3643     // will ensure this too, but we don't have to use lockers if we are only
3644     // using one thread.
3645     ExecutionAccess lock(this);
3646     stack_guard()->InitThread(lock);
3647   }
3648 
3649   // SetUp the object heap.
3650   DCHECK(!heap_.HasBeenSetUp());
3651   heap_.SetUp();
3652   ReadOnlyHeap::SetUp(this, read_only_snapshot_data, can_rehash);
3653   heap_.SetUpSpaces();
3654 
3655   if (V8_SHORT_BUILTIN_CALLS_BOOL && FLAG_short_builtin_calls) {
3656     // Check if the system has more than 4GB of physical memory by comparing the
3657     // old space size with respective threshold value.
3658     //
3659     // Additionally, enable if there is already a process-wide CodeRange that
3660     // has re-embedded builtins.
3661     is_short_builtin_calls_enabled_ = (heap_.MaxOldGenerationSize() >=
3662                                        kShortBuiltinCallsOldSpaceSizeThreshold);
3663     if (COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) {
3664       std::shared_ptr<CodeRange> code_range =
3665           CodeRange::GetProcessWideCodeRange();
3666       if (code_range && code_range->embedded_blob_code_copy() != nullptr) {
3667         is_short_builtin_calls_enabled_ = true;
3668       }
3669     }
3670   }
3671 
3672   // Create LocalIsolate/LocalHeap for the main thread and set state to Running.
3673   main_thread_local_isolate_.reset(new LocalIsolate(this, ThreadKind::kMain));
3674   main_thread_local_heap()->Unpark();
3675 
3676   heap_.InitializeMainThreadLocalHeap(main_thread_local_heap());
3677 
3678   isolate_data_.external_reference_table()->Init(this);
3679 
3680 #if V8_ENABLE_WEBASSEMBLY
3681   wasm::GetWasmEngine()->AddIsolate(this);
3682 #endif  // V8_ENABLE_WEBASSEMBLY
3683 
3684   if (setup_delegate_ == nullptr) {
3685     setup_delegate_ = new SetupIsolateDelegate(create_heap_objects);
3686   }
3687 
3688   if (!FLAG_inline_new) heap_.DisableInlineAllocation();
3689 
3690   if (!setup_delegate_->SetupHeap(&heap_)) {
3691     V8::FatalProcessOutOfMemory(this, "heap object creation");
3692   }
3693 
3694   if (create_heap_objects) {
3695     // Terminate the startup object cache so we can iterate.
3696     startup_object_cache_.push_back(ReadOnlyRoots(this).undefined_value());
3697   }
3698 
3699   InitializeThreadLocal();
3700 
3701   // Profiler has to be created after ThreadLocal is initialized
3702   // because it makes use of interrupts.
3703   tracing_cpu_profiler_.reset(new TracingCpuProfilerImpl(this));
3704 
3705   bootstrapper_->Initialize(create_heap_objects);
3706 
3707   if (create_heap_objects) {
3708     builtins_constants_table_builder_ = new BuiltinsConstantsTableBuilder(this);
3709 
3710     setup_delegate_->SetupBuiltins(this);
3711 
3712 #ifndef V8_TARGET_ARCH_ARM
3713     // Store the interpreter entry trampoline on the root list. It is used as a
3714     // template for further copies that may later be created to help profile
3715     // interpreted code.
3716     // We currently cannot do this on arm due to RELATIVE_CODE_TARGETs
3717     // assuming that all possible Code targets may be addressed with an int24
3718     // offset, effectively limiting code space size to 32MB. We can guarantee
3719     // this at mksnapshot-time, but not at runtime.
3720     // See also: https://crbug.com/v8/8713.
3721     heap_.SetInterpreterEntryTrampolineForProfiling(
3722         heap_.builtin(Builtin::kInterpreterEntryTrampoline));
3723 #endif
3724 
3725     builtins_constants_table_builder_->Finalize();
3726     delete builtins_constants_table_builder_;
3727     builtins_constants_table_builder_ = nullptr;
3728 
3729     CreateAndSetEmbeddedBlob();
3730   } else {
3731     setup_delegate_->SetupBuiltins(this);
3732     MaybeRemapEmbeddedBuiltinsIntoCodeRange();
3733   }
3734 
3735   // Initialize custom memcopy and memmove functions (must happen after
3736   // embedded blob setup).
3737   init_memcopy_functions();
3738 
3739   if (FLAG_log_internal_timer_events) {
3740     set_event_logger(Logger::DefaultEventLoggerSentinel);
3741   }
3742 
3743   if (FLAG_trace_turbo || FLAG_trace_turbo_graph || FLAG_turbo_profiling) {
3744     PrintF("Concurrent recompilation has been disabled for tracing.\n");
3745   } else if (OptimizingCompileDispatcher::Enabled()) {
3746     optimizing_compile_dispatcher_ = new OptimizingCompileDispatcher(this);
3747   }
3748 
3749   // Initialize runtime profiler before deserialization, because collections may
3750   // occur, clearing/updating ICs.
3751   runtime_profiler_ = new RuntimeProfiler(this);
3752 
3753   // If we are deserializing, read the state into the now-empty heap.
3754   {
3755     CodeSpaceMemoryModificationScope modification_scope(heap());
3756 
3757     if (create_heap_objects) {
3758       heap_.read_only_space()->ClearStringPaddingIfNeeded();
3759       read_only_heap_->OnCreateHeapObjectsComplete(this);
3760     } else {
3761       StartupDeserializer startup_deserializer(this, startup_snapshot_data,
3762                                                can_rehash);
3763       startup_deserializer.DeserializeIntoIsolate();
3764     }
3765     load_stub_cache_->Initialize();
3766     store_stub_cache_->Initialize();
3767     interpreter_->Initialize();
3768     heap_.NotifyDeserializationComplete();
3769   }
3770 
3771 #ifdef VERIFY_HEAP
3772   if (FLAG_verify_heap) {
3773     heap_.VerifyReadOnlyHeap();
3774   }
3775 #endif
3776 
3777   delete setup_delegate_;
3778   setup_delegate_ = nullptr;
3779 
3780   Builtins::InitializeIsolateDataTables(this);
3781   Builtins::EmitCodeCreateEvents(this);
3782 
3783 #ifdef DEBUG
3784   // Verify that the current heap state (usually deserialized from the snapshot)
3785   // is compatible with the embedded blob. If this DCHECK fails, we've likely
3786   // loaded a snapshot generated by a different V8 version or build-time
3787   // configuration.
3788   if (!IsolateIsCompatibleWithEmbeddedBlob(this)) {
3789     FATAL(
3790         "The Isolate is incompatible with the embedded blob. This is usually "
3791         "caused by incorrect usage of mksnapshot. When generating custom "
3792         "snapshots, embedders must ensure they pass the same flags as during "
3793         "the V8 build process (e.g.: --turbo-instruction-scheduling).");
3794   }
3795 #endif  // DEBUG
3796 
3797 #ifndef V8_TARGET_ARCH_ARM
3798   // The IET for profiling should always be a full on-heap Code object.
3799   DCHECK(!Code::cast(heap_.interpreter_entry_trampoline_for_profiling())
3800               .is_off_heap_trampoline());
3801 #endif  // V8_TARGET_ARCH_ARM
3802 
3803   if (FLAG_print_builtin_code) builtins()->PrintBuiltinCode();
3804   if (FLAG_print_builtin_size) builtins()->PrintBuiltinSize();
3805 
3806   // Finish initialization of ThreadLocal after deserialization is done.
3807   clear_pending_exception();
3808   clear_pending_message();
3809   clear_scheduled_exception();
3810 
3811   // Quiet the heap NaN if needed on target platform.
3812   if (!create_heap_objects)
3813     Assembler::QuietNaN(ReadOnlyRoots(this).nan_value());
3814 
3815   if (FLAG_trace_turbo) {
3816     // Create an empty file.
3817     std::ofstream(GetTurboCfgFileName(this).c_str(), std::ios_base::trunc);
3818   }
3819 
3820   {
3821     HandleScope scope(this);
3822     ast_string_constants_ = new AstStringConstants(this, HashSeed(this));
3823   }
3824 
3825   initialized_from_snapshot_ = !create_heap_objects;
3826 
3827   if (FLAG_stress_sampling_allocation_profiler > 0) {
3828     uint64_t sample_interval = FLAG_stress_sampling_allocation_profiler;
3829     int stack_depth = 128;
3830     v8::HeapProfiler::SamplingFlags sampling_flags =
3831         v8::HeapProfiler::SamplingFlags::kSamplingForceGC;
3832     heap_profiler()->StartSamplingHeapProfiler(sample_interval, stack_depth,
3833                                                sampling_flags);
3834   }
3835 
3836 #if defined(V8_OS_WIN64)
3837   if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
3838       heap()->code_range()->AtomicIncrementUnwindInfoUseCount() == 0) {
3839     const base::AddressRegion& code_region = heap()->code_region();
3840     void* start = reinterpret_cast<void*>(code_region.begin());
3841     size_t size_in_bytes = code_region.size();
3842     win64_unwindinfo::RegisterNonABICompliantCodeRange(start, size_in_bytes);
3843   }
3844 #endif  // V8_OS_WIN64
3845 
3846   if (create_heap_objects && FLAG_profile_deserialization) {
3847     double ms = timer.Elapsed().InMillisecondsF();
3848     PrintF("[Initializing isolate from scratch took %0.3f ms]\n", ms);
3849   }
3850 
3851   initialized_ = true;
3852 
3853   return true;
3854 }
3855 
3856 void Isolate::Enter() {
3857   Isolate* current_isolate = nullptr;
3858   PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
3859   if (current_data != nullptr) {
3860     current_isolate = current_data->isolate_;
3861     DCHECK_NOT_NULL(current_isolate);
3862     if (current_isolate == this) {
3863       DCHECK(Current() == this);
3864       DCHECK_NOT_NULL(entry_stack_);
3865       DCHECK(entry_stack_->previous_thread_data == nullptr ||
3866              entry_stack_->previous_thread_data->thread_id() ==
3867                  ThreadId::Current());
3868       // Same thread re-enters the isolate, no need to re-init anything.
3869       entry_stack_->entry_count++;
3870       return;
3871     }
3872   }
3873 
3874   PerIsolateThreadData* data = FindOrAllocatePerThreadDataForThisThread();
3875   DCHECK_NOT_NULL(data);
3876   DCHECK(data->isolate_ == this);
3877 
3878   EntryStackItem* item =
3879       new EntryStackItem(current_data, current_isolate, entry_stack_);
3880   entry_stack_ = item;
3881 
3882   SetIsolateThreadLocals(this, data);
3883 
3884   // In case it's the first time some thread enters the isolate.
3885   set_thread_id(data->thread_id());
3886 }
3887 
3888 void Isolate::Exit() {
3889   DCHECK_NOT_NULL(entry_stack_);
3890   DCHECK(entry_stack_->previous_thread_data == nullptr ||
3891          entry_stack_->previous_thread_data->thread_id() ==
3892              ThreadId::Current());
3893 
3894   if (--entry_stack_->entry_count > 0) return;
3895 
3896   DCHECK_NOT_NULL(CurrentPerIsolateThreadData());
3897   DCHECK(CurrentPerIsolateThreadData()->isolate_ == this);
3898 
3899   // Pop the stack.
3900   EntryStackItem* item = entry_stack_;
3901   entry_stack_ = item->previous_item;
3902 
3903   PerIsolateThreadData* previous_thread_data = item->previous_thread_data;
3904   Isolate* previous_isolate = item->previous_isolate;
3905 
3906   delete item;
3907 
3908   // Reinit the current thread for the isolate it was running before this one.
3909   SetIsolateThreadLocals(previous_isolate, previous_thread_data);
3910 }
3911 
3912 std::unique_ptr<PersistentHandles> Isolate::NewPersistentHandles() {
3913   return std::make_unique<PersistentHandles>(this);
3914 }
3915 
3916 void Isolate::DumpAndResetStats() {
3917   if (FLAG_trace_turbo_stack_accesses) {
3918     StdoutStream os;
3919     uint64_t total_loads = 0;
3920     uint64_t total_stores = 0;
3921     os << "=== Stack access counters === " << std::endl;
3922     if (!stack_access_count_map) {
3923       os << "No stack accesses in optimized/wasm functions found.";
3924     } else {
3925       DCHECK_NOT_NULL(stack_access_count_map);
3926       os << "Number of optimized/wasm stack-access functions: "
3927          << stack_access_count_map->size() << std::endl;
3928       for (auto it = stack_access_count_map->cbegin();
3929            it != stack_access_count_map->cend(); it++) {
3930         std::string function_name((*it).first);
3931         std::pair<uint64_t, uint64_t> per_func_count = (*it).second;
3932         os << "Name: " << function_name << ", Loads: " << per_func_count.first
3933            << ", Stores: " << per_func_count.second << std::endl;
3934         total_loads += per_func_count.first;
3935         total_stores += per_func_count.second;
3936       }
3937       os << "Total Loads: " << total_loads << ", Total Stores: " << total_stores
3938          << std::endl;
3939       stack_access_count_map = nullptr;
3940     }
3941   }
3942   if (turbo_statistics() != nullptr) {
3943     DCHECK(FLAG_turbo_stats || FLAG_turbo_stats_nvp);
3944     StdoutStream os;
3945     if (FLAG_turbo_stats) {
3946       AsPrintableStatistics ps = {*turbo_statistics(), false};
3947       os << ps << std::endl;
3948     }
3949     if (FLAG_turbo_stats_nvp) {
3950       AsPrintableStatistics ps = {*turbo_statistics(), true};
3951       os << ps << std::endl;
3952     }
3953     delete turbo_statistics_;
3954     turbo_statistics_ = nullptr;
3955   }
3956 #if V8_ENABLE_WEBASSEMBLY
3957   // TODO(7424): There is no public API for the {WasmEngine} yet. So for now we
3958   // just dump and reset the engines statistics together with the Isolate.
3959   if (FLAG_turbo_stats_wasm) {
3960     wasm::GetWasmEngine()->DumpAndResetTurboStatistics();
3961   }
3962 #endif  // V8_ENABLE_WEBASSEMBLY
3963 #if V8_RUNTIME_CALL_STATS
3964   if (V8_UNLIKELY(TracingFlags::runtime_stats.load(std::memory_order_relaxed) ==
3965                   v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) {
3966     counters()->worker_thread_runtime_call_stats()->AddToMainTable(
3967         counters()->runtime_call_stats());
3968     counters()->runtime_call_stats()->Print();
3969     counters()->runtime_call_stats()->Reset();
3970   }
3971 #endif  // V8_RUNTIME_CALL_STATS
3972   if (BasicBlockProfiler::Get()->HasData(this)) {
3973     StdoutStream out;
3974     BasicBlockProfiler::Get()->Print(out, this);
3975     BasicBlockProfiler::Get()->ResetCounts(this);
3976   }
3977 }
3978 
3979 void Isolate::AbortConcurrentOptimization(BlockingBehavior behavior) {
3980   if (concurrent_recompilation_enabled()) {
3981     DisallowGarbageCollection no_recursive_gc;
3982     optimizing_compile_dispatcher()->Flush(behavior);
3983   }
3984 }
3985 
3986 CompilationStatistics* Isolate::GetTurboStatistics() {
3987   if (turbo_statistics() == nullptr)
3988     set_turbo_statistics(new CompilationStatistics());
3989   return turbo_statistics();
3990 }
3991 
3992 CodeTracer* Isolate::GetCodeTracer() {
3993   if (code_tracer() == nullptr) set_code_tracer(new CodeTracer(id()));
3994   return code_tracer();
3995 }
3996 
3997 bool Isolate::use_optimizer() {
3998   return FLAG_opt && !serializer_enabled_ && CpuFeatures::SupportsOptimizer() &&
3999          !is_precise_count_code_coverage();
4000 }
4001 
4002 void Isolate::IncreaseTotalRegexpCodeGenerated(Handle<HeapObject> code) {
4003   DCHECK(code->IsCode() || code->IsByteArray());
4004   total_regexp_code_generated_ += code->Size();
4005 }
4006 
4007 bool Isolate::NeedsDetailedOptimizedCodeLineInfo() const {
4008   return NeedsSourcePositionsForProfiling() ||
4009          detailed_source_positions_for_profiling();
4010 }
4011 
4012 bool Isolate::NeedsSourcePositionsForProfiling() const {
4013   return
4014       // Static conditions.
4015       FLAG_trace_deopt || FLAG_trace_turbo || FLAG_trace_turbo_graph ||
4016       FLAG_turbo_profiling || FLAG_perf_prof || FLAG_log_maps || FLAG_log_ic ||
4017       // Dynamic conditions; changing any of these conditions triggers source
4018       // position collection for the entire heap
4019       // (CollectSourcePositionsForAllBytecodeArrays).
4020       is_profiling() || debug_->is_active() || logger_->is_logging();
4021 }
4022 
4023 void Isolate::SetFeedbackVectorsForProfilingTools(Object value) {
4024   DCHECK(value.IsUndefined(this) || value.IsArrayList());
4025   heap()->set_feedback_vectors_for_profiling_tools(value);
4026 }
4027 
4028 void Isolate::MaybeInitializeVectorListFromHeap() {
4029   if (!heap()->feedback_vectors_for_profiling_tools().IsUndefined(this)) {
4030     // Already initialized, return early.
4031     DCHECK(heap()->feedback_vectors_for_profiling_tools().IsArrayList());
4032     return;
4033   }
4034 
4035   // Collect existing feedback vectors.
4036   std::vector<Handle<FeedbackVector>> vectors;
4037 
4038   {
4039     HeapObjectIterator heap_iterator(heap());
4040     for (HeapObject current_obj = heap_iterator.Next(); !current_obj.is_null();
4041          current_obj = heap_iterator.Next()) {
4042       if (!current_obj.IsFeedbackVector()) continue;
4043 
4044       FeedbackVector vector = FeedbackVector::cast(current_obj);
4045       SharedFunctionInfo shared = vector.shared_function_info();
4046 
4047       // No need to preserve the feedback vector for non-user-visible functions.
4048       if (!shared.IsSubjectToDebugging()) continue;
4049 
4050       vectors.emplace_back(vector, this);
4051     }
4052   }
4053 
4054   // Add collected feedback vectors to the root list lest we lose them to GC.
4055   Handle<ArrayList> list =
4056       ArrayList::New(this, static_cast<int>(vectors.size()));
4057   for (const auto& vector : vectors) list = ArrayList::Add(this, list, vector);
4058   SetFeedbackVectorsForProfilingTools(*list);
4059 }
4060 
4061 void Isolate::set_date_cache(DateCache* date_cache) {
4062   if (date_cache != date_cache_) {
4063     delete date_cache_;
4064   }
4065   date_cache_ = date_cache;
4066 }
4067 
4068 Isolate::KnownPrototype Isolate::IsArrayOrObjectOrStringPrototype(
4069     Object object) {
4070   Object context = heap()->native_contexts_list();
4071   while (!context.IsUndefined(this)) {
4072     Context current_context = Context::cast(context);
4073     if (current_context.initial_object_prototype() == object) {
4074       return KnownPrototype::kObject;
4075     } else if (current_context.initial_array_prototype() == object) {
4076       return KnownPrototype::kArray;
4077     } else if (current_context.initial_string_prototype() == object) {
4078       return KnownPrototype::kString;
4079     }
4080     context = current_context.next_context_link();
4081   }
4082   return KnownPrototype::kNone;
4083 }
4084 
4085 bool Isolate::IsInAnyContext(Object object, uint32_t index) {
4086   DisallowGarbageCollection no_gc;
4087   Object context = heap()->native_contexts_list();
4088   while (!context.IsUndefined(this)) {
4089     Context current_context = Context::cast(context);
4090     if (current_context.get(index) == object) {
4091       return true;
4092     }
4093     context = current_context.next_context_link();
4094   }
4095   return false;
4096 }
4097 
4098 void Isolate::UpdateNoElementsProtectorOnSetElement(Handle<JSObject> object) {
4099   DisallowGarbageCollection no_gc;
4100   if (!object->map().is_prototype_map()) return;
4101   if (!Protectors::IsNoElementsIntact(this)) return;
4102   KnownPrototype obj_type = IsArrayOrObjectOrStringPrototype(*object);
4103   if (obj_type == KnownPrototype::kNone) return;
4104   if (obj_type == KnownPrototype::kObject) {
4105     this->CountUsage(v8::Isolate::kObjectPrototypeHasElements);
4106   } else if (obj_type == KnownPrototype::kArray) {
4107     this->CountUsage(v8::Isolate::kArrayPrototypeHasElements);
4108   }
4109   Protectors::InvalidateNoElements(this);
4110 }
4111 
4112 static base::RandomNumberGenerator* ensure_rng_exists(
4113     base::RandomNumberGenerator** rng, int seed) {
4114   if (*rng == nullptr) {
4115     if (seed != 0) {
4116       *rng = new base::RandomNumberGenerator(seed);
4117     } else {
4118       *rng = new base::RandomNumberGenerator();
4119     }
4120   }
4121   return *rng;
4122 }
4123 
4124 base::RandomNumberGenerator* Isolate::random_number_generator() {
4125   // TODO(bmeurer) Initialized lazily because it depends on flags; can
4126   // be fixed once the default isolate cleanup is done.
4127   return ensure_rng_exists(&random_number_generator_, FLAG_random_seed);
4128 }
4129 
4130 base::RandomNumberGenerator* Isolate::fuzzer_rng() {
4131   if (fuzzer_rng_ == nullptr) {
4132     int64_t seed = FLAG_fuzzer_random_seed;
4133     if (seed == 0) {
4134       seed = random_number_generator()->initial_seed();
4135     }
4136 
4137     fuzzer_rng_ = new base::RandomNumberGenerator(seed);
4138   }
4139 
4140   return fuzzer_rng_;
4141 }
4142 
4143 int Isolate::GenerateIdentityHash(uint32_t mask) {
4144   int hash;
4145   int attempts = 0;
4146   do {
4147     hash = random_number_generator()->NextInt() & mask;
4148   } while (hash == 0 && attempts++ < 30);
4149   return hash != 0 ? hash : 1;
4150 }
4151 
4152 Code Isolate::FindCodeObject(Address a) {
4153   return heap()->GcSafeFindCodeForInnerPointer(a);
4154 }
4155 
4156 #ifdef DEBUG
4157 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \
4158   const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_);
4159 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
4160 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
4161 #undef ISOLATE_FIELD_OFFSET
4162 #endif
4163 
4164 Handle<Symbol> Isolate::SymbolFor(RootIndex dictionary_index,
4165                                   Handle<String> name, bool private_symbol) {
4166   Handle<String> key = factory()->InternalizeString(name);
4167   Handle<NameDictionary> dictionary =
4168       Handle<NameDictionary>::cast(root_handle(dictionary_index));
4169   InternalIndex entry = dictionary->FindEntry(this, key);
4170   Handle<Symbol> symbol;
4171   if (entry.is_not_found()) {
4172     symbol =
4173         private_symbol ? factory()->NewPrivateSymbol() : factory()->NewSymbol();
4174     symbol->set_description(*key);
4175     dictionary = NameDictionary::Add(this, dictionary, key, symbol,
4176                                      PropertyDetails::Empty(), &entry);
4177     switch (dictionary_index) {
4178       case RootIndex::kPublicSymbolTable:
4179         symbol->set_is_in_public_symbol_table(true);
4180         heap()->set_public_symbol_table(*dictionary);
4181         break;
4182       case RootIndex::kApiSymbolTable:
4183         heap()->set_api_symbol_table(*dictionary);
4184         break;
4185       case RootIndex::kApiPrivateSymbolTable:
4186         heap()->set_api_private_symbol_table(*dictionary);
4187         break;
4188       default:
4189         UNREACHABLE();
4190     }
4191   } else {
4192     symbol = Handle<Symbol>(Symbol::cast(dictionary->ValueAt(entry)), this);
4193   }
4194   return symbol;
4195 }
4196 
4197 void Isolate::AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback) {
4198   auto pos = std::find(before_call_entered_callbacks_.begin(),
4199                        before_call_entered_callbacks_.end(), callback);
4200   if (pos != before_call_entered_callbacks_.end()) return;
4201   before_call_entered_callbacks_.push_back(callback);
4202 }
4203 
4204 void Isolate::RemoveBeforeCallEnteredCallback(
4205     BeforeCallEnteredCallback callback) {
4206   auto pos = std::find(before_call_entered_callbacks_.begin(),
4207                        before_call_entered_callbacks_.end(), callback);
4208   if (pos == before_call_entered_callbacks_.end()) return;
4209   before_call_entered_callbacks_.erase(pos);
4210 }
4211 
4212 void Isolate::AddCallCompletedCallback(CallCompletedCallback callback) {
4213   auto pos = std::find(call_completed_callbacks_.begin(),
4214                        call_completed_callbacks_.end(), callback);
4215   if (pos != call_completed_callbacks_.end()) return;
4216   call_completed_callbacks_.push_back(callback);
4217 }
4218 
4219 void Isolate::RemoveCallCompletedCallback(CallCompletedCallback callback) {
4220   auto pos = std::find(call_completed_callbacks_.begin(),
4221                        call_completed_callbacks_.end(), callback);
4222   if (pos == call_completed_callbacks_.end()) return;
4223   call_completed_callbacks_.erase(pos);
4224 }
4225 
4226 void Isolate::FireCallCompletedCallbackInternal(
4227     MicrotaskQueue* microtask_queue) {
4228   DCHECK(thread_local_top()->CallDepthIsZero());
4229 
4230   bool perform_checkpoint =
4231       microtask_queue &&
4232       microtask_queue->microtasks_policy() == v8::MicrotasksPolicy::kAuto;
4233 
4234   v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this);
4235   if (perform_checkpoint) microtask_queue->PerformCheckpoint(isolate);
4236 
4237   if (call_completed_callbacks_.empty()) return;
4238   // Fire callbacks.  Increase call depth to prevent recursive callbacks.
4239   v8::Isolate::SuppressMicrotaskExecutionScope suppress(isolate);
4240   std::vector<CallCompletedCallback> callbacks(call_completed_callbacks_);
4241   for (auto& callback : callbacks) {
4242     callback(reinterpret_cast<v8::Isolate*>(this));
4243   }
4244 }
4245 
4246 void Isolate::UpdatePromiseHookProtector() {
4247   if (Protectors::IsPromiseHookIntact(this)) {
4248     HandleScope scope(this);
4249     Protectors::InvalidatePromiseHook(this);
4250   }
4251 }
4252 
4253 void Isolate::PromiseHookStateUpdated() {
4254   promise_hook_flags_ =
4255     (promise_hook_flags_ & PromiseHookFields::HasContextPromiseHook::kMask) |
4256     PromiseHookFields::HasIsolatePromiseHook::encode(promise_hook_) |
4257     PromiseHookFields::HasAsyncEventDelegate::encode(async_event_delegate_) |
4258     PromiseHookFields::IsDebugActive::encode(debug()->is_active());
4259 
4260   if (promise_hook_flags_ != 0) {
4261     UpdatePromiseHookProtector();
4262   }
4263 }
4264 
4265 namespace {
4266 
4267 MaybeHandle<JSPromise> NewRejectedPromise(Isolate* isolate,
4268                                           v8::Local<v8::Context> api_context,
4269                                           Handle<Object> exception) {
4270   v8::Local<v8::Promise::Resolver> resolver;
4271   ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4272       isolate, resolver, v8::Promise::Resolver::New(api_context),
4273       MaybeHandle<JSPromise>());
4274 
4275   RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4276       isolate, resolver->Reject(api_context, v8::Utils::ToLocal(exception)),
4277       MaybeHandle<JSPromise>());
4278 
4279   v8::Local<v8::Promise> promise = resolver->GetPromise();
4280   return v8::Utils::OpenHandle(*promise);
4281 }
4282 
4283 }  // namespace
4284 
4285 MaybeHandle<JSPromise> Isolate::RunHostImportModuleDynamicallyCallback(
4286     Handle<Script> referrer, Handle<Object> specifier,
4287     MaybeHandle<Object> maybe_import_assertions_argument) {
4288   v8::Local<v8::Context> api_context =
4289       v8::Utils::ToLocal(Handle<Context>(native_context()));
4290   if (host_import_module_dynamically_with_import_assertions_callback_ ==
4291       nullptr) {
4292     Handle<Object> exception =
4293         factory()->NewError(error_function(), MessageTemplate::kUnsupported);
4294     return NewRejectedPromise(this, api_context, exception);
4295   }
4296 
4297   Handle<String> specifier_str;
4298   MaybeHandle<String> maybe_specifier = Object::ToString(this, specifier);
4299   if (!maybe_specifier.ToHandle(&specifier_str)) {
4300     Handle<Object> exception(pending_exception(), this);
4301     clear_pending_exception();
4302     return NewRejectedPromise(this, api_context, exception);
4303   }
4304   DCHECK(!has_pending_exception());
4305 
4306   v8::Local<v8::Promise> promise;
4307   Handle<FixedArray> import_assertions_array;
4308   if (GetImportAssertionsFromArgument(maybe_import_assertions_argument)
4309           .ToHandle(&import_assertions_array)) {
4310     ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4311         this, promise,
4312         host_import_module_dynamically_with_import_assertions_callback_(
4313             api_context, v8::Utils::ScriptOrModuleToLocal(referrer),
4314             v8::Utils::ToLocal(specifier_str),
4315             ToApiHandle<v8::FixedArray>(import_assertions_array)),
4316         MaybeHandle<JSPromise>());
4317     return v8::Utils::OpenHandle(*promise);
4318   } else {
4319     Handle<Object> exception(pending_exception(), this);
4320     clear_pending_exception();
4321     return NewRejectedPromise(this, api_context, exception);
4322   }
4323 }
4324 
4325 MaybeHandle<FixedArray> Isolate::GetImportAssertionsFromArgument(
4326     MaybeHandle<Object> maybe_import_assertions_argument) {
4327   Handle<FixedArray> import_assertions_array = factory()->empty_fixed_array();
4328   Handle<Object> import_assertions_argument;
4329   if (!maybe_import_assertions_argument.ToHandle(&import_assertions_argument) ||
4330       import_assertions_argument->IsUndefined()) {
4331     return import_assertions_array;
4332   }
4333 
4334   // The parser shouldn't have allowed the second argument to import() if
4335   // the flag wasn't enabled.
4336   DCHECK(FLAG_harmony_import_assertions);
4337 
4338   if (!import_assertions_argument->IsJSReceiver()) {
4339     this->Throw(
4340         *factory()->NewTypeError(MessageTemplate::kNonObjectImportArgument));
4341     return MaybeHandle<FixedArray>();
4342   }
4343 
4344   Handle<JSReceiver> import_assertions_argument_receiver =
4345       Handle<JSReceiver>::cast(import_assertions_argument);
4346   Handle<Name> key = factory()->assert_string();
4347 
4348   Handle<Object> import_assertions_object;
4349   if (!JSReceiver::GetProperty(this, import_assertions_argument_receiver, key)
4350            .ToHandle(&import_assertions_object)) {
4351     // This can happen if the property has a getter function that throws
4352     // an error.
4353     return MaybeHandle<FixedArray>();
4354   }
4355 
4356   // If there is no 'assert' option in the options bag, it's not an error. Just
4357   // do the import() as if no assertions were provided.
4358   if (import_assertions_object->IsUndefined()) return import_assertions_array;
4359 
4360   if (!import_assertions_object->IsJSReceiver()) {
4361     this->Throw(
4362         *factory()->NewTypeError(MessageTemplate::kNonObjectAssertOption));
4363     return MaybeHandle<FixedArray>();
4364   }
4365 
4366   Handle<JSReceiver> import_assertions_object_receiver =
4367       Handle<JSReceiver>::cast(import_assertions_object);
4368 
4369   Handle<FixedArray> assertion_keys;
4370   if (!KeyAccumulator::GetKeys(import_assertions_object_receiver,
4371                                KeyCollectionMode::kOwnOnly, ENUMERABLE_STRINGS,
4372                                GetKeysConversion::kConvertToString)
4373            .ToHandle(&assertion_keys)) {
4374     // This happens if the assertions object is a Proxy whose ownKeys() or
4375     // getOwnPropertyDescriptor() trap throws.
4376     return MaybeHandle<FixedArray>();
4377   }
4378 
4379   // The assertions will be passed to the host in the form: [key1,
4380   // value1, key2, value2, ...].
4381   constexpr size_t kAssertionEntrySizeForDynamicImport = 2;
4382   import_assertions_array = factory()->NewFixedArray(static_cast<int>(
4383       assertion_keys->length() * kAssertionEntrySizeForDynamicImport));
4384   for (int i = 0; i < assertion_keys->length(); i++) {
4385     Handle<String> assertion_key(String::cast(assertion_keys->get(i)), this);
4386     Handle<Object> assertion_value;
4387     if (!JSReceiver::GetProperty(this, import_assertions_object_receiver,
4388                                  assertion_key)
4389              .ToHandle(&assertion_value)) {
4390       // This can happen if the property has a getter function that throws
4391       // an error.
4392       return MaybeHandle<FixedArray>();
4393     }
4394 
4395     if (!assertion_value->IsString()) {
4396       this->Throw(*factory()->NewTypeError(
4397           MessageTemplate::kNonStringImportAssertionValue));
4398       return MaybeHandle<FixedArray>();
4399     }
4400 
4401     import_assertions_array->set((i * kAssertionEntrySizeForDynamicImport),
4402                                  *assertion_key);
4403     import_assertions_array->set((i * kAssertionEntrySizeForDynamicImport) + 1,
4404                                  *assertion_value);
4405   }
4406 
4407   return import_assertions_array;
4408 }
4409 
4410 void Isolate::ClearKeptObjects() { heap()->ClearKeptObjects(); }
4411 
4412 void Isolate::SetHostImportModuleDynamicallyCallback(
4413     HostImportModuleDynamicallyWithImportAssertionsCallback callback) {
4414   host_import_module_dynamically_with_import_assertions_callback_ = callback;
4415 }
4416 
4417 MaybeHandle<JSObject> Isolate::RunHostInitializeImportMetaObjectCallback(
4418     Handle<SourceTextModule> module) {
4419   CHECK(module->import_meta(kAcquireLoad).IsTheHole(this));
4420   Handle<JSObject> import_meta = factory()->NewJSObjectWithNullProto();
4421   if (host_initialize_import_meta_object_callback_ != nullptr) {
4422     v8::Local<v8::Context> api_context =
4423         v8::Utils::ToLocal(Handle<Context>(native_context()));
4424     host_initialize_import_meta_object_callback_(
4425         api_context, Utils::ToLocal(Handle<Module>::cast(module)),
4426         v8::Local<v8::Object>::Cast(v8::Utils::ToLocal(import_meta)));
4427     if (has_scheduled_exception()) {
4428       PromoteScheduledException();
4429       return {};
4430     }
4431   }
4432   return import_meta;
4433 }
4434 
4435 void Isolate::SetHostInitializeImportMetaObjectCallback(
4436     HostInitializeImportMetaObjectCallback callback) {
4437   host_initialize_import_meta_object_callback_ = callback;
4438 }
4439 
4440 MaybeHandle<Object> Isolate::RunPrepareStackTraceCallback(
4441     Handle<Context> context, Handle<JSObject> error, Handle<JSArray> sites) {
4442   v8::Local<v8::Context> api_context = Utils::ToLocal(context);
4443 
4444   v8::Local<v8::Value> stack;
4445   ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4446       this, stack,
4447       prepare_stack_trace_callback_(api_context, Utils::ToLocal(error),
4448                                     Utils::ToLocal(sites)),
4449       MaybeHandle<Object>());
4450   return Utils::OpenHandle(*stack);
4451 }
4452 
4453 int Isolate::LookupOrAddExternallyCompiledFilename(const char* filename) {
4454   if (embedded_file_writer_ != nullptr) {
4455     return embedded_file_writer_->LookupOrAddExternallyCompiledFilename(
4456         filename);
4457   }
4458   return 0;
4459 }
4460 
4461 const char* Isolate::GetExternallyCompiledFilename(int index) const {
4462   if (embedded_file_writer_ != nullptr) {
4463     return embedded_file_writer_->GetExternallyCompiledFilename(index);
4464   }
4465   return "";
4466 }
4467 
4468 int Isolate::GetExternallyCompiledFilenameCount() const {
4469   if (embedded_file_writer_ != nullptr) {
4470     return embedded_file_writer_->GetExternallyCompiledFilenameCount();
4471   }
4472   return 0;
4473 }
4474 
4475 void Isolate::PrepareBuiltinSourcePositionMap() {
4476   if (embedded_file_writer_ != nullptr) {
4477     return embedded_file_writer_->PrepareBuiltinSourcePositionMap(
4478         this->builtins());
4479   }
4480 }
4481 
4482 void Isolate::PrepareBuiltinLabelInfoMap() {
4483   if (embedded_file_writer_ != nullptr) {
4484     embedded_file_writer_->PrepareBuiltinLabelInfoMap(
4485         heap()->construct_stub_create_deopt_pc_offset().value(),
4486         heap()->construct_stub_invoke_deopt_pc_offset().value());
4487   }
4488 }
4489 
4490 #if defined(V8_OS_WIN64)
4491 void Isolate::SetBuiltinUnwindData(
4492     Builtin builtin,
4493     const win64_unwindinfo::BuiltinUnwindInfo& unwinding_info) {
4494   if (embedded_file_writer_ != nullptr) {
4495     embedded_file_writer_->SetBuiltinUnwindData(builtin, unwinding_info);
4496   }
4497 }
4498 #endif  // V8_OS_WIN64
4499 
4500 void Isolate::SetPrepareStackTraceCallback(PrepareStackTraceCallback callback) {
4501   prepare_stack_trace_callback_ = callback;
4502 }
4503 
4504 bool Isolate::HasPrepareStackTraceCallback() const {
4505   return prepare_stack_trace_callback_ != nullptr;
4506 }
4507 
4508 void Isolate::SetAddCrashKeyCallback(AddCrashKeyCallback callback) {
4509   add_crash_key_callback_ = callback;
4510 
4511   // Log the initial set of data.
4512   AddCrashKeysForIsolateAndHeapPointers();
4513 }
4514 
4515 void Isolate::SetAtomicsWaitCallback(v8::Isolate::AtomicsWaitCallback callback,
4516                                      void* data) {
4517   atomics_wait_callback_ = callback;
4518   atomics_wait_callback_data_ = data;
4519 }
4520 
4521 void Isolate::RunAtomicsWaitCallback(v8::Isolate::AtomicsWaitEvent event,
4522                                      Handle<JSArrayBuffer> array_buffer,
4523                                      size_t offset_in_bytes, int64_t value,
4524                                      double timeout_in_ms,
4525                                      AtomicsWaitWakeHandle* stop_handle) {
4526   DCHECK(array_buffer->is_shared());
4527   if (atomics_wait_callback_ == nullptr) return;
4528   HandleScope handle_scope(this);
4529   atomics_wait_callback_(
4530       event, v8::Utils::ToLocalShared(array_buffer), offset_in_bytes, value,
4531       timeout_in_ms,
4532       reinterpret_cast<v8::Isolate::AtomicsWaitWakeHandle*>(stop_handle),
4533       atomics_wait_callback_data_);
4534 }
4535 
4536 void Isolate::SetPromiseHook(PromiseHook hook) {
4537   promise_hook_ = hook;
4538   PromiseHookStateUpdated();
4539 }
4540 
4541 void Isolate::RunAllPromiseHooks(PromiseHookType type,
4542                                  Handle<JSPromise> promise,
4543                                  Handle<Object> parent) {
4544   if (HasContextPromiseHooks()) {
4545     native_context()->RunPromiseHook(type, promise, parent);
4546   }
4547   if (HasIsolatePromiseHooks() || HasAsyncEventDelegate()) {
4548     RunPromiseHook(type, promise, parent);
4549   }
4550 }
4551 
4552 void Isolate::RunPromiseHook(PromiseHookType type, Handle<JSPromise> promise,
4553                              Handle<Object> parent) {
4554   RunPromiseHookForAsyncEventDelegate(type, promise);
4555   if (!HasIsolatePromiseHooks()) return;
4556   DCHECK(promise_hook_ != nullptr);
4557   promise_hook_(type, v8::Utils::PromiseToLocal(promise),
4558                 v8::Utils::ToLocal(parent));
4559 }
4560 
4561 void Isolate::RunPromiseHookForAsyncEventDelegate(PromiseHookType type,
4562                                                   Handle<JSPromise> promise) {
4563   if (!HasAsyncEventDelegate()) return;
4564   DCHECK(async_event_delegate_ != nullptr);
4565   switch (type) {
4566     case PromiseHookType::kResolve:
4567       return;
4568     case PromiseHookType::kBefore:
4569       if (!promise->async_task_id()) return;
4570       async_event_delegate_->AsyncEventOccurred(
4571           debug::kDebugWillHandle, promise->async_task_id(), false);
4572       break;
4573     case PromiseHookType::kAfter:
4574       if (!promise->async_task_id()) return;
4575       async_event_delegate_->AsyncEventOccurred(
4576           debug::kDebugDidHandle, promise->async_task_id(), false);
4577       break;
4578     case PromiseHookType::kInit:
4579       debug::DebugAsyncActionType action_type = debug::kDebugPromiseThen;
4580       bool last_frame_was_promise_builtin = false;
4581       JavaScriptFrameIterator it(this);
4582       while (!it.done()) {
4583         std::vector<Handle<SharedFunctionInfo>> infos;
4584         it.frame()->GetFunctions(&infos);
4585         for (size_t i = 1; i <= infos.size(); ++i) {
4586           Handle<SharedFunctionInfo> info = infos[infos.size() - i];
4587           if (info->IsUserJavaScript()) {
4588             // We should not report PromiseThen and PromiseCatch which is called
4589             // indirectly, e.g. Promise.all calls Promise.then internally.
4590             if (last_frame_was_promise_builtin) {
4591               if (!promise->async_task_id()) {
4592                 promise->set_async_task_id(++async_task_count_);
4593               }
4594               async_event_delegate_->AsyncEventOccurred(
4595                   action_type, promise->async_task_id(),
4596                   debug()->IsBlackboxed(info));
4597             }
4598             return;
4599           }
4600           last_frame_was_promise_builtin = false;
4601           if (info->HasBuiltinId()) {
4602             if (info->builtin_id() == Builtin::kPromisePrototypeThen) {
4603               action_type = debug::kDebugPromiseThen;
4604               last_frame_was_promise_builtin = true;
4605             } else if (info->builtin_id() == Builtin::kPromisePrototypeCatch) {
4606               action_type = debug::kDebugPromiseCatch;
4607               last_frame_was_promise_builtin = true;
4608             } else if (info->builtin_id() ==
4609                        Builtin::kPromisePrototypeFinally) {
4610               action_type = debug::kDebugPromiseFinally;
4611               last_frame_was_promise_builtin = true;
4612             }
4613           }
4614         }
4615         it.Advance();
4616       }
4617   }
4618 }
4619 
4620 void Isolate::OnAsyncFunctionStateChanged(Handle<JSPromise> promise,
4621                                           debug::DebugAsyncActionType event) {
4622   if (!async_event_delegate_) return;
4623   if (!promise->async_task_id()) {
4624     promise->set_async_task_id(++async_task_count_);
4625   }
4626   async_event_delegate_->AsyncEventOccurred(event, promise->async_task_id(),
4627                                             false);
4628 }
4629 
4630 void Isolate::SetPromiseRejectCallback(PromiseRejectCallback callback) {
4631   promise_reject_callback_ = callback;
4632 }
4633 
4634 void Isolate::ReportPromiseReject(Handle<JSPromise> promise,
4635                                   Handle<Object> value,
4636                                   v8::PromiseRejectEvent event) {
4637   if (promise_reject_callback_ == nullptr) return;
4638   promise_reject_callback_(v8::PromiseRejectMessage(
4639       v8::Utils::PromiseToLocal(promise), event, v8::Utils::ToLocal(value)));
4640 }
4641 
4642 void Isolate::SetUseCounterCallback(v8::Isolate::UseCounterCallback callback) {
4643   DCHECK(!use_counter_callback_);
4644   use_counter_callback_ = callback;
4645 }
4646 
4647 void Isolate::CountUsage(v8::Isolate::UseCounterFeature feature) {
4648   // The counter callback
4649   // - may cause the embedder to call into V8, which is not generally possible
4650   //   during GC.
4651   // - requires a current native context, which may not always exist.
4652   // TODO(jgruber): Consider either removing the native context requirement in
4653   // blink, or passing it to the callback explicitly.
4654   if (heap_.gc_state() == Heap::NOT_IN_GC && !context().is_null()) {
4655     DCHECK(context().IsContext());
4656     DCHECK(context().native_context().IsNativeContext());
4657     if (use_counter_callback_) {
4658       HandleScope handle_scope(this);
4659       use_counter_callback_(reinterpret_cast<v8::Isolate*>(this), feature);
4660     }
4661   } else {
4662     heap_.IncrementDeferredCount(feature);
4663   }
4664 }
4665 
4666 int Isolate::GetNextScriptId() { return heap()->NextScriptId(); }
4667 
4668 // static
4669 std::string Isolate::GetTurboCfgFileName(Isolate* isolate) {
4670   if (FLAG_trace_turbo_cfg_file == nullptr) {
4671     std::ostringstream os;
4672     os << "turbo-" << base::OS::GetCurrentProcessId() << "-";
4673     if (isolate != nullptr) {
4674       os << isolate->id();
4675     } else {
4676       os << "any";
4677     }
4678     os << ".cfg";
4679     return os.str();
4680   } else {
4681     return FLAG_trace_turbo_cfg_file;
4682   }
4683 }
4684 
4685 // Heap::detached_contexts tracks detached contexts as pairs
4686 // (number of GC since the context was detached, the context).
4687 void Isolate::AddDetachedContext(Handle<Context> context) {
4688   HandleScope scope(this);
4689   Handle<WeakArrayList> detached_contexts = factory()->detached_contexts();
4690   detached_contexts = WeakArrayList::AddToEnd(
4691       this, detached_contexts, MaybeObjectHandle(Smi::zero(), this),
4692       MaybeObjectHandle::Weak(context));
4693   heap()->set_detached_contexts(*detached_contexts);
4694 }
4695 
4696 void Isolate::CheckDetachedContextsAfterGC() {
4697   HandleScope scope(this);
4698   Handle<WeakArrayList> detached_contexts = factory()->detached_contexts();
4699   int length = detached_contexts->length();
4700   if (length == 0) return;
4701   int new_length = 0;
4702   for (int i = 0; i < length; i += 2) {
4703     int mark_sweeps = detached_contexts->Get(i).ToSmi().value();
4704     MaybeObject context = detached_contexts->Get(i + 1);
4705     DCHECK(context->IsWeakOrCleared());
4706     if (!context->IsCleared()) {
4707       detached_contexts->Set(
4708           new_length, MaybeObject::FromSmi(Smi::FromInt(mark_sweeps + 1)));
4709       detached_contexts->Set(new_length + 1, context);
4710       new_length += 2;
4711     }
4712   }
4713   detached_contexts->set_length(new_length);
4714   while (new_length < length) {
4715     detached_contexts->Set(new_length, MaybeObject::FromSmi(Smi::zero()));
4716     ++new_length;
4717   }
4718 
4719   if (FLAG_trace_detached_contexts) {
4720     PrintF("%d detached contexts are collected out of %d\n",
4721            length - new_length, length);
4722     for (int i = 0; i < new_length; i += 2) {
4723       int mark_sweeps = detached_contexts->Get(i).ToSmi().value();
4724       MaybeObject context = detached_contexts->Get(i + 1);
4725       DCHECK(context->IsWeakOrCleared());
4726       if (mark_sweeps > 3) {
4727         PrintF("detached context %p\n survived %d GCs (leak?)\n",
4728                reinterpret_cast<void*>(context.ptr()), mark_sweeps);
4729       }
4730     }
4731   }
4732 }
4733 
4734 void Isolate::DetachGlobal(Handle<Context> env) {
4735   counters()->errors_thrown_per_context()->AddSample(
4736       env->native_context().GetErrorsThrown());
4737 
4738   ReadOnlyRoots roots(this);
4739   Handle<JSGlobalProxy> global_proxy(env->global_proxy(), this);
4740   global_proxy->set_native_context(roots.null_value());
4741   // NOTE: Turbofan's JSNativeContextSpecialization depends on DetachGlobal
4742   // causing a map change.
4743   JSObject::ForceSetPrototype(this, global_proxy, factory()->null_value());
4744   global_proxy->map().set_constructor_or_back_pointer(roots.null_value(),
4745                                                       kRelaxedStore);
4746   if (FLAG_track_detached_contexts) AddDetachedContext(env);
4747   DCHECK(global_proxy->IsDetached());
4748 
4749   env->native_context().set_microtask_queue(this, nullptr);
4750 }
4751 
4752 double Isolate::LoadStartTimeMs() {
4753   base::MutexGuard guard(&rail_mutex_);
4754   return load_start_time_ms_;
4755 }
4756 
4757 void Isolate::UpdateLoadStartTime() {
4758   base::MutexGuard guard(&rail_mutex_);
4759   load_start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
4760 }
4761 
4762 void Isolate::SetRAILMode(RAILMode rail_mode) {
4763   RAILMode old_rail_mode = rail_mode_.load();
4764   if (old_rail_mode != PERFORMANCE_LOAD && rail_mode == PERFORMANCE_LOAD) {
4765     base::MutexGuard guard(&rail_mutex_);
4766     load_start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
4767   }
4768   rail_mode_.store(rail_mode);
4769   if (old_rail_mode == PERFORMANCE_LOAD && rail_mode != PERFORMANCE_LOAD) {
4770     heap()->incremental_marking()->incremental_marking_job()->ScheduleTask(
4771         heap());
4772   }
4773   if (FLAG_trace_rail) {
4774     PrintIsolate(this, "RAIL mode: %s\n", RAILModeName(rail_mode));
4775   }
4776 }
4777 
4778 void Isolate::IsolateInBackgroundNotification() {
4779   is_isolate_in_background_ = true;
4780   heap()->ActivateMemoryReducerIfNeeded();
4781 }
4782 
4783 void Isolate::IsolateInForegroundNotification() {
4784   is_isolate_in_background_ = false;
4785 }
4786 
4787 void Isolate::PrintWithTimestamp(const char* format, ...) {
4788   base::OS::Print("[%d:%p] %8.0f ms: ", base::OS::GetCurrentProcessId(),
4789                   static_cast<void*>(this), time_millis_since_init());
4790   va_list arguments;
4791   va_start(arguments, format);
4792   base::OS::VPrint(format, arguments);
4793   va_end(arguments);
4794 }
4795 
4796 void Isolate::SetIdle(bool is_idle) {
4797   StateTag state = current_vm_state();
4798   if (js_entry_sp() != kNullAddress) return;
4799   DCHECK(state == EXTERNAL || state == IDLE);
4800   if (is_idle) {
4801     set_current_vm_state(IDLE);
4802   } else if (state == IDLE) {
4803     set_current_vm_state(EXTERNAL);
4804   }
4805 }
4806 
4807 void Isolate::CollectSourcePositionsForAllBytecodeArrays() {
4808   if (!initialized_) return;
4809 
4810   HandleScope scope(this);
4811   std::vector<Handle<SharedFunctionInfo>> sfis;
4812   {
4813     DisallowGarbageCollection no_gc;
4814     HeapObjectIterator iterator(heap());
4815     for (HeapObject obj = iterator.Next(); !obj.is_null();
4816          obj = iterator.Next()) {
4817       if (!obj.IsSharedFunctionInfo()) continue;
4818       SharedFunctionInfo sfi = SharedFunctionInfo::cast(obj);
4819       if (!sfi.CanCollectSourcePosition(this)) continue;
4820       sfis.push_back(Handle<SharedFunctionInfo>(sfi, this));
4821     }
4822   }
4823   for (auto sfi : sfis) {
4824     SharedFunctionInfo::EnsureSourcePositionsAvailable(this, sfi);
4825   }
4826 }
4827 
4828 #ifdef V8_INTL_SUPPORT
4829 
4830 namespace {
4831 
4832 std::string GetStringFromLocales(Isolate* isolate, Handle<Object> locales) {
4833   if (locales->IsUndefined(isolate)) return "";
4834   return std::string(String::cast(*locales).ToCString().get());
4835 }
4836 
4837 bool StringEqualsLocales(Isolate* isolate, const std::string& str,
4838                          Handle<Object> locales) {
4839   if (locales->IsUndefined(isolate)) return str == "";
4840   return Handle<String>::cast(locales)->IsEqualTo(
4841       base::VectorOf(str.c_str(), str.length()));
4842 }
4843 
4844 }  // namespace
4845 
4846 icu::UMemory* Isolate::get_cached_icu_object(ICUObjectCacheType cache_type,
4847                                              Handle<Object> locales) {
4848   const ICUObjectCacheEntry& entry =
4849       icu_object_cache_[static_cast<int>(cache_type)];
4850   return StringEqualsLocales(this, entry.locales, locales) ? entry.obj.get()
4851                                                            : nullptr;
4852 }
4853 
4854 void Isolate::set_icu_object_in_cache(ICUObjectCacheType cache_type,
4855                                       Handle<Object> locales,
4856                                       std::shared_ptr<icu::UMemory> obj) {
4857   icu_object_cache_[static_cast<int>(cache_type)] = {
4858       GetStringFromLocales(this, locales), std::move(obj)};
4859 }
4860 
4861 void Isolate::clear_cached_icu_object(ICUObjectCacheType cache_type) {
4862   icu_object_cache_[static_cast<int>(cache_type)] = ICUObjectCacheEntry{};
4863 }
4864 
4865 void Isolate::clear_cached_icu_objects() {
4866   for (int i = 0; i < kICUObjectCacheTypeCount; i++) {
4867     clear_cached_icu_object(static_cast<ICUObjectCacheType>(i));
4868   }
4869 }
4870 
4871 #endif  // V8_INTL_SUPPORT
4872 
4873 bool StackLimitCheck::JsHasOverflowed(uintptr_t gap) const {
4874   StackGuard* stack_guard = isolate_->stack_guard();
4875 #ifdef USE_SIMULATOR
4876   // The simulator uses a separate JS stack.
4877   Address jssp_address = Simulator::current(isolate_)->get_sp();
4878   uintptr_t jssp = static_cast<uintptr_t>(jssp_address);
4879   if (jssp - gap < stack_guard->real_jslimit()) return true;
4880 #endif  // USE_SIMULATOR
4881   return GetCurrentStackPosition() - gap < stack_guard->real_climit();
4882 }
4883 
4884 SaveContext::SaveContext(Isolate* isolate) : isolate_(isolate) {
4885   if (!isolate->context().is_null()) {
4886     context_ = Handle<Context>(isolate->context(), isolate);
4887   }
4888 
4889   c_entry_fp_ = isolate->c_entry_fp(isolate->thread_local_top());
4890 }
4891 
4892 SaveContext::~SaveContext() {
4893   isolate_->set_context(context_.is_null() ? Context() : *context_);
4894 }
4895 
4896 bool SaveContext::IsBelowFrame(CommonFrame* frame) {
4897   return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
4898 }
4899 
4900 SaveAndSwitchContext::SaveAndSwitchContext(Isolate* isolate,
4901                                            Context new_context)
4902     : SaveContext(isolate) {
4903   isolate->set_context(new_context);
4904 }
4905 
4906 #ifdef DEBUG
4907 AssertNoContextChange::AssertNoContextChange(Isolate* isolate)
4908     : isolate_(isolate), context_(isolate->context(), isolate) {}
4909 
4910 namespace {
4911 
4912 bool Overlapping(const MemoryRange& a, const MemoryRange& b) {
4913   uintptr_t a1 = reinterpret_cast<uintptr_t>(a.start);
4914   uintptr_t a2 = a1 + a.length_in_bytes;
4915   uintptr_t b1 = reinterpret_cast<uintptr_t>(b.start);
4916   uintptr_t b2 = b1 + b.length_in_bytes;
4917   // Either b1 or b2 are in the [a1, a2) range.
4918   return (a1 <= b1 && b1 < a2) || (a1 <= b2 && b2 < a2);
4919 }
4920 
4921 }  // anonymous namespace
4922 
4923 #endif  // DEBUG
4924 
4925 void Isolate::AddCodeMemoryRange(MemoryRange range) {
4926   std::vector<MemoryRange>* old_code_pages = GetCodePages();
4927   DCHECK_NOT_NULL(old_code_pages);
4928 #ifdef DEBUG
4929   auto overlapping = [range](const MemoryRange& a) {
4930     return Overlapping(range, a);
4931   };
4932   DCHECK_EQ(old_code_pages->end(),
4933             std::find_if(old_code_pages->begin(), old_code_pages->end(),
4934                          overlapping));
4935 #endif
4936 
4937   std::vector<MemoryRange>* new_code_pages;
4938   if (old_code_pages == &code_pages_buffer1_) {
4939     new_code_pages = &code_pages_buffer2_;
4940   } else {
4941     new_code_pages = &code_pages_buffer1_;
4942   }
4943 
4944   // Copy all existing data from the old vector to the new vector and insert the
4945   // new page.
4946   new_code_pages->clear();
4947   new_code_pages->reserve(old_code_pages->size() + 1);
4948   std::merge(old_code_pages->begin(), old_code_pages->end(), &range, &range + 1,
4949              std::back_inserter(*new_code_pages),
4950              [](const MemoryRange& a, const MemoryRange& b) {
4951                return a.start < b.start;
4952              });
4953 
4954   // Atomically switch out the pointer
4955   SetCodePages(new_code_pages);
4956 }
4957 
4958 // |chunk| is either a Page or an executable LargePage.
4959 void Isolate::AddCodeMemoryChunk(MemoryChunk* chunk) {
4960   // We only keep track of individual code pages/allocations if we are on arm32,
4961   // because on x64 and arm64 we have a code range which makes this unnecessary.
4962 #if !defined(V8_TARGET_ARCH_ARM)
4963   return;
4964 #else
4965   void* new_page_start = reinterpret_cast<void*>(chunk->area_start());
4966   size_t new_page_size = chunk->area_size();
4967 
4968   MemoryRange new_range{new_page_start, new_page_size};
4969 
4970   AddCodeMemoryRange(new_range);
4971 #endif  // !defined(V8_TARGET_ARCH_ARM)
4972 }
4973 
4974 void Isolate::AddCodeRange(Address begin, size_t length_in_bytes) {
4975   AddCodeMemoryRange(
4976       MemoryRange{reinterpret_cast<void*>(begin), length_in_bytes});
4977 }
4978 
4979 bool Isolate::RequiresCodeRange() const {
4980   return kPlatformRequiresCodeRange && !jitless_;
4981 }
4982 
4983 v8::metrics::Recorder::ContextId Isolate::GetOrRegisterRecorderContextId(
4984     Handle<NativeContext> context) {
4985   if (serializer_enabled_) return v8::metrics::Recorder::ContextId::Empty();
4986   i::Object id = context->recorder_context_id();
4987   if (id.IsNullOrUndefined()) {
4988     CHECK_LT(last_recorder_context_id_, i::Smi::kMaxValue);
4989     context->set_recorder_context_id(
4990         i::Smi::FromIntptr(++last_recorder_context_id_));
4991     v8::HandleScope handle_scope(reinterpret_cast<v8::Isolate*>(this));
4992     auto result = recorder_context_id_map_.emplace(
4993         std::piecewise_construct,
4994         std::forward_as_tuple(last_recorder_context_id_),
4995         std::forward_as_tuple(reinterpret_cast<v8::Isolate*>(this),
4996                               ToApiHandle<v8::Context>(context)));
4997     result.first->second.SetWeak(
4998         reinterpret_cast<void*>(last_recorder_context_id_),
4999         RemoveContextIdCallback, v8::WeakCallbackType::kParameter);
5000     return v8::metrics::Recorder::ContextId(last_recorder_context_id_);
5001   } else {
5002     DCHECK(id.IsSmi());
5003     return v8::metrics::Recorder::ContextId(
5004         static_cast<uintptr_t>(i::Smi::ToInt(id)));
5005   }
5006 }
5007 
5008 MaybeLocal<v8::Context> Isolate::GetContextFromRecorderContextId(
5009     v8::metrics::Recorder::ContextId id) {
5010   auto result = recorder_context_id_map_.find(id.id_);
5011   if (result == recorder_context_id_map_.end() || result->second.IsEmpty())
5012     return MaybeLocal<v8::Context>();
5013   return result->second.Get(reinterpret_cast<v8::Isolate*>(this));
5014 }
5015 
5016 void Isolate::UpdateLongTaskStats() {
5017   if (last_long_task_stats_counter_ != isolate_data_.long_task_stats_counter_) {
5018     last_long_task_stats_counter_ = isolate_data_.long_task_stats_counter_;
5019     long_task_stats_ = v8::metrics::LongTaskStats{};
5020   }
5021 }
5022 
5023 v8::metrics::LongTaskStats* Isolate::GetCurrentLongTaskStats() {
5024   UpdateLongTaskStats();
5025   return &long_task_stats_;
5026 }
5027 
5028 void Isolate::RemoveContextIdCallback(const v8::WeakCallbackInfo<void>& data) {
5029   Isolate* isolate = reinterpret_cast<Isolate*>(data.GetIsolate());
5030   uintptr_t context_id = reinterpret_cast<uintptr_t>(data.GetParameter());
5031   isolate->recorder_context_id_map_.erase(context_id);
5032 }
5033 
5034 LocalHeap* Isolate::main_thread_local_heap() {
5035   return main_thread_local_isolate()->heap();
5036 }
5037 
5038 LocalHeap* Isolate::CurrentLocalHeap() {
5039   LocalHeap* local_heap = LocalHeap::Current();
5040   return local_heap ? local_heap : main_thread_local_heap();
5041 }
5042 
5043 // |chunk| is either a Page or an executable LargePage.
5044 void Isolate::RemoveCodeMemoryChunk(MemoryChunk* chunk) {
5045   // We only keep track of individual code pages/allocations if we are on arm32,
5046   // because on x64 and arm64 we have a code range which makes this unnecessary.
5047 #if !defined(V8_TARGET_ARCH_ARM)
5048   return;
5049 #else
5050   void* removed_page_start = reinterpret_cast<void*>(chunk->area_start());
5051   std::vector<MemoryRange>* old_code_pages = GetCodePages();
5052   DCHECK_NOT_NULL(old_code_pages);
5053 
5054   std::vector<MemoryRange>* new_code_pages;
5055   if (old_code_pages == &code_pages_buffer1_) {
5056     new_code_pages = &code_pages_buffer2_;
5057   } else {
5058     new_code_pages = &code_pages_buffer1_;
5059   }
5060 
5061   // Copy all existing data from the old vector to the new vector except the
5062   // removed page.
5063   new_code_pages->clear();
5064   new_code_pages->reserve(old_code_pages->size() - 1);
5065   std::remove_copy_if(old_code_pages->begin(), old_code_pages->end(),
5066                       std::back_inserter(*new_code_pages),
5067                       [removed_page_start](const MemoryRange& range) {
5068                         return range.start == removed_page_start;
5069                       });
5070   DCHECK_EQ(old_code_pages->size(), new_code_pages->size() + 1);
5071   // Atomically switch out the pointer
5072   SetCodePages(new_code_pages);
5073 #endif  // !defined(V8_TARGET_ARCH_ARM)
5074 }
5075 
5076 #undef TRACE_ISOLATE
5077 
5078 // static
5079 Address Isolate::load_from_stack_count_address(const char* function_name) {
5080   DCHECK_NOT_NULL(function_name);
5081   if (!stack_access_count_map) {
5082     stack_access_count_map = new MapOfLoadsAndStoresPerFunction{};
5083   }
5084   auto& map = *stack_access_count_map;
5085   std::string name(function_name);
5086   // It is safe to return the address of std::map values.
5087   // Only iterators and references to the erased elements are invalidated.
5088   return reinterpret_cast<Address>(&map[name].first);
5089 }
5090 
5091 // static
5092 Address Isolate::store_to_stack_count_address(const char* function_name) {
5093   DCHECK_NOT_NULL(function_name);
5094   if (!stack_access_count_map) {
5095     stack_access_count_map = new MapOfLoadsAndStoresPerFunction{};
5096   }
5097   auto& map = *stack_access_count_map;
5098   std::string name(function_name);
5099   // It is safe to return the address of std::map values.
5100   // Only iterators and references to the erased elements are invalidated.
5101   return reinterpret_cast<Address>(&map[name].second);
5102 }
5103 
5104 void Isolate::AttachToSharedIsolate(Isolate* shared) {
5105   DCHECK(shared->is_shared());
5106   DCHECK_NULL(shared_isolate_);
5107   shared->AppendAsClientIsolate(this);
5108   shared_isolate_ = shared;
5109   heap()->InitSharedSpaces();
5110 }
5111 
5112 void Isolate::DetachFromSharedIsolate() {
5113   DCHECK_NOT_NULL(shared_isolate_);
5114   shared_isolate_->RemoveAsClientIsolate(this);
5115   shared_isolate_ = nullptr;
5116   heap()->DeinitSharedSpaces();
5117 }
5118 
5119 void Isolate::AppendAsClientIsolate(Isolate* client) {
5120   base::MutexGuard guard(&client_isolate_mutex_);
5121 
5122   DCHECK_NULL(client->prev_client_isolate_);
5123   DCHECK_NULL(client->next_client_isolate_);
5124   DCHECK_NE(client_isolate_head_, client);
5125 
5126   if (client_isolate_head_) {
5127     client_isolate_head_->prev_client_isolate_ = client;
5128   }
5129 
5130   client->prev_client_isolate_ = nullptr;
5131   client->next_client_isolate_ = client_isolate_head_;
5132 
5133   client_isolate_head_ = client;
5134 }
5135 
5136 void Isolate::RemoveAsClientIsolate(Isolate* client) {
5137   base::MutexGuard guard(&client_isolate_mutex_);
5138 
5139   if (client->next_client_isolate_) {
5140     client->next_client_isolate_->prev_client_isolate_ =
5141         client->prev_client_isolate_;
5142   }
5143 
5144   if (client->prev_client_isolate_) {
5145     client->prev_client_isolate_->next_client_isolate_ =
5146         client->next_client_isolate_;
5147   } else {
5148     DCHECK_EQ(client_isolate_head_, client);
5149     client_isolate_head_ = client->next_client_isolate_;
5150   }
5151 }
5152 
5153 }  // namespace internal
5154 }  // namespace v8
5155