1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/isolate.h"
6
7 #include <stdlib.h>
8
9 #include <atomic>
10 #include <fstream> // NOLINT(readability/streams)
11 #include <sstream>
12 #include <unordered_map>
13
14 #include "src/api.h"
15 #include "src/assembler-inl.h"
16 #include "src/ast/ast-value-factory.h"
17 #include "src/ast/context-slot-cache.h"
18 #include "src/base/adapters.h"
19 #include "src/base/hashmap.h"
20 #include "src/base/platform/platform.h"
21 #include "src/base/sys-info.h"
22 #include "src/base/utils/random-number-generator.h"
23 #include "src/basic-block-profiler.h"
24 #include "src/bootstrapper.h"
25 #include "src/builtins/constants-table-builder.h"
26 #include "src/cancelable-task.h"
27 #include "src/code-stubs.h"
28 #include "src/compilation-cache.h"
29 #include "src/compilation-statistics.h"
30 #include "src/compiler-dispatcher/compiler-dispatcher.h"
31 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
32 #include "src/debug/debug.h"
33 #include "src/deoptimizer.h"
34 #include "src/elements.h"
35 #include "src/frames-inl.h"
36 #include "src/ic/stub-cache.h"
37 #include "src/instruction-stream.h"
38 #include "src/interpreter/interpreter.h"
39 #include "src/isolate-inl.h"
40 #include "src/libsampler/sampler.h"
41 #include "src/log.h"
42 #include "src/messages.h"
43 #include "src/objects/frame-array-inl.h"
44 #include "src/objects/hash-table-inl.h"
45 #include "src/objects/promise-inl.h"
46 #include "src/profiler/cpu-profiler.h"
47 #include "src/profiler/tracing-cpu-profiler.h"
48 #include "src/prototype.h"
49 #include "src/regexp/regexp-stack.h"
50 #include "src/runtime-profiler.h"
51 #include "src/setup-isolate.h"
52 #include "src/simulator.h"
53 #include "src/snapshot/startup-deserializer.h"
54 #include "src/tracing/tracing-category-observer.h"
55 #include "src/trap-handler/trap-handler.h"
56 #include "src/unicode-cache.h"
57 #include "src/v8.h"
58 #include "src/version.h"
59 #include "src/visitors.h"
60 #include "src/vm-state-inl.h"
61 #include "src/wasm/wasm-code-manager.h"
62 #include "src/wasm/wasm-engine.h"
63 #include "src/wasm/wasm-objects.h"
64 #include "src/zone/accounting-allocator.h"
65
66 namespace v8 {
67 namespace internal {
68
69 base::Atomic32 ThreadId::highest_thread_id_ = 0;
70
71 #ifdef V8_EMBEDDED_BUILTINS
72 extern const uint8_t* DefaultEmbeddedBlob();
73 extern uint32_t DefaultEmbeddedBlobSize();
74
75 #ifdef V8_MULTI_SNAPSHOTS
76 extern const uint8_t* TrustedEmbeddedBlob();
77 extern uint32_t TrustedEmbeddedBlobSize();
78 #endif
79
80 namespace {
81 // These variables provide access to the current embedded blob without requiring
82 // an isolate instance. This is needed e.g. by Code::InstructionStart, which may
83 // not have access to an isolate but still needs to access the embedded blob.
84 // The variables are initialized by each isolate in Init(). Writes and reads are
85 // relaxed since we can guarantee that the current thread has initialized these
86 // variables before accessing them. Different threads may race, but this is fine
87 // since they all attempt to set the same values of the blob pointer and size.
88
89 std::atomic<const uint8_t*> current_embedded_blob_(nullptr);
90 std::atomic<uint32_t> current_embedded_blob_size_(0);
91 } // namespace
92
SetEmbeddedBlob(const uint8_t * blob,uint32_t blob_size)93 void Isolate::SetEmbeddedBlob(const uint8_t* blob, uint32_t blob_size) {
94 embedded_blob_ = blob;
95 embedded_blob_size_ = blob_size;
96 current_embedded_blob_.store(blob, std::memory_order_relaxed);
97 current_embedded_blob_size_.store(blob_size, std::memory_order_relaxed);
98
99 #ifdef DEBUG
100 if (blob != nullptr) {
101 // Verify that the contents of the embedded blob are unchanged from
102 // serialization-time, just to ensure the compiler isn't messing with us.
103 EmbeddedData d = EmbeddedData::FromBlob();
104 CHECK_EQ(d.Hash(), d.CreateHash());
105 }
106 #endif // DEBUG
107 }
108
embedded_blob() const109 const uint8_t* Isolate::embedded_blob() const { return embedded_blob_; }
embedded_blob_size() const110 uint32_t Isolate::embedded_blob_size() const { return embedded_blob_size_; }
111
112 // static
CurrentEmbeddedBlob()113 const uint8_t* Isolate::CurrentEmbeddedBlob() {
114 return current_embedded_blob_.load(std::memory_order::memory_order_relaxed);
115 }
116
117 // static
CurrentEmbeddedBlobSize()118 uint32_t Isolate::CurrentEmbeddedBlobSize() {
119 return current_embedded_blob_size_.load(
120 std::memory_order::memory_order_relaxed);
121 }
122 #endif // V8_EMBEDDED_BUILTINS
123
AllocateThreadId()124 int ThreadId::AllocateThreadId() {
125 int new_id = base::Relaxed_AtomicIncrement(&highest_thread_id_, 1);
126 return new_id;
127 }
128
129
GetCurrentThreadId()130 int ThreadId::GetCurrentThreadId() {
131 int thread_id = base::Thread::GetThreadLocalInt(Isolate::thread_id_key_);
132 if (thread_id == 0) {
133 thread_id = AllocateThreadId();
134 base::Thread::SetThreadLocalInt(Isolate::thread_id_key_, thread_id);
135 }
136 return thread_id;
137 }
138
139
ThreadLocalTop()140 ThreadLocalTop::ThreadLocalTop() {
141 InitializeInternal();
142 }
143
144
InitializeInternal()145 void ThreadLocalTop::InitializeInternal() {
146 c_entry_fp_ = 0;
147 c_function_ = 0;
148 handler_ = 0;
149 #ifdef USE_SIMULATOR
150 simulator_ = nullptr;
151 #endif
152 js_entry_sp_ = kNullAddress;
153 external_callback_scope_ = nullptr;
154 current_vm_state_ = EXTERNAL;
155 try_catch_handler_ = nullptr;
156 context_ = nullptr;
157 thread_id_ = ThreadId::Invalid();
158 external_caught_exception_ = false;
159 failed_access_check_callback_ = nullptr;
160 save_context_ = nullptr;
161 promise_on_stack_ = nullptr;
162
163 // These members are re-initialized later after deserialization
164 // is complete.
165 pending_exception_ = nullptr;
166 wasm_caught_exception_ = nullptr;
167 rethrowing_message_ = false;
168 pending_message_obj_ = nullptr;
169 scheduled_exception_ = nullptr;
170 }
171
172
Initialize()173 void ThreadLocalTop::Initialize() {
174 InitializeInternal();
175 #ifdef USE_SIMULATOR
176 simulator_ = Simulator::current(isolate_);
177 #endif
178 thread_id_ = ThreadId::Current();
179 }
180
181
Free()182 void ThreadLocalTop::Free() {
183 wasm_caught_exception_ = nullptr;
184 // Match unmatched PopPromise calls.
185 while (promise_on_stack_) isolate_->PopPromise();
186 }
187
188
189 base::Thread::LocalStorageKey Isolate::isolate_key_;
190 base::Thread::LocalStorageKey Isolate::thread_id_key_;
191 base::Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_;
192 base::Atomic32 Isolate::isolate_counter_ = 0;
193 #if DEBUG
194 base::Atomic32 Isolate::isolate_key_created_ = 0;
195 #endif
196
197 Isolate::PerIsolateThreadData*
FindOrAllocatePerThreadDataForThisThread()198 Isolate::FindOrAllocatePerThreadDataForThisThread() {
199 ThreadId thread_id = ThreadId::Current();
200 PerIsolateThreadData* per_thread = nullptr;
201 {
202 base::LockGuard<base::Mutex> lock_guard(&thread_data_table_mutex_);
203 per_thread = thread_data_table_.Lookup(thread_id);
204 if (per_thread == nullptr) {
205 per_thread = new PerIsolateThreadData(this, thread_id);
206 thread_data_table_.Insert(per_thread);
207 }
208 DCHECK(thread_data_table_.Lookup(thread_id) == per_thread);
209 }
210 return per_thread;
211 }
212
213
DiscardPerThreadDataForThisThread()214 void Isolate::DiscardPerThreadDataForThisThread() {
215 int thread_id_int = base::Thread::GetThreadLocalInt(Isolate::thread_id_key_);
216 if (thread_id_int) {
217 ThreadId thread_id = ThreadId(thread_id_int);
218 DCHECK(!thread_manager_->mutex_owner_.Equals(thread_id));
219 base::LockGuard<base::Mutex> lock_guard(&thread_data_table_mutex_);
220 PerIsolateThreadData* per_thread = thread_data_table_.Lookup(thread_id);
221 if (per_thread) {
222 DCHECK(!per_thread->thread_state_);
223 thread_data_table_.Remove(per_thread);
224 }
225 }
226 }
227
228
FindPerThreadDataForThisThread()229 Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() {
230 ThreadId thread_id = ThreadId::Current();
231 return FindPerThreadDataForThread(thread_id);
232 }
233
234
FindPerThreadDataForThread(ThreadId thread_id)235 Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread(
236 ThreadId thread_id) {
237 PerIsolateThreadData* per_thread = nullptr;
238 {
239 base::LockGuard<base::Mutex> lock_guard(&thread_data_table_mutex_);
240 per_thread = thread_data_table_.Lookup(thread_id);
241 }
242 return per_thread;
243 }
244
245
InitializeOncePerProcess()246 void Isolate::InitializeOncePerProcess() {
247 isolate_key_ = base::Thread::CreateThreadLocalKey();
248 #if DEBUG
249 base::Relaxed_Store(&isolate_key_created_, 1);
250 #endif
251 thread_id_key_ = base::Thread::CreateThreadLocalKey();
252 per_isolate_thread_data_key_ = base::Thread::CreateThreadLocalKey();
253 }
254
get_address_from_id(IsolateAddressId id)255 Address Isolate::get_address_from_id(IsolateAddressId id) {
256 return isolate_addresses_[id];
257 }
258
Iterate(RootVisitor * v,char * thread_storage)259 char* Isolate::Iterate(RootVisitor* v, char* thread_storage) {
260 ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage);
261 Iterate(v, thread);
262 return thread_storage + sizeof(ThreadLocalTop);
263 }
264
265
IterateThread(ThreadVisitor * v,char * t)266 void Isolate::IterateThread(ThreadVisitor* v, char* t) {
267 ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(t);
268 v->VisitThread(this, thread);
269 }
270
Iterate(RootVisitor * v,ThreadLocalTop * thread)271 void Isolate::Iterate(RootVisitor* v, ThreadLocalTop* thread) {
272 // Visit the roots from the top for a given thread.
273 v->VisitRootPointer(Root::kTop, nullptr, &thread->pending_exception_);
274 v->VisitRootPointer(Root::kTop, nullptr, &thread->wasm_caught_exception_);
275 v->VisitRootPointer(Root::kTop, nullptr, &thread->pending_message_obj_);
276 v->VisitRootPointer(Root::kTop, nullptr,
277 bit_cast<Object**>(&(thread->context_)));
278 v->VisitRootPointer(Root::kTop, nullptr, &thread->scheduled_exception_);
279
280 for (v8::TryCatch* block = thread->try_catch_handler(); block != nullptr;
281 block = block->next_) {
282 v->VisitRootPointer(Root::kTop, nullptr,
283 bit_cast<Object**>(&(block->exception_)));
284 v->VisitRootPointer(Root::kTop, nullptr,
285 bit_cast<Object**>(&(block->message_obj_)));
286 }
287
288 // Iterate over pointers on native execution stack.
289 for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) {
290 it.frame()->Iterate(v);
291 }
292 }
293
Iterate(RootVisitor * v)294 void Isolate::Iterate(RootVisitor* v) {
295 ThreadLocalTop* current_t = thread_local_top();
296 Iterate(v, current_t);
297 }
298
IterateDeferredHandles(RootVisitor * visitor)299 void Isolate::IterateDeferredHandles(RootVisitor* visitor) {
300 for (DeferredHandles* deferred = deferred_handles_head_; deferred != nullptr;
301 deferred = deferred->next_) {
302 deferred->Iterate(visitor);
303 }
304 }
305
306
307 #ifdef DEBUG
IsDeferredHandle(Object ** handle)308 bool Isolate::IsDeferredHandle(Object** handle) {
309 // Each DeferredHandles instance keeps the handles to one job in the
310 // concurrent recompilation queue, containing a list of blocks. Each block
311 // contains kHandleBlockSize handles except for the first block, which may
312 // not be fully filled.
313 // We iterate through all the blocks to see whether the argument handle
314 // belongs to one of the blocks. If so, it is deferred.
315 for (DeferredHandles* deferred = deferred_handles_head_; deferred != nullptr;
316 deferred = deferred->next_) {
317 std::vector<Object**>* blocks = &deferred->blocks_;
318 for (size_t i = 0; i < blocks->size(); i++) {
319 Object** block_limit = (i == 0) ? deferred->first_block_limit_
320 : blocks->at(i) + kHandleBlockSize;
321 if (blocks->at(i) <= handle && handle < block_limit) return true;
322 }
323 }
324 return false;
325 }
326 #endif // DEBUG
327
328
RegisterTryCatchHandler(v8::TryCatch * that)329 void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) {
330 thread_local_top()->set_try_catch_handler(that);
331 }
332
333
UnregisterTryCatchHandler(v8::TryCatch * that)334 void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) {
335 DCHECK(thread_local_top()->try_catch_handler() == that);
336 thread_local_top()->set_try_catch_handler(that->next_);
337 }
338
339
StackTraceString()340 Handle<String> Isolate::StackTraceString() {
341 if (stack_trace_nesting_level_ == 0) {
342 stack_trace_nesting_level_++;
343 HeapStringAllocator allocator;
344 StringStream::ClearMentionedObjectCache(this);
345 StringStream accumulator(&allocator);
346 incomplete_message_ = &accumulator;
347 PrintStack(&accumulator);
348 Handle<String> stack_trace = accumulator.ToString(this);
349 incomplete_message_ = nullptr;
350 stack_trace_nesting_level_ = 0;
351 return stack_trace;
352 } else if (stack_trace_nesting_level_ == 1) {
353 stack_trace_nesting_level_++;
354 base::OS::PrintError(
355 "\n\nAttempt to print stack while printing stack (double fault)\n");
356 base::OS::PrintError(
357 "If you are lucky you may find a partial stack dump on stdout.\n\n");
358 incomplete_message_->OutputToStdOut();
359 return factory()->empty_string();
360 } else {
361 base::OS::Abort();
362 // Unreachable
363 return factory()->empty_string();
364 }
365 }
366
PushStackTraceAndDie(void * ptr1,void * ptr2,void * ptr3,void * ptr4)367 void Isolate::PushStackTraceAndDie(void* ptr1, void* ptr2, void* ptr3,
368 void* ptr4) {
369 StackTraceFailureMessage message(this, ptr1, ptr2, ptr3, ptr4);
370 message.Print();
371 base::OS::Abort();
372 }
373
Print()374 void StackTraceFailureMessage::Print() volatile {
375 // Print the details of this failure message object, including its own address
376 // to force stack allocation.
377 base::OS::PrintError(
378 "Stacktrace:\n ptr1=%p\n ptr2=%p\n ptr3=%p\n ptr4=%p\n "
379 "failure_message_object=%p\n%s",
380 ptr1_, ptr2_, ptr3_, ptr4_, this, &js_stack_trace_[0]);
381 }
382
StackTraceFailureMessage(Isolate * isolate,void * ptr1,void * ptr2,void * ptr3,void * ptr4)383 StackTraceFailureMessage::StackTraceFailureMessage(Isolate* isolate, void* ptr1,
384 void* ptr2, void* ptr3,
385 void* ptr4) {
386 isolate_ = isolate;
387 ptr1_ = ptr1;
388 ptr2_ = ptr2;
389 ptr3_ = ptr3;
390 ptr4_ = ptr4;
391 // Write a stracktrace into the {js_stack_trace_} buffer.
392 const size_t buffer_length = arraysize(js_stack_trace_);
393 memset(&js_stack_trace_, 0, buffer_length);
394 FixedStringAllocator fixed(&js_stack_trace_[0], buffer_length - 1);
395 StringStream accumulator(&fixed, StringStream::kPrintObjectConcise);
396 isolate->PrintStack(&accumulator, Isolate::kPrintStackVerbose);
397 // Keeping a reference to the last code objects to increase likelyhood that
398 // they get included in the minidump.
399 const size_t code_objects_length = arraysize(code_objects_);
400 size_t i = 0;
401 StackFrameIterator it(isolate);
402 for (; !it.done() && i < code_objects_length; it.Advance()) {
403 code_objects_[i++] = it.frame()->unchecked_code();
404 }
405 }
406
407 namespace {
408
409 class FrameArrayBuilder {
410 public:
FrameArrayBuilder(Isolate * isolate,FrameSkipMode mode,int limit,Handle<Object> caller)411 FrameArrayBuilder(Isolate* isolate, FrameSkipMode mode, int limit,
412 Handle<Object> caller)
413 : isolate_(isolate), mode_(mode), limit_(limit), caller_(caller) {
414 switch (mode_) {
415 case SKIP_FIRST:
416 skip_next_frame_ = true;
417 break;
418 case SKIP_UNTIL_SEEN:
419 DCHECK(caller_->IsJSFunction());
420 skip_next_frame_ = true;
421 break;
422 case SKIP_NONE:
423 skip_next_frame_ = false;
424 break;
425 }
426
427 elements_ = isolate->factory()->NewFrameArray(Min(limit, 10));
428 }
429
AppendStandardFrame(StandardFrame * frame)430 void AppendStandardFrame(StandardFrame* frame) {
431 std::vector<FrameSummary> frames;
432 frame->Summarize(&frames);
433 // A standard frame may include many summarized frames (due to inlining).
434 for (size_t i = frames.size(); i != 0 && !full(); i--) {
435 const auto& summ = frames[i - 1];
436 if (summ.IsJavaScript()) {
437 //====================================================================
438 // Handle a JavaScript frame.
439 //====================================================================
440 const auto& summary = summ.AsJavaScript();
441
442 // Filter out internal frames that we do not want to show.
443 if (!IsVisibleInStackTrace(summary.function())) continue;
444
445 Handle<AbstractCode> abstract_code = summary.abstract_code();
446 const int offset = summary.code_offset();
447
448 bool is_constructor = summary.is_constructor();
449 // Help CallSite::IsConstructor correctly detect hand-written
450 // construct stubs.
451 if (abstract_code->IsCode() &&
452 Code::cast(*abstract_code)->is_construct_stub()) {
453 is_constructor = true;
454 }
455
456 int flags = 0;
457 Handle<JSFunction> function = summary.function();
458 if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict;
459 if (is_constructor) flags |= FrameArray::kIsConstructor;
460
461 elements_ = FrameArray::AppendJSFrame(
462 elements_, TheHoleToUndefined(isolate_, summary.receiver()),
463 function, abstract_code, offset, flags);
464 } else if (summ.IsWasmCompiled()) {
465 //====================================================================
466 // Handle a WASM compiled frame.
467 //====================================================================
468 const auto& summary = summ.AsWasmCompiled();
469 if (summary.code()->kind() != wasm::WasmCode::kFunction) {
470 continue;
471 }
472 Handle<WasmInstanceObject> instance = summary.wasm_instance();
473 int flags = 0;
474 if (instance->module_object()->shared()->is_asm_js()) {
475 flags |= FrameArray::kIsAsmJsWasmFrame;
476 if (WasmCompiledFrame::cast(frame)->at_to_number_conversion()) {
477 flags |= FrameArray::kAsmJsAtNumberConversion;
478 }
479 } else {
480 flags |= FrameArray::kIsWasmFrame;
481 }
482
483 elements_ = FrameArray::AppendWasmFrame(
484 elements_, instance, summary.function_index(), summary.code(),
485 summary.code_offset(), flags);
486 } else if (summ.IsWasmInterpreted()) {
487 //====================================================================
488 // Handle a WASM interpreted frame.
489 //====================================================================
490 const auto& summary = summ.AsWasmInterpreted();
491 Handle<WasmInstanceObject> instance = summary.wasm_instance();
492 int flags = FrameArray::kIsWasmInterpretedFrame;
493 DCHECK(!instance->module_object()->shared()->is_asm_js());
494 elements_ = FrameArray::AppendWasmFrame(elements_, instance,
495 summary.function_index(), {},
496 summary.byte_offset(), flags);
497 }
498 }
499 }
500
AppendBuiltinExitFrame(BuiltinExitFrame * exit_frame)501 void AppendBuiltinExitFrame(BuiltinExitFrame* exit_frame) {
502 Handle<JSFunction> function = handle(exit_frame->function(), isolate_);
503
504 // Filter out internal frames that we do not want to show.
505 if (!IsVisibleInStackTrace(function)) return;
506
507 Handle<Object> receiver(exit_frame->receiver(), isolate_);
508 Handle<Code> code(exit_frame->LookupCode(), isolate_);
509 const int offset =
510 static_cast<int>(exit_frame->pc() - code->InstructionStart());
511
512 int flags = 0;
513 if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict;
514 if (exit_frame->IsConstructor()) flags |= FrameArray::kIsConstructor;
515
516 elements_ = FrameArray::AppendJSFrame(elements_, receiver, function,
517 Handle<AbstractCode>::cast(code),
518 offset, flags);
519 }
520
full()521 bool full() { return elements_->FrameCount() >= limit_; }
522
GetElements()523 Handle<FrameArray> GetElements() {
524 elements_->ShrinkToFit();
525 return elements_;
526 }
527
528 private:
529 // Poison stack frames below the first strict mode frame.
530 // The stack trace API should not expose receivers and function
531 // objects on frames deeper than the top-most one with a strict mode
532 // function.
IsStrictFrame(Handle<JSFunction> function)533 bool IsStrictFrame(Handle<JSFunction> function) {
534 if (!encountered_strict_function_) {
535 encountered_strict_function_ =
536 is_strict(function->shared()->language_mode());
537 }
538 return encountered_strict_function_;
539 }
540
541 // Determines whether the given stack frame should be displayed in a stack
542 // trace.
IsVisibleInStackTrace(Handle<JSFunction> function)543 bool IsVisibleInStackTrace(Handle<JSFunction> function) {
544 return ShouldIncludeFrame(function) && IsNotHidden(function) &&
545 IsInSameSecurityContext(function);
546 }
547
548 // This mechanism excludes a number of uninteresting frames from the stack
549 // trace. This can be be the first frame (which will be a builtin-exit frame
550 // for the error constructor builtin) or every frame until encountering a
551 // user-specified function.
ShouldIncludeFrame(Handle<JSFunction> function)552 bool ShouldIncludeFrame(Handle<JSFunction> function) {
553 switch (mode_) {
554 case SKIP_NONE:
555 return true;
556 case SKIP_FIRST:
557 if (!skip_next_frame_) return true;
558 skip_next_frame_ = false;
559 return false;
560 case SKIP_UNTIL_SEEN:
561 if (skip_next_frame_ && (*function == *caller_)) {
562 skip_next_frame_ = false;
563 return false;
564 }
565 return !skip_next_frame_;
566 }
567 UNREACHABLE();
568 }
569
IsNotHidden(Handle<JSFunction> function)570 bool IsNotHidden(Handle<JSFunction> function) {
571 // Functions defined not in user scripts are not visible unless directly
572 // exposed, in which case the native flag is set.
573 // The --builtins-in-stack-traces command line flag allows including
574 // internal call sites in the stack trace for debugging purposes.
575 if (!FLAG_builtins_in_stack_traces &&
576 !function->shared()->IsUserJavaScript()) {
577 return function->shared()->native();
578 }
579 return true;
580 }
581
IsInSameSecurityContext(Handle<JSFunction> function)582 bool IsInSameSecurityContext(Handle<JSFunction> function) {
583 return isolate_->context()->HasSameSecurityTokenAs(function->context());
584 }
585
586 // TODO(jgruber): Fix all cases in which frames give us a hole value (e.g. the
587 // receiver in RegExp constructor frames.
TheHoleToUndefined(Isolate * isolate,Handle<Object> in)588 Handle<Object> TheHoleToUndefined(Isolate* isolate, Handle<Object> in) {
589 return (in->IsTheHole(isolate))
590 ? Handle<Object>::cast(isolate->factory()->undefined_value())
591 : in;
592 }
593
594 Isolate* isolate_;
595 const FrameSkipMode mode_;
596 int limit_;
597 const Handle<Object> caller_;
598 bool skip_next_frame_ = true;
599 bool encountered_strict_function_ = false;
600 Handle<FrameArray> elements_;
601 };
602
GetStackTraceLimit(Isolate * isolate,int * result)603 bool GetStackTraceLimit(Isolate* isolate, int* result) {
604 Handle<JSObject> error = isolate->error_function();
605
606 Handle<String> key = isolate->factory()->stackTraceLimit_string();
607 Handle<Object> stack_trace_limit = JSReceiver::GetDataProperty(error, key);
608 if (!stack_trace_limit->IsNumber()) return false;
609
610 // Ensure that limit is not negative.
611 *result = Max(FastD2IChecked(stack_trace_limit->Number()), 0);
612
613 if (*result != FLAG_stack_trace_limit) {
614 isolate->CountUsage(v8::Isolate::kErrorStackTraceLimit);
615 }
616
617 return true;
618 }
619
NoExtension(const v8::FunctionCallbackInfo<v8::Value> &)620 bool NoExtension(const v8::FunctionCallbackInfo<v8::Value>&) { return false; }
621 } // namespace
622
CaptureSimpleStackTrace(Handle<JSReceiver> error_object,FrameSkipMode mode,Handle<Object> caller)623 Handle<Object> Isolate::CaptureSimpleStackTrace(Handle<JSReceiver> error_object,
624 FrameSkipMode mode,
625 Handle<Object> caller) {
626 DisallowJavascriptExecution no_js(this);
627
628 int limit;
629 if (!GetStackTraceLimit(this, &limit)) return factory()->undefined_value();
630
631 FrameArrayBuilder builder(this, mode, limit, caller);
632
633 for (StackFrameIterator iter(this); !iter.done() && !builder.full();
634 iter.Advance()) {
635 StackFrame* frame = iter.frame();
636
637 switch (frame->type()) {
638 case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION:
639 case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
640 case StackFrame::OPTIMIZED:
641 case StackFrame::INTERPRETED:
642 case StackFrame::BUILTIN:
643 builder.AppendStandardFrame(JavaScriptFrame::cast(frame));
644 break;
645 case StackFrame::BUILTIN_EXIT:
646 // BuiltinExitFrames are not standard frames, so they do not have
647 // Summarize(). However, they may have one JS frame worth showing.
648 builder.AppendBuiltinExitFrame(BuiltinExitFrame::cast(frame));
649 break;
650 case StackFrame::WASM_COMPILED:
651 builder.AppendStandardFrame(WasmCompiledFrame::cast(frame));
652 break;
653 case StackFrame::WASM_INTERPRETER_ENTRY:
654 builder.AppendStandardFrame(WasmInterpreterEntryFrame::cast(frame));
655 break;
656
657 default:
658 break;
659 }
660 }
661
662 // TODO(yangguo): Queue this structured stack trace for preprocessing on GC.
663 return factory()->NewJSArrayWithElements(builder.GetElements());
664 }
665
CaptureAndSetDetailedStackTrace(Handle<JSReceiver> error_object)666 MaybeHandle<JSReceiver> Isolate::CaptureAndSetDetailedStackTrace(
667 Handle<JSReceiver> error_object) {
668 if (capture_stack_trace_for_uncaught_exceptions_) {
669 // Capture stack trace for a detailed exception message.
670 Handle<Name> key = factory()->detailed_stack_trace_symbol();
671 Handle<FixedArray> stack_trace = CaptureCurrentStackTrace(
672 stack_trace_for_uncaught_exceptions_frame_limit_,
673 stack_trace_for_uncaught_exceptions_options_);
674 RETURN_ON_EXCEPTION(this,
675 JSReceiver::SetProperty(error_object, key, stack_trace,
676 LanguageMode::kStrict),
677 JSReceiver);
678 }
679 return error_object;
680 }
681
CaptureAndSetSimpleStackTrace(Handle<JSReceiver> error_object,FrameSkipMode mode,Handle<Object> caller)682 MaybeHandle<JSReceiver> Isolate::CaptureAndSetSimpleStackTrace(
683 Handle<JSReceiver> error_object, FrameSkipMode mode,
684 Handle<Object> caller) {
685 // Capture stack trace for simple stack trace string formatting.
686 Handle<Name> key = factory()->stack_trace_symbol();
687 Handle<Object> stack_trace =
688 CaptureSimpleStackTrace(error_object, mode, caller);
689 RETURN_ON_EXCEPTION(this,
690 JSReceiver::SetProperty(error_object, key, stack_trace,
691 LanguageMode::kStrict),
692 JSReceiver);
693 return error_object;
694 }
695
GetDetailedStackTrace(Handle<JSObject> error_object)696 Handle<FixedArray> Isolate::GetDetailedStackTrace(
697 Handle<JSObject> error_object) {
698 Handle<Name> key_detailed = factory()->detailed_stack_trace_symbol();
699 Handle<Object> stack_trace =
700 JSReceiver::GetDataProperty(error_object, key_detailed);
701 if (stack_trace->IsFixedArray()) return Handle<FixedArray>::cast(stack_trace);
702 return Handle<FixedArray>();
703 }
704
GetAbstractPC(int * line,int * column)705 Address Isolate::GetAbstractPC(int* line, int* column) {
706 JavaScriptFrameIterator it(this);
707
708 if (it.done()) {
709 *line = -1;
710 *column = -1;
711 return kNullAddress;
712 }
713 JavaScriptFrame* frame = it.frame();
714 DCHECK(!frame->is_builtin());
715 int position = frame->position();
716
717 Object* maybe_script = frame->function()->shared()->script();
718 if (maybe_script->IsScript()) {
719 Handle<Script> script(Script::cast(maybe_script), this);
720 Script::PositionInfo info;
721 Script::GetPositionInfo(script, position, &info, Script::WITH_OFFSET);
722 *line = info.line + 1;
723 *column = info.column + 1;
724 } else {
725 *line = position;
726 *column = -1;
727 }
728
729 if (frame->is_interpreted()) {
730 InterpretedFrame* iframe = static_cast<InterpretedFrame*>(frame);
731 Address bytecode_start =
732 reinterpret_cast<Address>(iframe->GetBytecodeArray()) - kHeapObjectTag +
733 BytecodeArray::kHeaderSize;
734 return bytecode_start + iframe->GetBytecodeOffset();
735 }
736
737 return frame->pc();
738 }
739
740 class CaptureStackTraceHelper {
741 public:
CaptureStackTraceHelper(Isolate * isolate)742 explicit CaptureStackTraceHelper(Isolate* isolate) : isolate_(isolate) {}
743
NewStackFrameObject(FrameSummary & summ)744 Handle<StackFrameInfo> NewStackFrameObject(FrameSummary& summ) {
745 if (summ.IsJavaScript()) return NewStackFrameObject(summ.AsJavaScript());
746 if (summ.IsWasm()) return NewStackFrameObject(summ.AsWasm());
747 UNREACHABLE();
748 }
749
NewStackFrameObject(const FrameSummary::JavaScriptFrameSummary & summ)750 Handle<StackFrameInfo> NewStackFrameObject(
751 const FrameSummary::JavaScriptFrameSummary& summ) {
752 int code_offset;
753 Handle<ByteArray> source_position_table;
754 Handle<Object> maybe_cache;
755 Handle<SimpleNumberDictionary> cache;
756 if (!FLAG_optimize_for_size) {
757 code_offset = summ.code_offset();
758 source_position_table =
759 handle(summ.abstract_code()->source_position_table(), isolate_);
760 maybe_cache = handle(summ.abstract_code()->stack_frame_cache(), isolate_);
761 if (maybe_cache->IsSimpleNumberDictionary()) {
762 cache = Handle<SimpleNumberDictionary>::cast(maybe_cache);
763 } else {
764 cache = SimpleNumberDictionary::New(isolate_, 1);
765 }
766 int entry = cache->FindEntry(code_offset);
767 if (entry != NumberDictionary::kNotFound) {
768 Handle<StackFrameInfo> frame(
769 StackFrameInfo::cast(cache->ValueAt(entry)));
770 DCHECK(frame->function_name()->IsString());
771 Handle<String> function_name = summ.FunctionName();
772 if (function_name->Equals(String::cast(frame->function_name()))) {
773 return frame;
774 }
775 }
776 }
777
778 Handle<StackFrameInfo> frame = factory()->NewStackFrameInfo();
779 Handle<Script> script = Handle<Script>::cast(summ.script());
780 Script::PositionInfo info;
781 bool valid_pos = Script::GetPositionInfo(script, summ.SourcePosition(),
782 &info, Script::WITH_OFFSET);
783 if (valid_pos) {
784 frame->set_line_number(info.line + 1);
785 frame->set_column_number(info.column + 1);
786 }
787 frame->set_script_id(script->id());
788 frame->set_script_name(script->name());
789 frame->set_script_name_or_source_url(script->GetNameOrSourceURL());
790 frame->set_is_eval(script->compilation_type() ==
791 Script::COMPILATION_TYPE_EVAL);
792 Handle<String> function_name = summ.FunctionName();
793 frame->set_function_name(*function_name);
794 frame->set_is_constructor(summ.is_constructor());
795 frame->set_is_wasm(false);
796 if (!FLAG_optimize_for_size) {
797 auto new_cache = SimpleNumberDictionary::Set(cache, code_offset, frame);
798 if (*new_cache != *cache || !maybe_cache->IsNumberDictionary()) {
799 AbstractCode::SetStackFrameCache(summ.abstract_code(), new_cache);
800 }
801 }
802 frame->set_id(next_id());
803 return frame;
804 }
805
NewStackFrameObject(const FrameSummary::WasmFrameSummary & summ)806 Handle<StackFrameInfo> NewStackFrameObject(
807 const FrameSummary::WasmFrameSummary& summ) {
808 Handle<StackFrameInfo> info = factory()->NewStackFrameInfo();
809
810 Handle<WasmSharedModuleData> shared(
811 summ.wasm_instance()->module_object()->shared(), isolate_);
812 Handle<String> name = WasmSharedModuleData::GetFunctionName(
813 isolate_, shared, summ.function_index());
814 info->set_function_name(*name);
815 // Encode the function index as line number (1-based).
816 info->set_line_number(summ.function_index() + 1);
817 // Encode the byte offset as column (1-based).
818 int position = summ.byte_offset();
819 // Make position 1-based.
820 if (position >= 0) ++position;
821 info->set_column_number(position);
822 info->set_script_id(summ.script()->id());
823 info->set_is_wasm(true);
824 info->set_id(next_id());
825 return info;
826 }
827
828 private:
factory()829 inline Factory* factory() { return isolate_->factory(); }
830
next_id() const831 int next_id() const {
832 int id = isolate_->last_stack_frame_info_id() + 1;
833 isolate_->set_last_stack_frame_info_id(id);
834 return id;
835 }
836
837 Isolate* isolate_;
838 };
839
CaptureCurrentStackTrace(int frame_limit,StackTrace::StackTraceOptions options)840 Handle<FixedArray> Isolate::CaptureCurrentStackTrace(
841 int frame_limit, StackTrace::StackTraceOptions options) {
842 DisallowJavascriptExecution no_js(this);
843 CaptureStackTraceHelper helper(this);
844
845 // Ensure no negative values.
846 int limit = Max(frame_limit, 0);
847 Handle<FixedArray> stack_trace_elems = factory()->NewFixedArray(limit);
848
849 int frames_seen = 0;
850 for (StackTraceFrameIterator it(this); !it.done() && (frames_seen < limit);
851 it.Advance()) {
852 StandardFrame* frame = it.frame();
853 // Set initial size to the maximum inlining level + 1 for the outermost
854 // function.
855 std::vector<FrameSummary> frames;
856 frame->Summarize(&frames);
857 for (size_t i = frames.size(); i != 0 && frames_seen < limit; i--) {
858 FrameSummary& frame = frames[i - 1];
859 if (!frame.is_subject_to_debugging()) continue;
860 // Filter frames from other security contexts.
861 if (!(options & StackTrace::kExposeFramesAcrossSecurityOrigins) &&
862 !this->context()->HasSameSecurityTokenAs(*frame.native_context()))
863 continue;
864 Handle<StackFrameInfo> new_frame_obj = helper.NewStackFrameObject(frame);
865 stack_trace_elems->set(frames_seen, *new_frame_obj);
866 frames_seen++;
867 }
868 }
869 stack_trace_elems->Shrink(frames_seen);
870 return stack_trace_elems;
871 }
872
873
PrintStack(FILE * out,PrintStackMode mode)874 void Isolate::PrintStack(FILE* out, PrintStackMode mode) {
875 if (stack_trace_nesting_level_ == 0) {
876 stack_trace_nesting_level_++;
877 StringStream::ClearMentionedObjectCache(this);
878 HeapStringAllocator allocator;
879 StringStream accumulator(&allocator);
880 incomplete_message_ = &accumulator;
881 PrintStack(&accumulator, mode);
882 accumulator.OutputToFile(out);
883 InitializeLoggingAndCounters();
884 accumulator.Log(this);
885 incomplete_message_ = nullptr;
886 stack_trace_nesting_level_ = 0;
887 } else if (stack_trace_nesting_level_ == 1) {
888 stack_trace_nesting_level_++;
889 base::OS::PrintError(
890 "\n\nAttempt to print stack while printing stack (double fault)\n");
891 base::OS::PrintError(
892 "If you are lucky you may find a partial stack dump on stdout.\n\n");
893 incomplete_message_->OutputToFile(out);
894 }
895 }
896
897
PrintFrames(Isolate * isolate,StringStream * accumulator,StackFrame::PrintMode mode)898 static void PrintFrames(Isolate* isolate,
899 StringStream* accumulator,
900 StackFrame::PrintMode mode) {
901 StackFrameIterator it(isolate);
902 for (int i = 0; !it.done(); it.Advance()) {
903 it.frame()->Print(accumulator, mode, i++);
904 }
905 }
906
PrintStack(StringStream * accumulator,PrintStackMode mode)907 void Isolate::PrintStack(StringStream* accumulator, PrintStackMode mode) {
908 // The MentionedObjectCache is not GC-proof at the moment.
909 DisallowHeapAllocation no_gc;
910 HandleScope scope(this);
911 DCHECK(accumulator->IsMentionedObjectCacheClear(this));
912
913 // Avoid printing anything if there are no frames.
914 if (c_entry_fp(thread_local_top()) == 0) return;
915
916 accumulator->Add(
917 "\n==== JS stack trace =========================================\n\n");
918 PrintFrames(this, accumulator, StackFrame::OVERVIEW);
919 if (mode == kPrintStackVerbose) {
920 accumulator->Add(
921 "\n==== Details ================================================\n\n");
922 PrintFrames(this, accumulator, StackFrame::DETAILS);
923 accumulator->PrintMentionedObjectCache(this);
924 }
925 accumulator->Add("=====================\n\n");
926 }
927
928
SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback)929 void Isolate::SetFailedAccessCheckCallback(
930 v8::FailedAccessCheckCallback callback) {
931 thread_local_top()->failed_access_check_callback_ = callback;
932 }
933
934
ReportFailedAccessCheck(Handle<JSObject> receiver)935 void Isolate::ReportFailedAccessCheck(Handle<JSObject> receiver) {
936 if (!thread_local_top()->failed_access_check_callback_) {
937 return ScheduleThrow(*factory()->NewTypeError(MessageTemplate::kNoAccess));
938 }
939
940 DCHECK(receiver->IsAccessCheckNeeded());
941 DCHECK(context());
942
943 // Get the data object from access check info.
944 HandleScope scope(this);
945 Handle<Object> data;
946 { DisallowHeapAllocation no_gc;
947 AccessCheckInfo* access_check_info = AccessCheckInfo::Get(this, receiver);
948 if (!access_check_info) {
949 AllowHeapAllocation doesnt_matter_anymore;
950 return ScheduleThrow(
951 *factory()->NewTypeError(MessageTemplate::kNoAccess));
952 }
953 data = handle(access_check_info->data(), this);
954 }
955
956 // Leaving JavaScript.
957 VMState<EXTERNAL> state(this);
958 thread_local_top()->failed_access_check_callback_(
959 v8::Utils::ToLocal(receiver), v8::ACCESS_HAS, v8::Utils::ToLocal(data));
960 }
961
962
MayAccess(Handle<Context> accessing_context,Handle<JSObject> receiver)963 bool Isolate::MayAccess(Handle<Context> accessing_context,
964 Handle<JSObject> receiver) {
965 DCHECK(receiver->IsJSGlobalProxy() || receiver->IsAccessCheckNeeded());
966
967 // Check for compatibility between the security tokens in the
968 // current lexical context and the accessed object.
969
970 // During bootstrapping, callback functions are not enabled yet.
971 if (bootstrapper()->IsActive()) return true;
972 {
973 DisallowHeapAllocation no_gc;
974
975 if (receiver->IsJSGlobalProxy()) {
976 Object* receiver_context =
977 JSGlobalProxy::cast(*receiver)->native_context();
978 if (!receiver_context->IsContext()) return false;
979
980 // Get the native context of current top context.
981 // avoid using Isolate::native_context() because it uses Handle.
982 Context* native_context =
983 accessing_context->global_object()->native_context();
984 if (receiver_context == native_context) return true;
985
986 if (Context::cast(receiver_context)->security_token() ==
987 native_context->security_token())
988 return true;
989 }
990 }
991
992 HandleScope scope(this);
993 Handle<Object> data;
994 v8::AccessCheckCallback callback = nullptr;
995 { DisallowHeapAllocation no_gc;
996 AccessCheckInfo* access_check_info = AccessCheckInfo::Get(this, receiver);
997 if (!access_check_info) return false;
998 Object* fun_obj = access_check_info->callback();
999 callback = v8::ToCData<v8::AccessCheckCallback>(fun_obj);
1000 data = handle(access_check_info->data(), this);
1001 }
1002
1003 LOG(this, ApiSecurityCheck());
1004
1005 {
1006 // Leaving JavaScript.
1007 VMState<EXTERNAL> state(this);
1008 return callback(v8::Utils::ToLocal(accessing_context),
1009 v8::Utils::ToLocal(receiver), v8::Utils::ToLocal(data));
1010 }
1011 }
1012
1013
StackOverflow()1014 Object* Isolate::StackOverflow() {
1015 if (FLAG_abort_on_stack_or_string_length_overflow) {
1016 FATAL("Aborting on stack overflow");
1017 }
1018
1019 DisallowJavascriptExecution no_js(this);
1020 HandleScope scope(this);
1021
1022 Handle<JSFunction> fun = range_error_function();
1023 Handle<Object> msg = factory()->NewStringFromAsciiChecked(
1024 MessageTemplate::TemplateString(MessageTemplate::kStackOverflow));
1025 Handle<Object> no_caller;
1026 Handle<Object> exception;
1027 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1028 this, exception,
1029 ErrorUtils::Construct(this, fun, fun, msg, SKIP_NONE, no_caller, true));
1030
1031 Throw(*exception, nullptr);
1032
1033 #ifdef VERIFY_HEAP
1034 if (FLAG_verify_heap && FLAG_stress_compaction) {
1035 heap()->CollectAllGarbage(Heap::kNoGCFlags,
1036 GarbageCollectionReason::kTesting);
1037 }
1038 #endif // VERIFY_HEAP
1039
1040 return heap()->exception();
1041 }
1042
1043
TerminateExecution()1044 Object* Isolate::TerminateExecution() {
1045 return Throw(heap_.termination_exception(), nullptr);
1046 }
1047
1048
CancelTerminateExecution()1049 void Isolate::CancelTerminateExecution() {
1050 if (try_catch_handler()) {
1051 try_catch_handler()->has_terminated_ = false;
1052 }
1053 if (has_pending_exception() &&
1054 pending_exception() == heap_.termination_exception()) {
1055 thread_local_top()->external_caught_exception_ = false;
1056 clear_pending_exception();
1057 }
1058 if (has_scheduled_exception() &&
1059 scheduled_exception() == heap_.termination_exception()) {
1060 thread_local_top()->external_caught_exception_ = false;
1061 clear_scheduled_exception();
1062 }
1063 }
1064
1065
RequestInterrupt(InterruptCallback callback,void * data)1066 void Isolate::RequestInterrupt(InterruptCallback callback, void* data) {
1067 ExecutionAccess access(this);
1068 api_interrupts_queue_.push(InterruptEntry(callback, data));
1069 stack_guard()->RequestApiInterrupt();
1070 }
1071
1072
InvokeApiInterruptCallbacks()1073 void Isolate::InvokeApiInterruptCallbacks() {
1074 RuntimeCallTimerScope runtimeTimer(
1075 this, RuntimeCallCounterId::kInvokeApiInterruptCallbacks);
1076 // Note: callback below should be called outside of execution access lock.
1077 while (true) {
1078 InterruptEntry entry;
1079 {
1080 ExecutionAccess access(this);
1081 if (api_interrupts_queue_.empty()) return;
1082 entry = api_interrupts_queue_.front();
1083 api_interrupts_queue_.pop();
1084 }
1085 VMState<EXTERNAL> state(this);
1086 HandleScope handle_scope(this);
1087 entry.first(reinterpret_cast<v8::Isolate*>(this), entry.second);
1088 }
1089 }
1090
1091
ReportBootstrappingException(Handle<Object> exception,MessageLocation * location)1092 void ReportBootstrappingException(Handle<Object> exception,
1093 MessageLocation* location) {
1094 base::OS::PrintError("Exception thrown during bootstrapping\n");
1095 if (location == nullptr || location->script().is_null()) return;
1096 // We are bootstrapping and caught an error where the location is set
1097 // and we have a script for the location.
1098 // In this case we could have an extension (or an internal error
1099 // somewhere) and we print out the line number at which the error occurred
1100 // to the console for easier debugging.
1101 int line_number =
1102 location->script()->GetLineNumber(location->start_pos()) + 1;
1103 if (exception->IsString() && location->script()->name()->IsString()) {
1104 base::OS::PrintError(
1105 "Extension or internal compilation error: %s in %s at line %d.\n",
1106 String::cast(*exception)->ToCString().get(),
1107 String::cast(location->script()->name())->ToCString().get(),
1108 line_number);
1109 } else if (location->script()->name()->IsString()) {
1110 base::OS::PrintError(
1111 "Extension or internal compilation error in %s at line %d.\n",
1112 String::cast(location->script()->name())->ToCString().get(),
1113 line_number);
1114 } else if (exception->IsString()) {
1115 base::OS::PrintError("Extension or internal compilation error: %s.\n",
1116 String::cast(*exception)->ToCString().get());
1117 } else {
1118 base::OS::PrintError("Extension or internal compilation error.\n");
1119 }
1120 #ifdef OBJECT_PRINT
1121 // Since comments and empty lines have been stripped from the source of
1122 // builtins, print the actual source here so that line numbers match.
1123 if (location->script()->source()->IsString()) {
1124 Handle<String> src(String::cast(location->script()->source()));
1125 PrintF("Failing script:");
1126 int len = src->length();
1127 if (len == 0) {
1128 PrintF(" <not available>\n");
1129 } else {
1130 PrintF("\n");
1131 int line_number = 1;
1132 PrintF("%5d: ", line_number);
1133 for (int i = 0; i < len; i++) {
1134 uint16_t character = src->Get(i);
1135 PrintF("%c", character);
1136 if (character == '\n' && i < len - 2) {
1137 PrintF("%5d: ", ++line_number);
1138 }
1139 }
1140 PrintF("\n");
1141 }
1142 }
1143 #endif
1144 }
1145
is_catchable_by_wasm(Object * exception)1146 bool Isolate::is_catchable_by_wasm(Object* exception) {
1147 if (!is_catchable_by_javascript(exception) || !exception->IsJSError())
1148 return false;
1149 HandleScope scope(this);
1150 Handle<Object> exception_handle(exception, this);
1151 return JSReceiver::HasProperty(Handle<JSReceiver>::cast(exception_handle),
1152 factory()->InternalizeUtf8String(
1153 wasm::WasmException::kRuntimeIdStr))
1154 .IsJust();
1155 }
1156
Throw(Object * exception,MessageLocation * location)1157 Object* Isolate::Throw(Object* exception, MessageLocation* location) {
1158 DCHECK(!has_pending_exception());
1159
1160 HandleScope scope(this);
1161 Handle<Object> exception_handle(exception, this);
1162
1163 if (FLAG_print_all_exceptions) {
1164 printf("=========================================================\n");
1165 printf("Exception thrown:\n");
1166 if (location) {
1167 Handle<Script> script = location->script();
1168 Handle<Object> name(script->GetNameOrSourceURL(), this);
1169 printf("at ");
1170 if (name->IsString() && String::cast(*name)->length() > 0)
1171 String::cast(*name)->PrintOn(stdout);
1172 else
1173 printf("<anonymous>");
1174 // Script::GetLineNumber and Script::GetColumnNumber can allocate on the heap to
1175 // initialize the line_ends array, so be careful when calling them.
1176 #ifdef DEBUG
1177 if (AllowHeapAllocation::IsAllowed()) {
1178 #else
1179 if ((false)) {
1180 #endif
1181 printf(", %d:%d - %d:%d\n",
1182 Script::GetLineNumber(script, location->start_pos()) + 1,
1183 Script::GetColumnNumber(script, location->start_pos()),
1184 Script::GetLineNumber(script, location->end_pos()) + 1,
1185 Script::GetColumnNumber(script, location->end_pos()));
1186 } else {
1187 printf(", line %d\n", script->GetLineNumber(location->start_pos()) + 1);
1188 }
1189 }
1190 exception->Print();
1191 printf("Stack Trace:\n");
1192 PrintStack(stdout);
1193 printf("=========================================================\n");
1194 }
1195
1196 // Determine whether a message needs to be created for the given exception
1197 // depending on the following criteria:
1198 // 1) External v8::TryCatch missing: Always create a message because any
1199 // JavaScript handler for a finally-block might re-throw to top-level.
1200 // 2) External v8::TryCatch exists: Only create a message if the handler
1201 // captures messages or is verbose (which reports despite the catch).
1202 // 3) ReThrow from v8::TryCatch: The message from a previous throw still
1203 // exists and we preserve it instead of creating a new message.
1204 bool requires_message = try_catch_handler() == nullptr ||
1205 try_catch_handler()->is_verbose_ ||
1206 try_catch_handler()->capture_message_;
1207 bool rethrowing_message = thread_local_top()->rethrowing_message_;
1208
1209 thread_local_top()->rethrowing_message_ = false;
1210
1211 // Notify debugger of exception.
1212 if (is_catchable_by_javascript(exception)) {
1213 debug()->OnThrow(exception_handle);
1214 }
1215
1216 // Generate the message if required.
1217 if (requires_message && !rethrowing_message) {
1218 MessageLocation computed_location;
1219 // If no location was specified we try to use a computed one instead.
1220 if (location == nullptr && ComputeLocation(&computed_location)) {
1221 location = &computed_location;
1222 }
1223
1224 if (bootstrapper()->IsActive()) {
1225 // It's not safe to try to make message objects or collect stack traces
1226 // while the bootstrapper is active since the infrastructure may not have
1227 // been properly initialized.
1228 ReportBootstrappingException(exception_handle, location);
1229 } else {
1230 Handle<Object> message_obj = CreateMessage(exception_handle, location);
1231 thread_local_top()->pending_message_obj_ = *message_obj;
1232
1233 // For any exception not caught by JavaScript, even when an external
1234 // handler is present:
1235 // If the abort-on-uncaught-exception flag is specified, and if the
1236 // embedder didn't specify a custom uncaught exception callback,
1237 // or if the custom callback determined that V8 should abort, then
1238 // abort.
1239 if (FLAG_abort_on_uncaught_exception) {
1240 CatchType prediction = PredictExceptionCatcher();
1241 if ((prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) &&
1242 (!abort_on_uncaught_exception_callback_ ||
1243 abort_on_uncaught_exception_callback_(
1244 reinterpret_cast<v8::Isolate*>(this)))) {
1245 // Prevent endless recursion.
1246 FLAG_abort_on_uncaught_exception = false;
1247 // This flag is intended for use by JavaScript developers, so
1248 // print a user-friendly stack trace (not an internal one).
1249 PrintF(stderr, "%s\n\nFROM\n",
1250 MessageHandler::GetLocalizedMessage(this, message_obj).get());
1251 PrintCurrentStackTrace(stderr);
1252 base::OS::Abort();
1253 }
1254 }
1255 }
1256 }
1257
1258 // Set the exception being thrown.
1259 set_pending_exception(*exception_handle);
1260 return heap()->exception();
1261 }
1262
1263
1264 Object* Isolate::ReThrow(Object* exception) {
1265 DCHECK(!has_pending_exception());
1266
1267 // Set the exception being re-thrown.
1268 set_pending_exception(exception);
1269 return heap()->exception();
1270 }
1271
1272
1273 Object* Isolate::UnwindAndFindHandler() {
1274 Object* exception = pending_exception();
1275
1276 auto FoundHandler = [&](Context* context, Address instruction_start,
1277 intptr_t handler_offset,
1278 Address constant_pool_address, Address handler_sp,
1279 Address handler_fp) {
1280 // Store information to be consumed by the CEntry.
1281 thread_local_top()->pending_handler_context_ = context;
1282 thread_local_top()->pending_handler_entrypoint_ =
1283 instruction_start + handler_offset;
1284 thread_local_top()->pending_handler_constant_pool_ = constant_pool_address;
1285 thread_local_top()->pending_handler_fp_ = handler_fp;
1286 thread_local_top()->pending_handler_sp_ = handler_sp;
1287
1288 // Return and clear pending exception.
1289 clear_pending_exception();
1290 return exception;
1291 };
1292
1293 // Special handling of termination exceptions, uncatchable by JavaScript and
1294 // Wasm code, we unwind the handlers until the top ENTRY handler is found.
1295 bool catchable_by_js = is_catchable_by_javascript(exception);
1296
1297 // Compute handler and stack unwinding information by performing a full walk
1298 // over the stack and dispatching according to the frame type.
1299 for (StackFrameIterator iter(this);; iter.Advance()) {
1300 // Handler must exist.
1301 DCHECK(!iter.done());
1302
1303 StackFrame* frame = iter.frame();
1304
1305 switch (frame->type()) {
1306 case StackFrame::ENTRY:
1307 case StackFrame::CONSTRUCT_ENTRY: {
1308 // For JSEntryStub frames we always have a handler.
1309 StackHandler* handler = frame->top_handler();
1310
1311 // Restore the next handler.
1312 thread_local_top()->handler_ = handler->next()->address();
1313
1314 // Gather information from the handler.
1315 Code* code = frame->LookupCode();
1316 HandlerTable table(code);
1317 return FoundHandler(nullptr, code->InstructionStart(),
1318 table.LookupReturn(0), code->constant_pool(),
1319 handler->address() + StackHandlerConstants::kSize,
1320 0);
1321 }
1322
1323 case StackFrame::WASM_COMPILED: {
1324 if (trap_handler::IsThreadInWasm()) {
1325 trap_handler::ClearThreadInWasm();
1326 }
1327
1328 if (!FLAG_experimental_wasm_eh || !is_catchable_by_wasm(exception)) {
1329 break;
1330 }
1331 int stack_slots = 0; // Will contain stack slot count of frame.
1332 WasmCompiledFrame* wasm_frame = static_cast<WasmCompiledFrame*>(frame);
1333 int offset = wasm_frame->LookupExceptionHandlerInTable(&stack_slots);
1334 if (offset < 0) break;
1335 // Compute the stack pointer from the frame pointer. This ensures that
1336 // argument slots on the stack are dropped as returning would.
1337 Address return_sp = frame->fp() +
1338 StandardFrameConstants::kFixedFrameSizeAboveFp -
1339 stack_slots * kPointerSize;
1340
1341 // This is going to be handled by Wasm, so we need to set the TLS flag
1342 // again.
1343 trap_handler::SetThreadInWasm();
1344
1345 set_wasm_caught_exception(exception);
1346 wasm::WasmCode* wasm_code =
1347 wasm_engine()->code_manager()->LookupCode(frame->pc());
1348 return FoundHandler(nullptr, wasm_code->instruction_start(), offset,
1349 wasm_code->constant_pool(), return_sp, frame->fp());
1350 }
1351
1352 case StackFrame::OPTIMIZED: {
1353 // For optimized frames we perform a lookup in the handler table.
1354 if (!catchable_by_js) break;
1355 OptimizedFrame* js_frame = static_cast<OptimizedFrame*>(frame);
1356 int stack_slots = 0; // Will contain stack slot count of frame.
1357 int offset =
1358 js_frame->LookupExceptionHandlerInTable(&stack_slots, nullptr);
1359 if (offset < 0) break;
1360 // Compute the stack pointer from the frame pointer. This ensures
1361 // that argument slots on the stack are dropped as returning would.
1362 Address return_sp = frame->fp() +
1363 StandardFrameConstants::kFixedFrameSizeAboveFp -
1364 stack_slots * kPointerSize;
1365
1366 // Gather information from the frame.
1367 Code* code = frame->LookupCode();
1368
1369 // TODO(bmeurer): Turbofanned BUILTIN frames appear as OPTIMIZED,
1370 // but do not have a code kind of OPTIMIZED_FUNCTION.
1371 if (code->kind() == Code::OPTIMIZED_FUNCTION &&
1372 code->marked_for_deoptimization()) {
1373 // If the target code is lazy deoptimized, we jump to the original
1374 // return address, but we make a note that we are throwing, so
1375 // that the deoptimizer can do the right thing.
1376 offset = static_cast<int>(frame->pc() - code->entry());
1377 set_deoptimizer_lazy_throw(true);
1378 }
1379
1380 return FoundHandler(nullptr, code->InstructionStart(), offset,
1381 code->constant_pool(), return_sp, frame->fp());
1382 }
1383
1384 case StackFrame::STUB: {
1385 // Some stubs are able to handle exceptions.
1386 if (!catchable_by_js) break;
1387 StubFrame* stub_frame = static_cast<StubFrame*>(frame);
1388 Code* code = stub_frame->LookupCode();
1389 if (!code->IsCode() || code->kind() != Code::BUILTIN ||
1390 !code->handler_table_offset() || !code->is_turbofanned()) {
1391 break;
1392 }
1393
1394 int stack_slots = 0; // Will contain stack slot count of frame.
1395 int offset = stub_frame->LookupExceptionHandlerInTable(&stack_slots);
1396 if (offset < 0) break;
1397
1398 // Compute the stack pointer from the frame pointer. This ensures
1399 // that argument slots on the stack are dropped as returning would.
1400 Address return_sp = frame->fp() +
1401 StandardFrameConstants::kFixedFrameSizeAboveFp -
1402 stack_slots * kPointerSize;
1403
1404 return FoundHandler(nullptr, code->InstructionStart(), offset,
1405 code->constant_pool(), return_sp, frame->fp());
1406 }
1407
1408 case StackFrame::INTERPRETED: {
1409 // For interpreted frame we perform a range lookup in the handler table.
1410 if (!catchable_by_js) break;
1411 InterpretedFrame* js_frame = static_cast<InterpretedFrame*>(frame);
1412 int register_slots = InterpreterFrameConstants::RegisterStackSlotCount(
1413 js_frame->GetBytecodeArray()->register_count());
1414 int context_reg = 0; // Will contain register index holding context.
1415 int offset =
1416 js_frame->LookupExceptionHandlerInTable(&context_reg, nullptr);
1417 if (offset < 0) break;
1418 // Compute the stack pointer from the frame pointer. This ensures that
1419 // argument slots on the stack are dropped as returning would.
1420 // Note: This is only needed for interpreted frames that have been
1421 // materialized by the deoptimizer. If there is a handler frame
1422 // in between then {frame->sp()} would already be correct.
1423 Address return_sp = frame->fp() -
1424 InterpreterFrameConstants::kFixedFrameSizeFromFp -
1425 register_slots * kPointerSize;
1426
1427 // Patch the bytecode offset in the interpreted frame to reflect the
1428 // position of the exception handler. The special builtin below will
1429 // take care of continuing to dispatch at that position. Also restore
1430 // the correct context for the handler from the interpreter register.
1431 Context* context =
1432 Context::cast(js_frame->ReadInterpreterRegister(context_reg));
1433 js_frame->PatchBytecodeOffset(static_cast<int>(offset));
1434
1435 Code* code =
1436 builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
1437 return FoundHandler(context, code->InstructionStart(), 0,
1438 code->constant_pool(), return_sp, frame->fp());
1439 }
1440
1441 case StackFrame::BUILTIN:
1442 // For builtin frames we are guaranteed not to find a handler.
1443 if (catchable_by_js) {
1444 CHECK_EQ(-1,
1445 JavaScriptFrame::cast(frame)->LookupExceptionHandlerInTable(
1446 nullptr, nullptr));
1447 }
1448 break;
1449
1450 case StackFrame::WASM_INTERPRETER_ENTRY: {
1451 if (trap_handler::IsThreadInWasm()) {
1452 trap_handler::ClearThreadInWasm();
1453 }
1454 WasmInterpreterEntryFrame* interpreter_frame =
1455 WasmInterpreterEntryFrame::cast(frame);
1456 // TODO(wasm): Implement try-catch in the interpreter.
1457 interpreter_frame->debug_info()->Unwind(frame->fp());
1458 } break;
1459
1460 case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
1461 // Builtin continuation frames with catch can handle exceptions.
1462 if (!catchable_by_js) break;
1463 JavaScriptBuiltinContinuationWithCatchFrame* js_frame =
1464 JavaScriptBuiltinContinuationWithCatchFrame::cast(frame);
1465 js_frame->SetException(exception);
1466
1467 // Reconstruct the stack pointer from the frame pointer.
1468 Address return_sp = js_frame->fp() - js_frame->GetSPToFPDelta();
1469 Code* code = js_frame->LookupCode();
1470 return FoundHandler(nullptr, code->InstructionStart(), 0,
1471 code->constant_pool(), return_sp, frame->fp());
1472 } break;
1473
1474 default:
1475 // All other types can not handle exception.
1476 break;
1477 }
1478
1479 if (frame->is_optimized()) {
1480 // Remove per-frame stored materialized objects.
1481 bool removed = materialized_object_store_->Remove(frame->fp());
1482 USE(removed);
1483 // If there were any materialized objects, the code should be
1484 // marked for deopt.
1485 DCHECK_IMPLIES(removed, frame->LookupCode()->marked_for_deoptimization());
1486 }
1487 }
1488
1489 UNREACHABLE();
1490 }
1491
1492 namespace {
1493 HandlerTable::CatchPrediction PredictException(JavaScriptFrame* frame) {
1494 HandlerTable::CatchPrediction prediction;
1495 if (frame->is_optimized()) {
1496 if (frame->LookupExceptionHandlerInTable(nullptr, nullptr) > 0) {
1497 // This optimized frame will catch. It's handler table does not include
1498 // exception prediction, and we need to use the corresponding handler
1499 // tables on the unoptimized code objects.
1500 std::vector<FrameSummary> summaries;
1501 frame->Summarize(&summaries);
1502 for (size_t i = summaries.size(); i != 0; i--) {
1503 const FrameSummary& summary = summaries[i - 1];
1504 Handle<AbstractCode> code = summary.AsJavaScript().abstract_code();
1505 if (code->IsCode() && code->kind() == AbstractCode::BUILTIN) {
1506 prediction = code->GetCode()->GetBuiltinCatchPrediction();
1507 if (prediction == HandlerTable::UNCAUGHT) continue;
1508 return prediction;
1509 }
1510
1511 // Must have been constructed from a bytecode array.
1512 CHECK_EQ(AbstractCode::INTERPRETED_FUNCTION, code->kind());
1513 int code_offset = summary.code_offset();
1514 HandlerTable table(code->GetBytecodeArray());
1515 int index = table.LookupRange(code_offset, nullptr, &prediction);
1516 if (index <= 0) continue;
1517 if (prediction == HandlerTable::UNCAUGHT) continue;
1518 return prediction;
1519 }
1520 }
1521 } else if (frame->LookupExceptionHandlerInTable(nullptr, &prediction) > 0) {
1522 return prediction;
1523 }
1524 return HandlerTable::UNCAUGHT;
1525 }
1526
1527 Isolate::CatchType ToCatchType(HandlerTable::CatchPrediction prediction) {
1528 switch (prediction) {
1529 case HandlerTable::UNCAUGHT:
1530 return Isolate::NOT_CAUGHT;
1531 case HandlerTable::CAUGHT:
1532 return Isolate::CAUGHT_BY_JAVASCRIPT;
1533 case HandlerTable::PROMISE:
1534 return Isolate::CAUGHT_BY_PROMISE;
1535 case HandlerTable::DESUGARING:
1536 return Isolate::CAUGHT_BY_DESUGARING;
1537 case HandlerTable::ASYNC_AWAIT:
1538 return Isolate::CAUGHT_BY_ASYNC_AWAIT;
1539 default:
1540 UNREACHABLE();
1541 }
1542 }
1543 } // anonymous namespace
1544
1545 Isolate::CatchType Isolate::PredictExceptionCatcher() {
1546 Address external_handler = thread_local_top()->try_catch_handler_address();
1547 if (IsExternalHandlerOnTop(nullptr)) return CAUGHT_BY_EXTERNAL;
1548
1549 // Search for an exception handler by performing a full walk over the stack.
1550 for (StackFrameIterator iter(this); !iter.done(); iter.Advance()) {
1551 StackFrame* frame = iter.frame();
1552
1553 switch (frame->type()) {
1554 case StackFrame::ENTRY:
1555 case StackFrame::CONSTRUCT_ENTRY: {
1556 Address entry_handler = frame->top_handler()->next()->address();
1557 // The exception has been externally caught if and only if there is an
1558 // external handler which is on top of the top-most JS_ENTRY handler.
1559 if (external_handler != kNullAddress &&
1560 !try_catch_handler()->is_verbose_) {
1561 if (entry_handler == kNullAddress ||
1562 entry_handler > external_handler) {
1563 return CAUGHT_BY_EXTERNAL;
1564 }
1565 }
1566 } break;
1567
1568 // For JavaScript frames we perform a lookup in the handler table.
1569 case StackFrame::OPTIMIZED:
1570 case StackFrame::INTERPRETED:
1571 case StackFrame::BUILTIN: {
1572 JavaScriptFrame* js_frame = JavaScriptFrame::cast(frame);
1573 Isolate::CatchType prediction = ToCatchType(PredictException(js_frame));
1574 if (prediction == NOT_CAUGHT) break;
1575 return prediction;
1576 } break;
1577
1578 case StackFrame::STUB: {
1579 Handle<Code> code(frame->LookupCode());
1580 if (!code->IsCode() || code->kind() != Code::BUILTIN ||
1581 !code->handler_table_offset() || !code->is_turbofanned()) {
1582 break;
1583 }
1584
1585 CatchType prediction = ToCatchType(code->GetBuiltinCatchPrediction());
1586 if (prediction != NOT_CAUGHT) return prediction;
1587 } break;
1588
1589 case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
1590 Handle<Code> code(frame->LookupCode());
1591 CatchType prediction = ToCatchType(code->GetBuiltinCatchPrediction());
1592 if (prediction != NOT_CAUGHT) return prediction;
1593 } break;
1594
1595 default:
1596 // All other types can not handle exception.
1597 break;
1598 }
1599 }
1600
1601 // Handler not found.
1602 return NOT_CAUGHT;
1603 }
1604
1605 Object* Isolate::ThrowIllegalOperation() {
1606 if (FLAG_stack_trace_on_illegal) PrintStack(stdout);
1607 return Throw(heap()->illegal_access_string());
1608 }
1609
1610
1611 void Isolate::ScheduleThrow(Object* exception) {
1612 // When scheduling a throw we first throw the exception to get the
1613 // error reporting if it is uncaught before rescheduling it.
1614 Throw(exception);
1615 PropagatePendingExceptionToExternalTryCatch();
1616 if (has_pending_exception()) {
1617 thread_local_top()->scheduled_exception_ = pending_exception();
1618 thread_local_top()->external_caught_exception_ = false;
1619 clear_pending_exception();
1620 }
1621 }
1622
1623
1624 void Isolate::RestorePendingMessageFromTryCatch(v8::TryCatch* handler) {
1625 DCHECK(handler == try_catch_handler());
1626 DCHECK(handler->HasCaught());
1627 DCHECK(handler->rethrow_);
1628 DCHECK(handler->capture_message_);
1629 Object* message = reinterpret_cast<Object*>(handler->message_obj_);
1630 DCHECK(message->IsJSMessageObject() || message->IsTheHole(this));
1631 thread_local_top()->pending_message_obj_ = message;
1632 }
1633
1634
1635 void Isolate::CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler) {
1636 DCHECK(has_scheduled_exception());
1637 if (scheduled_exception() == handler->exception_) {
1638 DCHECK(scheduled_exception() != heap()->termination_exception());
1639 clear_scheduled_exception();
1640 }
1641 if (thread_local_top_.pending_message_obj_ == handler->message_obj_) {
1642 clear_pending_message();
1643 }
1644 }
1645
1646
1647 Object* Isolate::PromoteScheduledException() {
1648 Object* thrown = scheduled_exception();
1649 clear_scheduled_exception();
1650 // Re-throw the exception to avoid getting repeated error reporting.
1651 return ReThrow(thrown);
1652 }
1653
1654
1655 void Isolate::PrintCurrentStackTrace(FILE* out) {
1656 for (StackTraceFrameIterator it(this); !it.done(); it.Advance()) {
1657 if (!it.is_javascript()) continue;
1658
1659 HandleScope scope(this);
1660 JavaScriptFrame* frame = it.javascript_frame();
1661
1662 Handle<Object> receiver(frame->receiver(), this);
1663 Handle<JSFunction> function(frame->function(), this);
1664 Handle<AbstractCode> code;
1665 int offset;
1666 if (frame->is_interpreted()) {
1667 InterpretedFrame* interpreted_frame = InterpretedFrame::cast(frame);
1668 code = handle(AbstractCode::cast(interpreted_frame->GetBytecodeArray()),
1669 this);
1670 offset = interpreted_frame->GetBytecodeOffset();
1671 } else {
1672 code = handle(AbstractCode::cast(frame->LookupCode()), this);
1673 offset = static_cast<int>(frame->pc() - code->InstructionStart());
1674 }
1675
1676 JSStackFrame site(this, receiver, function, code, offset);
1677 Handle<String> line = site.ToString().ToHandleChecked();
1678 if (line->length() > 0) {
1679 line->PrintOn(out);
1680 PrintF(out, "\n");
1681 }
1682 }
1683 }
1684
1685 bool Isolate::ComputeLocation(MessageLocation* target) {
1686 StackTraceFrameIterator it(this);
1687 if (it.done()) return false;
1688 StandardFrame* frame = it.frame();
1689 // Compute the location from the function and the relocation info of the
1690 // baseline code. For optimized code this will use the deoptimization
1691 // information to get canonical location information.
1692 std::vector<FrameSummary> frames;
1693 frame->Summarize(&frames);
1694 FrameSummary& summary = frames.back();
1695 int pos = summary.SourcePosition();
1696 Handle<SharedFunctionInfo> shared;
1697 Handle<Object> script = summary.script();
1698 if (!script->IsScript() ||
1699 (Script::cast(*script)->source()->IsUndefined(this))) {
1700 return false;
1701 }
1702
1703 if (summary.IsJavaScript()) {
1704 shared = handle(summary.AsJavaScript().function()->shared());
1705 }
1706 *target = MessageLocation(Handle<Script>::cast(script), pos, pos + 1, shared);
1707 return true;
1708 }
1709
1710 bool Isolate::ComputeLocationFromException(MessageLocation* target,
1711 Handle<Object> exception) {
1712 if (!exception->IsJSObject()) return false;
1713
1714 Handle<Name> start_pos_symbol = factory()->error_start_pos_symbol();
1715 Handle<Object> start_pos = JSReceiver::GetDataProperty(
1716 Handle<JSObject>::cast(exception), start_pos_symbol);
1717 if (!start_pos->IsSmi()) return false;
1718 int start_pos_value = Handle<Smi>::cast(start_pos)->value();
1719
1720 Handle<Name> end_pos_symbol = factory()->error_end_pos_symbol();
1721 Handle<Object> end_pos = JSReceiver::GetDataProperty(
1722 Handle<JSObject>::cast(exception), end_pos_symbol);
1723 if (!end_pos->IsSmi()) return false;
1724 int end_pos_value = Handle<Smi>::cast(end_pos)->value();
1725
1726 Handle<Name> script_symbol = factory()->error_script_symbol();
1727 Handle<Object> script = JSReceiver::GetDataProperty(
1728 Handle<JSObject>::cast(exception), script_symbol);
1729 if (!script->IsScript()) return false;
1730
1731 Handle<Script> cast_script(Script::cast(*script));
1732 *target = MessageLocation(cast_script, start_pos_value, end_pos_value);
1733 return true;
1734 }
1735
1736
1737 bool Isolate::ComputeLocationFromStackTrace(MessageLocation* target,
1738 Handle<Object> exception) {
1739 if (!exception->IsJSObject()) return false;
1740 Handle<Name> key = factory()->stack_trace_symbol();
1741 Handle<Object> property =
1742 JSReceiver::GetDataProperty(Handle<JSObject>::cast(exception), key);
1743 if (!property->IsJSArray()) return false;
1744 Handle<JSArray> simple_stack_trace = Handle<JSArray>::cast(property);
1745
1746 Handle<FrameArray> elements(FrameArray::cast(simple_stack_trace->elements()));
1747
1748 const int frame_count = elements->FrameCount();
1749 for (int i = 0; i < frame_count; i++) {
1750 if (elements->IsWasmFrame(i) || elements->IsAsmJsWasmFrame(i)) {
1751 Handle<WasmInstanceObject> instance(elements->WasmInstance(i));
1752 uint32_t func_index =
1753 static_cast<uint32_t>(elements->WasmFunctionIndex(i)->value());
1754 int code_offset = elements->Offset(i)->value();
1755
1756 // TODO(titzer): store a reference to the code object in FrameArray;
1757 // a second lookup here could lead to inconsistency.
1758 int byte_offset =
1759 FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(
1760 instance->compiled_module()->GetNativeModule()->code(func_index),
1761 code_offset);
1762
1763 bool is_at_number_conversion =
1764 elements->IsAsmJsWasmFrame(i) &&
1765 elements->Flags(i)->value() & FrameArray::kAsmJsAtNumberConversion;
1766 int pos = WasmSharedModuleData::GetSourcePosition(
1767 handle(instance->module_object()->shared(), this), func_index,
1768 byte_offset, is_at_number_conversion);
1769 Handle<Script> script(instance->module_object()->shared()->script());
1770
1771 *target = MessageLocation(script, pos, pos + 1);
1772 return true;
1773 }
1774
1775 Handle<JSFunction> fun = handle(elements->Function(i), this);
1776 if (!fun->shared()->IsSubjectToDebugging()) continue;
1777
1778 Object* script = fun->shared()->script();
1779 if (script->IsScript() &&
1780 !(Script::cast(script)->source()->IsUndefined(this))) {
1781 AbstractCode* abstract_code = elements->Code(i);
1782 const int code_offset = elements->Offset(i)->value();
1783 const int pos = abstract_code->SourcePosition(code_offset);
1784
1785 Handle<Script> casted_script(Script::cast(script));
1786 *target = MessageLocation(casted_script, pos, pos + 1);
1787 return true;
1788 }
1789 }
1790 return false;
1791 }
1792
1793
1794 Handle<JSMessageObject> Isolate::CreateMessage(Handle<Object> exception,
1795 MessageLocation* location) {
1796 Handle<FixedArray> stack_trace_object;
1797 if (capture_stack_trace_for_uncaught_exceptions_) {
1798 if (exception->IsJSError()) {
1799 // We fetch the stack trace that corresponds to this error object.
1800 // If the lookup fails, the exception is probably not a valid Error
1801 // object. In that case, we fall through and capture the stack trace
1802 // at this throw site.
1803 stack_trace_object =
1804 GetDetailedStackTrace(Handle<JSObject>::cast(exception));
1805 }
1806 if (stack_trace_object.is_null()) {
1807 // Not an error object, we capture stack and location at throw site.
1808 stack_trace_object = CaptureCurrentStackTrace(
1809 stack_trace_for_uncaught_exceptions_frame_limit_,
1810 stack_trace_for_uncaught_exceptions_options_);
1811 }
1812 }
1813 MessageLocation computed_location;
1814 if (location == nullptr &&
1815 (ComputeLocationFromException(&computed_location, exception) ||
1816 ComputeLocationFromStackTrace(&computed_location, exception) ||
1817 ComputeLocation(&computed_location))) {
1818 location = &computed_location;
1819 }
1820
1821 return MessageHandler::MakeMessageObject(
1822 this, MessageTemplate::kUncaughtException, location, exception,
1823 stack_trace_object);
1824 }
1825
1826
1827 bool Isolate::IsJavaScriptHandlerOnTop(Object* exception) {
1828 DCHECK_NE(heap()->the_hole_value(), exception);
1829
1830 // For uncatchable exceptions, the JavaScript handler cannot be on top.
1831 if (!is_catchable_by_javascript(exception)) return false;
1832
1833 // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
1834 Address entry_handler = Isolate::handler(thread_local_top());
1835 if (entry_handler == kNullAddress) return false;
1836
1837 // Get the address of the external handler so we can compare the address to
1838 // determine which one is closer to the top of the stack.
1839 Address external_handler = thread_local_top()->try_catch_handler_address();
1840 if (external_handler == kNullAddress) return true;
1841
1842 // The exception has been externally caught if and only if there is an
1843 // external handler which is on top of the top-most JS_ENTRY handler.
1844 //
1845 // Note, that finally clauses would re-throw an exception unless it's aborted
1846 // by jumps in control flow (like return, break, etc.) and we'll have another
1847 // chance to set proper v8::TryCatch later.
1848 return (entry_handler < external_handler);
1849 }
1850
1851
1852 bool Isolate::IsExternalHandlerOnTop(Object* exception) {
1853 DCHECK_NE(heap()->the_hole_value(), exception);
1854
1855 // Get the address of the external handler so we can compare the address to
1856 // determine which one is closer to the top of the stack.
1857 Address external_handler = thread_local_top()->try_catch_handler_address();
1858 if (external_handler == kNullAddress) return false;
1859
1860 // For uncatchable exceptions, the external handler is always on top.
1861 if (!is_catchable_by_javascript(exception)) return true;
1862
1863 // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
1864 Address entry_handler = Isolate::handler(thread_local_top());
1865 if (entry_handler == kNullAddress) return true;
1866
1867 // The exception has been externally caught if and only if there is an
1868 // external handler which is on top of the top-most JS_ENTRY handler.
1869 //
1870 // Note, that finally clauses would re-throw an exception unless it's aborted
1871 // by jumps in control flow (like return, break, etc.) and we'll have another
1872 // chance to set proper v8::TryCatch later.
1873 return (entry_handler > external_handler);
1874 }
1875
1876 void Isolate::ReportPendingMessagesImpl(bool report_externally) {
1877 Object* exception = pending_exception();
1878
1879 // Clear the pending message object early to avoid endless recursion.
1880 Object* message_obj = thread_local_top_.pending_message_obj_;
1881 clear_pending_message();
1882
1883 // For uncatchable exceptions we do nothing. If needed, the exception and the
1884 // message have already been propagated to v8::TryCatch.
1885 if (!is_catchable_by_javascript(exception)) return;
1886
1887 // Determine whether the message needs to be reported to all message handlers
1888 // depending on whether and external v8::TryCatch or an internal JavaScript
1889 // handler is on top.
1890 bool should_report_exception;
1891 if (report_externally) {
1892 // Only report the exception if the external handler is verbose.
1893 should_report_exception = try_catch_handler()->is_verbose_;
1894 } else {
1895 // Report the exception if it isn't caught by JavaScript code.
1896 should_report_exception = !IsJavaScriptHandlerOnTop(exception);
1897 }
1898
1899 // Actually report the pending message to all message handlers.
1900 if (!message_obj->IsTheHole(this) && should_report_exception) {
1901 HandleScope scope(this);
1902 Handle<JSMessageObject> message(JSMessageObject::cast(message_obj), this);
1903 Handle<JSValue> script_wrapper(JSValue::cast(message->script()), this);
1904 Handle<Script> script(Script::cast(script_wrapper->value()), this);
1905 int start_pos = message->start_position();
1906 int end_pos = message->end_position();
1907 MessageLocation location(script, start_pos, end_pos);
1908 MessageHandler::ReportMessage(this, &location, message);
1909 }
1910 }
1911
1912 void Isolate::ReportPendingMessages() {
1913 DCHECK(AllowExceptions::IsAllowed(this));
1914
1915 // The embedder might run script in response to an exception.
1916 AllowJavascriptExecutionDebugOnly allow_script(this);
1917
1918 Object* exception = pending_exception();
1919
1920 // Try to propagate the exception to an external v8::TryCatch handler. If
1921 // propagation was unsuccessful, then we will get another chance at reporting
1922 // the pending message if the exception is re-thrown.
1923 bool has_been_propagated = PropagatePendingExceptionToExternalTryCatch();
1924 if (!has_been_propagated) return;
1925
1926 ReportPendingMessagesImpl(IsExternalHandlerOnTop(exception));
1927 }
1928
1929 void Isolate::ReportPendingMessagesFromJavaScript() {
1930 DCHECK(AllowExceptions::IsAllowed(this));
1931
1932 auto IsHandledByJavaScript = [=]() {
1933 // In this situation, the exception is always a non-terminating exception.
1934
1935 // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
1936 Address entry_handler = Isolate::handler(thread_local_top());
1937 DCHECK_NE(entry_handler, kNullAddress);
1938 entry_handler =
1939 reinterpret_cast<StackHandler*>(entry_handler)->next()->address();
1940
1941 // Get the address of the external handler so we can compare the address to
1942 // determine which one is closer to the top of the stack.
1943 Address external_handler = thread_local_top()->try_catch_handler_address();
1944 if (external_handler == kNullAddress) return true;
1945
1946 return (entry_handler < external_handler);
1947 };
1948
1949 auto IsHandledExternally = [=]() {
1950 Address external_handler = thread_local_top()->try_catch_handler_address();
1951 if (external_handler == kNullAddress) return false;
1952
1953 // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
1954 Address entry_handler = Isolate::handler(thread_local_top());
1955 DCHECK_NE(entry_handler, kNullAddress);
1956 entry_handler =
1957 reinterpret_cast<StackHandler*>(entry_handler)->next()->address();
1958 return (entry_handler > external_handler);
1959 };
1960
1961 auto PropagateToExternalHandler = [=]() {
1962 if (IsHandledByJavaScript()) {
1963 thread_local_top_.external_caught_exception_ = false;
1964 return false;
1965 }
1966
1967 if (!IsHandledExternally()) {
1968 thread_local_top_.external_caught_exception_ = false;
1969 return true;
1970 }
1971
1972 thread_local_top_.external_caught_exception_ = true;
1973 v8::TryCatch* handler = try_catch_handler();
1974 DCHECK(thread_local_top_.pending_message_obj_->IsJSMessageObject() ||
1975 thread_local_top_.pending_message_obj_->IsTheHole(this));
1976 handler->can_continue_ = true;
1977 handler->has_terminated_ = false;
1978 handler->exception_ = pending_exception();
1979 // Propagate to the external try-catch only if we got an actual message.
1980 if (thread_local_top_.pending_message_obj_->IsTheHole(this)) return true;
1981
1982 handler->message_obj_ = thread_local_top_.pending_message_obj_;
1983 return true;
1984 };
1985
1986 // Try to propagate to an external v8::TryCatch handler.
1987 if (!PropagateToExternalHandler()) return;
1988
1989 ReportPendingMessagesImpl(true);
1990 }
1991
1992 MessageLocation Isolate::GetMessageLocation() {
1993 DCHECK(has_pending_exception());
1994
1995 if (thread_local_top_.pending_exception_ != heap()->termination_exception() &&
1996 !thread_local_top_.pending_message_obj_->IsTheHole(this)) {
1997 Handle<JSMessageObject> message_obj(
1998 JSMessageObject::cast(thread_local_top_.pending_message_obj_), this);
1999 Handle<JSValue> script_wrapper(JSValue::cast(message_obj->script()), this);
2000 Handle<Script> script(Script::cast(script_wrapper->value()), this);
2001 int start_pos = message_obj->start_position();
2002 int end_pos = message_obj->end_position();
2003 return MessageLocation(script, start_pos, end_pos);
2004 }
2005
2006 return MessageLocation();
2007 }
2008
2009
2010 bool Isolate::OptionalRescheduleException(bool is_bottom_call) {
2011 DCHECK(has_pending_exception());
2012 PropagatePendingExceptionToExternalTryCatch();
2013
2014 bool is_termination_exception =
2015 pending_exception() == heap_.termination_exception();
2016
2017 // Do not reschedule the exception if this is the bottom call.
2018 bool clear_exception = is_bottom_call;
2019
2020 if (is_termination_exception) {
2021 if (is_bottom_call) {
2022 thread_local_top()->external_caught_exception_ = false;
2023 clear_pending_exception();
2024 return false;
2025 }
2026 } else if (thread_local_top()->external_caught_exception_) {
2027 // If the exception is externally caught, clear it if there are no
2028 // JavaScript frames on the way to the C++ frame that has the
2029 // external handler.
2030 DCHECK_NE(thread_local_top()->try_catch_handler_address(), kNullAddress);
2031 Address external_handler_address =
2032 thread_local_top()->try_catch_handler_address();
2033 JavaScriptFrameIterator it(this);
2034 if (it.done() || (it.frame()->sp() > external_handler_address)) {
2035 clear_exception = true;
2036 }
2037 }
2038
2039 // Clear the exception if needed.
2040 if (clear_exception) {
2041 thread_local_top()->external_caught_exception_ = false;
2042 clear_pending_exception();
2043 return false;
2044 }
2045
2046 // Reschedule the exception.
2047 thread_local_top()->scheduled_exception_ = pending_exception();
2048 clear_pending_exception();
2049 return true;
2050 }
2051
2052 void Isolate::PushPromise(Handle<JSObject> promise) {
2053 ThreadLocalTop* tltop = thread_local_top();
2054 PromiseOnStack* prev = tltop->promise_on_stack_;
2055 Handle<JSObject> global_promise = global_handles()->Create(*promise);
2056 tltop->promise_on_stack_ = new PromiseOnStack(global_promise, prev);
2057 }
2058
2059
2060 void Isolate::PopPromise() {
2061 ThreadLocalTop* tltop = thread_local_top();
2062 if (tltop->promise_on_stack_ == nullptr) return;
2063 PromiseOnStack* prev = tltop->promise_on_stack_->prev();
2064 Handle<Object> global_promise = tltop->promise_on_stack_->promise();
2065 delete tltop->promise_on_stack_;
2066 tltop->promise_on_stack_ = prev;
2067 global_handles()->Destroy(global_promise.location());
2068 }
2069
2070 namespace {
2071 bool InternalPromiseHasUserDefinedRejectHandler(Isolate* isolate,
2072 Handle<JSPromise> promise);
2073
2074 bool PromiseHandlerCheck(Isolate* isolate, Handle<JSReceiver> handler,
2075 Handle<JSReceiver> deferred_promise) {
2076 // Recurse to the forwarding Promise, if any. This may be due to
2077 // - await reaction forwarding to the throwaway Promise, which has
2078 // a dependency edge to the outer Promise.
2079 // - PromiseIdResolveHandler forwarding to the output of .then
2080 // - Promise.all/Promise.race forwarding to a throwaway Promise, which
2081 // has a dependency edge to the generated outer Promise.
2082 // Otherwise, this is a real reject handler for the Promise.
2083 Handle<Symbol> key = isolate->factory()->promise_forwarding_handler_symbol();
2084 Handle<Object> forwarding_handler = JSReceiver::GetDataProperty(handler, key);
2085 if (forwarding_handler->IsUndefined(isolate)) {
2086 return true;
2087 }
2088
2089 if (!deferred_promise->IsJSPromise()) {
2090 return true;
2091 }
2092
2093 return InternalPromiseHasUserDefinedRejectHandler(
2094 isolate, Handle<JSPromise>::cast(deferred_promise));
2095 }
2096
2097 bool InternalPromiseHasUserDefinedRejectHandler(Isolate* isolate,
2098 Handle<JSPromise> promise) {
2099 // If this promise was marked as being handled by a catch block
2100 // in an async function, then it has a user-defined reject handler.
2101 if (promise->handled_hint()) return true;
2102
2103 // If this Promise is subsumed by another Promise (a Promise resolved
2104 // with another Promise, or an intermediate, hidden, throwaway Promise
2105 // within async/await), then recurse on the outer Promise.
2106 // In this case, the dependency is one possible way that the Promise
2107 // could be resolved, so it does not subsume the other following cases.
2108 Handle<Symbol> key = isolate->factory()->promise_handled_by_symbol();
2109 Handle<Object> outer_promise_obj = JSObject::GetDataProperty(promise, key);
2110 if (outer_promise_obj->IsJSPromise() &&
2111 InternalPromiseHasUserDefinedRejectHandler(
2112 isolate, Handle<JSPromise>::cast(outer_promise_obj))) {
2113 return true;
2114 }
2115
2116 if (promise->status() == Promise::kPending) {
2117 for (Handle<Object> current(promise->reactions(), isolate);
2118 !current->IsSmi();) {
2119 Handle<PromiseReaction> reaction = Handle<PromiseReaction>::cast(current);
2120 Handle<HeapObject> promise_or_capability(
2121 reaction->promise_or_capability(), isolate);
2122 Handle<JSPromise> promise = Handle<JSPromise>::cast(
2123 promise_or_capability->IsJSPromise()
2124 ? promise_or_capability
2125 : handle(Handle<PromiseCapability>::cast(promise_or_capability)
2126 ->promise(),
2127 isolate));
2128 if (reaction->reject_handler()->IsUndefined(isolate)) {
2129 if (InternalPromiseHasUserDefinedRejectHandler(isolate, promise)) {
2130 return true;
2131 }
2132 } else {
2133 Handle<JSReceiver> current_handler(
2134 JSReceiver::cast(reaction->reject_handler()), isolate);
2135 if (PromiseHandlerCheck(isolate, current_handler, promise)) {
2136 return true;
2137 }
2138 }
2139 current = handle(reaction->next(), isolate);
2140 }
2141 }
2142
2143 return false;
2144 }
2145
2146 } // namespace
2147
2148 bool Isolate::PromiseHasUserDefinedRejectHandler(Handle<Object> promise) {
2149 if (!promise->IsJSPromise()) return false;
2150 return InternalPromiseHasUserDefinedRejectHandler(
2151 this, Handle<JSPromise>::cast(promise));
2152 }
2153
2154 Handle<Object> Isolate::GetPromiseOnStackOnThrow() {
2155 Handle<Object> undefined = factory()->undefined_value();
2156 ThreadLocalTop* tltop = thread_local_top();
2157 if (tltop->promise_on_stack_ == nullptr) return undefined;
2158 // Find the top-most try-catch or try-finally handler.
2159 CatchType prediction = PredictExceptionCatcher();
2160 if (prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) {
2161 return undefined;
2162 }
2163 Handle<Object> retval = undefined;
2164 PromiseOnStack* promise_on_stack = tltop->promise_on_stack_;
2165 for (StackFrameIterator it(this); !it.done(); it.Advance()) {
2166 StackFrame* frame = it.frame();
2167 HandlerTable::CatchPrediction catch_prediction;
2168 if (frame->is_java_script()) {
2169 catch_prediction = PredictException(JavaScriptFrame::cast(frame));
2170 } else if (frame->type() == StackFrame::STUB) {
2171 Code* code = frame->LookupCode();
2172 if (!code->IsCode() || code->kind() != Code::BUILTIN ||
2173 !code->handler_table_offset() || !code->is_turbofanned()) {
2174 continue;
2175 }
2176 catch_prediction = code->GetBuiltinCatchPrediction();
2177 } else {
2178 continue;
2179 }
2180
2181 switch (catch_prediction) {
2182 case HandlerTable::UNCAUGHT:
2183 continue;
2184 case HandlerTable::CAUGHT:
2185 case HandlerTable::DESUGARING:
2186 if (retval->IsJSPromise()) {
2187 // Caught the result of an inner async/await invocation.
2188 // Mark the inner promise as caught in the "synchronous case" so
2189 // that Debug::OnException will see. In the synchronous case,
2190 // namely in the code in an async function before the first
2191 // await, the function which has this exception event has not yet
2192 // returned, so the generated Promise has not yet been marked
2193 // by AsyncFunctionAwaitCaught with promiseHandledHintSymbol.
2194 Handle<JSPromise>::cast(retval)->set_handled_hint(true);
2195 }
2196 return retval;
2197 case HandlerTable::PROMISE:
2198 return promise_on_stack
2199 ? Handle<Object>::cast(promise_on_stack->promise())
2200 : undefined;
2201 case HandlerTable::ASYNC_AWAIT: {
2202 // If in the initial portion of async/await, continue the loop to pop up
2203 // successive async/await stack frames until an asynchronous one with
2204 // dependents is found, or a non-async stack frame is encountered, in
2205 // order to handle the synchronous async/await catch prediction case:
2206 // assume that async function calls are awaited.
2207 if (!promise_on_stack) return retval;
2208 retval = promise_on_stack->promise();
2209 if (PromiseHasUserDefinedRejectHandler(retval)) {
2210 return retval;
2211 }
2212 promise_on_stack = promise_on_stack->prev();
2213 continue;
2214 }
2215 }
2216 }
2217 return retval;
2218 }
2219
2220
2221 void Isolate::SetCaptureStackTraceForUncaughtExceptions(
2222 bool capture,
2223 int frame_limit,
2224 StackTrace::StackTraceOptions options) {
2225 capture_stack_trace_for_uncaught_exceptions_ = capture;
2226 stack_trace_for_uncaught_exceptions_frame_limit_ = frame_limit;
2227 stack_trace_for_uncaught_exceptions_options_ = options;
2228 }
2229
2230
2231 void Isolate::SetAbortOnUncaughtExceptionCallback(
2232 v8::Isolate::AbortOnUncaughtExceptionCallback callback) {
2233 abort_on_uncaught_exception_callback_ = callback;
2234 }
2235
2236 namespace {
2237 void AdvanceWhileDebugContext(JavaScriptFrameIterator& it, Debug* debug) {
2238 if (!debug->in_debug_scope()) return;
2239
2240 while (!it.done()) {
2241 Context* context = Context::cast(it.frame()->context());
2242 if (context->native_context() == *debug->debug_context()) {
2243 it.Advance();
2244 } else {
2245 break;
2246 }
2247 }
2248 }
2249 } // namespace
2250
2251 Handle<Context> Isolate::GetCallingNativeContext() {
2252 JavaScriptFrameIterator it(this);
2253 AdvanceWhileDebugContext(it, debug_);
2254 if (it.done()) return Handle<Context>::null();
2255 JavaScriptFrame* frame = it.frame();
2256 Context* context = Context::cast(frame->context());
2257 return Handle<Context>(context->native_context(), this);
2258 }
2259
2260 Handle<Context> Isolate::GetIncumbentContext() {
2261 JavaScriptFrameIterator it(this);
2262 AdvanceWhileDebugContext(it, debug_);
2263
2264 // 1st candidate: most-recently-entered author function's context
2265 // if it's newer than the last Context::BackupIncumbentScope entry.
2266 if (!it.done() &&
2267 static_cast<const void*>(it.frame()) >
2268 static_cast<const void*>(top_backup_incumbent_scope())) {
2269 Context* context = Context::cast(it.frame()->context());
2270 return Handle<Context>(context->native_context(), this);
2271 }
2272
2273 // 2nd candidate: the last Context::Scope's incumbent context if any.
2274 if (top_backup_incumbent_scope()) {
2275 return Utils::OpenHandle(
2276 *top_backup_incumbent_scope()->backup_incumbent_context_);
2277 }
2278
2279 // Last candidate: the entered context.
2280 // Given that there is no other author function is running, there must be
2281 // no cross-context function running, then the incumbent realm must match
2282 // the entry realm.
2283 v8::Local<v8::Context> entered_context =
2284 reinterpret_cast<v8::Isolate*>(this)->GetEnteredContext();
2285 return Utils::OpenHandle(*entered_context);
2286 }
2287
2288 char* Isolate::ArchiveThread(char* to) {
2289 MemCopy(to, reinterpret_cast<char*>(thread_local_top()),
2290 sizeof(ThreadLocalTop));
2291 InitializeThreadLocal();
2292 clear_pending_exception();
2293 clear_pending_message();
2294 clear_scheduled_exception();
2295 return to + sizeof(ThreadLocalTop);
2296 }
2297
2298
2299 char* Isolate::RestoreThread(char* from) {
2300 MemCopy(reinterpret_cast<char*>(thread_local_top()), from,
2301 sizeof(ThreadLocalTop));
2302 // This might be just paranoia, but it seems to be needed in case a
2303 // thread_local_top_ is restored on a separate OS thread.
2304 #ifdef USE_SIMULATOR
2305 thread_local_top()->simulator_ = Simulator::current(this);
2306 #endif
2307 DCHECK(context() == nullptr || context()->IsContext());
2308 return from + sizeof(ThreadLocalTop);
2309 }
2310
2311 Isolate::ThreadDataTable::ThreadDataTable() : table_() {}
2312
2313 Isolate::ThreadDataTable::~ThreadDataTable() {}
2314
2315 void Isolate::ReleaseSharedPtrs() {
2316 while (managed_ptr_destructors_head_) {
2317 ManagedPtrDestructor* l = managed_ptr_destructors_head_;
2318 ManagedPtrDestructor* n = nullptr;
2319 managed_ptr_destructors_head_ = nullptr;
2320 for (; l != nullptr; l = n) {
2321 l->destructor_(l->shared_ptr_ptr_);
2322 n = l->next_;
2323 delete l;
2324 }
2325 }
2326 }
2327
2328 void Isolate::RegisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
2329 DCHECK_NULL(destructor->prev_);
2330 DCHECK_NULL(destructor->next_);
2331 if (managed_ptr_destructors_head_) {
2332 managed_ptr_destructors_head_->prev_ = destructor;
2333 }
2334 destructor->next_ = managed_ptr_destructors_head_;
2335 managed_ptr_destructors_head_ = destructor;
2336 }
2337
2338 void Isolate::UnregisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
2339 if (destructor->prev_) {
2340 destructor->prev_->next_ = destructor->next_;
2341 } else {
2342 DCHECK_EQ(destructor, managed_ptr_destructors_head_);
2343 managed_ptr_destructors_head_ = destructor->next_;
2344 }
2345 if (destructor->next_) destructor->next_->prev_ = destructor->prev_;
2346 destructor->prev_ = nullptr;
2347 destructor->next_ = nullptr;
2348 }
2349
2350 Isolate::PerIsolateThreadData::~PerIsolateThreadData() {
2351 #if defined(USE_SIMULATOR)
2352 delete simulator_;
2353 #endif
2354 }
2355
2356 Isolate::PerIsolateThreadData* Isolate::ThreadDataTable::Lookup(
2357 ThreadId thread_id) {
2358 auto t = table_.find(thread_id);
2359 if (t == table_.end()) return nullptr;
2360 return t->second;
2361 }
2362
2363
2364 void Isolate::ThreadDataTable::Insert(Isolate::PerIsolateThreadData* data) {
2365 bool inserted = table_.insert(std::make_pair(data->thread_id_, data)).second;
2366 CHECK(inserted);
2367 }
2368
2369
2370 void Isolate::ThreadDataTable::Remove(PerIsolateThreadData* data) {
2371 table_.erase(data->thread_id_);
2372 delete data;
2373 }
2374
2375 void Isolate::ThreadDataTable::RemoveAllThreads() {
2376 for (auto& x : table_) {
2377 delete x.second;
2378 }
2379 table_.clear();
2380 }
2381
2382
2383 #ifdef DEBUG
2384 #define TRACE_ISOLATE(tag) \
2385 do { \
2386 if (FLAG_trace_isolates) { \
2387 PrintF("Isolate %p (id %d)" #tag "\n", \
2388 reinterpret_cast<void*>(this), id()); \
2389 } \
2390 } while (false)
2391 #else
2392 #define TRACE_ISOLATE(tag)
2393 #endif
2394
2395 class VerboseAccountingAllocator : public AccountingAllocator {
2396 public:
2397 VerboseAccountingAllocator(Heap* heap, size_t allocation_sample_bytes,
2398 size_t pool_sample_bytes)
2399 : heap_(heap),
2400 last_memory_usage_(0),
2401 last_pool_size_(0),
2402 nesting_deepth_(0),
2403 allocation_sample_bytes_(allocation_sample_bytes),
2404 pool_sample_bytes_(pool_sample_bytes) {}
2405
2406 v8::internal::Segment* GetSegment(size_t size) override {
2407 v8::internal::Segment* memory = AccountingAllocator::GetSegment(size);
2408 if (memory) {
2409 size_t malloced_current = GetCurrentMemoryUsage();
2410 size_t pooled_current = GetCurrentPoolSize();
2411
2412 if (last_memory_usage_.Value() + allocation_sample_bytes_ <
2413 malloced_current ||
2414 last_pool_size_.Value() + pool_sample_bytes_ < pooled_current) {
2415 PrintMemoryJSON(malloced_current, pooled_current);
2416 last_memory_usage_.SetValue(malloced_current);
2417 last_pool_size_.SetValue(pooled_current);
2418 }
2419 }
2420 return memory;
2421 }
2422
2423 void ReturnSegment(v8::internal::Segment* memory) override {
2424 AccountingAllocator::ReturnSegment(memory);
2425 size_t malloced_current = GetCurrentMemoryUsage();
2426 size_t pooled_current = GetCurrentPoolSize();
2427
2428 if (malloced_current + allocation_sample_bytes_ <
2429 last_memory_usage_.Value() ||
2430 pooled_current + pool_sample_bytes_ < last_pool_size_.Value()) {
2431 PrintMemoryJSON(malloced_current, pooled_current);
2432 last_memory_usage_.SetValue(malloced_current);
2433 last_pool_size_.SetValue(pooled_current);
2434 }
2435 }
2436
2437 void ZoneCreation(const Zone* zone) override {
2438 PrintZoneModificationSample(zone, "zonecreation");
2439 nesting_deepth_.Increment(1);
2440 }
2441
2442 void ZoneDestruction(const Zone* zone) override {
2443 nesting_deepth_.Decrement(1);
2444 PrintZoneModificationSample(zone, "zonedestruction");
2445 }
2446
2447 private:
2448 void PrintZoneModificationSample(const Zone* zone, const char* type) {
2449 PrintF(
2450 "{"
2451 "\"type\": \"%s\", "
2452 "\"isolate\": \"%p\", "
2453 "\"time\": %f, "
2454 "\"ptr\": \"%p\", "
2455 "\"name\": \"%s\", "
2456 "\"size\": %" PRIuS
2457 ","
2458 "\"nesting\": %" PRIuS "}\n",
2459 type, reinterpret_cast<void*>(heap_->isolate()),
2460 heap_->isolate()->time_millis_since_init(),
2461 reinterpret_cast<const void*>(zone), zone->name(),
2462 zone->allocation_size(), nesting_deepth_.Value());
2463 }
2464
2465 void PrintMemoryJSON(size_t malloced, size_t pooled) {
2466 // Note: Neither isolate, nor heap is locked, so be careful with accesses
2467 // as the allocator is potentially used on a concurrent thread.
2468 double time = heap_->isolate()->time_millis_since_init();
2469 PrintF(
2470 "{"
2471 "\"type\": \"zone\", "
2472 "\"isolate\": \"%p\", "
2473 "\"time\": %f, "
2474 "\"allocated\": %" PRIuS
2475 ","
2476 "\"pooled\": %" PRIuS "}\n",
2477 reinterpret_cast<void*>(heap_->isolate()), time, malloced, pooled);
2478 }
2479
2480 Heap* heap_;
2481 base::AtomicNumber<size_t> last_memory_usage_;
2482 base::AtomicNumber<size_t> last_pool_size_;
2483 base::AtomicNumber<size_t> nesting_deepth_;
2484 size_t allocation_sample_bytes_, pool_sample_bytes_;
2485 };
2486
2487 #ifdef DEBUG
2488 base::AtomicNumber<size_t> Isolate::non_disposed_isolates_;
2489 #endif // DEBUG
2490
2491 Isolate::Isolate()
2492 : embedder_data_(),
2493 entry_stack_(nullptr),
2494 stack_trace_nesting_level_(0),
2495 incomplete_message_(nullptr),
2496 bootstrapper_(nullptr),
2497 runtime_profiler_(nullptr),
2498 compilation_cache_(nullptr),
2499 logger_(nullptr),
2500 load_stub_cache_(nullptr),
2501 store_stub_cache_(nullptr),
2502 deoptimizer_data_(nullptr),
2503 deoptimizer_lazy_throw_(false),
2504 materialized_object_store_(nullptr),
2505 capture_stack_trace_for_uncaught_exceptions_(false),
2506 stack_trace_for_uncaught_exceptions_frame_limit_(0),
2507 stack_trace_for_uncaught_exceptions_options_(StackTrace::kOverview),
2508 context_slot_cache_(nullptr),
2509 descriptor_lookup_cache_(nullptr),
2510 handle_scope_implementer_(nullptr),
2511 unicode_cache_(nullptr),
2512 allocator_(FLAG_trace_zone_stats ? new VerboseAccountingAllocator(
2513 &heap_, 256 * KB, 128 * KB)
2514 : new AccountingAllocator()),
2515 inner_pointer_to_code_cache_(nullptr),
2516 global_handles_(nullptr),
2517 eternal_handles_(nullptr),
2518 thread_manager_(nullptr),
2519 setup_delegate_(nullptr),
2520 regexp_stack_(nullptr),
2521 date_cache_(nullptr),
2522 call_descriptor_data_(nullptr),
2523 // TODO(bmeurer) Initialized lazily because it depends on flags; can
2524 // be fixed once the default isolate cleanup is done.
2525 random_number_generator_(nullptr),
2526 fuzzer_rng_(nullptr),
2527 rail_mode_(PERFORMANCE_ANIMATION),
2528 promise_hook_or_debug_is_active_(false),
2529 promise_hook_(nullptr),
2530 load_start_time_ms_(0),
2531 serializer_enabled_(false),
2532 has_fatal_error_(false),
2533 initialized_from_snapshot_(false),
2534 is_tail_call_elimination_enabled_(true),
2535 is_isolate_in_background_(false),
2536 cpu_profiler_(nullptr),
2537 heap_profiler_(nullptr),
2538 code_event_dispatcher_(new CodeEventDispatcher()),
2539 function_entry_hook_(nullptr),
2540 deferred_handles_head_(nullptr),
2541 optimizing_compile_dispatcher_(nullptr),
2542 stress_deopt_count_(0),
2543 force_slow_path_(false),
2544 next_optimization_id_(0),
2545 #if V8_SFI_HAS_UNIQUE_ID
2546 next_unique_sfi_id_(0),
2547 #endif
2548 is_running_microtasks_(false),
2549 use_counter_callback_(nullptr),
2550 basic_block_profiler_(nullptr),
2551 cancelable_task_manager_(new CancelableTaskManager()),
2552 abort_on_uncaught_exception_callback_(nullptr),
2553 total_regexp_code_generated_(0) {
2554 id_ = base::Relaxed_AtomicIncrement(&isolate_counter_, 1);
2555 TRACE_ISOLATE(constructor);
2556
2557 memset(isolate_addresses_, 0,
2558 sizeof(isolate_addresses_[0]) * (kIsolateAddressCount + 1));
2559
2560 heap_.isolate_ = this;
2561 stack_guard_.isolate_ = this;
2562
2563 // ThreadManager is initialized early to support locking an isolate
2564 // before it is entered.
2565 thread_manager_ = new ThreadManager();
2566 thread_manager_->isolate_ = this;
2567
2568 #ifdef DEBUG
2569 non_disposed_isolates_.Increment(1);
2570 #endif // DEBUG
2571
2572 handle_scope_data_.Initialize();
2573
2574 #define ISOLATE_INIT_EXECUTE(type, name, initial_value) \
2575 name##_ = (initial_value);
2576 ISOLATE_INIT_LIST(ISOLATE_INIT_EXECUTE)
2577 #undef ISOLATE_INIT_EXECUTE
2578
2579 #define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length) \
2580 memset(name##_, 0, sizeof(type) * length);
2581 ISOLATE_INIT_ARRAY_LIST(ISOLATE_INIT_ARRAY_EXECUTE)
2582 #undef ISOLATE_INIT_ARRAY_EXECUTE
2583
2584 InitializeLoggingAndCounters();
2585 debug_ = new Debug(this);
2586
2587 tracing_cpu_profiler_.reset(new TracingCpuProfilerImpl(this));
2588
2589 init_memcopy_functions(this);
2590 }
2591
2592
2593 void Isolate::TearDown() {
2594 TRACE_ISOLATE(tear_down);
2595
2596 tracing_cpu_profiler_.reset();
2597 if (FLAG_stress_sampling_allocation_profiler > 0) {
2598 heap_profiler()->StopSamplingHeapProfiler();
2599 }
2600
2601 // Temporarily set this isolate as current so that various parts of
2602 // the isolate can access it in their destructors without having a
2603 // direct pointer. We don't use Enter/Exit here to avoid
2604 // initializing the thread data.
2605 PerIsolateThreadData* saved_data = CurrentPerIsolateThreadData();
2606 DCHECK_EQ(base::Relaxed_Load(&isolate_key_created_), 1);
2607 Isolate* saved_isolate =
2608 reinterpret_cast<Isolate*>(base::Thread::GetThreadLocal(isolate_key_));
2609 SetIsolateThreadLocals(this, nullptr);
2610
2611 Deinit();
2612
2613 {
2614 base::LockGuard<base::Mutex> lock_guard(&thread_data_table_mutex_);
2615 thread_data_table_.RemoveAllThreads();
2616 }
2617
2618 #ifdef DEBUG
2619 non_disposed_isolates_.Decrement(1);
2620 #endif // DEBUG
2621
2622 delete this;
2623
2624 // Restore the previous current isolate.
2625 SetIsolateThreadLocals(saved_isolate, saved_data);
2626 }
2627
2628
2629 void Isolate::ClearSerializerData() {
2630 delete external_reference_map_;
2631 external_reference_map_ = nullptr;
2632 }
2633
2634
2635 void Isolate::Deinit() {
2636 TRACE_ISOLATE(deinit);
2637
2638 debug()->Unload();
2639
2640 if (concurrent_recompilation_enabled()) {
2641 optimizing_compile_dispatcher_->Stop();
2642 delete optimizing_compile_dispatcher_;
2643 optimizing_compile_dispatcher_ = nullptr;
2644 }
2645
2646 wasm_engine()->TearDown();
2647
2648 heap_.mark_compact_collector()->EnsureSweepingCompleted();
2649 heap_.memory_allocator()->unmapper()->EnsureUnmappingCompleted();
2650
2651 DumpAndResetStats();
2652
2653 if (FLAG_print_deopt_stress) {
2654 PrintF(stdout, "=== Stress deopt counter: %u\n", stress_deopt_count_);
2655 }
2656
2657 if (cpu_profiler_) {
2658 cpu_profiler_->DeleteAllProfiles();
2659 }
2660
2661 // We must stop the logger before we tear down other components.
2662 sampler::Sampler* sampler = logger_->sampler();
2663 if (sampler && sampler->IsActive()) sampler->Stop();
2664
2665 FreeThreadResources();
2666
2667 // We start with the heap tear down so that releasing managed objects does
2668 // not cause a GC.
2669 heap_.StartTearDown();
2670
2671 ReleaseSharedPtrs();
2672
2673 delete deoptimizer_data_;
2674 deoptimizer_data_ = nullptr;
2675 builtins_.TearDown();
2676 bootstrapper_->TearDown();
2677
2678 if (runtime_profiler_ != nullptr) {
2679 delete runtime_profiler_;
2680 runtime_profiler_ = nullptr;
2681 }
2682
2683 delete basic_block_profiler_;
2684 basic_block_profiler_ = nullptr;
2685
2686 delete heap_profiler_;
2687 heap_profiler_ = nullptr;
2688
2689 compiler_dispatcher_->AbortAll(BlockingBehavior::kBlock);
2690 delete compiler_dispatcher_;
2691 compiler_dispatcher_ = nullptr;
2692
2693 cancelable_task_manager()->CancelAndWait();
2694
2695 heap_.TearDown();
2696 logger_->TearDown();
2697
2698 #ifdef V8_EMBEDDED_BUILTINS
2699 if (DefaultEmbeddedBlob() == nullptr && embedded_blob() != nullptr) {
2700 // We own the embedded blob. Free it.
2701 uint8_t* data = const_cast<uint8_t*>(embedded_blob_);
2702 InstructionStream::FreeOffHeapInstructionStream(data, embedded_blob_size_);
2703 }
2704 #endif
2705
2706 delete interpreter_;
2707 interpreter_ = nullptr;
2708
2709 delete ast_string_constants_;
2710 ast_string_constants_ = nullptr;
2711
2712 delete cpu_profiler_;
2713 cpu_profiler_ = nullptr;
2714
2715 code_event_dispatcher_.reset();
2716
2717 delete root_index_map_;
2718 root_index_map_ = nullptr;
2719
2720 ClearSerializerData();
2721 }
2722
2723
2724 void Isolate::SetIsolateThreadLocals(Isolate* isolate,
2725 PerIsolateThreadData* data) {
2726 base::Thread::SetThreadLocal(isolate_key_, isolate);
2727 base::Thread::SetThreadLocal(per_isolate_thread_data_key_, data);
2728 }
2729
2730
2731 Isolate::~Isolate() {
2732 TRACE_ISOLATE(destructor);
2733
2734 // The entry stack must be empty when we get here.
2735 DCHECK(entry_stack_ == nullptr || entry_stack_->previous_item == nullptr);
2736
2737 delete entry_stack_;
2738 entry_stack_ = nullptr;
2739
2740 delete unicode_cache_;
2741 unicode_cache_ = nullptr;
2742
2743 delete date_cache_;
2744 date_cache_ = nullptr;
2745
2746 delete[] call_descriptor_data_;
2747 call_descriptor_data_ = nullptr;
2748
2749 delete regexp_stack_;
2750 regexp_stack_ = nullptr;
2751
2752 delete descriptor_lookup_cache_;
2753 descriptor_lookup_cache_ = nullptr;
2754 delete context_slot_cache_;
2755 context_slot_cache_ = nullptr;
2756
2757 delete load_stub_cache_;
2758 load_stub_cache_ = nullptr;
2759 delete store_stub_cache_;
2760 store_stub_cache_ = nullptr;
2761
2762 delete materialized_object_store_;
2763 materialized_object_store_ = nullptr;
2764
2765 delete logger_;
2766 logger_ = nullptr;
2767
2768 delete handle_scope_implementer_;
2769 handle_scope_implementer_ = nullptr;
2770
2771 delete code_tracer();
2772 set_code_tracer(nullptr);
2773
2774 delete compilation_cache_;
2775 compilation_cache_ = nullptr;
2776 delete bootstrapper_;
2777 bootstrapper_ = nullptr;
2778 delete inner_pointer_to_code_cache_;
2779 inner_pointer_to_code_cache_ = nullptr;
2780
2781 delete thread_manager_;
2782 thread_manager_ = nullptr;
2783
2784 delete global_handles_;
2785 global_handles_ = nullptr;
2786 delete eternal_handles_;
2787 eternal_handles_ = nullptr;
2788
2789 delete string_stream_debug_object_cache_;
2790 string_stream_debug_object_cache_ = nullptr;
2791
2792 delete random_number_generator_;
2793 random_number_generator_ = nullptr;
2794
2795 delete fuzzer_rng_;
2796 fuzzer_rng_ = nullptr;
2797
2798 delete debug_;
2799 debug_ = nullptr;
2800
2801 delete cancelable_task_manager_;
2802 cancelable_task_manager_ = nullptr;
2803
2804 delete allocator_;
2805 allocator_ = nullptr;
2806 }
2807
2808
2809 void Isolate::InitializeThreadLocal() {
2810 thread_local_top_.isolate_ = this;
2811 thread_local_top_.Initialize();
2812 }
2813
2814 void Isolate::SetTerminationOnExternalTryCatch() {
2815 if (try_catch_handler() == nullptr) return;
2816 try_catch_handler()->can_continue_ = false;
2817 try_catch_handler()->has_terminated_ = true;
2818 try_catch_handler()->exception_ = heap()->null_value();
2819 }
2820
2821 bool Isolate::PropagatePendingExceptionToExternalTryCatch() {
2822 Object* exception = pending_exception();
2823
2824 if (IsJavaScriptHandlerOnTop(exception)) {
2825 thread_local_top_.external_caught_exception_ = false;
2826 return false;
2827 }
2828
2829 if (!IsExternalHandlerOnTop(exception)) {
2830 thread_local_top_.external_caught_exception_ = false;
2831 return true;
2832 }
2833
2834 thread_local_top_.external_caught_exception_ = true;
2835 if (!is_catchable_by_javascript(exception)) {
2836 SetTerminationOnExternalTryCatch();
2837 } else {
2838 v8::TryCatch* handler = try_catch_handler();
2839 DCHECK(thread_local_top_.pending_message_obj_->IsJSMessageObject() ||
2840 thread_local_top_.pending_message_obj_->IsTheHole(this));
2841 handler->can_continue_ = true;
2842 handler->has_terminated_ = false;
2843 handler->exception_ = pending_exception();
2844 // Propagate to the external try-catch only if we got an actual message.
2845 if (thread_local_top_.pending_message_obj_->IsTheHole(this)) return true;
2846
2847 handler->message_obj_ = thread_local_top_.pending_message_obj_;
2848 }
2849 return true;
2850 }
2851
2852 bool Isolate::InitializeCounters() {
2853 if (async_counters_) return false;
2854 async_counters_ = std::make_shared<Counters>(this);
2855 return true;
2856 }
2857
2858 void Isolate::InitializeLoggingAndCounters() {
2859 if (logger_ == nullptr) {
2860 logger_ = new Logger(this);
2861 }
2862 InitializeCounters();
2863 }
2864
2865 namespace {
2866 void PrintBuiltinSizes(Isolate* isolate) {
2867 Builtins* builtins = isolate->builtins();
2868 for (int i = 0; i < Builtins::builtin_count; i++) {
2869 const char* name = builtins->name(i);
2870 const char* kind = Builtins::KindNameOf(i);
2871 Code* code = builtins->builtin(i);
2872 PrintF(stdout, "%s Builtin, %s, %d\n", kind, name, code->InstructionSize());
2873 }
2874 }
2875
2876 #ifdef V8_EMBEDDED_BUILTINS
2877 void CreateOffHeapTrampolines(Isolate* isolate) {
2878 DCHECK(isolate->serializer_enabled());
2879 DCHECK_NOT_NULL(isolate->embedded_blob());
2880 DCHECK_NE(0, isolate->embedded_blob_size());
2881
2882 HandleScope scope(isolate);
2883 Builtins* builtins = isolate->builtins();
2884
2885 EmbeddedData d = EmbeddedData::FromBlob();
2886
2887 CodeSpaceMemoryModificationScope code_allocation(isolate->heap());
2888 for (int i = 0; i < Builtins::builtin_count; i++) {
2889 if (!Builtins::IsIsolateIndependent(i)) continue;
2890
2891 Address instruction_start = d.InstructionStartOfBuiltin(i);
2892 Handle<Code> trampoline = isolate->factory()->NewOffHeapTrampolineFor(
2893 builtins->builtin_handle(i), instruction_start);
2894
2895 // Note that references to the old, on-heap code objects may still exist on
2896 // the heap. This is fine for the sake of serialization, as serialization
2897 // will replace all of them with a builtin reference which is later
2898 // deserialized to point to the object within the builtins table.
2899 //
2900 // From this point onwards, some builtin code objects may be unreachable and
2901 // thus collected by the GC.
2902 builtins->set_builtin(i, *trampoline);
2903
2904 if (isolate->logger()->is_listening_to_code_events() ||
2905 isolate->is_profiling()) {
2906 isolate->logger()->LogCodeObject(*trampoline);
2907 }
2908 }
2909 }
2910 #endif // V8_EMBEDDED_BUILTINS
2911 } // namespace
2912
2913 #ifdef V8_EMBEDDED_BUILTINS
2914 void Isolate::PrepareEmbeddedBlobForSerialization() {
2915 // When preparing the embedded blob, ensure it doesn't exist yet.
2916 DCHECK_NULL(embedded_blob());
2917 DCHECK_NULL(DefaultEmbeddedBlob());
2918 DCHECK(serializer_enabled());
2919
2920 // The isolate takes ownership of this pointer into an executable mmap'd
2921 // area. We muck around with const-casts because the standard use-case in
2922 // shipping builds is for embedded_blob_ to point into a read-only
2923 // .text-embedded section.
2924 uint8_t* data;
2925 uint32_t size;
2926 InstructionStream::CreateOffHeapInstructionStream(this, &data, &size);
2927 SetEmbeddedBlob(const_cast<const uint8_t*>(data), size);
2928 CreateOffHeapTrampolines(this);
2929 }
2930 #endif // V8_EMBEDDED_BUILTINS
2931
2932 bool Isolate::Init(StartupDeserializer* des) {
2933 TRACE_ISOLATE(init);
2934
2935 base::ElapsedTimer timer;
2936 if (des == nullptr && FLAG_profile_deserialization) timer.Start();
2937
2938 time_millis_at_init_ = heap_.MonotonicallyIncreasingTimeInMs();
2939
2940 stress_deopt_count_ = FLAG_deopt_every_n_times;
2941 force_slow_path_ = FLAG_force_slow_path;
2942
2943 has_fatal_error_ = false;
2944
2945 if (function_entry_hook() != nullptr) {
2946 // When function entry hooking is in effect, we have to create the code
2947 // stubs from scratch to get entry hooks, rather than loading the previously
2948 // generated stubs from disk.
2949 // If this assert fires, the initialization path has regressed.
2950 DCHECK_NULL(des);
2951 }
2952
2953 // The initialization process does not handle memory exhaustion.
2954 AlwaysAllocateScope always_allocate(this);
2955
2956 // Safe after setting Heap::isolate_, and initializing StackGuard
2957 heap_.SetStackLimits();
2958
2959 #define ASSIGN_ELEMENT(CamelName, hacker_name) \
2960 isolate_addresses_[IsolateAddressId::k##CamelName##Address] = \
2961 reinterpret_cast<Address>(hacker_name##_address());
2962 FOR_EACH_ISOLATE_ADDRESS_NAME(ASSIGN_ELEMENT)
2963 #undef ASSIGN_ELEMENT
2964
2965 compilation_cache_ = new CompilationCache(this);
2966 context_slot_cache_ = new ContextSlotCache();
2967 descriptor_lookup_cache_ = new DescriptorLookupCache();
2968 unicode_cache_ = new UnicodeCache();
2969 inner_pointer_to_code_cache_ = new InnerPointerToCodeCache(this);
2970 global_handles_ = new GlobalHandles(this);
2971 eternal_handles_ = new EternalHandles();
2972 bootstrapper_ = new Bootstrapper(this);
2973 handle_scope_implementer_ = new HandleScopeImplementer(this);
2974 load_stub_cache_ = new StubCache(this);
2975 store_stub_cache_ = new StubCache(this);
2976 materialized_object_store_ = new MaterializedObjectStore(this);
2977 regexp_stack_ = new RegExpStack();
2978 regexp_stack_->isolate_ = this;
2979 date_cache_ = new DateCache();
2980 call_descriptor_data_ =
2981 new CallInterfaceDescriptorData[CallDescriptors::NUMBER_OF_DESCRIPTORS];
2982 heap_profiler_ = new HeapProfiler(heap());
2983 interpreter_ = new interpreter::Interpreter(this);
2984 compiler_dispatcher_ =
2985 new CompilerDispatcher(this, V8::GetCurrentPlatform(), FLAG_stack_size);
2986
2987 #ifdef V8_EMBEDDED_BUILTINS
2988 #ifdef V8_MULTI_SNAPSHOTS
2989 if (FLAG_untrusted_code_mitigations) {
2990 SetEmbeddedBlob(DefaultEmbeddedBlob(), DefaultEmbeddedBlobSize());
2991 } else {
2992 SetEmbeddedBlob(TrustedEmbeddedBlob(), TrustedEmbeddedBlobSize());
2993 }
2994 #else
2995 SetEmbeddedBlob(DefaultEmbeddedBlob(), DefaultEmbeddedBlobSize());
2996 #endif
2997 #endif
2998
2999 // Enable logging before setting up the heap
3000 logger_->SetUp(this);
3001
3002 { // NOLINT
3003 // Ensure that the thread has a valid stack guard. The v8::Locker object
3004 // will ensure this too, but we don't have to use lockers if we are only
3005 // using one thread.
3006 ExecutionAccess lock(this);
3007 stack_guard_.InitThread(lock);
3008 }
3009
3010 // SetUp the object heap.
3011 DCHECK(!heap_.HasBeenSetUp());
3012 if (!heap_.SetUp()) {
3013 V8::FatalProcessOutOfMemory(this, "heap setup");
3014 return false;
3015 }
3016
3017 // Setup the wasm engine. Currently, there's one per Isolate.
3018 wasm_engine_.reset(new wasm::WasmEngine(
3019 std::unique_ptr<wasm::WasmCodeManager>(new wasm::WasmCodeManager(
3020 reinterpret_cast<v8::Isolate*>(this), kMaxWasmCodeMemory))));
3021 wasm_engine_->memory_tracker()->SetAllocationResultHistogram(
3022 counters()->wasm_memory_allocation_result());
3023 wasm_engine_->memory_tracker()->SetAddressSpaceUsageHistogram(
3024 counters()->wasm_address_space_usage_mb());
3025 wasm_engine_->code_manager()->SetModuleCodeSizeHistogram(
3026 counters()->wasm_module_code_size_mb());
3027
3028 // Initialize the interface descriptors ahead of time.
3029 #define INTERFACE_DESCRIPTOR(Name, ...) \
3030 { Name##Descriptor(this); }
3031 INTERFACE_DESCRIPTOR_LIST(INTERFACE_DESCRIPTOR)
3032 #undef INTERFACE_DESCRIPTOR
3033
3034 deoptimizer_data_ = new DeoptimizerData(heap());
3035
3036 const bool create_heap_objects = (des == nullptr);
3037 if (setup_delegate_ == nullptr) {
3038 setup_delegate_ = new SetupIsolateDelegate(create_heap_objects);
3039 }
3040
3041 if (!setup_delegate_->SetupHeap(&heap_)) {
3042 V8::FatalProcessOutOfMemory(this, "heap object creation");
3043 return false;
3044 }
3045
3046 if (create_heap_objects) {
3047 // Terminate the partial snapshot cache so we can iterate.
3048 partial_snapshot_cache_.push_back(heap_.undefined_value());
3049 }
3050
3051 InitializeThreadLocal();
3052
3053 bootstrapper_->Initialize(create_heap_objects);
3054
3055 #ifdef V8_EMBEDDED_BUILTINS
3056 if (create_heap_objects && serializer_enabled()) {
3057 builtins_constants_table_builder_ = new BuiltinsConstantsTableBuilder(this);
3058 }
3059 #endif
3060 setup_delegate_->SetupBuiltins(this);
3061 #ifdef V8_EMBEDDED_BUILTINS
3062 if (create_heap_objects && serializer_enabled()) {
3063 builtins_constants_table_builder_->Finalize();
3064 delete builtins_constants_table_builder_;
3065 builtins_constants_table_builder_ = nullptr;
3066 }
3067 #endif // V8_EMBEDDED_BUILTINS
3068
3069 if (create_heap_objects) heap_.CreateFixedStubs();
3070
3071 if (FLAG_log_internal_timer_events) {
3072 set_event_logger(Logger::DefaultEventLoggerSentinel);
3073 }
3074
3075 if (FLAG_trace_turbo || FLAG_trace_turbo_graph) {
3076 PrintF("Concurrent recompilation has been disabled for tracing.\n");
3077 } else if (OptimizingCompileDispatcher::Enabled()) {
3078 optimizing_compile_dispatcher_ = new OptimizingCompileDispatcher(this);
3079 }
3080
3081 // Initialize runtime profiler before deserialization, because collections may
3082 // occur, clearing/updating ICs.
3083 runtime_profiler_ = new RuntimeProfiler(this);
3084
3085 // If we are deserializing, read the state into the now-empty heap.
3086 {
3087 AlwaysAllocateScope always_allocate(this);
3088 CodeSpaceMemoryModificationScope modification_scope(&heap_);
3089
3090 if (!create_heap_objects) des->DeserializeInto(this);
3091 load_stub_cache_->Initialize();
3092 store_stub_cache_->Initialize();
3093 setup_delegate_->SetupInterpreter(interpreter_);
3094
3095 heap_.NotifyDeserializationComplete();
3096 }
3097 delete setup_delegate_;
3098 setup_delegate_ = nullptr;
3099
3100 if (FLAG_print_builtin_size) PrintBuiltinSizes(this);
3101
3102 // Finish initialization of ThreadLocal after deserialization is done.
3103 clear_pending_exception();
3104 clear_pending_message();
3105 clear_scheduled_exception();
3106
3107 // Deserializing may put strange things in the root array's copy of the
3108 // stack guard.
3109 heap_.SetStackLimits();
3110
3111 // Quiet the heap NaN if needed on target platform.
3112 if (!create_heap_objects) Assembler::QuietNaN(heap_.nan_value());
3113
3114 if (FLAG_trace_turbo) {
3115 // Create an empty file.
3116 std::ofstream(GetTurboCfgFileName().c_str(), std::ios_base::trunc);
3117 }
3118
3119 CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, embedder_data_)),
3120 Internals::kIsolateEmbedderDataOffset);
3121 CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.roots_)),
3122 Internals::kIsolateRootsOffset);
3123 CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.external_memory_)),
3124 Internals::kExternalMemoryOffset);
3125 CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.external_memory_limit_)),
3126 Internals::kExternalMemoryLimitOffset);
3127 CHECK_EQ(static_cast<int>(
3128 OFFSET_OF(Isolate, heap_.external_memory_at_last_mark_compact_)),
3129 Internals::kExternalMemoryAtLastMarkCompactOffset);
3130 CHECK_EQ(
3131 static_cast<int>(OFFSET_OF(Isolate, heap_.external_reference_table_)),
3132 Internals::kIsolateRootsOffset +
3133 Heap::kRootsExternalReferenceTableOffset);
3134
3135 {
3136 HandleScope scope(this);
3137 ast_string_constants_ = new AstStringConstants(this, heap()->HashSeed());
3138 }
3139
3140 initialized_from_snapshot_ = (des != nullptr);
3141
3142 if (!FLAG_inline_new) heap_.DisableInlineAllocation();
3143
3144 if (FLAG_stress_sampling_allocation_profiler > 0) {
3145 uint64_t sample_interval = FLAG_stress_sampling_allocation_profiler;
3146 int stack_depth = 128;
3147 v8::HeapProfiler::SamplingFlags sampling_flags =
3148 v8::HeapProfiler::SamplingFlags::kSamplingForceGC;
3149 heap_profiler()->StartSamplingHeapProfiler(sample_interval, stack_depth,
3150 sampling_flags);
3151 }
3152
3153 if (des == nullptr && FLAG_profile_deserialization) {
3154 double ms = timer.Elapsed().InMillisecondsF();
3155 PrintF("[Initializing isolate from scratch took %0.3f ms]\n", ms);
3156 }
3157
3158 return true;
3159 }
3160
3161
3162 void Isolate::Enter() {
3163 Isolate* current_isolate = nullptr;
3164 PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
3165 if (current_data != nullptr) {
3166 current_isolate = current_data->isolate_;
3167 DCHECK_NOT_NULL(current_isolate);
3168 if (current_isolate == this) {
3169 DCHECK(Current() == this);
3170 DCHECK_NOT_NULL(entry_stack_);
3171 DCHECK(entry_stack_->previous_thread_data == nullptr ||
3172 entry_stack_->previous_thread_data->thread_id().Equals(
3173 ThreadId::Current()));
3174 // Same thread re-enters the isolate, no need to re-init anything.
3175 entry_stack_->entry_count++;
3176 return;
3177 }
3178 }
3179
3180 PerIsolateThreadData* data = FindOrAllocatePerThreadDataForThisThread();
3181 DCHECK_NOT_NULL(data);
3182 DCHECK(data->isolate_ == this);
3183
3184 EntryStackItem* item = new EntryStackItem(current_data,
3185 current_isolate,
3186 entry_stack_);
3187 entry_stack_ = item;
3188
3189 SetIsolateThreadLocals(this, data);
3190
3191 // In case it's the first time some thread enters the isolate.
3192 set_thread_id(data->thread_id());
3193 }
3194
3195
3196 void Isolate::Exit() {
3197 DCHECK_NOT_NULL(entry_stack_);
3198 DCHECK(entry_stack_->previous_thread_data == nullptr ||
3199 entry_stack_->previous_thread_data->thread_id().Equals(
3200 ThreadId::Current()));
3201
3202 if (--entry_stack_->entry_count > 0) return;
3203
3204 DCHECK_NOT_NULL(CurrentPerIsolateThreadData());
3205 DCHECK(CurrentPerIsolateThreadData()->isolate_ == this);
3206
3207 // Pop the stack.
3208 EntryStackItem* item = entry_stack_;
3209 entry_stack_ = item->previous_item;
3210
3211 PerIsolateThreadData* previous_thread_data = item->previous_thread_data;
3212 Isolate* previous_isolate = item->previous_isolate;
3213
3214 delete item;
3215
3216 // Reinit the current thread for the isolate it was running before this one.
3217 SetIsolateThreadLocals(previous_isolate, previous_thread_data);
3218 }
3219
3220
3221 void Isolate::LinkDeferredHandles(DeferredHandles* deferred) {
3222 deferred->next_ = deferred_handles_head_;
3223 if (deferred_handles_head_ != nullptr) {
3224 deferred_handles_head_->previous_ = deferred;
3225 }
3226 deferred_handles_head_ = deferred;
3227 }
3228
3229
3230 void Isolate::UnlinkDeferredHandles(DeferredHandles* deferred) {
3231 #ifdef DEBUG
3232 // In debug mode assert that the linked list is well-formed.
3233 DeferredHandles* deferred_iterator = deferred;
3234 while (deferred_iterator->previous_ != nullptr) {
3235 deferred_iterator = deferred_iterator->previous_;
3236 }
3237 DCHECK(deferred_handles_head_ == deferred_iterator);
3238 #endif
3239 if (deferred_handles_head_ == deferred) {
3240 deferred_handles_head_ = deferred_handles_head_->next_;
3241 }
3242 if (deferred->next_ != nullptr) {
3243 deferred->next_->previous_ = deferred->previous_;
3244 }
3245 if (deferred->previous_ != nullptr) {
3246 deferred->previous_->next_ = deferred->next_;
3247 }
3248 }
3249
3250 void Isolate::DumpAndResetStats() {
3251 if (turbo_statistics() != nullptr) {
3252 DCHECK(FLAG_turbo_stats || FLAG_turbo_stats_nvp);
3253
3254 OFStream os(stdout);
3255 if (FLAG_turbo_stats) {
3256 AsPrintableStatistics ps = {*turbo_statistics(), false};
3257 os << ps << std::endl;
3258 }
3259 if (FLAG_turbo_stats_nvp) {
3260 AsPrintableStatistics ps = {*turbo_statistics(), true};
3261 os << ps << std::endl;
3262 }
3263 }
3264 delete turbo_statistics_;
3265 turbo_statistics_ = nullptr;
3266 if (V8_UNLIKELY(FLAG_runtime_stats ==
3267 v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) {
3268 counters()->runtime_call_stats()->Print();
3269 counters()->runtime_call_stats()->Reset();
3270 }
3271 }
3272
3273 void Isolate::AbortConcurrentOptimization(BlockingBehavior behavior) {
3274 if (concurrent_recompilation_enabled()) {
3275 DisallowHeapAllocation no_recursive_gc;
3276 optimizing_compile_dispatcher()->Flush(behavior);
3277 }
3278 }
3279
3280 CompilationStatistics* Isolate::GetTurboStatistics() {
3281 if (turbo_statistics() == nullptr)
3282 set_turbo_statistics(new CompilationStatistics());
3283 return turbo_statistics();
3284 }
3285
3286
3287 CodeTracer* Isolate::GetCodeTracer() {
3288 if (code_tracer() == nullptr) set_code_tracer(new CodeTracer(id()));
3289 return code_tracer();
3290 }
3291
3292 bool Isolate::use_optimizer() {
3293 return FLAG_opt && !serializer_enabled_ && CpuFeatures::SupportsOptimizer() &&
3294 !is_precise_count_code_coverage() && !is_block_count_code_coverage();
3295 }
3296
3297 bool Isolate::NeedsDetailedOptimizedCodeLineInfo() const {
3298 return NeedsSourcePositionsForProfiling() || FLAG_detailed_line_info;
3299 }
3300
3301 bool Isolate::NeedsSourcePositionsForProfiling() const {
3302 return FLAG_trace_deopt || FLAG_trace_turbo || FLAG_trace_turbo_graph ||
3303 FLAG_turbo_profiling || FLAG_perf_prof || is_profiling() ||
3304 debug_->is_active() || logger_->is_logging() || FLAG_trace_maps;
3305 }
3306
3307 void Isolate::SetFeedbackVectorsForProfilingTools(Object* value) {
3308 DCHECK(value->IsUndefined(this) || value->IsArrayList());
3309 heap()->set_feedback_vectors_for_profiling_tools(value);
3310 }
3311
3312 void Isolate::MaybeInitializeVectorListFromHeap() {
3313 if (!heap()->feedback_vectors_for_profiling_tools()->IsUndefined(this)) {
3314 // Already initialized, return early.
3315 DCHECK(heap()->feedback_vectors_for_profiling_tools()->IsArrayList());
3316 return;
3317 }
3318
3319 // Collect existing feedback vectors.
3320 std::vector<Handle<FeedbackVector>> vectors;
3321
3322 {
3323 HeapIterator heap_iterator(heap());
3324 while (HeapObject* current_obj = heap_iterator.next()) {
3325 if (!current_obj->IsFeedbackVector()) continue;
3326
3327 FeedbackVector* vector = FeedbackVector::cast(current_obj);
3328 SharedFunctionInfo* shared = vector->shared_function_info();
3329
3330 // No need to preserve the feedback vector for non-user-visible functions.
3331 if (!shared->IsSubjectToDebugging()) continue;
3332
3333 vectors.emplace_back(vector, this);
3334 }
3335 }
3336
3337 // Add collected feedback vectors to the root list lest we lose them to GC.
3338 Handle<ArrayList> list =
3339 ArrayList::New(this, static_cast<int>(vectors.size()));
3340 for (const auto& vector : vectors) list = ArrayList::Add(list, vector);
3341 SetFeedbackVectorsForProfilingTools(*list);
3342 }
3343
3344 bool Isolate::IsArrayOrObjectOrStringPrototype(Object* object) {
3345 Object* context = heap()->native_contexts_list();
3346 while (!context->IsUndefined(this)) {
3347 Context* current_context = Context::cast(context);
3348 if (current_context->initial_object_prototype() == object ||
3349 current_context->initial_array_prototype() == object ||
3350 current_context->initial_string_prototype() == object) {
3351 return true;
3352 }
3353 context = current_context->next_context_link();
3354 }
3355 return false;
3356 }
3357
3358 bool Isolate::IsInAnyContext(Object* object, uint32_t index) {
3359 DisallowHeapAllocation no_gc;
3360 Object* context = heap()->native_contexts_list();
3361 while (!context->IsUndefined(this)) {
3362 Context* current_context = Context::cast(context);
3363 if (current_context->get(index) == object) {
3364 return true;
3365 }
3366 context = current_context->next_context_link();
3367 }
3368 return false;
3369 }
3370
3371 bool Isolate::IsNoElementsProtectorIntact(Context* context) {
3372 PropertyCell* no_elements_cell = heap()->no_elements_protector();
3373 bool cell_reports_intact =
3374 no_elements_cell->value()->IsSmi() &&
3375 Smi::ToInt(no_elements_cell->value()) == kProtectorValid;
3376
3377 #ifdef DEBUG
3378 Context* native_context = context->native_context();
3379
3380 Map* root_array_map =
3381 native_context->GetInitialJSArrayMap(GetInitialFastElementsKind());
3382 JSObject* initial_array_proto = JSObject::cast(
3383 native_context->get(Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
3384 JSObject* initial_object_proto = JSObject::cast(
3385 native_context->get(Context::INITIAL_OBJECT_PROTOTYPE_INDEX));
3386 JSObject* initial_string_proto = JSObject::cast(
3387 native_context->get(Context::INITIAL_STRING_PROTOTYPE_INDEX));
3388
3389 if (root_array_map == nullptr ||
3390 initial_array_proto == initial_object_proto) {
3391 // We are in the bootstrapping process, and the entire check sequence
3392 // shouldn't be performed.
3393 return cell_reports_intact;
3394 }
3395
3396 // Check that the array prototype hasn't been altered WRT empty elements.
3397 if (root_array_map->prototype() != initial_array_proto) {
3398 DCHECK_EQ(false, cell_reports_intact);
3399 return cell_reports_intact;
3400 }
3401
3402 FixedArrayBase* elements = initial_array_proto->elements();
3403 if (elements != heap()->empty_fixed_array() &&
3404 elements != heap()->empty_slow_element_dictionary()) {
3405 DCHECK_EQ(false, cell_reports_intact);
3406 return cell_reports_intact;
3407 }
3408
3409 // Check that the Object.prototype hasn't been altered WRT empty elements.
3410 elements = initial_object_proto->elements();
3411 if (elements != heap()->empty_fixed_array() &&
3412 elements != heap()->empty_slow_element_dictionary()) {
3413 DCHECK_EQ(false, cell_reports_intact);
3414 return cell_reports_intact;
3415 }
3416
3417 // Check that the Array.prototype has the Object.prototype as its
3418 // [[Prototype]] and that the Object.prototype has a null [[Prototype]].
3419 PrototypeIterator iter(this, initial_array_proto);
3420 if (iter.IsAtEnd() || iter.GetCurrent() != initial_object_proto) {
3421 DCHECK_EQ(false, cell_reports_intact);
3422 DCHECK(!has_pending_exception());
3423 return cell_reports_intact;
3424 }
3425 iter.Advance();
3426 if (!iter.IsAtEnd()) {
3427 DCHECK_EQ(false, cell_reports_intact);
3428 DCHECK(!has_pending_exception());
3429 return cell_reports_intact;
3430 }
3431 DCHECK(!has_pending_exception());
3432
3433 // Check that the String.prototype hasn't been altered WRT empty elements.
3434 elements = initial_string_proto->elements();
3435 if (elements != heap()->empty_fixed_array() &&
3436 elements != heap()->empty_slow_element_dictionary()) {
3437 DCHECK_EQ(false, cell_reports_intact);
3438 return cell_reports_intact;
3439 }
3440
3441 // Check that the String.prototype has the Object.prototype
3442 // as its [[Prototype]] still.
3443 if (initial_string_proto->map()->prototype() != initial_object_proto) {
3444 DCHECK_EQ(false, cell_reports_intact);
3445 return cell_reports_intact;
3446 }
3447 #endif
3448
3449 return cell_reports_intact;
3450 }
3451
3452 bool Isolate::IsNoElementsProtectorIntact() {
3453 return Isolate::IsNoElementsProtectorIntact(context());
3454 }
3455
3456 bool Isolate::IsIsConcatSpreadableLookupChainIntact() {
3457 Cell* is_concat_spreadable_cell = heap()->is_concat_spreadable_protector();
3458 bool is_is_concat_spreadable_set =
3459 Smi::ToInt(is_concat_spreadable_cell->value()) == kProtectorInvalid;
3460 #ifdef DEBUG
3461 Map* root_array_map =
3462 raw_native_context()->GetInitialJSArrayMap(GetInitialFastElementsKind());
3463 if (root_array_map == nullptr) {
3464 // Ignore the value of is_concat_spreadable during bootstrap.
3465 return !is_is_concat_spreadable_set;
3466 }
3467 Handle<Object> array_prototype(array_function()->prototype(), this);
3468 Handle<Symbol> key = factory()->is_concat_spreadable_symbol();
3469 Handle<Object> value;
3470 LookupIterator it(array_prototype, key);
3471 if (it.IsFound() && !JSReceiver::GetDataProperty(&it)->IsUndefined(this)) {
3472 // TODO(cbruni): Currently we do not revert if we unset the
3473 // @@isConcatSpreadable property on Array.prototype or Object.prototype
3474 // hence the reverse implication doesn't hold.
3475 DCHECK(is_is_concat_spreadable_set);
3476 return false;
3477 }
3478 #endif // DEBUG
3479
3480 return !is_is_concat_spreadable_set;
3481 }
3482
3483 bool Isolate::IsIsConcatSpreadableLookupChainIntact(JSReceiver* receiver) {
3484 if (!IsIsConcatSpreadableLookupChainIntact()) return false;
3485 return !receiver->HasProxyInPrototype(this);
3486 }
3487
3488 bool Isolate::IsPromiseHookProtectorIntact() {
3489 PropertyCell* promise_hook_cell = heap()->promise_hook_protector();
3490 bool is_promise_hook_protector_intact =
3491 Smi::ToInt(promise_hook_cell->value()) == kProtectorValid;
3492 DCHECK_IMPLIES(is_promise_hook_protector_intact,
3493 !promise_hook_or_debug_is_active_);
3494 return is_promise_hook_protector_intact;
3495 }
3496
3497 bool Isolate::IsPromiseResolveLookupChainIntact() {
3498 Cell* promise_resolve_cell = heap()->promise_resolve_protector();
3499 bool is_promise_resolve_protector_intact =
3500 Smi::ToInt(promise_resolve_cell->value()) == kProtectorValid;
3501 return is_promise_resolve_protector_intact;
3502 }
3503
3504 bool Isolate::IsPromiseThenLookupChainIntact() {
3505 PropertyCell* promise_then_cell = heap()->promise_then_protector();
3506 bool is_promise_then_protector_intact =
3507 Smi::ToInt(promise_then_cell->value()) == kProtectorValid;
3508 return is_promise_then_protector_intact;
3509 }
3510
3511 bool Isolate::IsPromiseThenLookupChainIntact(Handle<JSReceiver> receiver) {
3512 DisallowHeapAllocation no_gc;
3513 if (!receiver->IsJSPromise()) return false;
3514 if (!IsInAnyContext(receiver->map()->prototype(),
3515 Context::PROMISE_PROTOTYPE_INDEX)) {
3516 return false;
3517 }
3518 return IsPromiseThenLookupChainIntact();
3519 }
3520
3521 void Isolate::UpdateNoElementsProtectorOnSetElement(Handle<JSObject> object) {
3522 DisallowHeapAllocation no_gc;
3523 if (!object->map()->is_prototype_map()) return;
3524 if (!IsNoElementsProtectorIntact()) return;
3525 if (!IsArrayOrObjectOrStringPrototype(*object)) return;
3526 PropertyCell::SetValueWithInvalidation(
3527 factory()->no_elements_protector(),
3528 handle(Smi::FromInt(kProtectorInvalid), this));
3529 }
3530
3531 void Isolate::InvalidateIsConcatSpreadableProtector() {
3532 DCHECK(factory()->is_concat_spreadable_protector()->value()->IsSmi());
3533 DCHECK(IsIsConcatSpreadableLookupChainIntact());
3534 factory()->is_concat_spreadable_protector()->set_value(
3535 Smi::FromInt(kProtectorInvalid));
3536 DCHECK(!IsIsConcatSpreadableLookupChainIntact());
3537 }
3538
3539 void Isolate::InvalidateArrayConstructorProtector() {
3540 DCHECK(factory()->array_constructor_protector()->value()->IsSmi());
3541 DCHECK(IsArrayConstructorIntact());
3542 factory()->array_constructor_protector()->set_value(
3543 Smi::FromInt(kProtectorInvalid));
3544 DCHECK(!IsArrayConstructorIntact());
3545 }
3546
3547 void Isolate::InvalidateArraySpeciesProtector() {
3548 DCHECK(factory()->array_species_protector()->value()->IsSmi());
3549 DCHECK(IsArraySpeciesLookupChainIntact());
3550 factory()->array_species_protector()->set_value(
3551 Smi::FromInt(kProtectorInvalid));
3552 DCHECK(!IsArraySpeciesLookupChainIntact());
3553 }
3554
3555 void Isolate::InvalidateTypedArraySpeciesProtector() {
3556 DCHECK(factory()->typed_array_species_protector()->value()->IsSmi());
3557 DCHECK(IsTypedArraySpeciesLookupChainIntact());
3558 factory()->typed_array_species_protector()->set_value(
3559 Smi::FromInt(kProtectorInvalid));
3560 DCHECK(!IsTypedArraySpeciesLookupChainIntact());
3561 }
3562
3563 void Isolate::InvalidatePromiseSpeciesProtector() {
3564 DCHECK(factory()->promise_species_protector()->value()->IsSmi());
3565 DCHECK(IsPromiseSpeciesLookupChainIntact());
3566 factory()->promise_species_protector()->set_value(
3567 Smi::FromInt(kProtectorInvalid));
3568 DCHECK(!IsPromiseSpeciesLookupChainIntact());
3569 }
3570
3571 void Isolate::InvalidateStringLengthOverflowProtector() {
3572 DCHECK(factory()->string_length_protector()->value()->IsSmi());
3573 DCHECK(IsStringLengthOverflowIntact());
3574 factory()->string_length_protector()->set_value(
3575 Smi::FromInt(kProtectorInvalid));
3576 DCHECK(!IsStringLengthOverflowIntact());
3577 }
3578
3579 void Isolate::InvalidateArrayIteratorProtector() {
3580 DCHECK(factory()->array_iterator_protector()->value()->IsSmi());
3581 DCHECK(IsArrayIteratorLookupChainIntact());
3582 PropertyCell::SetValueWithInvalidation(
3583 factory()->array_iterator_protector(),
3584 handle(Smi::FromInt(kProtectorInvalid), this));
3585 DCHECK(!IsArrayIteratorLookupChainIntact());
3586 }
3587
3588 void Isolate::InvalidateArrayBufferNeuteringProtector() {
3589 DCHECK(factory()->array_buffer_neutering_protector()->value()->IsSmi());
3590 DCHECK(IsArrayBufferNeuteringIntact());
3591 PropertyCell::SetValueWithInvalidation(
3592 factory()->array_buffer_neutering_protector(),
3593 handle(Smi::FromInt(kProtectorInvalid), this));
3594 DCHECK(!IsArrayBufferNeuteringIntact());
3595 }
3596
3597 void Isolate::InvalidatePromiseHookProtector() {
3598 DCHECK(factory()->promise_hook_protector()->value()->IsSmi());
3599 DCHECK(IsPromiseHookProtectorIntact());
3600 PropertyCell::SetValueWithInvalidation(
3601 factory()->promise_hook_protector(),
3602 handle(Smi::FromInt(kProtectorInvalid), this));
3603 DCHECK(!IsPromiseHookProtectorIntact());
3604 }
3605
3606 void Isolate::InvalidatePromiseResolveProtector() {
3607 DCHECK(factory()->promise_resolve_protector()->value()->IsSmi());
3608 DCHECK(IsPromiseResolveLookupChainIntact());
3609 factory()->promise_resolve_protector()->set_value(
3610 Smi::FromInt(kProtectorInvalid));
3611 DCHECK(!IsPromiseResolveLookupChainIntact());
3612 }
3613
3614 void Isolate::InvalidatePromiseThenProtector() {
3615 DCHECK(factory()->promise_then_protector()->value()->IsSmi());
3616 DCHECK(IsPromiseThenLookupChainIntact());
3617 PropertyCell::SetValueWithInvalidation(
3618 factory()->promise_then_protector(),
3619 handle(Smi::FromInt(kProtectorInvalid), this));
3620 DCHECK(!IsPromiseThenLookupChainIntact());
3621 }
3622
3623 bool Isolate::IsAnyInitialArrayPrototype(Handle<JSArray> array) {
3624 DisallowHeapAllocation no_gc;
3625 return IsInAnyContext(*array, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
3626 }
3627
3628
3629 CallInterfaceDescriptorData* Isolate::call_descriptor_data(int index) {
3630 DCHECK(0 <= index && index < CallDescriptors::NUMBER_OF_DESCRIPTORS);
3631 return &call_descriptor_data_[index];
3632 }
3633
3634 static base::RandomNumberGenerator* ensure_rng_exists(
3635 base::RandomNumberGenerator** rng, int seed) {
3636 if (*rng == nullptr) {
3637 if (seed != 0) {
3638 *rng = new base::RandomNumberGenerator(seed);
3639 } else {
3640 *rng = new base::RandomNumberGenerator();
3641 }
3642 }
3643 return *rng;
3644 }
3645
3646 base::RandomNumberGenerator* Isolate::random_number_generator() {
3647 return ensure_rng_exists(&random_number_generator_, FLAG_random_seed);
3648 }
3649
3650 base::RandomNumberGenerator* Isolate::fuzzer_rng() {
3651 if (fuzzer_rng_ == nullptr) {
3652 int64_t seed = FLAG_fuzzer_random_seed;
3653 if (seed == 0) {
3654 seed = random_number_generator()->initial_seed();
3655 }
3656
3657 fuzzer_rng_ = new base::RandomNumberGenerator(seed);
3658 }
3659
3660 return fuzzer_rng_;
3661 }
3662
3663 int Isolate::GenerateIdentityHash(uint32_t mask) {
3664 int hash;
3665 int attempts = 0;
3666 do {
3667 hash = random_number_generator()->NextInt() & mask;
3668 } while (hash == 0 && attempts++ < 30);
3669 return hash != 0 ? hash : 1;
3670 }
3671
3672 Code* Isolate::FindCodeObject(Address a) {
3673 return heap()->GcSafeFindCodeForInnerPointer(a);
3674 }
3675
3676
3677 #ifdef DEBUG
3678 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \
3679 const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_);
3680 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
3681 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
3682 #undef ISOLATE_FIELD_OFFSET
3683 #endif
3684
3685 Handle<Symbol> Isolate::SymbolFor(Heap::RootListIndex dictionary_index,
3686 Handle<String> name, bool private_symbol) {
3687 Handle<String> key = factory()->InternalizeString(name);
3688 Handle<NameDictionary> dictionary =
3689 Handle<NameDictionary>::cast(heap()->root_handle(dictionary_index));
3690 int entry = dictionary->FindEntry(key);
3691 Handle<Symbol> symbol;
3692 if (entry == NameDictionary::kNotFound) {
3693 symbol =
3694 private_symbol ? factory()->NewPrivateSymbol() : factory()->NewSymbol();
3695 symbol->set_name(*key);
3696 dictionary = NameDictionary::Add(dictionary, key, symbol,
3697 PropertyDetails::Empty(), &entry);
3698 switch (dictionary_index) {
3699 case Heap::kPublicSymbolTableRootIndex:
3700 symbol->set_is_public(true);
3701 heap()->set_public_symbol_table(*dictionary);
3702 break;
3703 case Heap::kApiSymbolTableRootIndex:
3704 heap()->set_api_symbol_table(*dictionary);
3705 break;
3706 case Heap::kApiPrivateSymbolTableRootIndex:
3707 heap()->set_api_private_symbol_table(*dictionary);
3708 break;
3709 default:
3710 UNREACHABLE();
3711 }
3712 } else {
3713 symbol = Handle<Symbol>(Symbol::cast(dictionary->ValueAt(entry)));
3714 }
3715 return symbol;
3716 }
3717
3718 void Isolate::AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback) {
3719 auto pos = std::find(before_call_entered_callbacks_.begin(),
3720 before_call_entered_callbacks_.end(), callback);
3721 if (pos != before_call_entered_callbacks_.end()) return;
3722 before_call_entered_callbacks_.push_back(callback);
3723 }
3724
3725 void Isolate::RemoveBeforeCallEnteredCallback(
3726 BeforeCallEnteredCallback callback) {
3727 auto pos = std::find(before_call_entered_callbacks_.begin(),
3728 before_call_entered_callbacks_.end(), callback);
3729 if (pos == before_call_entered_callbacks_.end()) return;
3730 before_call_entered_callbacks_.erase(pos);
3731 }
3732
3733 void Isolate::AddCallCompletedCallback(CallCompletedCallback callback) {
3734 auto pos = std::find(call_completed_callbacks_.begin(),
3735 call_completed_callbacks_.end(), callback);
3736 if (pos != call_completed_callbacks_.end()) return;
3737 call_completed_callbacks_.push_back(callback);
3738 }
3739
3740 void Isolate::RemoveCallCompletedCallback(CallCompletedCallback callback) {
3741 auto pos = std::find(call_completed_callbacks_.begin(),
3742 call_completed_callbacks_.end(), callback);
3743 if (pos == call_completed_callbacks_.end()) return;
3744 call_completed_callbacks_.erase(pos);
3745 }
3746
3747 void Isolate::AddMicrotasksCompletedCallback(
3748 MicrotasksCompletedCallback callback) {
3749 auto pos = std::find(microtasks_completed_callbacks_.begin(),
3750 microtasks_completed_callbacks_.end(), callback);
3751 if (pos != microtasks_completed_callbacks_.end()) return;
3752 microtasks_completed_callbacks_.push_back(callback);
3753 }
3754
3755 void Isolate::RemoveMicrotasksCompletedCallback(
3756 MicrotasksCompletedCallback callback) {
3757 auto pos = std::find(microtasks_completed_callbacks_.begin(),
3758 microtasks_completed_callbacks_.end(), callback);
3759 if (pos == microtasks_completed_callbacks_.end()) return;
3760 microtasks_completed_callbacks_.erase(pos);
3761 }
3762
3763 void Isolate::FireCallCompletedCallback() {
3764 if (!handle_scope_implementer()->CallDepthIsZero()) return;
3765
3766 bool run_microtasks =
3767 pending_microtask_count() &&
3768 !handle_scope_implementer()->HasMicrotasksSuppressions() &&
3769 handle_scope_implementer()->microtasks_policy() ==
3770 v8::MicrotasksPolicy::kAuto;
3771
3772 if (run_microtasks) RunMicrotasks();
3773
3774 if (call_completed_callbacks_.empty()) return;
3775 // Fire callbacks. Increase call depth to prevent recursive callbacks.
3776 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this);
3777 v8::Isolate::SuppressMicrotaskExecutionScope suppress(isolate);
3778 std::vector<CallCompletedCallback> callbacks(call_completed_callbacks_);
3779 for (auto& callback : callbacks) {
3780 callback(reinterpret_cast<v8::Isolate*>(this));
3781 }
3782 }
3783
3784 void Isolate::DebugStateUpdated() {
3785 bool promise_hook_or_debug_is_active = promise_hook_ || debug()->is_active();
3786 if (promise_hook_or_debug_is_active && IsPromiseHookProtectorIntact()) {
3787 InvalidatePromiseHookProtector();
3788 }
3789 promise_hook_or_debug_is_active_ = promise_hook_or_debug_is_active;
3790 }
3791
3792 namespace {
3793
3794 MaybeHandle<JSPromise> NewRejectedPromise(Isolate* isolate,
3795 v8::Local<v8::Context> api_context,
3796 Handle<Object> exception) {
3797 v8::Local<v8::Promise::Resolver> resolver;
3798 ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
3799 isolate, resolver, v8::Promise::Resolver::New(api_context),
3800 MaybeHandle<JSPromise>());
3801
3802 RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
3803 isolate, resolver->Reject(api_context, v8::Utils::ToLocal(exception)),
3804 MaybeHandle<JSPromise>());
3805
3806 v8::Local<v8::Promise> promise = resolver->GetPromise();
3807 return v8::Utils::OpenHandle(*promise);
3808 }
3809
3810 } // namespace
3811
3812 MaybeHandle<JSPromise> Isolate::RunHostImportModuleDynamicallyCallback(
3813 Handle<Script> referrer, Handle<Object> specifier) {
3814 v8::Local<v8::Context> api_context = v8::Utils::ToLocal(native_context());
3815
3816 if (host_import_module_dynamically_callback_ == nullptr) {
3817 Handle<Object> exception =
3818 factory()->NewError(error_function(), MessageTemplate::kUnsupported);
3819 return NewRejectedPromise(this, api_context, exception);
3820 }
3821
3822 Handle<String> specifier_str;
3823 MaybeHandle<String> maybe_specifier = Object::ToString(this, specifier);
3824 if (!maybe_specifier.ToHandle(&specifier_str)) {
3825 Handle<Object> exception(pending_exception(), this);
3826 clear_pending_exception();
3827
3828 return NewRejectedPromise(this, api_context, exception);
3829 }
3830 DCHECK(!has_pending_exception());
3831
3832 v8::Local<v8::Promise> promise;
3833 ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
3834 this, promise,
3835 host_import_module_dynamically_callback_(
3836 api_context, v8::Utils::ScriptOrModuleToLocal(referrer),
3837 v8::Utils::ToLocal(specifier_str)),
3838 MaybeHandle<JSPromise>());
3839 return v8::Utils::OpenHandle(*promise);
3840 }
3841
3842 void Isolate::SetHostImportModuleDynamicallyCallback(
3843 HostImportModuleDynamicallyCallback callback) {
3844 host_import_module_dynamically_callback_ = callback;
3845 }
3846
3847 Handle<JSObject> Isolate::RunHostInitializeImportMetaObjectCallback(
3848 Handle<Module> module) {
3849 Handle<Object> host_meta(module->import_meta(), this);
3850 if (host_meta->IsTheHole(this)) {
3851 host_meta = factory()->NewJSObjectWithNullProto();
3852 if (host_initialize_import_meta_object_callback_ != nullptr) {
3853 v8::Local<v8::Context> api_context = v8::Utils::ToLocal(native_context());
3854 host_initialize_import_meta_object_callback_(
3855 api_context, Utils::ToLocal(module),
3856 v8::Local<v8::Object>::Cast(v8::Utils::ToLocal(host_meta)));
3857 }
3858 module->set_import_meta(*host_meta);
3859 }
3860 return Handle<JSObject>::cast(host_meta);
3861 }
3862
3863 void Isolate::SetHostInitializeImportMetaObjectCallback(
3864 HostInitializeImportMetaObjectCallback callback) {
3865 host_initialize_import_meta_object_callback_ = callback;
3866 }
3867
3868 void Isolate::SetPromiseHook(PromiseHook hook) {
3869 promise_hook_ = hook;
3870 DebugStateUpdated();
3871 }
3872
3873 void Isolate::RunPromiseHook(PromiseHookType type, Handle<JSPromise> promise,
3874 Handle<Object> parent) {
3875 if (debug()->is_active()) debug()->RunPromiseHook(type, promise, parent);
3876 if (promise_hook_ == nullptr) return;
3877 promise_hook_(type, v8::Utils::PromiseToLocal(promise),
3878 v8::Utils::ToLocal(parent));
3879 }
3880
3881 void Isolate::SetPromiseRejectCallback(PromiseRejectCallback callback) {
3882 promise_reject_callback_ = callback;
3883 }
3884
3885 void Isolate::ReportPromiseReject(Handle<JSPromise> promise,
3886 Handle<Object> value,
3887 v8::PromiseRejectEvent event) {
3888 if (promise_reject_callback_ == nullptr) return;
3889 Handle<FixedArray> stack_trace;
3890 if (event != v8::kPromiseHandlerAddedAfterReject && value->IsJSObject()) {
3891 stack_trace = GetDetailedStackTrace(Handle<JSObject>::cast(value));
3892 }
3893 promise_reject_callback_(v8::PromiseRejectMessage(
3894 v8::Utils::PromiseToLocal(promise), event, v8::Utils::ToLocal(value),
3895 v8::Utils::StackTraceToLocal(stack_trace)));
3896 }
3897
3898 void Isolate::EnqueueMicrotask(Handle<Microtask> microtask) {
3899 Handle<FixedArray> queue(heap()->microtask_queue(), this);
3900 int num_tasks = pending_microtask_count();
3901 DCHECK_LE(num_tasks, queue->length());
3902 if (num_tasks == queue->length()) {
3903 queue = factory()->CopyFixedArrayAndGrow(queue, std::max(num_tasks, 8));
3904 heap()->set_microtask_queue(*queue);
3905 }
3906 DCHECK_LE(8, queue->length());
3907 DCHECK_LT(num_tasks, queue->length());
3908 DCHECK(queue->get(num_tasks)->IsUndefined(this));
3909 queue->set(num_tasks, *microtask);
3910 set_pending_microtask_count(num_tasks + 1);
3911 }
3912
3913
3914 void Isolate::RunMicrotasks() {
3915 // Increase call depth to prevent recursive callbacks.
3916 v8::Isolate::SuppressMicrotaskExecutionScope suppress(
3917 reinterpret_cast<v8::Isolate*>(this));
3918 if (pending_microtask_count()) {
3919 is_running_microtasks_ = true;
3920 TRACE_EVENT0("v8.execute", "RunMicrotasks");
3921 TRACE_EVENT_CALL_STATS_SCOPED(this, "v8", "V8.RunMicrotasks");
3922
3923 HandleScope scope(this);
3924 MaybeHandle<Object> maybe_exception;
3925 MaybeHandle<Object> maybe_result = Execution::RunMicrotasks(
3926 this, Execution::MessageHandling::kReport, &maybe_exception);
3927 // If execution is terminating, bail out, clean up, and propagate to
3928 // TryCatch scope.
3929 if (maybe_result.is_null() && maybe_exception.is_null()) {
3930 heap()->set_microtask_queue(heap()->empty_fixed_array());
3931 set_pending_microtask_count(0);
3932 handle_scope_implementer()->LeaveMicrotaskContext();
3933 SetTerminationOnExternalTryCatch();
3934 }
3935 CHECK_EQ(0, pending_microtask_count());
3936 CHECK_EQ(0, heap()->microtask_queue()->length());
3937 is_running_microtasks_ = false;
3938 }
3939 FireMicrotasksCompletedCallback();
3940 }
3941
3942 void Isolate::SetUseCounterCallback(v8::Isolate::UseCounterCallback callback) {
3943 DCHECK(!use_counter_callback_);
3944 use_counter_callback_ = callback;
3945 }
3946
3947
3948 void Isolate::CountUsage(v8::Isolate::UseCounterFeature feature) {
3949 // The counter callback may cause the embedder to call into V8, which is not
3950 // generally possible during GC.
3951 if (heap_.gc_state() == Heap::NOT_IN_GC) {
3952 if (use_counter_callback_) {
3953 HandleScope handle_scope(this);
3954 use_counter_callback_(reinterpret_cast<v8::Isolate*>(this), feature);
3955 }
3956 } else {
3957 heap_.IncrementDeferredCount(feature);
3958 }
3959 }
3960
3961
3962 BasicBlockProfiler* Isolate::GetOrCreateBasicBlockProfiler() {
3963 if (basic_block_profiler_ == nullptr) {
3964 basic_block_profiler_ = new BasicBlockProfiler();
3965 }
3966 return basic_block_profiler_;
3967 }
3968
3969
3970 std::string Isolate::GetTurboCfgFileName() {
3971 if (FLAG_trace_turbo_cfg_file == nullptr) {
3972 std::ostringstream os;
3973 os << "turbo-" << base::OS::GetCurrentProcessId() << "-" << id() << ".cfg";
3974 return os.str();
3975 } else {
3976 return FLAG_trace_turbo_cfg_file;
3977 }
3978 }
3979
3980 // Heap::detached_contexts tracks detached contexts as pairs
3981 // (number of GC since the context was detached, the context).
3982 void Isolate::AddDetachedContext(Handle<Context> context) {
3983 HandleScope scope(this);
3984 Handle<WeakCell> cell = factory()->NewWeakCell(context);
3985 Handle<FixedArray> detached_contexts =
3986 factory()->CopyFixedArrayAndGrow(factory()->detached_contexts(), 2);
3987 int new_length = detached_contexts->length();
3988 detached_contexts->set(new_length - 2, Smi::kZero);
3989 detached_contexts->set(new_length - 1, *cell);
3990 heap()->set_detached_contexts(*detached_contexts);
3991 }
3992
3993
3994 void Isolate::CheckDetachedContextsAfterGC() {
3995 HandleScope scope(this);
3996 Handle<FixedArray> detached_contexts = factory()->detached_contexts();
3997 int length = detached_contexts->length();
3998 if (length == 0) return;
3999 int new_length = 0;
4000 for (int i = 0; i < length; i += 2) {
4001 int mark_sweeps = Smi::ToInt(detached_contexts->get(i));
4002 DCHECK(detached_contexts->get(i + 1)->IsWeakCell());
4003 WeakCell* cell = WeakCell::cast(detached_contexts->get(i + 1));
4004 if (!cell->cleared()) {
4005 detached_contexts->set(new_length, Smi::FromInt(mark_sweeps + 1));
4006 detached_contexts->set(new_length + 1, cell);
4007 new_length += 2;
4008 }
4009 counters()->detached_context_age_in_gc()->AddSample(mark_sweeps + 1);
4010 }
4011 if (FLAG_trace_detached_contexts) {
4012 PrintF("%d detached contexts are collected out of %d\n",
4013 length - new_length, length);
4014 for (int i = 0; i < new_length; i += 2) {
4015 int mark_sweeps = Smi::ToInt(detached_contexts->get(i));
4016 DCHECK(detached_contexts->get(i + 1)->IsWeakCell());
4017 WeakCell* cell = WeakCell::cast(detached_contexts->get(i + 1));
4018 if (mark_sweeps > 3) {
4019 PrintF("detached context %p\n survived %d GCs (leak?)\n",
4020 static_cast<void*>(cell->value()), mark_sweeps);
4021 }
4022 }
4023 }
4024 if (new_length == 0) {
4025 heap()->set_detached_contexts(heap()->empty_fixed_array());
4026 } else if (new_length < length) {
4027 heap()->RightTrimFixedArray(*detached_contexts, length - new_length);
4028 }
4029 }
4030
4031 double Isolate::LoadStartTimeMs() {
4032 base::LockGuard<base::Mutex> guard(&rail_mutex_);
4033 return load_start_time_ms_;
4034 }
4035
4036 void Isolate::SetRAILMode(RAILMode rail_mode) {
4037 RAILMode old_rail_mode = rail_mode_.Value();
4038 if (old_rail_mode != PERFORMANCE_LOAD && rail_mode == PERFORMANCE_LOAD) {
4039 base::LockGuard<base::Mutex> guard(&rail_mutex_);
4040 load_start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
4041 }
4042 rail_mode_.SetValue(rail_mode);
4043 if (old_rail_mode == PERFORMANCE_LOAD && rail_mode != PERFORMANCE_LOAD) {
4044 heap()->incremental_marking()->incremental_marking_job()->ScheduleTask(
4045 heap());
4046 }
4047 if (FLAG_trace_rail) {
4048 PrintIsolate(this, "RAIL mode: %s\n", RAILModeName(rail_mode));
4049 }
4050 }
4051
4052 void Isolate::IsolateInBackgroundNotification() {
4053 is_isolate_in_background_ = true;
4054 heap()->ActivateMemoryReducerIfNeeded();
4055 }
4056
4057 void Isolate::IsolateInForegroundNotification() {
4058 is_isolate_in_background_ = false;
4059 }
4060
4061 void Isolate::PrintWithTimestamp(const char* format, ...) {
4062 base::OS::Print("[%d:%p] %8.0f ms: ", base::OS::GetCurrentProcessId(),
4063 static_cast<void*>(this), time_millis_since_init());
4064 va_list arguments;
4065 va_start(arguments, format);
4066 base::OS::VPrint(format, arguments);
4067 va_end(arguments);
4068 }
4069
4070 void Isolate::SetIdle(bool is_idle) {
4071 if (!is_profiling()) return;
4072 StateTag state = current_vm_state();
4073 DCHECK(state == EXTERNAL || state == IDLE);
4074 if (js_entry_sp() != kNullAddress) return;
4075 if (is_idle) {
4076 set_current_vm_state(IDLE);
4077 } else if (state == IDLE) {
4078 set_current_vm_state(EXTERNAL);
4079 }
4080 }
4081
4082 CpuProfiler* Isolate::EnsureCpuProfiler() {
4083 if (!cpu_profiler_) {
4084 cpu_profiler_ = new CpuProfiler(this);
4085 }
4086 return cpu_profiler_;
4087 }
4088
4089 bool StackLimitCheck::JsHasOverflowed(uintptr_t gap) const {
4090 StackGuard* stack_guard = isolate_->stack_guard();
4091 #ifdef USE_SIMULATOR
4092 // The simulator uses a separate JS stack.
4093 Address jssp_address = Simulator::current(isolate_)->get_sp();
4094 uintptr_t jssp = static_cast<uintptr_t>(jssp_address);
4095 if (jssp - gap < stack_guard->real_jslimit()) return true;
4096 #endif // USE_SIMULATOR
4097 return GetCurrentStackPosition() - gap < stack_guard->real_climit();
4098 }
4099
4100 SaveContext::SaveContext(Isolate* isolate)
4101 : isolate_(isolate), prev_(isolate->save_context()) {
4102 if (isolate->context() != nullptr) {
4103 context_ = Handle<Context>(isolate->context());
4104 }
4105 isolate->set_save_context(this);
4106
4107 c_entry_fp_ = isolate->c_entry_fp(isolate->thread_local_top());
4108 }
4109
4110 SaveContext::~SaveContext() {
4111 isolate_->set_context(context_.is_null() ? nullptr : *context_);
4112 isolate_->set_save_context(prev_);
4113 }
4114
4115 bool SaveContext::IsBelowFrame(StandardFrame* frame) {
4116 return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
4117 }
4118
4119 #ifdef DEBUG
4120 AssertNoContextChange::AssertNoContextChange(Isolate* isolate)
4121 : isolate_(isolate), context_(isolate->context(), isolate) {}
4122 #endif // DEBUG
4123
4124 bool InterruptsScope::Intercept(StackGuard::InterruptFlag flag) {
4125 InterruptsScope* last_postpone_scope = nullptr;
4126 for (InterruptsScope* current = this; current; current = current->prev_) {
4127 // We only consider scopes related to passed flag.
4128 if (!(current->intercept_mask_ & flag)) continue;
4129 if (current->mode_ == kRunInterrupts) {
4130 // If innermost scope is kRunInterrupts scope, prevent interrupt from
4131 // being intercepted.
4132 break;
4133 } else {
4134 DCHECK_EQ(current->mode_, kPostponeInterrupts);
4135 last_postpone_scope = current;
4136 }
4137 }
4138 // If there is no postpone scope for passed flag then we should not intercept.
4139 if (!last_postpone_scope) return false;
4140 last_postpone_scope->intercepted_flags_ |= flag;
4141 return true;
4142 }
4143 } // namespace internal
4144 } // namespace v8
4145