1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/execution/isolate.h"
6
7#include <stdlib.h>
8
9#include <atomic>
10#include <cstdint>
11#include <fstream>
12#include <memory>
13#include <sstream>
14#include <string>
15#include <unordered_map>
16#include <utility>
17
18#include "include/v8-template.h"
19#include "src/api/api-inl.h"
20#include "src/ast/ast-value-factory.h"
21#include "src/ast/scopes.h"
22#include "src/base/hashmap.h"
23#include "src/base/logging.h"
24#include "src/base/platform/mutex.h"
25#include "src/base/platform/platform.h"
26#include "src/base/sys-info.h"
27#include "src/base/utils/random-number-generator.h"
28#include "src/baseline/baseline-batch-compiler.h"
29#include "src/bigint/bigint.h"
30#include "src/builtins/builtins-promise.h"
31#include "src/builtins/constants-table-builder.h"
32#include "src/codegen/assembler-inl.h"
33#include "src/codegen/compilation-cache.h"
34#include "src/codegen/flush-instruction-cache.h"
35#include "src/common/assert-scope.h"
36#include "src/common/ptr-compr-inl.h"
37#include "src/compiler-dispatcher/lazy-compile-dispatcher.h"
38#include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
39#include "src/date/date.h"
40#include "src/debug/debug-frames.h"
41#if V8_ENABLE_WEBASSEMBLY
42#include "src/debug/debug-wasm-objects.h"
43#endif  // V8_ENABLE_WEBASSEMBLY
44#include "src/debug/debug.h"
45#include "src/deoptimizer/deoptimizer.h"
46#include "src/deoptimizer/materialized-object-store.h"
47#include "src/diagnostics/basic-block-profiler.h"
48#include "src/diagnostics/compilation-statistics.h"
49#include "src/execution/frames-inl.h"
50#include "src/execution/frames.h"
51#include "src/execution/isolate-inl.h"
52#include "src/execution/local-isolate.h"
53#include "src/execution/messages.h"
54#include "src/execution/microtask-queue.h"
55#include "src/execution/protectors-inl.h"
56#include "src/execution/simulator.h"
57#include "src/execution/tiering-manager.h"
58#include "src/execution/v8threads.h"
59#include "src/execution/vm-state-inl.h"
60#include "src/handles/global-handles-inl.h"
61#include "src/handles/persistent-handles.h"
62#include "src/heap/heap-inl.h"
63#include "src/heap/heap.h"
64#include "src/heap/local-heap.h"
65#include "src/heap/parked-scope.h"
66#include "src/heap/read-only-heap.h"
67#include "src/heap/safepoint.h"
68#include "src/ic/stub-cache.h"
69#include "src/init/bootstrapper.h"
70#include "src/init/setup-isolate.h"
71#include "src/init/v8.h"
72#include "src/interpreter/interpreter.h"
73#include "src/libsampler/sampler.h"
74#include "src/logging/counters.h"
75#include "src/logging/log.h"
76#include "src/logging/metrics.h"
77#include "src/logging/runtime-call-stats-scope.h"
78#include "src/numbers/hash-seed-inl.h"
79#include "src/objects/backing-store.h"
80#include "src/objects/call-site-info-inl.h"
81#include "src/objects/elements.h"
82#include "src/objects/feedback-vector.h"
83#include "src/objects/hash-table-inl.h"
84#include "src/objects/js-array-buffer-inl.h"
85#include "src/objects/js-array-inl.h"
86#include "src/objects/js-generator-inl.h"
87#include "src/objects/js-weak-refs-inl.h"
88#include "src/objects/managed-inl.h"
89#include "src/objects/module-inl.h"
90#include "src/objects/promise-inl.h"
91#include "src/objects/prototype.h"
92#include "src/objects/slots.h"
93#include "src/objects/smi.h"
94#include "src/objects/source-text-module-inl.h"
95#include "src/objects/visitors.h"
96#include "src/profiler/heap-profiler.h"
97#include "src/profiler/tracing-cpu-profiler.h"
98#include "src/regexp/regexp-stack.h"
99#include "src/snapshot/embedded/embedded-data-inl.h"
100#include "src/snapshot/embedded/embedded-file-writer-interface.h"
101#include "src/snapshot/read-only-deserializer.h"
102#include "src/snapshot/shared-heap-deserializer.h"
103#include "src/snapshot/startup-deserializer.h"
104#include "src/strings/string-builder-inl.h"
105#include "src/strings/string-stream.h"
106#include "src/tasks/cancelable-task.h"
107#include "src/tracing/tracing-category-observer.h"
108#include "src/utils/address-map.h"
109#include "src/utils/ostreams.h"
110#include "src/utils/version.h"
111#include "src/zone/accounting-allocator.h"
112#include "src/zone/type-stats.h"
113#ifdef V8_INTL_SUPPORT
114#include "src/objects/intl-objects.h"
115#include "unicode/locid.h"
116#include "unicode/uobject.h"
117#endif  // V8_INTL_SUPPORT
118
119#if V8_ENABLE_MAGLEV
120#include "src/maglev/maglev-concurrent-dispatcher.h"
121#endif  // V8_ENABLE_MAGLEV
122
123#if V8_ENABLE_WEBASSEMBLY
124#include "src/trap-handler/trap-handler.h"
125#include "src/wasm/wasm-code-manager.h"
126#include "src/wasm/wasm-engine.h"
127#include "src/wasm/wasm-module.h"
128#include "src/wasm/wasm-objects.h"
129#endif  // V8_ENABLE_WEBASSEMBLY
130
131#if defined(V8_OS_WIN64)
132#include "src/diagnostics/unwinding-info-win64.h"
133#endif  // V8_OS_WIN64
134
135#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
136#include "src/base/platform/wrappers.h"
137#include "src/heap/conservative-stack-visitor.h"
138#endif
139
140#if USE_SIMULATOR
141#include "src/execution/simulator-base.h"
142#endif
143
144extern "C" const uint8_t* v8_Default_embedded_blob_code_;
145extern "C" uint32_t v8_Default_embedded_blob_code_size_;
146extern "C" const uint8_t* v8_Default_embedded_blob_data_;
147extern "C" uint32_t v8_Default_embedded_blob_data_size_;
148
149namespace v8 {
150namespace internal {
151
152#ifdef DEBUG
153#define TRACE_ISOLATE(tag)                                                  \
154  do {                                                                      \
155    if (FLAG_trace_isolates) {                                              \
156      PrintF("Isolate %p (id %d)" #tag "\n", reinterpret_cast<void*>(this), \
157             id());                                                         \
158    }                                                                       \
159  } while (false)
160#else
161#define TRACE_ISOLATE(tag)
162#endif
163
164const uint8_t* DefaultEmbeddedBlobCode() {
165  return v8_Default_embedded_blob_code_;
166}
167uint32_t DefaultEmbeddedBlobCodeSize() {
168  return v8_Default_embedded_blob_code_size_;
169}
170const uint8_t* DefaultEmbeddedBlobData() {
171  return v8_Default_embedded_blob_data_;
172}
173uint32_t DefaultEmbeddedBlobDataSize() {
174  return v8_Default_embedded_blob_data_size_;
175}
176
177namespace {
178// These variables provide access to the current embedded blob without requiring
179// an isolate instance. This is needed e.g. by Code::InstructionStart, which may
180// not have access to an isolate but still needs to access the embedded blob.
181// The variables are initialized by each isolate in Init(). Writes and reads are
182// relaxed since we can guarantee that the current thread has initialized these
183// variables before accessing them. Different threads may race, but this is fine
184// since they all attempt to set the same values of the blob pointer and size.
185
186std::atomic<const uint8_t*> current_embedded_blob_code_(nullptr);
187std::atomic<uint32_t> current_embedded_blob_code_size_(0);
188std::atomic<const uint8_t*> current_embedded_blob_data_(nullptr);
189std::atomic<uint32_t> current_embedded_blob_data_size_(0);
190
191// The various workflows around embedded snapshots are fairly complex. We need
192// to support plain old snapshot builds, nosnap builds, and the requirements of
193// subtly different serialization tests. There's two related knobs to twiddle:
194//
195// - The default embedded blob may be overridden by setting the sticky embedded
196// blob. This is set automatically whenever we create a new embedded blob.
197//
198// - Lifecycle management can be either manual or set to refcounting.
199//
200// A few situations to demonstrate their use:
201//
202// - A plain old snapshot build neither overrides the default blob nor
203// refcounts.
204//
205// - mksnapshot sets the sticky blob and manually frees the embedded
206// blob once done.
207//
208// - Most serializer tests do the same.
209//
210// - Nosnapshot builds set the sticky blob and enable refcounting.
211
212// This mutex protects access to the following variables:
213// - sticky_embedded_blob_code_
214// - sticky_embedded_blob_code_size_
215// - sticky_embedded_blob_data_
216// - sticky_embedded_blob_data_size_
217// - enable_embedded_blob_refcounting_
218// - current_embedded_blob_refs_
219base::LazyMutex current_embedded_blob_refcount_mutex_ = LAZY_MUTEX_INITIALIZER;
220
221const uint8_t* sticky_embedded_blob_code_ = nullptr;
222uint32_t sticky_embedded_blob_code_size_ = 0;
223const uint8_t* sticky_embedded_blob_data_ = nullptr;
224uint32_t sticky_embedded_blob_data_size_ = 0;
225
226bool enable_embedded_blob_refcounting_ = true;
227int current_embedded_blob_refs_ = 0;
228
229const uint8_t* StickyEmbeddedBlobCode() { return sticky_embedded_blob_code_; }
230uint32_t StickyEmbeddedBlobCodeSize() {
231  return sticky_embedded_blob_code_size_;
232}
233const uint8_t* StickyEmbeddedBlobData() { return sticky_embedded_blob_data_; }
234uint32_t StickyEmbeddedBlobDataSize() {
235  return sticky_embedded_blob_data_size_;
236}
237
238void SetStickyEmbeddedBlob(const uint8_t* code, uint32_t code_size,
239                           const uint8_t* data, uint32_t data_size) {
240  sticky_embedded_blob_code_ = code;
241  sticky_embedded_blob_code_size_ = code_size;
242  sticky_embedded_blob_data_ = data;
243  sticky_embedded_blob_data_size_ = data_size;
244}
245
246}  // namespace
247
248void DisableEmbeddedBlobRefcounting() {
249  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
250  enable_embedded_blob_refcounting_ = false;
251}
252
253void FreeCurrentEmbeddedBlob() {
254  CHECK(!enable_embedded_blob_refcounting_);
255  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
256
257  if (StickyEmbeddedBlobCode() == nullptr) return;
258
259  CHECK_EQ(StickyEmbeddedBlobCode(), Isolate::CurrentEmbeddedBlobCode());
260  CHECK_EQ(StickyEmbeddedBlobData(), Isolate::CurrentEmbeddedBlobData());
261
262  OffHeapInstructionStream::FreeOffHeapOffHeapInstructionStream(
263      const_cast<uint8_t*>(Isolate::CurrentEmbeddedBlobCode()),
264      Isolate::CurrentEmbeddedBlobCodeSize(),
265      const_cast<uint8_t*>(Isolate::CurrentEmbeddedBlobData()),
266      Isolate::CurrentEmbeddedBlobDataSize());
267
268  current_embedded_blob_code_.store(nullptr, std::memory_order_relaxed);
269  current_embedded_blob_code_size_.store(0, std::memory_order_relaxed);
270  current_embedded_blob_data_.store(nullptr, std::memory_order_relaxed);
271  current_embedded_blob_data_size_.store(0, std::memory_order_relaxed);
272  sticky_embedded_blob_code_ = nullptr;
273  sticky_embedded_blob_code_size_ = 0;
274  sticky_embedded_blob_data_ = nullptr;
275  sticky_embedded_blob_data_size_ = 0;
276}
277
278// static
279bool Isolate::CurrentEmbeddedBlobIsBinaryEmbedded() {
280  // In some situations, we must be able to rely on the embedded blob being
281  // immortal immovable. This is the case if the blob is binary-embedded.
282  // See blob lifecycle controls above for descriptions of when the current
283  // embedded blob may change (e.g. in tests or mksnapshot). If the blob is
284  // binary-embedded, it is immortal immovable.
285  const uint8_t* code =
286      current_embedded_blob_code_.load(std::memory_order_relaxed);
287  if (code == nullptr) return false;
288  return code == DefaultEmbeddedBlobCode();
289}
290
291void Isolate::SetEmbeddedBlob(const uint8_t* code, uint32_t code_size,
292                              const uint8_t* data, uint32_t data_size) {
293  CHECK_NOT_NULL(code);
294  CHECK_NOT_NULL(data);
295
296  embedded_blob_code_ = code;
297  embedded_blob_code_size_ = code_size;
298  embedded_blob_data_ = data;
299  embedded_blob_data_size_ = data_size;
300  current_embedded_blob_code_.store(code, std::memory_order_relaxed);
301  current_embedded_blob_code_size_.store(code_size, std::memory_order_relaxed);
302  current_embedded_blob_data_.store(data, std::memory_order_relaxed);
303  current_embedded_blob_data_size_.store(data_size, std::memory_order_relaxed);
304
305#ifdef DEBUG
306  // Verify that the contents of the embedded blob are unchanged from
307  // serialization-time, just to ensure the compiler isn't messing with us.
308  EmbeddedData d = EmbeddedData::FromBlob();
309  if (d.EmbeddedBlobDataHash() != d.CreateEmbeddedBlobDataHash()) {
310    FATAL(
311        "Embedded blob data section checksum verification failed. This "
312        "indicates that the embedded blob has been modified since compilation "
313        "time.");
314  }
315  if (FLAG_text_is_readable) {
316    if (d.EmbeddedBlobCodeHash() != d.CreateEmbeddedBlobCodeHash()) {
317      FATAL(
318          "Embedded blob code section checksum verification failed. This "
319          "indicates that the embedded blob has been modified since "
320          "compilation time. A common cause is a debugging breakpoint set "
321          "within builtin code.");
322    }
323  }
324#endif  // DEBUG
325}
326
327void Isolate::ClearEmbeddedBlob() {
328  CHECK(enable_embedded_blob_refcounting_);
329  CHECK_EQ(embedded_blob_code_, CurrentEmbeddedBlobCode());
330  CHECK_EQ(embedded_blob_code_, StickyEmbeddedBlobCode());
331  CHECK_EQ(embedded_blob_data_, CurrentEmbeddedBlobData());
332  CHECK_EQ(embedded_blob_data_, StickyEmbeddedBlobData());
333
334  embedded_blob_code_ = nullptr;
335  embedded_blob_code_size_ = 0;
336  embedded_blob_data_ = nullptr;
337  embedded_blob_data_size_ = 0;
338  current_embedded_blob_code_.store(nullptr, std::memory_order_relaxed);
339  current_embedded_blob_code_size_.store(0, std::memory_order_relaxed);
340  current_embedded_blob_data_.store(nullptr, std::memory_order_relaxed);
341  current_embedded_blob_data_size_.store(0, std::memory_order_relaxed);
342  sticky_embedded_blob_code_ = nullptr;
343  sticky_embedded_blob_code_size_ = 0;
344  sticky_embedded_blob_data_ = nullptr;
345  sticky_embedded_blob_data_size_ = 0;
346}
347
348const uint8_t* Isolate::embedded_blob_code() const {
349  return embedded_blob_code_;
350}
351uint32_t Isolate::embedded_blob_code_size() const {
352  return embedded_blob_code_size_;
353}
354const uint8_t* Isolate::embedded_blob_data() const {
355  return embedded_blob_data_;
356}
357uint32_t Isolate::embedded_blob_data_size() const {
358  return embedded_blob_data_size_;
359}
360
361// static
362const uint8_t* Isolate::CurrentEmbeddedBlobCode() {
363  return current_embedded_blob_code_.load(std::memory_order_relaxed);
364}
365
366// static
367uint32_t Isolate::CurrentEmbeddedBlobCodeSize() {
368  return current_embedded_blob_code_size_.load(std::memory_order_relaxed);
369}
370
371// static
372const uint8_t* Isolate::CurrentEmbeddedBlobData() {
373  return current_embedded_blob_data_.load(std::memory_order_relaxed);
374}
375
376// static
377uint32_t Isolate::CurrentEmbeddedBlobDataSize() {
378  return current_embedded_blob_data_size_.load(std::memory_order_relaxed);
379}
380
381// static
382base::AddressRegion Isolate::GetShortBuiltinsCallRegion() {
383  // Update calculations below if the assert fails.
384  STATIC_ASSERT(kMaxPCRelativeCodeRangeInMB <= 4096);
385  if (kMaxPCRelativeCodeRangeInMB == 0) {
386    // Return empty region if pc-relative calls/jumps are not supported.
387    return base::AddressRegion(kNullAddress, 0);
388  }
389  constexpr size_t max_size = std::numeric_limits<size_t>::max();
390  if (uint64_t{kMaxPCRelativeCodeRangeInMB} * MB > max_size) {
391    // The whole addressable space is reachable with pc-relative calls/jumps.
392    return base::AddressRegion(kNullAddress, max_size);
393  }
394  constexpr size_t radius = kMaxPCRelativeCodeRangeInMB * MB;
395
396  DCHECK_LT(CurrentEmbeddedBlobCodeSize(), radius);
397  Address embedded_blob_code_start =
398      reinterpret_cast<Address>(CurrentEmbeddedBlobCode());
399  if (embedded_blob_code_start == kNullAddress) {
400    // Return empty region if there's no embedded blob.
401    return base::AddressRegion(kNullAddress, 0);
402  }
403  Address embedded_blob_code_end =
404      embedded_blob_code_start + CurrentEmbeddedBlobCodeSize();
405  Address region_start =
406      (embedded_blob_code_end > radius) ? (embedded_blob_code_end - radius) : 0;
407  Address region_end = embedded_blob_code_start + radius;
408  if (region_end < embedded_blob_code_start) {
409    region_end = static_cast<Address>(-1);
410  }
411  return base::AddressRegion(region_start, region_end - region_start);
412}
413
414size_t Isolate::HashIsolateForEmbeddedBlob() {
415  DCHECK(builtins_.is_initialized());
416  DCHECK(Builtins::AllBuiltinsAreIsolateIndependent());
417
418  DisallowGarbageCollection no_gc;
419
420  static constexpr size_t kSeed = 0;
421  size_t hash = kSeed;
422
423  // Hash data sections of builtin code objects.
424  for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
425       ++builtin) {
426    Code code = FromCodeT(builtins()->code(builtin));
427
428    DCHECK(Internals::HasHeapObjectTag(code.ptr()));
429    uint8_t* const code_ptr =
430        reinterpret_cast<uint8_t*>(code.ptr() - kHeapObjectTag);
431
432    // These static asserts ensure we don't miss relevant fields. We don't hash
433    // pointer compression base, instruction/metadata size value and flags since
434    // they change when creating the off-heap trampolines. Other data fields
435    // must remain the same.
436#ifdef V8_EXTERNAL_CODE_SPACE
437    STATIC_ASSERT(Code::kMainCageBaseUpper32BitsOffset == Code::kDataStart);
438    STATIC_ASSERT(Code::kInstructionSizeOffset ==
439                  Code::kMainCageBaseUpper32BitsOffsetEnd + 1);
440#else
441    STATIC_ASSERT(Code::kInstructionSizeOffset == Code::kDataStart);
442#endif  // V8_EXTERNAL_CODE_SPACE
443    STATIC_ASSERT(Code::kMetadataSizeOffset ==
444                  Code::kInstructionSizeOffsetEnd + 1);
445    STATIC_ASSERT(Code::kFlagsOffset == Code::kMetadataSizeOffsetEnd + 1);
446    STATIC_ASSERT(Code::kBuiltinIndexOffset == Code::kFlagsOffsetEnd + 1);
447    static constexpr int kStartOffset = Code::kBuiltinIndexOffset;
448
449    for (int j = kStartOffset; j < Code::kUnalignedHeaderSize; j++) {
450      hash = base::hash_combine(hash, size_t{code_ptr[j]});
451    }
452  }
453
454  // The builtins constants table is also tightly tied to embedded builtins.
455  hash = base::hash_combine(
456      hash, static_cast<size_t>(heap_.builtins_constants_table().length()));
457
458  return hash;
459}
460
461base::Thread::LocalStorageKey Isolate::isolate_key_;
462base::Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_;
463std::atomic<bool> Isolate::isolate_key_created_{false};
464
465namespace {
466// A global counter for all generated Isolates, might overflow.
467std::atomic<int> isolate_counter{0};
468}  // namespace
469
470Isolate::PerIsolateThreadData*
471Isolate::FindOrAllocatePerThreadDataForThisThread() {
472  ThreadId thread_id = ThreadId::Current();
473  PerIsolateThreadData* per_thread = nullptr;
474  {
475    base::MutexGuard lock_guard(&thread_data_table_mutex_);
476    per_thread = thread_data_table_.Lookup(thread_id);
477    if (per_thread == nullptr) {
478      if (FLAG_adjust_os_scheduling_parameters) {
479        base::OS::AdjustSchedulingParams();
480      }
481      per_thread = new PerIsolateThreadData(this, thread_id);
482      thread_data_table_.Insert(per_thread);
483    }
484    DCHECK(thread_data_table_.Lookup(thread_id) == per_thread);
485  }
486  return per_thread;
487}
488
489void Isolate::DiscardPerThreadDataForThisThread() {
490  ThreadId thread_id = ThreadId::TryGetCurrent();
491  if (thread_id.IsValid()) {
492    DCHECK_NE(thread_manager_->mutex_owner_.load(std::memory_order_relaxed),
493              thread_id);
494    base::MutexGuard lock_guard(&thread_data_table_mutex_);
495    PerIsolateThreadData* per_thread = thread_data_table_.Lookup(thread_id);
496    if (per_thread) {
497      DCHECK(!per_thread->thread_state_);
498      thread_data_table_.Remove(per_thread);
499    }
500  }
501}
502
503Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() {
504  ThreadId thread_id = ThreadId::Current();
505  return FindPerThreadDataForThread(thread_id);
506}
507
508Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread(
509    ThreadId thread_id) {
510  PerIsolateThreadData* per_thread = nullptr;
511  {
512    base::MutexGuard lock_guard(&thread_data_table_mutex_);
513    per_thread = thread_data_table_.Lookup(thread_id);
514  }
515  return per_thread;
516}
517
518void Isolate::InitializeOncePerProcess() {
519  isolate_key_ = base::Thread::CreateThreadLocalKey();
520  bool expected = false;
521  CHECK(isolate_key_created_.compare_exchange_strong(
522      expected, true, std::memory_order_relaxed));
523  per_isolate_thread_data_key_ = base::Thread::CreateThreadLocalKey();
524
525  Heap::InitializeOncePerProcess();
526}
527
528void Isolate::DisposeOncePerProcess() {
529  base::Thread::DeleteThreadLocalKey(isolate_key_);
530  bool expected = true;
531  CHECK(isolate_key_created_.compare_exchange_strong(
532      expected, false, std::memory_order_relaxed));
533  base::Thread::DeleteThreadLocalKey(per_isolate_thread_data_key_);
534}
535
536Address Isolate::get_address_from_id(IsolateAddressId id) {
537  return isolate_addresses_[id];
538}
539
540char* Isolate::Iterate(RootVisitor* v, char* thread_storage) {
541  ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage);
542  Iterate(v, thread);
543  return thread_storage + sizeof(ThreadLocalTop);
544}
545
546void Isolate::IterateThread(ThreadVisitor* v, char* t) {
547  ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(t);
548  v->VisitThread(this, thread);
549}
550
551void Isolate::Iterate(RootVisitor* v, ThreadLocalTop* thread) {
552  // Visit the roots from the top for a given thread.
553  v->VisitRootPointer(Root::kStackRoots, nullptr,
554                      FullObjectSlot(&thread->pending_exception_));
555  v->VisitRootPointer(Root::kStackRoots, nullptr,
556                      FullObjectSlot(&thread->pending_message_));
557  v->VisitRootPointer(Root::kStackRoots, nullptr,
558                      FullObjectSlot(&thread->context_));
559  v->VisitRootPointer(Root::kStackRoots, nullptr,
560                      FullObjectSlot(&thread->scheduled_exception_));
561
562  for (v8::TryCatch* block = thread->try_catch_handler_; block != nullptr;
563       block = block->next_) {
564    // TODO(3770): Make TryCatch::exception_ an Address (and message_obj_ too).
565    v->VisitRootPointer(
566        Root::kStackRoots, nullptr,
567        FullObjectSlot(reinterpret_cast<Address>(&(block->exception_))));
568    v->VisitRootPointer(
569        Root::kStackRoots, nullptr,
570        FullObjectSlot(reinterpret_cast<Address>(&(block->message_obj_))));
571  }
572
573#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
574  ConservativeStackVisitor stack_visitor(this, v);
575  thread_local_top()->stack_.IteratePointers(&stack_visitor);
576#endif
577
578  // Iterate over pointers on native execution stack.
579#if V8_ENABLE_WEBASSEMBLY
580  wasm::WasmCodeRefScope wasm_code_ref_scope;
581  if (FLAG_experimental_wasm_stack_switching) {
582    wasm::StackMemory* current = wasm_stacks_;
583    DCHECK_NOT_NULL(current);
584    do {
585      if (current->IsActive()) {
586        // The active stack's jump buffer does not match the current state, use
587        // the thread info below instead.
588        current = current->next();
589        continue;
590      }
591      for (StackFrameIterator it(this, current); !it.done(); it.Advance()) {
592        it.frame()->Iterate(v);
593      }
594      current = current->next();
595    } while (current != wasm_stacks_);
596  }
597#endif  // V8_ENABLE_WEBASSEMBLY
598  for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) {
599    it.frame()->Iterate(v);
600  }
601}
602
603void Isolate::Iterate(RootVisitor* v) {
604  ThreadLocalTop* current_t = thread_local_top();
605  Iterate(v, current_t);
606}
607
608void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) {
609  thread_local_top()->try_catch_handler_ = that;
610}
611
612void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) {
613  DCHECK(thread_local_top()->try_catch_handler_ == that);
614  thread_local_top()->try_catch_handler_ = that->next_;
615}
616
617Handle<String> Isolate::StackTraceString() {
618  if (stack_trace_nesting_level_ == 0) {
619    stack_trace_nesting_level_++;
620    HeapStringAllocator allocator;
621    StringStream::ClearMentionedObjectCache(this);
622    StringStream accumulator(&allocator);
623    incomplete_message_ = &accumulator;
624    PrintStack(&accumulator);
625    Handle<String> stack_trace = accumulator.ToString(this);
626    incomplete_message_ = nullptr;
627    stack_trace_nesting_level_ = 0;
628    return stack_trace;
629  } else if (stack_trace_nesting_level_ == 1) {
630    stack_trace_nesting_level_++;
631    base::OS::PrintError(
632        "\n\nAttempt to print stack while printing stack (double fault)\n");
633    base::OS::PrintError(
634        "If you are lucky you may find a partial stack dump on stdout.\n\n");
635    incomplete_message_->OutputToStdOut();
636    return factory()->empty_string();
637  } else {
638    base::OS::Abort();
639  }
640}
641
642void Isolate::PushStackTraceAndDie(void* ptr1, void* ptr2, void* ptr3,
643                                   void* ptr4) {
644  StackTraceFailureMessage message(this,
645                                   StackTraceFailureMessage::kIncludeStackTrace,
646                                   ptr1, ptr2, ptr3, ptr4);
647  message.Print();
648  base::OS::Abort();
649}
650
651void Isolate::PushParamsAndDie(void* ptr1, void* ptr2, void* ptr3, void* ptr4,
652                               void* ptr5, void* ptr6) {
653  StackTraceFailureMessage message(
654      this, StackTraceFailureMessage::kDontIncludeStackTrace, ptr1, ptr2, ptr3,
655      ptr4, ptr5, ptr6);
656  message.Print();
657  base::OS::Abort();
658}
659
660void StackTraceFailureMessage::Print() volatile {
661  // Print the details of this failure message object, including its own address
662  // to force stack allocation.
663  base::OS::PrintError(
664      "Stacktrace:\n    ptr1=%p\n    ptr2=%p\n    ptr3=%p\n    ptr4=%p\n    "
665      "ptr5=%p\n    ptr6=%p\n    failure_message_object=%p\n%s",
666      ptr1_, ptr2_, ptr3_, ptr4_, ptr5_, ptr6_, this, &js_stack_trace_[0]);
667}
668
669StackTraceFailureMessage::StackTraceFailureMessage(
670    Isolate* isolate, StackTraceFailureMessage::StackTraceMode mode, void* ptr1,
671    void* ptr2, void* ptr3, void* ptr4, void* ptr5, void* ptr6) {
672  isolate_ = isolate;
673  ptr1_ = ptr1;
674  ptr2_ = ptr2;
675  ptr3_ = ptr3;
676  ptr4_ = ptr4;
677  ptr5_ = ptr5;
678  ptr6_ = ptr6;
679  // Write a stracktrace into the {js_stack_trace_} buffer.
680  const size_t buffer_length = arraysize(js_stack_trace_);
681  memset(&js_stack_trace_, 0, buffer_length);
682  memset(&code_objects_, 0, sizeof(code_objects_));
683  if (mode == kIncludeStackTrace) {
684    FixedStringAllocator fixed(&js_stack_trace_[0], buffer_length - 1);
685    StringStream accumulator(&fixed, StringStream::kPrintObjectConcise);
686    isolate->PrintStack(&accumulator, Isolate::kPrintStackVerbose);
687    // Keeping a reference to the last code objects to increase likelyhood that
688    // they get included in the minidump.
689    const size_t code_objects_length = arraysize(code_objects_);
690    size_t i = 0;
691    StackFrameIterator it(isolate);
692    for (; !it.done() && i < code_objects_length; it.Advance()) {
693      code_objects_[i++] =
694          reinterpret_cast<void*>(it.frame()->unchecked_code().ptr());
695    }
696  }
697}
698
699bool NoExtension(const v8::FunctionCallbackInfo<v8::Value>&) { return false; }
700
701namespace {
702
703class CallSiteBuilder {
704 public:
705  CallSiteBuilder(Isolate* isolate, FrameSkipMode mode, int limit,
706                  Handle<Object> caller)
707      : isolate_(isolate),
708        mode_(mode),
709        limit_(limit),
710        caller_(caller),
711        skip_next_frame_(mode != SKIP_NONE) {
712    DCHECK_IMPLIES(mode_ == SKIP_UNTIL_SEEN, caller_->IsJSFunction());
713    // Modern web applications are usually built with multiple layers of
714    // framework and library code, and stack depth tends to be more than
715    // a dozen frames, so we over-allocate a bit here to avoid growing
716    // the elements array in the common case.
717    elements_ = isolate->factory()->NewFixedArray(std::min(64, limit));
718  }
719
720  bool Visit(FrameSummary const& summary) {
721    if (Full()) return false;
722#if V8_ENABLE_WEBASSEMBLY
723    if (summary.IsWasm()) {
724      AppendWasmFrame(summary.AsWasm());
725      return true;
726    }
727#endif  // V8_ENABLE_WEBASSEMBLY
728    AppendJavaScriptFrame(summary.AsJavaScript());
729    return true;
730  }
731
732  void AppendAsyncFrame(Handle<JSGeneratorObject> generator_object) {
733    Handle<JSFunction> function(generator_object->function(), isolate_);
734    if (!IsVisibleInStackTrace(function)) return;
735    int flags = CallSiteInfo::kIsAsync;
736    if (IsStrictFrame(function)) flags |= CallSiteInfo::kIsStrict;
737
738    Handle<Object> receiver(generator_object->receiver(), isolate_);
739    Handle<BytecodeArray> code(function->shared().GetBytecodeArray(isolate_),
740                               isolate_);
741    // The stored bytecode offset is relative to a different base than what
742    // is used in the source position table, hence the subtraction.
743    int offset = Smi::ToInt(generator_object->input_or_debug_pos()) -
744                 (BytecodeArray::kHeaderSize - kHeapObjectTag);
745
746    Handle<FixedArray> parameters = isolate_->factory()->empty_fixed_array();
747    if (V8_UNLIKELY(FLAG_detailed_error_stack_trace)) {
748      parameters = isolate_->factory()->CopyFixedArrayUpTo(
749          handle(generator_object->parameters_and_registers(), isolate_),
750          function->shared()
751              .internal_formal_parameter_count_without_receiver());
752    }
753
754    AppendFrame(receiver, function, code, offset, flags, parameters);
755  }
756
757  void AppendPromiseCombinatorFrame(Handle<JSFunction> element_function,
758                                    Handle<JSFunction> combinator) {
759    if (!IsVisibleInStackTrace(combinator)) return;
760    int flags =
761        CallSiteInfo::kIsAsync | CallSiteInfo::kIsSourcePositionComputed;
762
763    Handle<Object> receiver(combinator->native_context().promise_function(),
764                            isolate_);
765    // TODO(v8:11880): avoid roundtrips between cdc and code.
766    Handle<Code> code(FromCodeT(combinator->code()), isolate_);
767
768    // TODO(mmarchini) save Promises list from the Promise combinator
769    Handle<FixedArray> parameters = isolate_->factory()->empty_fixed_array();
770
771    // We store the offset of the promise into the element function's
772    // hash field for element callbacks.
773    int promise_index =
774        Smi::ToInt(Smi::cast(element_function->GetIdentityHash())) - 1;
775
776    AppendFrame(receiver, combinator, code, promise_index, flags, parameters);
777  }
778
779  void AppendJavaScriptFrame(
780      FrameSummary::JavaScriptFrameSummary const& summary) {
781    // Filter out internal frames that we do not want to show.
782    if (!IsVisibleInStackTrace(summary.function())) return;
783
784    int flags = 0;
785    Handle<JSFunction> function = summary.function();
786    if (IsStrictFrame(function)) flags |= CallSiteInfo::kIsStrict;
787    if (summary.is_constructor()) flags |= CallSiteInfo::kIsConstructor;
788
789    AppendFrame(summary.receiver(), function, summary.abstract_code(),
790                summary.code_offset(), flags, summary.parameters());
791  }
792
793#if V8_ENABLE_WEBASSEMBLY
794  void AppendWasmFrame(FrameSummary::WasmFrameSummary const& summary) {
795    if (summary.code()->kind() != wasm::WasmCode::kWasmFunction) return;
796    Handle<WasmInstanceObject> instance = summary.wasm_instance();
797    int flags = CallSiteInfo::kIsWasm;
798    if (instance->module_object().is_asm_js()) {
799      flags |= CallSiteInfo::kIsAsmJsWasm;
800      if (summary.at_to_number_conversion()) {
801        flags |= CallSiteInfo::kIsAsmJsAtNumberConversion;
802      }
803    }
804
805    auto code = Managed<wasm::GlobalWasmCodeRef>::Allocate(
806        isolate_, 0, summary.code(),
807        instance->module_object().shared_native_module());
808    AppendFrame(instance,
809                handle(Smi::FromInt(summary.function_index()), isolate_), code,
810                summary.code_offset(), flags,
811                isolate_->factory()->empty_fixed_array());
812  }
813#endif  // V8_ENABLE_WEBASSEMBLY
814
815  bool Full() { return index_ >= limit_; }
816
817  Handle<FixedArray> Build() {
818    return FixedArray::ShrinkOrEmpty(isolate_, elements_, index_);
819  }
820
821 private:
822  // Poison stack frames below the first strict mode frame.
823  // The stack trace API should not expose receivers and function
824  // objects on frames deeper than the top-most one with a strict mode
825  // function.
826  bool IsStrictFrame(Handle<JSFunction> function) {
827    if (!encountered_strict_function_) {
828      encountered_strict_function_ =
829          is_strict(function->shared().language_mode());
830    }
831    return encountered_strict_function_;
832  }
833
834  // Determines whether the given stack frame should be displayed in a stack
835  // trace.
836  bool IsVisibleInStackTrace(Handle<JSFunction> function) {
837    return ShouldIncludeFrame(function) && IsNotHidden(function);
838  }
839
840  // This mechanism excludes a number of uninteresting frames from the stack
841  // trace. This can be be the first frame (which will be a builtin-exit frame
842  // for the error constructor builtin) or every frame until encountering a
843  // user-specified function.
844  bool ShouldIncludeFrame(Handle<JSFunction> function) {
845    switch (mode_) {
846      case SKIP_NONE:
847        return true;
848      case SKIP_FIRST:
849        if (!skip_next_frame_) return true;
850        skip_next_frame_ = false;
851        return false;
852      case SKIP_UNTIL_SEEN:
853        if (skip_next_frame_ && (*function == *caller_)) {
854          skip_next_frame_ = false;
855          return false;
856        }
857        return !skip_next_frame_;
858    }
859    UNREACHABLE();
860  }
861
862  bool IsNotHidden(Handle<JSFunction> function) {
863    // TODO(szuend): Remove this check once the flag is enabled
864    //               by default.
865    if (!FLAG_experimental_stack_trace_frames &&
866        function->shared().IsApiFunction()) {
867      return false;
868    }
869    // Functions defined not in user scripts are not visible unless directly
870    // exposed, in which case the native flag is set.
871    // The --builtins-in-stack-traces command line flag allows including
872    // internal call sites in the stack trace for debugging purposes.
873    if (!FLAG_builtins_in_stack_traces &&
874        !function->shared().IsUserJavaScript()) {
875      return function->shared().native() || function->shared().IsApiFunction();
876    }
877    return true;
878  }
879
880  void AppendFrame(Handle<Object> receiver_or_instance, Handle<Object> function,
881                   Handle<HeapObject> code, int offset, int flags,
882                   Handle<FixedArray> parameters) {
883    if (receiver_or_instance->IsTheHole(isolate_)) {
884      // TODO(jgruber): Fix all cases in which frames give us a hole value
885      // (e.g. the receiver in RegExp constructor frames).
886      receiver_or_instance = isolate_->factory()->undefined_value();
887    }
888    auto info = isolate_->factory()->NewCallSiteInfo(
889        receiver_or_instance, function, code, offset, flags, parameters);
890    elements_ = FixedArray::SetAndGrow(isolate_, elements_, index_++, info);
891  }
892
893  Isolate* isolate_;
894  const FrameSkipMode mode_;
895  int index_ = 0;
896  const int limit_;
897  const Handle<Object> caller_;
898  bool skip_next_frame_;
899  bool encountered_strict_function_ = false;
900  Handle<FixedArray> elements_;
901};
902
903bool GetStackTraceLimit(Isolate* isolate, int* result) {
904  if (FLAG_correctness_fuzzer_suppressions) return false;
905  Handle<JSObject> error = isolate->error_function();
906
907  Handle<String> key = isolate->factory()->stackTraceLimit_string();
908  Handle<Object> stack_trace_limit =
909      JSReceiver::GetDataProperty(isolate, error, key);
910  if (!stack_trace_limit->IsNumber()) return false;
911
912  // Ensure that limit is not negative.
913  *result = std::max(FastD2IChecked(stack_trace_limit->Number()), 0);
914
915  if (*result != FLAG_stack_trace_limit) {
916    isolate->CountUsage(v8::Isolate::kErrorStackTraceLimit);
917  }
918
919  return true;
920}
921
922bool IsBuiltinFunction(Isolate* isolate, HeapObject object, Builtin builtin) {
923  if (!object.IsJSFunction()) return false;
924  JSFunction const function = JSFunction::cast(object);
925  return function.code() == isolate->builtins()->code(builtin);
926}
927
928void CaptureAsyncStackTrace(Isolate* isolate, Handle<JSPromise> promise,
929                            CallSiteBuilder* builder) {
930  while (!builder->Full()) {
931    // Check that the {promise} is not settled.
932    if (promise->status() != Promise::kPending) return;
933
934    // Check that we have exactly one PromiseReaction on the {promise}.
935    if (!promise->reactions().IsPromiseReaction()) return;
936    Handle<PromiseReaction> reaction(
937        PromiseReaction::cast(promise->reactions()), isolate);
938    if (!reaction->next().IsSmi()) return;
939
940    // Check if the {reaction} has one of the known async function or
941    // async generator continuations as its fulfill handler.
942    if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
943                          Builtin::kAsyncFunctionAwaitResolveClosure) ||
944        IsBuiltinFunction(isolate, reaction->fulfill_handler(),
945                          Builtin::kAsyncGeneratorAwaitResolveClosure) ||
946        IsBuiltinFunction(isolate, reaction->fulfill_handler(),
947                          Builtin::kAsyncGeneratorYieldResolveClosure)) {
948      // Now peek into the handlers' AwaitContext to get to
949      // the JSGeneratorObject for the async function.
950      Handle<Context> context(
951          JSFunction::cast(reaction->fulfill_handler()).context(), isolate);
952      Handle<JSGeneratorObject> generator_object(
953          JSGeneratorObject::cast(context->extension()), isolate);
954      CHECK(generator_object->is_suspended());
955
956      // Append async frame corresponding to the {generator_object}.
957      builder->AppendAsyncFrame(generator_object);
958
959      // Try to continue from here.
960      if (generator_object->IsJSAsyncFunctionObject()) {
961        Handle<JSAsyncFunctionObject> async_function_object =
962            Handle<JSAsyncFunctionObject>::cast(generator_object);
963        promise = handle(async_function_object->promise(), isolate);
964      } else {
965        Handle<JSAsyncGeneratorObject> async_generator_object =
966            Handle<JSAsyncGeneratorObject>::cast(generator_object);
967        if (async_generator_object->queue().IsUndefined(isolate)) return;
968        Handle<AsyncGeneratorRequest> async_generator_request(
969            AsyncGeneratorRequest::cast(async_generator_object->queue()),
970            isolate);
971        promise = handle(JSPromise::cast(async_generator_request->promise()),
972                         isolate);
973      }
974    } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
975                                 Builtin::kPromiseAllResolveElementClosure)) {
976      Handle<JSFunction> function(JSFunction::cast(reaction->fulfill_handler()),
977                                  isolate);
978      Handle<Context> context(function->context(), isolate);
979      Handle<JSFunction> combinator(context->native_context().promise_all(),
980                                    isolate);
981      builder->AppendPromiseCombinatorFrame(function, combinator);
982
983      // Now peak into the Promise.all() resolve element context to
984      // find the promise capability that's being resolved when all
985      // the concurrent promises resolve.
986      int const index =
987          PromiseBuiltins::kPromiseAllResolveElementCapabilitySlot;
988      Handle<PromiseCapability> capability(
989          PromiseCapability::cast(context->get(index)), isolate);
990      if (!capability->promise().IsJSPromise()) return;
991      promise = handle(JSPromise::cast(capability->promise()), isolate);
992    } else if (IsBuiltinFunction(
993                   isolate, reaction->fulfill_handler(),
994                   Builtin::kPromiseAllSettledResolveElementClosure)) {
995      Handle<JSFunction> function(JSFunction::cast(reaction->fulfill_handler()),
996                                  isolate);
997      Handle<Context> context(function->context(), isolate);
998      Handle<JSFunction> combinator(
999          context->native_context().promise_all_settled(), isolate);
1000      builder->AppendPromiseCombinatorFrame(function, combinator);
1001
1002      // Now peak into the Promise.allSettled() resolve element context to
1003      // find the promise capability that's being resolved when all
1004      // the concurrent promises resolve.
1005      int const index =
1006          PromiseBuiltins::kPromiseAllResolveElementCapabilitySlot;
1007      Handle<PromiseCapability> capability(
1008          PromiseCapability::cast(context->get(index)), isolate);
1009      if (!capability->promise().IsJSPromise()) return;
1010      promise = handle(JSPromise::cast(capability->promise()), isolate);
1011    } else if (IsBuiltinFunction(isolate, reaction->reject_handler(),
1012                                 Builtin::kPromiseAnyRejectElementClosure)) {
1013      Handle<JSFunction> function(JSFunction::cast(reaction->reject_handler()),
1014                                  isolate);
1015      Handle<Context> context(function->context(), isolate);
1016      Handle<JSFunction> combinator(context->native_context().promise_any(),
1017                                    isolate);
1018      builder->AppendPromiseCombinatorFrame(function, combinator);
1019
1020      // Now peak into the Promise.any() reject element context to
1021      // find the promise capability that's being resolved when any of
1022      // the concurrent promises resolve.
1023      int const index = PromiseBuiltins::kPromiseAnyRejectElementCapabilitySlot;
1024      Handle<PromiseCapability> capability(
1025          PromiseCapability::cast(context->get(index)), isolate);
1026      if (!capability->promise().IsJSPromise()) return;
1027      promise = handle(JSPromise::cast(capability->promise()), isolate);
1028    } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
1029                                 Builtin::kPromiseCapabilityDefaultResolve)) {
1030      Handle<JSFunction> function(JSFunction::cast(reaction->fulfill_handler()),
1031                                  isolate);
1032      Handle<Context> context(function->context(), isolate);
1033      promise =
1034          handle(JSPromise::cast(context->get(PromiseBuiltins::kPromiseSlot)),
1035                 isolate);
1036    } else {
1037      // We have some generic promise chain here, so try to
1038      // continue with the chained promise on the reaction
1039      // (only works for native promise chains).
1040      Handle<HeapObject> promise_or_capability(
1041          reaction->promise_or_capability(), isolate);
1042      if (promise_or_capability->IsJSPromise()) {
1043        promise = Handle<JSPromise>::cast(promise_or_capability);
1044      } else if (promise_or_capability->IsPromiseCapability()) {
1045        Handle<PromiseCapability> capability =
1046            Handle<PromiseCapability>::cast(promise_or_capability);
1047        if (!capability->promise().IsJSPromise()) return;
1048        promise = handle(JSPromise::cast(capability->promise()), isolate);
1049      } else {
1050        // Otherwise the {promise_or_capability} must be undefined here.
1051        CHECK(promise_or_capability->IsUndefined(isolate));
1052        return;
1053      }
1054    }
1055  }
1056}
1057
1058void CaptureAsyncStackTrace(Isolate* isolate, CallSiteBuilder* builder) {
1059  Handle<Object> current_microtask = isolate->factory()->current_microtask();
1060  if (current_microtask->IsPromiseReactionJobTask()) {
1061    Handle<PromiseReactionJobTask> promise_reaction_job_task =
1062        Handle<PromiseReactionJobTask>::cast(current_microtask);
1063    // Check if the {reaction} has one of the known async function or
1064    // async generator continuations as its fulfill handler.
1065    if (IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1066                          Builtin::kAsyncFunctionAwaitResolveClosure) ||
1067        IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1068                          Builtin::kAsyncGeneratorAwaitResolveClosure) ||
1069        IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1070                          Builtin::kAsyncGeneratorYieldResolveClosure) ||
1071        IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1072                          Builtin::kAsyncFunctionAwaitRejectClosure) ||
1073        IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1074                          Builtin::kAsyncGeneratorAwaitRejectClosure)) {
1075      // Now peek into the handlers' AwaitContext to get to
1076      // the JSGeneratorObject for the async function.
1077      Handle<Context> context(
1078          JSFunction::cast(promise_reaction_job_task->handler()).context(),
1079          isolate);
1080      Handle<JSGeneratorObject> generator_object(
1081          JSGeneratorObject::cast(context->extension()), isolate);
1082      if (generator_object->is_executing()) {
1083        if (generator_object->IsJSAsyncFunctionObject()) {
1084          Handle<JSAsyncFunctionObject> async_function_object =
1085              Handle<JSAsyncFunctionObject>::cast(generator_object);
1086          Handle<JSPromise> promise(async_function_object->promise(), isolate);
1087          CaptureAsyncStackTrace(isolate, promise, builder);
1088        } else {
1089          Handle<JSAsyncGeneratorObject> async_generator_object =
1090              Handle<JSAsyncGeneratorObject>::cast(generator_object);
1091          Handle<Object> queue(async_generator_object->queue(), isolate);
1092          if (!queue->IsUndefined(isolate)) {
1093            Handle<AsyncGeneratorRequest> async_generator_request =
1094                Handle<AsyncGeneratorRequest>::cast(queue);
1095            Handle<JSPromise> promise(
1096                JSPromise::cast(async_generator_request->promise()), isolate);
1097            CaptureAsyncStackTrace(isolate, promise, builder);
1098          }
1099        }
1100      }
1101    } else {
1102      // The {promise_reaction_job_task} doesn't belong to an await (or
1103      // yield inside an async generator), but we might still be able to
1104      // find an async frame if we follow along the chain of promises on
1105      // the {promise_reaction_job_task}.
1106      Handle<HeapObject> promise_or_capability(
1107          promise_reaction_job_task->promise_or_capability(), isolate);
1108      if (promise_or_capability->IsJSPromise()) {
1109        Handle<JSPromise> promise =
1110            Handle<JSPromise>::cast(promise_or_capability);
1111        CaptureAsyncStackTrace(isolate, promise, builder);
1112      }
1113    }
1114  }
1115}
1116
1117template <typename Visitor>
1118void VisitStack(Isolate* isolate, Visitor* visitor,
1119                StackTrace::StackTraceOptions options = StackTrace::kDetailed) {
1120  DisallowJavascriptExecution no_js(isolate);
1121  for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
1122    StackFrame* frame = it.frame();
1123    switch (frame->type()) {
1124      case StackFrame::BUILTIN_EXIT:
1125      case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION:
1126      case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
1127      case StackFrame::OPTIMIZED:
1128      case StackFrame::INTERPRETED:
1129      case StackFrame::BASELINE:
1130      case StackFrame::BUILTIN:
1131#if V8_ENABLE_WEBASSEMBLY
1132      case StackFrame::WASM:
1133#endif  // V8_ENABLE_WEBASSEMBLY
1134      {
1135        // A standard frame may include many summarized frames (due to
1136        // inlining).
1137        std::vector<FrameSummary> summaries;
1138        CommonFrame::cast(frame)->Summarize(&summaries);
1139        for (auto rit = summaries.rbegin(); rit != summaries.rend(); ++rit) {
1140          FrameSummary& summary = *rit;
1141          // Skip frames from other origins when asked to do so.
1142          if (!(options & StackTrace::kExposeFramesAcrossSecurityOrigins) &&
1143              !summary.native_context()->HasSameSecurityTokenAs(
1144                  isolate->context())) {
1145            continue;
1146          }
1147          if (!visitor->Visit(summary)) return;
1148        }
1149        break;
1150      }
1151
1152      default:
1153        break;
1154    }
1155  }
1156}
1157
1158Handle<FixedArray> CaptureSimpleStackTrace(Isolate* isolate, int limit,
1159                                           FrameSkipMode mode,
1160                                           Handle<Object> caller) {
1161  TRACE_EVENT_BEGIN1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__,
1162                     "maxFrameCount", limit);
1163
1164#if V8_ENABLE_WEBASSEMBLY
1165  wasm::WasmCodeRefScope code_ref_scope;
1166#endif  // V8_ENABLE_WEBASSEMBLY
1167
1168  CallSiteBuilder builder(isolate, mode, limit, caller);
1169  VisitStack(isolate, &builder);
1170
1171  // If --async-stack-traces are enabled and the "current microtask" is a
1172  // PromiseReactionJobTask, we try to enrich the stack trace with async
1173  // frames.
1174  if (FLAG_async_stack_traces) {
1175    CaptureAsyncStackTrace(isolate, &builder);
1176  }
1177
1178  Handle<FixedArray> stack_trace = builder.Build();
1179  TRACE_EVENT_END1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__,
1180                   "frameCount", stack_trace->length());
1181  return stack_trace;
1182}
1183
1184}  // namespace
1185
1186MaybeHandle<JSObject> Isolate::CaptureAndSetErrorStack(
1187    Handle<JSObject> error_object, FrameSkipMode mode, Handle<Object> caller) {
1188  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__);
1189  Handle<Object> error_stack = factory()->undefined_value();
1190
1191  // Capture the "simple stack trace" for the error.stack property,
1192  // which can be disabled by setting Error.stackTraceLimit to a non
1193  // number value or simply deleting the property. If the inspector
1194  // is active, and requests more stack frames than the JavaScript
1195  // program itself, we collect up to the maximum.
1196  int stack_trace_limit = 0;
1197  if (GetStackTraceLimit(this, &stack_trace_limit)) {
1198    int limit = stack_trace_limit;
1199    if (capture_stack_trace_for_uncaught_exceptions_ &&
1200        !(stack_trace_for_uncaught_exceptions_options_ &
1201          StackTrace::kExposeFramesAcrossSecurityOrigins)) {
1202      // Collect up to the maximum of what the JavaScript program and
1203      // the inspector want. There's a special case here where the API
1204      // can ask the stack traces to also include cross-origin frames,
1205      // in which case we collect a separate trace below. Note that
1206      // the inspector doesn't use this option, so we could as well
1207      // just deprecate this in the future.
1208      if (limit < stack_trace_for_uncaught_exceptions_frame_limit_) {
1209        limit = stack_trace_for_uncaught_exceptions_frame_limit_;
1210      }
1211    }
1212    error_stack = CaptureSimpleStackTrace(this, limit, mode, caller);
1213  }
1214
1215  // Next is the inspector part: Depending on whether we got a "simple
1216  // stack trace" above and whether that's usable (meaning the API
1217  // didn't request to include cross-origin frames), we remember the
1218  // cap for the stack trace (either a positive limit indicating that
1219  // the Error.stackTraceLimit value was below what was requested via
1220  // the API, or a negative limit to indicate the opposite), or we
1221  // collect a "detailed stack trace" eagerly and stash that away.
1222  if (capture_stack_trace_for_uncaught_exceptions_) {
1223    Handle<Object> limit_or_stack_frame_infos;
1224    if (error_stack->IsUndefined(this) ||
1225        (stack_trace_for_uncaught_exceptions_options_ &
1226         StackTrace::kExposeFramesAcrossSecurityOrigins)) {
1227      limit_or_stack_frame_infos = CaptureDetailedStackTrace(
1228          stack_trace_for_uncaught_exceptions_frame_limit_,
1229          stack_trace_for_uncaught_exceptions_options_);
1230    } else {
1231      int limit =
1232          stack_trace_limit > stack_trace_for_uncaught_exceptions_frame_limit_
1233              ? -stack_trace_for_uncaught_exceptions_frame_limit_
1234              : stack_trace_limit;
1235      limit_or_stack_frame_infos = handle(Smi::FromInt(limit), this);
1236    }
1237    error_stack =
1238        factory()->NewErrorStackData(error_stack, limit_or_stack_frame_infos);
1239  }
1240
1241  RETURN_ON_EXCEPTION(
1242      this,
1243      JSObject::SetProperty(this, error_object, factory()->error_stack_symbol(),
1244                            error_stack, StoreOrigin::kMaybeKeyed,
1245                            Just(ShouldThrow::kThrowOnError)),
1246      JSObject);
1247  return error_object;
1248}
1249
1250Handle<FixedArray> Isolate::GetDetailedStackTrace(
1251    Handle<JSReceiver> error_object) {
1252  Handle<Object> error_stack = JSReceiver::GetDataProperty(
1253      this, error_object, factory()->error_stack_symbol());
1254  if (!error_stack->IsErrorStackData()) {
1255    return Handle<FixedArray>();
1256  }
1257  Handle<ErrorStackData> error_stack_data =
1258      Handle<ErrorStackData>::cast(error_stack);
1259  ErrorStackData::EnsureStackFrameInfos(this, error_stack_data);
1260  if (!error_stack_data->limit_or_stack_frame_infos().IsFixedArray()) {
1261    return Handle<FixedArray>();
1262  }
1263  return handle(
1264      FixedArray::cast(error_stack_data->limit_or_stack_frame_infos()), this);
1265}
1266
1267Handle<FixedArray> Isolate::GetSimpleStackTrace(
1268    Handle<JSReceiver> error_object) {
1269  Handle<Object> error_stack = JSReceiver::GetDataProperty(
1270      this, error_object, factory()->error_stack_symbol());
1271  if (error_stack->IsFixedArray()) {
1272    return Handle<FixedArray>::cast(error_stack);
1273  }
1274  if (!error_stack->IsErrorStackData()) {
1275    return factory()->empty_fixed_array();
1276  }
1277  Handle<ErrorStackData> error_stack_data =
1278      Handle<ErrorStackData>::cast(error_stack);
1279  if (!error_stack_data->HasCallSiteInfos()) {
1280    return factory()->empty_fixed_array();
1281  }
1282  return handle(error_stack_data->call_site_infos(), this);
1283}
1284
1285Address Isolate::GetAbstractPC(int* line, int* column) {
1286  JavaScriptFrameIterator it(this);
1287
1288  if (it.done()) {
1289    *line = -1;
1290    *column = -1;
1291    return kNullAddress;
1292  }
1293  JavaScriptFrame* frame = it.frame();
1294  DCHECK(!frame->is_builtin());
1295
1296  Handle<SharedFunctionInfo> shared = handle(frame->function().shared(), this);
1297  SharedFunctionInfo::EnsureSourcePositionsAvailable(this, shared);
1298  int position = frame->position();
1299
1300  Object maybe_script = frame->function().shared().script();
1301  if (maybe_script.IsScript()) {
1302    Handle<Script> script(Script::cast(maybe_script), this);
1303    Script::PositionInfo info;
1304    Script::GetPositionInfo(script, position, &info, Script::WITH_OFFSET);
1305    *line = info.line + 1;
1306    *column = info.column + 1;
1307  } else {
1308    *line = position;
1309    *column = -1;
1310  }
1311
1312  if (frame->is_unoptimized()) {
1313    UnoptimizedFrame* iframe = static_cast<UnoptimizedFrame*>(frame);
1314    Address bytecode_start =
1315        iframe->GetBytecodeArray().GetFirstBytecodeAddress();
1316    return bytecode_start + iframe->GetBytecodeOffset();
1317  }
1318
1319  return frame->pc();
1320}
1321
1322namespace {
1323
1324class StackFrameBuilder {
1325 public:
1326  StackFrameBuilder(Isolate* isolate, int limit)
1327      : isolate_(isolate),
1328        frames_(isolate_->factory()->empty_fixed_array()),
1329        index_(0),
1330        limit_(limit) {}
1331
1332  bool Visit(FrameSummary& summary) {
1333    // Check if we have enough capacity left.
1334    if (index_ >= limit_) return false;
1335    // Skip frames that aren't subject to debugging.
1336    if (!summary.is_subject_to_debugging()) return true;
1337    Handle<StackFrameInfo> frame = summary.CreateStackFrameInfo();
1338    frames_ = FixedArray::SetAndGrow(isolate_, frames_, index_++, frame);
1339    return true;
1340  }
1341
1342  Handle<FixedArray> Build() {
1343    return FixedArray::ShrinkOrEmpty(isolate_, frames_, index_);
1344  }
1345
1346 private:
1347  Isolate* isolate_;
1348  Handle<FixedArray> frames_;
1349  int index_;
1350  int limit_;
1351};
1352
1353}  // namespace
1354
1355Handle<FixedArray> Isolate::CaptureDetailedStackTrace(
1356    int limit, StackTrace::StackTraceOptions options) {
1357  TRACE_EVENT_BEGIN1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__,
1358                     "maxFrameCount", limit);
1359  StackFrameBuilder builder(this, limit);
1360  VisitStack(this, &builder, options);
1361  Handle<FixedArray> stack_trace = builder.Build();
1362  TRACE_EVENT_END1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__,
1363                   "frameCount", stack_trace->length());
1364  return stack_trace;
1365}
1366
1367namespace {
1368
1369class CurrentScriptNameStackVisitor {
1370 public:
1371  explicit CurrentScriptNameStackVisitor(Isolate* isolate)
1372      : isolate_(isolate) {}
1373
1374  bool Visit(FrameSummary& summary) {
1375    // Skip frames that aren't subject to debugging. Keep this in sync with
1376    // StackFrameBuilder::Visit so both visitors visit the same frames.
1377    if (!summary.is_subject_to_debugging()) return true;
1378
1379    // Frames that are subject to debugging always have a valid script object.
1380    Handle<Script> script = Handle<Script>::cast(summary.script());
1381    Handle<Object> name_or_url_obj =
1382        handle(script->GetNameOrSourceURL(), isolate_);
1383    if (!name_or_url_obj->IsString()) return true;
1384
1385    Handle<String> name_or_url = Handle<String>::cast(name_or_url_obj);
1386    if (!name_or_url->length()) return true;
1387
1388    name_or_url_ = name_or_url;
1389    return false;
1390  }
1391
1392  Handle<String> CurrentScriptNameOrSourceURL() const { return name_or_url_; }
1393
1394 private:
1395  Isolate* const isolate_;
1396  Handle<String> name_or_url_;
1397};
1398
1399}  // namespace
1400
1401Handle<String> Isolate::CurrentScriptNameOrSourceURL() {
1402  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__);
1403  CurrentScriptNameStackVisitor visitor(this);
1404  VisitStack(this, &visitor);
1405  return visitor.CurrentScriptNameOrSourceURL();
1406}
1407
1408void Isolate::PrintStack(FILE* out, PrintStackMode mode) {
1409  if (stack_trace_nesting_level_ == 0) {
1410    stack_trace_nesting_level_++;
1411    StringStream::ClearMentionedObjectCache(this);
1412    HeapStringAllocator allocator;
1413    StringStream accumulator(&allocator);
1414    incomplete_message_ = &accumulator;
1415    PrintStack(&accumulator, mode);
1416    accumulator.OutputToFile(out);
1417    InitializeLoggingAndCounters();
1418    accumulator.Log(this);
1419    incomplete_message_ = nullptr;
1420    stack_trace_nesting_level_ = 0;
1421  } else if (stack_trace_nesting_level_ == 1) {
1422    stack_trace_nesting_level_++;
1423    base::OS::PrintError(
1424        "\n\nAttempt to print stack while printing stack (double fault)\n");
1425    base::OS::PrintError(
1426        "If you are lucky you may find a partial stack dump on stdout.\n\n");
1427    incomplete_message_->OutputToFile(out);
1428  }
1429}
1430
1431static void PrintFrames(Isolate* isolate, StringStream* accumulator,
1432                        StackFrame::PrintMode mode) {
1433  StackFrameIterator it(isolate);
1434  for (int i = 0; !it.done(); it.Advance()) {
1435    it.frame()->Print(accumulator, mode, i++);
1436  }
1437}
1438
1439void Isolate::PrintStack(StringStream* accumulator, PrintStackMode mode) {
1440  HandleScope scope(this);
1441  DCHECK(accumulator->IsMentionedObjectCacheClear(this));
1442
1443  // Avoid printing anything if there are no frames.
1444  if (c_entry_fp(thread_local_top()) == 0) return;
1445
1446  accumulator->Add(
1447      "\n==== JS stack trace =========================================\n\n");
1448  PrintFrames(this, accumulator, StackFrame::OVERVIEW);
1449  if (mode == kPrintStackVerbose) {
1450    accumulator->Add(
1451        "\n==== Details ================================================\n\n");
1452    PrintFrames(this, accumulator, StackFrame::DETAILS);
1453    accumulator->PrintMentionedObjectCache(this);
1454  }
1455  accumulator->Add("=====================\n\n");
1456}
1457
1458void Isolate::SetFailedAccessCheckCallback(
1459    v8::FailedAccessCheckCallback callback) {
1460  thread_local_top()->failed_access_check_callback_ = callback;
1461}
1462
1463void Isolate::ReportFailedAccessCheck(Handle<JSObject> receiver) {
1464  if (!thread_local_top()->failed_access_check_callback_) {
1465    return ScheduleThrow(*factory()->NewTypeError(MessageTemplate::kNoAccess));
1466  }
1467
1468  DCHECK(receiver->IsAccessCheckNeeded());
1469  DCHECK(!context().is_null());
1470
1471  // Get the data object from access check info.
1472  HandleScope scope(this);
1473  Handle<Object> data;
1474  {
1475    DisallowGarbageCollection no_gc;
1476    AccessCheckInfo access_check_info = AccessCheckInfo::Get(this, receiver);
1477    if (access_check_info.is_null()) {
1478      no_gc.Release();
1479      return ScheduleThrow(
1480          *factory()->NewTypeError(MessageTemplate::kNoAccess));
1481    }
1482    data = handle(access_check_info.data(), this);
1483  }
1484
1485  // Leaving JavaScript.
1486  VMState<EXTERNAL> state(this);
1487  thread_local_top()->failed_access_check_callback_(
1488      v8::Utils::ToLocal(receiver), v8::ACCESS_HAS, v8::Utils::ToLocal(data));
1489}
1490
1491bool Isolate::MayAccess(Handle<Context> accessing_context,
1492                        Handle<JSObject> receiver) {
1493  DCHECK(receiver->IsJSGlobalProxy() || receiver->IsAccessCheckNeeded());
1494
1495  // Check for compatibility between the security tokens in the
1496  // current lexical context and the accessed object.
1497
1498  // During bootstrapping, callback functions are not enabled yet.
1499  if (bootstrapper()->IsActive()) return true;
1500  {
1501    DisallowGarbageCollection no_gc;
1502
1503    if (receiver->IsJSGlobalProxy()) {
1504      Object receiver_context = JSGlobalProxy::cast(*receiver).native_context();
1505      if (!receiver_context.IsContext()) return false;
1506
1507      // Get the native context of current top context.
1508      // avoid using Isolate::native_context() because it uses Handle.
1509      Context native_context =
1510          accessing_context->global_object().native_context();
1511      if (receiver_context == native_context) return true;
1512
1513      if (Context::cast(receiver_context).security_token() ==
1514          native_context.security_token())
1515        return true;
1516    }
1517  }
1518
1519  HandleScope scope(this);
1520  Handle<Object> data;
1521  v8::AccessCheckCallback callback = nullptr;
1522  {
1523    DisallowGarbageCollection no_gc;
1524    AccessCheckInfo access_check_info = AccessCheckInfo::Get(this, receiver);
1525    if (access_check_info.is_null()) return false;
1526    Object fun_obj = access_check_info.callback();
1527    callback = v8::ToCData<v8::AccessCheckCallback>(fun_obj);
1528    data = handle(access_check_info.data(), this);
1529  }
1530
1531  {
1532    // Leaving JavaScript.
1533    VMState<EXTERNAL> state(this);
1534    return callback(v8::Utils::ToLocal(accessing_context),
1535                    v8::Utils::ToLocal(receiver), v8::Utils::ToLocal(data));
1536  }
1537}
1538
1539Object Isolate::StackOverflow() {
1540  // Whoever calls this method should not have overflown the stack limit by too
1541  // much. Otherwise we risk actually running out of stack space.
1542  // We allow for up to 8kB overflow, because we typically allow up to 4KB
1543  // overflow per frame in generated code, but might call through more smaller
1544  // frames until we reach this method.
1545  // If this DCHECK fails, one of the frames on the stack should be augmented by
1546  // an additional stack check.
1547#if defined(V8_USE_ADDRESS_SANITIZER) || defined(MEMORY_SANITIZER)
1548  // Allow for a bit more overflow in sanitizer builds, because C++ frames take
1549  // significantly more space there.
1550  DCHECK_GE(GetCurrentStackPosition(), stack_guard()->real_climit() - 32 * KB);
1551#else
1552  DCHECK_GE(GetCurrentStackPosition(), stack_guard()->real_climit() - 8 * KB);
1553#endif
1554
1555  if (FLAG_correctness_fuzzer_suppressions) {
1556    FATAL("Aborting on stack overflow");
1557  }
1558
1559  DisallowJavascriptExecution no_js(this);
1560  HandleScope scope(this);
1561
1562  Handle<JSFunction> fun = range_error_function();
1563  Handle<Object> msg = factory()->NewStringFromAsciiChecked(
1564      MessageFormatter::TemplateString(MessageTemplate::kStackOverflow));
1565  Handle<Object> options = factory()->undefined_value();
1566  Handle<Object> no_caller;
1567  Handle<JSObject> exception;
1568  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1569      this, exception,
1570      ErrorUtils::Construct(this, fun, fun, msg, options, SKIP_NONE, no_caller,
1571                            ErrorUtils::StackTraceCollection::kEnabled));
1572  JSObject::AddProperty(this, exception, factory()->wasm_uncatchable_symbol(),
1573                        factory()->true_value(), NONE);
1574
1575  Throw(*exception);
1576
1577#ifdef VERIFY_HEAP
1578  if (FLAG_verify_heap && FLAG_stress_compaction) {
1579    heap()->CollectAllGarbage(Heap::kNoGCFlags,
1580                              GarbageCollectionReason::kTesting);
1581  }
1582#endif  // VERIFY_HEAP
1583
1584  return ReadOnlyRoots(heap()).exception();
1585}
1586
1587Object Isolate::ThrowAt(Handle<JSObject> exception, MessageLocation* location) {
1588  Handle<Name> key_start_pos = factory()->error_start_pos_symbol();
1589  Object::SetProperty(this, exception, key_start_pos,
1590                      handle(Smi::FromInt(location->start_pos()), this),
1591                      StoreOrigin::kMaybeKeyed,
1592                      Just(ShouldThrow::kThrowOnError))
1593      .Check();
1594
1595  Handle<Name> key_end_pos = factory()->error_end_pos_symbol();
1596  Object::SetProperty(this, exception, key_end_pos,
1597                      handle(Smi::FromInt(location->end_pos()), this),
1598                      StoreOrigin::kMaybeKeyed,
1599                      Just(ShouldThrow::kThrowOnError))
1600      .Check();
1601
1602  Handle<Name> key_script = factory()->error_script_symbol();
1603  Object::SetProperty(this, exception, key_script, location->script(),
1604                      StoreOrigin::kMaybeKeyed,
1605                      Just(ShouldThrow::kThrowOnError))
1606      .Check();
1607
1608  return ThrowInternal(*exception, location);
1609}
1610
1611Object Isolate::TerminateExecution() {
1612  return Throw(ReadOnlyRoots(this).termination_exception());
1613}
1614
1615void Isolate::CancelTerminateExecution() {
1616  if (try_catch_handler()) {
1617    try_catch_handler()->has_terminated_ = false;
1618  }
1619  if (has_pending_exception() &&
1620      pending_exception() == ReadOnlyRoots(this).termination_exception()) {
1621    thread_local_top()->external_caught_exception_ = false;
1622    clear_pending_exception();
1623  }
1624  if (has_scheduled_exception() &&
1625      scheduled_exception() == ReadOnlyRoots(this).termination_exception()) {
1626    thread_local_top()->external_caught_exception_ = false;
1627    clear_scheduled_exception();
1628  }
1629}
1630
1631void Isolate::RequestInterrupt(InterruptCallback callback, void* data) {
1632  ExecutionAccess access(this);
1633  api_interrupts_queue_.push(InterruptEntry(callback, data));
1634  stack_guard()->RequestApiInterrupt();
1635}
1636
1637void Isolate::InvokeApiInterruptCallbacks() {
1638  RCS_SCOPE(this, RuntimeCallCounterId::kInvokeApiInterruptCallbacks);
1639  // Note: callback below should be called outside of execution access lock.
1640  while (true) {
1641    InterruptEntry entry;
1642    {
1643      ExecutionAccess access(this);
1644      if (api_interrupts_queue_.empty()) return;
1645      entry = api_interrupts_queue_.front();
1646      api_interrupts_queue_.pop();
1647    }
1648    VMState<EXTERNAL> state(this);
1649    HandleScope handle_scope(this);
1650    entry.first(reinterpret_cast<v8::Isolate*>(this), entry.second);
1651  }
1652}
1653
1654namespace {
1655
1656void ReportBootstrappingException(Handle<Object> exception,
1657                                  MessageLocation* location) {
1658  base::OS::PrintError("Exception thrown during bootstrapping\n");
1659  if (location == nullptr || location->script().is_null()) return;
1660  // We are bootstrapping and caught an error where the location is set
1661  // and we have a script for the location.
1662  // In this case we could have an extension (or an internal error
1663  // somewhere) and we print out the line number at which the error occurred
1664  // to the console for easier debugging.
1665  int line_number =
1666      location->script()->GetLineNumber(location->start_pos()) + 1;
1667  if (exception->IsString() && location->script()->name().IsString()) {
1668    base::OS::PrintError(
1669        "Extension or internal compilation error: %s in %s at line %d.\n",
1670        String::cast(*exception).ToCString().get(),
1671        String::cast(location->script()->name()).ToCString().get(),
1672        line_number);
1673  } else if (location->script()->name().IsString()) {
1674    base::OS::PrintError(
1675        "Extension or internal compilation error in %s at line %d.\n",
1676        String::cast(location->script()->name()).ToCString().get(),
1677        line_number);
1678  } else if (exception->IsString()) {
1679    base::OS::PrintError("Extension or internal compilation error: %s.\n",
1680                         String::cast(*exception).ToCString().get());
1681  } else {
1682    base::OS::PrintError("Extension or internal compilation error.\n");
1683  }
1684#ifdef OBJECT_PRINT
1685  // Since comments and empty lines have been stripped from the source of
1686  // builtins, print the actual source here so that line numbers match.
1687  if (location->script()->source().IsString()) {
1688    Handle<String> src(String::cast(location->script()->source()),
1689                       location->script()->GetIsolate());
1690    PrintF("Failing script:");
1691    int len = src->length();
1692    if (len == 0) {
1693      PrintF(" <not available>\n");
1694    } else {
1695      PrintF("\n");
1696      line_number = 1;
1697      PrintF("%5d: ", line_number);
1698      for (int i = 0; i < len; i++) {
1699        uint16_t character = src->Get(i);
1700        PrintF("%c", character);
1701        if (character == '\n' && i < len - 2) {
1702          PrintF("%5d: ", ++line_number);
1703        }
1704      }
1705      PrintF("\n");
1706    }
1707  }
1708#endif
1709}
1710
1711}  // anonymous namespace
1712
1713Handle<JSMessageObject> Isolate::CreateMessageOrAbort(
1714    Handle<Object> exception, MessageLocation* location) {
1715  Handle<JSMessageObject> message_obj = CreateMessage(exception, location);
1716
1717  // If the abort-on-uncaught-exception flag is specified, and if the
1718  // embedder didn't specify a custom uncaught exception callback,
1719  // or if the custom callback determined that V8 should abort, then
1720  // abort.
1721  if (FLAG_abort_on_uncaught_exception) {
1722    CatchType prediction = PredictExceptionCatcher();
1723    if ((prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) &&
1724        (!abort_on_uncaught_exception_callback_ ||
1725         abort_on_uncaught_exception_callback_(
1726             reinterpret_cast<v8::Isolate*>(this)))) {
1727      // Prevent endless recursion.
1728      FLAG_abort_on_uncaught_exception = false;
1729      // This flag is intended for use by JavaScript developers, so
1730      // print a user-friendly stack trace (not an internal one).
1731      PrintF(stderr, "%s\n\nFROM\n",
1732             MessageHandler::GetLocalizedMessage(this, message_obj).get());
1733      std::ostringstream stack_trace_stream;
1734      PrintCurrentStackTrace(stack_trace_stream);
1735      PrintF(stderr, "%s", stack_trace_stream.str().c_str());
1736      base::OS::Abort();
1737    }
1738  }
1739
1740  return message_obj;
1741}
1742
1743Object Isolate::ThrowInternal(Object raw_exception, MessageLocation* location) {
1744  DCHECK(!has_pending_exception());
1745  IF_WASM(DCHECK_IMPLIES, trap_handler::IsTrapHandlerEnabled(),
1746          !trap_handler::IsThreadInWasm());
1747
1748  HandleScope scope(this);
1749  Handle<Object> exception(raw_exception, this);
1750
1751  if (FLAG_print_all_exceptions) {
1752    PrintF("=========================================================\n");
1753    PrintF("Exception thrown:\n");
1754    if (location) {
1755      Handle<Script> script = location->script();
1756      Handle<Object> name(script->GetNameOrSourceURL(), this);
1757      PrintF("at ");
1758      if (name->IsString() && String::cast(*name).length() > 0)
1759        String::cast(*name).PrintOn(stdout);
1760      else
1761        PrintF("<anonymous>");
1762// Script::GetLineNumber and Script::GetColumnNumber can allocate on the heap to
1763// initialize the line_ends array, so be careful when calling them.
1764#ifdef DEBUG
1765      if (AllowGarbageCollection::IsAllowed()) {
1766#else
1767      if ((false)) {
1768#endif
1769        PrintF(", %d:%d - %d:%d\n",
1770               Script::GetLineNumber(script, location->start_pos()) + 1,
1771               Script::GetColumnNumber(script, location->start_pos()),
1772               Script::GetLineNumber(script, location->end_pos()) + 1,
1773               Script::GetColumnNumber(script, location->end_pos()));
1774        // Make sure to update the raw exception pointer in case it moved.
1775        raw_exception = *exception;
1776      } else {
1777        PrintF(", line %d\n", script->GetLineNumber(location->start_pos()) + 1);
1778      }
1779    }
1780    raw_exception.Print();
1781    PrintF("Stack Trace:\n");
1782    PrintStack(stdout);
1783    PrintF("=========================================================\n");
1784  }
1785
1786  // Determine whether a message needs to be created for the given exception
1787  // depending on the following criteria:
1788  // 1) External v8::TryCatch missing: Always create a message because any
1789  //    JavaScript handler for a finally-block might re-throw to top-level.
1790  // 2) External v8::TryCatch exists: Only create a message if the handler
1791  //    captures messages or is verbose (which reports despite the catch).
1792  // 3) ReThrow from v8::TryCatch: The message from a previous throw still
1793  //    exists and we preserve it instead of creating a new message.
1794  bool requires_message = try_catch_handler() == nullptr ||
1795                          try_catch_handler()->is_verbose_ ||
1796                          try_catch_handler()->capture_message_;
1797  bool rethrowing_message = thread_local_top()->rethrowing_message_;
1798
1799  thread_local_top()->rethrowing_message_ = false;
1800
1801  // Notify debugger of exception.
1802  if (is_catchable_by_javascript(raw_exception)) {
1803    base::Optional<Object> maybe_exception = debug()->OnThrow(exception);
1804    if (maybe_exception.has_value()) {
1805      return *maybe_exception;
1806    }
1807  }
1808
1809  // Generate the message if required.
1810  if (requires_message && !rethrowing_message) {
1811    MessageLocation computed_location;
1812    // If no location was specified we try to use a computed one instead.
1813    if (location == nullptr && ComputeLocation(&computed_location)) {
1814      location = &computed_location;
1815    }
1816    if (bootstrapper()->IsActive()) {
1817      // It's not safe to try to make message objects or collect stack traces
1818      // while the bootstrapper is active since the infrastructure may not have
1819      // been properly initialized.
1820      ReportBootstrappingException(exception, location);
1821    } else {
1822      Handle<Object> message_obj = CreateMessageOrAbort(exception, location);
1823      set_pending_message(*message_obj);
1824    }
1825  }
1826
1827  // Set the exception being thrown.
1828  set_pending_exception(*exception);
1829  return ReadOnlyRoots(heap()).exception();
1830}
1831
1832Object Isolate::ReThrow(Object exception) {
1833  DCHECK(!has_pending_exception());
1834
1835  // Set the exception being re-thrown.
1836  set_pending_exception(exception);
1837  return ReadOnlyRoots(heap()).exception();
1838}
1839
1840Object Isolate::ReThrow(Object exception, Object message) {
1841  DCHECK(!has_pending_exception());
1842  DCHECK(!has_pending_message());
1843
1844  set_pending_message(message);
1845  return ReThrow(exception);
1846}
1847
1848namespace {
1849#if V8_ENABLE_WEBASSEMBLY
1850// This scope will set the thread-in-wasm flag after the execution of all
1851// destructors. The thread-in-wasm flag is only set when the scope gets enabled.
1852class SetThreadInWasmFlagScope {
1853 public:
1854  SetThreadInWasmFlagScope() {
1855    DCHECK_IMPLIES(trap_handler::IsTrapHandlerEnabled(),
1856                   !trap_handler::IsThreadInWasm());
1857  }
1858
1859  ~SetThreadInWasmFlagScope() {
1860    if (enabled_) trap_handler::SetThreadInWasm();
1861  }
1862
1863  void Enable() { enabled_ = true; }
1864
1865 private:
1866  bool enabled_ = false;
1867};
1868#endif  // V8_ENABLE_WEBASSEMBLY
1869}  // namespace
1870
1871Object Isolate::UnwindAndFindHandler() {
1872  // TODO(v8:12676): Fix gcmole failures in this function.
1873  DisableGCMole no_gcmole;
1874#if V8_ENABLE_WEBASSEMBLY
1875  // Create the {SetThreadInWasmFlagScope} first in this function so that its
1876  // destructor gets called after all the other destructors. It is important
1877  // that the destructor sets the thread-in-wasm flag after all other
1878  // destructors. The other destructors may cause exceptions, e.g. ASan on
1879  // Windows, which would invalidate the thread-in-wasm flag when the wasm trap
1880  // handler handles such non-wasm exceptions.
1881  SetThreadInWasmFlagScope set_thread_in_wasm_flag_scope;
1882#endif  // V8_ENABLE_WEBASSEMBLY
1883  Object exception = pending_exception();
1884
1885  auto FoundHandler = [&](Context context, Address instruction_start,
1886                          intptr_t handler_offset,
1887                          Address constant_pool_address, Address handler_sp,
1888                          Address handler_fp, int num_frames_above_handler) {
1889    // Store information to be consumed by the CEntry.
1890    thread_local_top()->pending_handler_context_ = context;
1891    thread_local_top()->pending_handler_entrypoint_ =
1892        instruction_start + handler_offset;
1893    thread_local_top()->pending_handler_constant_pool_ = constant_pool_address;
1894    thread_local_top()->pending_handler_fp_ = handler_fp;
1895    thread_local_top()->pending_handler_sp_ = handler_sp;
1896    thread_local_top()->num_frames_above_pending_handler_ =
1897        num_frames_above_handler;
1898
1899    // Return and clear pending exception. The contract is that:
1900    // (1) the pending exception is stored in one place (no duplication), and
1901    // (2) within generated-code land, that one place is the return register.
1902    // If/when we unwind back into C++ (returning to the JSEntry stub,
1903    // or to Execution::CallWasm), the returned exception will be sent
1904    // back to isolate->set_pending_exception(...).
1905    clear_pending_exception();
1906    return exception;
1907  };
1908
1909  // Special handling of termination exceptions, uncatchable by JavaScript and
1910  // Wasm code, we unwind the handlers until the top ENTRY handler is found.
1911  bool catchable_by_js = is_catchable_by_javascript(exception);
1912  if (!catchable_by_js && !context().is_null()) {
1913    // Because the array join stack will not pop the elements when throwing the
1914    // uncatchable terminate exception, we need to clear the array join stack to
1915    // avoid leaving the stack in an invalid state.
1916    // See also CycleProtectedArrayJoin.
1917    raw_native_context().set_array_join_stack(
1918        ReadOnlyRoots(this).undefined_value());
1919  }
1920
1921  int visited_frames = 0;
1922
1923  // Compute handler and stack unwinding information by performing a full walk
1924  // over the stack and dispatching according to the frame type.
1925  for (StackFrameIterator iter(this);; iter.Advance(), visited_frames++) {
1926    // Handler must exist.
1927    DCHECK(!iter.done());
1928
1929    StackFrame* frame = iter.frame();
1930
1931    switch (frame->type()) {
1932      case StackFrame::ENTRY:
1933      case StackFrame::CONSTRUCT_ENTRY: {
1934        // For JSEntry frames we always have a handler.
1935        StackHandler* handler = frame->top_handler();
1936
1937        // Restore the next handler.
1938        thread_local_top()->handler_ = handler->next_address();
1939
1940        // Gather information from the handler.
1941        Code code = frame->LookupCode();
1942        HandlerTable table(code);
1943        return FoundHandler(Context(), code.InstructionStart(this, frame->pc()),
1944                            table.LookupReturn(0), code.constant_pool(),
1945                            handler->address() + StackHandlerConstants::kSize,
1946                            0, visited_frames);
1947      }
1948
1949#if V8_ENABLE_WEBASSEMBLY
1950      case StackFrame::C_WASM_ENTRY: {
1951        StackHandler* handler = frame->top_handler();
1952        thread_local_top()->handler_ = handler->next_address();
1953        Code code = frame->LookupCode();
1954        HandlerTable table(code);
1955        Address instruction_start = code.InstructionStart(this, frame->pc());
1956        int return_offset = static_cast<int>(frame->pc() - instruction_start);
1957        int handler_offset = table.LookupReturn(return_offset);
1958        DCHECK_NE(-1, handler_offset);
1959        // Compute the stack pointer from the frame pointer. This ensures that
1960        // argument slots on the stack are dropped as returning would.
1961        Address return_sp = frame->fp() +
1962                            StandardFrameConstants::kFixedFrameSizeAboveFp -
1963                            code.stack_slots() * kSystemPointerSize;
1964        return FoundHandler(Context(), instruction_start, handler_offset,
1965                            code.constant_pool(), return_sp, frame->fp(),
1966                            visited_frames);
1967      }
1968
1969      case StackFrame::WASM: {
1970        if (!is_catchable_by_wasm(exception)) break;
1971
1972        // For WebAssembly frames we perform a lookup in the handler table.
1973        // This code ref scope is here to avoid a check failure when looking up
1974        // the code. It's not actually necessary to keep the code alive as it's
1975        // currently being executed.
1976        wasm::WasmCodeRefScope code_ref_scope;
1977        WasmFrame* wasm_frame = static_cast<WasmFrame*>(frame);
1978        wasm::WasmCode* wasm_code =
1979            wasm::GetWasmCodeManager()->LookupCode(frame->pc());
1980        int offset = wasm_frame->LookupExceptionHandlerInTable();
1981        if (offset < 0) break;
1982        wasm::GetWasmEngine()->SampleCatchEvent(this);
1983        // Compute the stack pointer from the frame pointer. This ensures that
1984        // argument slots on the stack are dropped as returning would.
1985        Address return_sp = frame->fp() +
1986                            StandardFrameConstants::kFixedFrameSizeAboveFp -
1987                            wasm_code->stack_slots() * kSystemPointerSize;
1988
1989        // This is going to be handled by WebAssembly, so we need to set the TLS
1990        // flag. The {SetThreadInWasmFlagScope} will set the flag after all
1991        // destructors have been executed.
1992        set_thread_in_wasm_flag_scope.Enable();
1993        return FoundHandler(Context(), wasm_code->instruction_start(), offset,
1994                            wasm_code->constant_pool(), return_sp, frame->fp(),
1995                            visited_frames);
1996      }
1997
1998      case StackFrame::WASM_COMPILE_LAZY: {
1999        // Can only fail directly on invocation. This happens if an invalid
2000        // function was validated lazily.
2001        DCHECK(FLAG_wasm_lazy_validation);
2002        break;
2003      }
2004#endif  // V8_ENABLE_WEBASSEMBLY
2005
2006      case StackFrame::OPTIMIZED: {
2007        // For optimized frames we perform a lookup in the handler table.
2008        if (!catchable_by_js) break;
2009        OptimizedFrame* js_frame = static_cast<OptimizedFrame*>(frame);
2010        Code code = frame->LookupCode();
2011        int offset = js_frame->LookupExceptionHandlerInTable(nullptr, nullptr);
2012        if (offset < 0) break;
2013        // Compute the stack pointer from the frame pointer. This ensures
2014        // that argument slots on the stack are dropped as returning would.
2015        Address return_sp = frame->fp() +
2016                            StandardFrameConstants::kFixedFrameSizeAboveFp -
2017                            code.stack_slots() * kSystemPointerSize;
2018
2019        // TODO(bmeurer): Turbofanned BUILTIN frames appear as OPTIMIZED,
2020        // but do not have a code kind of TURBOFAN.
2021        if (CodeKindCanDeoptimize(code.kind()) &&
2022            code.marked_for_deoptimization()) {
2023          // If the target code is lazy deoptimized, we jump to the original
2024          // return address, but we make a note that we are throwing, so
2025          // that the deoptimizer can do the right thing.
2026          offset = static_cast<int>(frame->pc() - code.entry());
2027          set_deoptimizer_lazy_throw(true);
2028        }
2029
2030        return FoundHandler(Context(), code.InstructionStart(this, frame->pc()),
2031                            offset, code.constant_pool(), return_sp,
2032                            frame->fp(), visited_frames);
2033      }
2034
2035      case StackFrame::STUB: {
2036        // Some stubs are able to handle exceptions.
2037        if (!catchable_by_js) break;
2038        StubFrame* stub_frame = static_cast<StubFrame*>(frame);
2039#if defined(DEBUG) && V8_ENABLE_WEBASSEMBLY
2040        wasm::WasmCodeRefScope code_ref_scope;
2041        DCHECK_NULL(wasm::GetWasmCodeManager()->LookupCode(frame->pc()));
2042#endif  // defined(DEBUG) && V8_ENABLE_WEBASSEMBLY
2043        Code code = stub_frame->LookupCode();
2044        if (!code.IsCode() || code.kind() != CodeKind::BUILTIN ||
2045            !code.has_handler_table() || !code.is_turbofanned()) {
2046          break;
2047        }
2048
2049        int offset = stub_frame->LookupExceptionHandlerInTable();
2050        if (offset < 0) break;
2051
2052        // Compute the stack pointer from the frame pointer. This ensures
2053        // that argument slots on the stack are dropped as returning would.
2054        Address return_sp = frame->fp() +
2055                            StandardFrameConstants::kFixedFrameSizeAboveFp -
2056                            code.stack_slots() * kSystemPointerSize;
2057
2058        return FoundHandler(Context(), code.InstructionStart(this, frame->pc()),
2059                            offset, code.constant_pool(), return_sp,
2060                            frame->fp(), visited_frames);
2061      }
2062
2063      case StackFrame::INTERPRETED:
2064      case StackFrame::BASELINE: {
2065        // For interpreted frame we perform a range lookup in the handler table.
2066        if (!catchable_by_js) break;
2067        UnoptimizedFrame* js_frame = UnoptimizedFrame::cast(frame);
2068        int register_slots = UnoptimizedFrameConstants::RegisterStackSlotCount(
2069            js_frame->GetBytecodeArray().register_count());
2070        int context_reg = 0;  // Will contain register index holding context.
2071        int offset =
2072            js_frame->LookupExceptionHandlerInTable(&context_reg, nullptr);
2073        if (offset < 0) break;
2074        // Compute the stack pointer from the frame pointer. This ensures that
2075        // argument slots on the stack are dropped as returning would.
2076        // Note: This is only needed for interpreted frames that have been
2077        //       materialized by the deoptimizer. If there is a handler frame
2078        //       in between then {frame->sp()} would already be correct.
2079        Address return_sp = frame->fp() -
2080                            InterpreterFrameConstants::kFixedFrameSizeFromFp -
2081                            register_slots * kSystemPointerSize;
2082
2083        // Patch the bytecode offset in the interpreted frame to reflect the
2084        // position of the exception handler. The special builtin below will
2085        // take care of continuing to dispatch at that position. Also restore
2086        // the correct context for the handler from the interpreter register.
2087        Context context =
2088            Context::cast(js_frame->ReadInterpreterRegister(context_reg));
2089        DCHECK(context.IsContext());
2090
2091        if (frame->is_baseline()) {
2092          BaselineFrame* sp_frame = BaselineFrame::cast(js_frame);
2093          Code code = sp_frame->LookupCode();
2094          intptr_t pc_offset = sp_frame->GetPCForBytecodeOffset(offset);
2095          // Patch the context register directly on the frame, so that we don't
2096          // need to have a context read + write in the baseline code.
2097          sp_frame->PatchContext(context);
2098          return FoundHandler(
2099              Context(), code.InstructionStart(this, sp_frame->sp()), pc_offset,
2100              code.constant_pool(), return_sp, sp_frame->fp(), visited_frames);
2101        } else {
2102          InterpretedFrame::cast(js_frame)->PatchBytecodeOffset(
2103              static_cast<int>(offset));
2104
2105          Code code =
2106              FromCodeT(builtins()->code(Builtin::kInterpreterEnterAtBytecode));
2107          // We subtract a frame from visited_frames because otherwise the
2108          // shadow stack will drop the underlying interpreter entry trampoline
2109          // in which the handler runs.
2110          //
2111          // An interpreted frame cannot be the first frame we look at
2112          // because at a minimum, an exit frame into C++ has to separate
2113          // it and the context in which this C++ code runs.
2114          CHECK_GE(visited_frames, 1);
2115          return FoundHandler(context, code.InstructionStart(), 0,
2116                              code.constant_pool(), return_sp, frame->fp(),
2117                              visited_frames - 1);
2118        }
2119      }
2120
2121      case StackFrame::BUILTIN:
2122        // For builtin frames we are guaranteed not to find a handler.
2123        if (catchable_by_js) {
2124          CHECK_EQ(-1, BuiltinFrame::cast(frame)->LookupExceptionHandlerInTable(
2125                           nullptr, nullptr));
2126        }
2127        break;
2128
2129      case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
2130        // Builtin continuation frames with catch can handle exceptions.
2131        if (!catchable_by_js) break;
2132        JavaScriptBuiltinContinuationWithCatchFrame* js_frame =
2133            JavaScriptBuiltinContinuationWithCatchFrame::cast(frame);
2134        js_frame->SetException(exception);
2135
2136        // Reconstruct the stack pointer from the frame pointer.
2137        Address return_sp = js_frame->fp() - js_frame->GetSPToFPDelta();
2138        Code code = js_frame->LookupCode();
2139        return FoundHandler(Context(), code.InstructionStart(), 0,
2140                            code.constant_pool(), return_sp, frame->fp(),
2141                            visited_frames);
2142      }
2143
2144      default:
2145        // All other types can not handle exception.
2146        break;
2147    }
2148
2149    if (frame->is_optimized()) {
2150      // Remove per-frame stored materialized objects.
2151      bool removed = materialized_object_store_->Remove(frame->fp());
2152      USE(removed);
2153      // If there were any materialized objects, the code should be
2154      // marked for deopt.
2155      DCHECK_IMPLIES(removed, frame->LookupCode().marked_for_deoptimization());
2156    }
2157  }
2158
2159  UNREACHABLE();
2160}
2161
2162namespace {
2163HandlerTable::CatchPrediction PredictException(JavaScriptFrame* frame) {
2164  HandlerTable::CatchPrediction prediction;
2165  if (frame->is_optimized()) {
2166    if (frame->LookupExceptionHandlerInTable(nullptr, nullptr) > 0) {
2167      // This optimized frame will catch. It's handler table does not include
2168      // exception prediction, and we need to use the corresponding handler
2169      // tables on the unoptimized code objects.
2170      std::vector<FrameSummary> summaries;
2171      frame->Summarize(&summaries);
2172      for (size_t i = summaries.size(); i != 0; i--) {
2173        const FrameSummary& summary = summaries[i - 1];
2174        Handle<AbstractCode> code = summary.AsJavaScript().abstract_code();
2175        if (code->IsCode() && code->kind() == CodeKind::BUILTIN) {
2176          prediction = code->GetCode().GetBuiltinCatchPrediction();
2177          if (prediction == HandlerTable::UNCAUGHT) continue;
2178          return prediction;
2179        }
2180
2181        // Must have been constructed from a bytecode array.
2182        CHECK_EQ(CodeKind::INTERPRETED_FUNCTION, code->kind());
2183        int code_offset = summary.code_offset();
2184        HandlerTable table(code->GetBytecodeArray());
2185        int index = table.LookupRange(code_offset, nullptr, &prediction);
2186        if (index <= 0) continue;
2187        if (prediction == HandlerTable::UNCAUGHT) continue;
2188        return prediction;
2189      }
2190    }
2191  } else if (frame->LookupExceptionHandlerInTable(nullptr, &prediction) > 0) {
2192    return prediction;
2193  }
2194  return HandlerTable::UNCAUGHT;
2195}
2196
2197Isolate::CatchType ToCatchType(HandlerTable::CatchPrediction prediction) {
2198  switch (prediction) {
2199    case HandlerTable::UNCAUGHT:
2200      return Isolate::NOT_CAUGHT;
2201    case HandlerTable::CAUGHT:
2202      return Isolate::CAUGHT_BY_JAVASCRIPT;
2203    case HandlerTable::PROMISE:
2204      return Isolate::CAUGHT_BY_PROMISE;
2205    case HandlerTable::UNCAUGHT_ASYNC_AWAIT:
2206    case HandlerTable::ASYNC_AWAIT:
2207      return Isolate::CAUGHT_BY_ASYNC_AWAIT;
2208    default:
2209      UNREACHABLE();
2210  }
2211}
2212}  // anonymous namespace
2213
2214Isolate::CatchType Isolate::PredictExceptionCatcher() {
2215  Address external_handler = thread_local_top()->try_catch_handler_address();
2216  if (TopExceptionHandlerType(Object()) ==
2217      ExceptionHandlerType::kExternalTryCatch) {
2218    return CAUGHT_BY_EXTERNAL;
2219  }
2220
2221  // Search for an exception handler by performing a full walk over the stack.
2222  for (StackFrameIterator iter(this); !iter.done(); iter.Advance()) {
2223    StackFrame* frame = iter.frame();
2224
2225    switch (frame->type()) {
2226      case StackFrame::ENTRY:
2227      case StackFrame::CONSTRUCT_ENTRY: {
2228        Address entry_handler = frame->top_handler()->next_address();
2229        // The exception has been externally caught if and only if there is an
2230        // external handler which is on top of the top-most JS_ENTRY handler.
2231        if (external_handler != kNullAddress &&
2232            !try_catch_handler()->is_verbose_) {
2233          if (entry_handler == kNullAddress ||
2234              entry_handler > external_handler) {
2235            return CAUGHT_BY_EXTERNAL;
2236          }
2237        }
2238      } break;
2239
2240      // For JavaScript frames we perform a lookup in the handler table.
2241      case StackFrame::OPTIMIZED:
2242      case StackFrame::INTERPRETED:
2243      case StackFrame::BASELINE:
2244      case StackFrame::BUILTIN: {
2245        JavaScriptFrame* js_frame = JavaScriptFrame::cast(frame);
2246        Isolate::CatchType prediction = ToCatchType(PredictException(js_frame));
2247        if (prediction == NOT_CAUGHT) break;
2248        return prediction;
2249      }
2250
2251      case StackFrame::STUB: {
2252        Handle<Code> code(frame->LookupCode(), this);
2253        if (!code->IsCode() || code->kind() != CodeKind::BUILTIN ||
2254            !code->has_handler_table() || !code->is_turbofanned()) {
2255          break;
2256        }
2257
2258        CatchType prediction = ToCatchType(code->GetBuiltinCatchPrediction());
2259        if (prediction != NOT_CAUGHT) return prediction;
2260      } break;
2261
2262      case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
2263        Handle<Code> code(frame->LookupCode(), this);
2264        CatchType prediction = ToCatchType(code->GetBuiltinCatchPrediction());
2265        if (prediction != NOT_CAUGHT) return prediction;
2266      } break;
2267
2268      default:
2269        // All other types can not handle exception.
2270        break;
2271    }
2272  }
2273
2274  // Handler not found.
2275  return NOT_CAUGHT;
2276}
2277
2278Object Isolate::ThrowIllegalOperation() {
2279  if (FLAG_stack_trace_on_illegal) PrintStack(stdout);
2280  return Throw(ReadOnlyRoots(heap()).illegal_access_string());
2281}
2282
2283void Isolate::ScheduleThrow(Object exception) {
2284  // When scheduling a throw we first throw the exception to get the
2285  // error reporting if it is uncaught before rescheduling it.
2286  Throw(exception);
2287  PropagatePendingExceptionToExternalTryCatch(
2288      TopExceptionHandlerType(pending_exception()));
2289  if (has_pending_exception()) {
2290    set_scheduled_exception(pending_exception());
2291    thread_local_top()->external_caught_exception_ = false;
2292    clear_pending_exception();
2293  }
2294}
2295
2296void Isolate::RestorePendingMessageFromTryCatch(v8::TryCatch* handler) {
2297  DCHECK(handler == try_catch_handler());
2298  DCHECK(handler->HasCaught());
2299  DCHECK(handler->rethrow_);
2300  DCHECK(handler->capture_message_);
2301  Object message(reinterpret_cast<Address>(handler->message_obj_));
2302  DCHECK(message.IsJSMessageObject() || message.IsTheHole(this));
2303  set_pending_message(message);
2304}
2305
2306void Isolate::CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler) {
2307  DCHECK(has_scheduled_exception());
2308  if (reinterpret_cast<void*>(scheduled_exception().ptr()) ==
2309      handler->exception_) {
2310    DCHECK_NE(scheduled_exception(),
2311              ReadOnlyRoots(heap()).termination_exception());
2312    clear_scheduled_exception();
2313  } else {
2314    DCHECK_EQ(scheduled_exception(),
2315              ReadOnlyRoots(heap()).termination_exception());
2316    // Clear termination once we returned from all V8 frames.
2317    if (thread_local_top()->CallDepthIsZero()) {
2318      thread_local_top()->external_caught_exception_ = false;
2319      clear_scheduled_exception();
2320    }
2321  }
2322  if (reinterpret_cast<void*>(thread_local_top()->pending_message_.ptr()) ==
2323      handler->message_obj_) {
2324    clear_pending_message();
2325  }
2326}
2327
2328Object Isolate::PromoteScheduledException() {
2329  Object thrown = scheduled_exception();
2330  clear_scheduled_exception();
2331  // Re-throw the exception to avoid getting repeated error reporting.
2332  return ReThrow(thrown);
2333}
2334
2335void Isolate::PrintCurrentStackTrace(std::ostream& out) {
2336  Handle<FixedArray> frames = CaptureSimpleStackTrace(
2337      this, FixedArray::kMaxLength, SKIP_NONE, factory()->undefined_value());
2338
2339  IncrementalStringBuilder builder(this);
2340  for (int i = 0; i < frames->length(); ++i) {
2341    Handle<CallSiteInfo> frame(CallSiteInfo::cast(frames->get(i)), this);
2342    SerializeCallSiteInfo(this, frame, &builder);
2343  }
2344
2345  Handle<String> stack_trace = builder.Finish().ToHandleChecked();
2346  stack_trace->PrintOn(out);
2347}
2348
2349bool Isolate::ComputeLocation(MessageLocation* target) {
2350  StackTraceFrameIterator it(this);
2351  if (it.done()) return false;
2352  // Compute the location from the function and the relocation info of the
2353  // baseline code. For optimized code this will use the deoptimization
2354  // information to get canonical location information.
2355#if V8_ENABLE_WEBASSEMBLY
2356  wasm::WasmCodeRefScope code_ref_scope;
2357#endif  // V8_ENABLE_WEBASSEMBLY
2358  FrameSummary summary = it.GetTopValidFrame();
2359  Handle<SharedFunctionInfo> shared;
2360  Handle<Object> script = summary.script();
2361  if (!script->IsScript() || Script::cast(*script).source().IsUndefined(this)) {
2362    return false;
2363  }
2364
2365  if (summary.IsJavaScript()) {
2366    shared = handle(summary.AsJavaScript().function()->shared(), this);
2367  }
2368  if (summary.AreSourcePositionsAvailable()) {
2369    int pos = summary.SourcePosition();
2370    *target =
2371        MessageLocation(Handle<Script>::cast(script), pos, pos + 1, shared);
2372  } else {
2373    *target = MessageLocation(Handle<Script>::cast(script), shared,
2374                              summary.code_offset());
2375  }
2376  return true;
2377}
2378
2379bool Isolate::ComputeLocationFromException(MessageLocation* target,
2380                                           Handle<Object> exception) {
2381  if (!exception->IsJSObject()) return false;
2382
2383  Handle<Name> start_pos_symbol = factory()->error_start_pos_symbol();
2384  Handle<Object> start_pos = JSReceiver::GetDataProperty(
2385      this, Handle<JSObject>::cast(exception), start_pos_symbol);
2386  if (!start_pos->IsSmi()) return false;
2387  int start_pos_value = Handle<Smi>::cast(start_pos)->value();
2388
2389  Handle<Name> end_pos_symbol = factory()->error_end_pos_symbol();
2390  Handle<Object> end_pos = JSReceiver::GetDataProperty(
2391      this, Handle<JSObject>::cast(exception), end_pos_symbol);
2392  if (!end_pos->IsSmi()) return false;
2393  int end_pos_value = Handle<Smi>::cast(end_pos)->value();
2394
2395  Handle<Name> script_symbol = factory()->error_script_symbol();
2396  Handle<Object> script = JSReceiver::GetDataProperty(
2397      this, Handle<JSObject>::cast(exception), script_symbol);
2398  if (!script->IsScript()) return false;
2399
2400  Handle<Script> cast_script(Script::cast(*script), this);
2401  *target = MessageLocation(cast_script, start_pos_value, end_pos_value);
2402  return true;
2403}
2404
2405bool Isolate::ComputeLocationFromSimpleStackTrace(MessageLocation* target,
2406                                                  Handle<Object> exception) {
2407  if (!exception->IsJSReceiver()) {
2408    return false;
2409  }
2410  Handle<FixedArray> call_site_infos =
2411      GetSimpleStackTrace(Handle<JSReceiver>::cast(exception));
2412  for (int i = 0; i < call_site_infos->length(); ++i) {
2413    Handle<CallSiteInfo> call_site_info(
2414        CallSiteInfo::cast(call_site_infos->get(i)), this);
2415    if (CallSiteInfo::ComputeLocation(call_site_info, target)) {
2416      return true;
2417    }
2418  }
2419  return false;
2420}
2421
2422bool Isolate::ComputeLocationFromDetailedStackTrace(MessageLocation* target,
2423                                                    Handle<Object> exception) {
2424  if (!exception->IsJSReceiver()) return false;
2425
2426  Handle<FixedArray> stack_frame_infos =
2427      GetDetailedStackTrace(Handle<JSReceiver>::cast(exception));
2428  if (stack_frame_infos.is_null() || stack_frame_infos->length() == 0) {
2429    return false;
2430  }
2431
2432  Handle<StackFrameInfo> info(StackFrameInfo::cast(stack_frame_infos->get(0)),
2433                              this);
2434  const int pos = StackFrameInfo::GetSourcePosition(info);
2435  *target = MessageLocation(handle(info->script(), this), pos, pos + 1);
2436  return true;
2437}
2438
2439Handle<JSMessageObject> Isolate::CreateMessage(Handle<Object> exception,
2440                                               MessageLocation* location) {
2441  Handle<FixedArray> stack_trace_object;
2442  if (capture_stack_trace_for_uncaught_exceptions_) {
2443    if (exception->IsJSError()) {
2444      // We fetch the stack trace that corresponds to this error object.
2445      // If the lookup fails, the exception is probably not a valid Error
2446      // object. In that case, we fall through and capture the stack trace
2447      // at this throw site.
2448      stack_trace_object =
2449          GetDetailedStackTrace(Handle<JSObject>::cast(exception));
2450    }
2451    if (stack_trace_object.is_null()) {
2452      // Not an error object, we capture stack and location at throw site.
2453      stack_trace_object = CaptureDetailedStackTrace(
2454          stack_trace_for_uncaught_exceptions_frame_limit_,
2455          stack_trace_for_uncaught_exceptions_options_);
2456    }
2457  }
2458  MessageLocation computed_location;
2459  if (location == nullptr &&
2460      (ComputeLocationFromException(&computed_location, exception) ||
2461       ComputeLocationFromSimpleStackTrace(&computed_location, exception) ||
2462       ComputeLocation(&computed_location))) {
2463    location = &computed_location;
2464  }
2465
2466  return MessageHandler::MakeMessageObject(
2467      this, MessageTemplate::kUncaughtException, location, exception,
2468      stack_trace_object);
2469}
2470
2471Handle<JSMessageObject> Isolate::CreateMessageFromException(
2472    Handle<Object> exception) {
2473  Handle<FixedArray> stack_trace_object;
2474  if (exception->IsJSError()) {
2475    stack_trace_object =
2476        GetDetailedStackTrace(Handle<JSObject>::cast(exception));
2477  }
2478
2479  MessageLocation* location = nullptr;
2480  MessageLocation computed_location;
2481  if (ComputeLocationFromException(&computed_location, exception) ||
2482      ComputeLocationFromDetailedStackTrace(&computed_location, exception)) {
2483    location = &computed_location;
2484  }
2485
2486  return MessageHandler::MakeMessageObject(
2487      this, MessageTemplate::kPlaceholderOnly, location, exception,
2488      stack_trace_object);
2489}
2490
2491Isolate::ExceptionHandlerType Isolate::TopExceptionHandlerType(
2492    Object exception) {
2493  DCHECK_NE(ReadOnlyRoots(heap()).the_hole_value(), exception);
2494
2495  Address js_handler = Isolate::handler(thread_local_top());
2496  Address external_handler = thread_local_top()->try_catch_handler_address();
2497
2498  // A handler cannot be on top if it doesn't exist. For uncatchable exceptions,
2499  // the JavaScript handler cannot be on top.
2500  if (js_handler == kNullAddress || !is_catchable_by_javascript(exception)) {
2501    if (external_handler == kNullAddress) {
2502      return ExceptionHandlerType::kNone;
2503    }
2504    return ExceptionHandlerType::kExternalTryCatch;
2505  }
2506
2507  if (external_handler == kNullAddress) {
2508    return ExceptionHandlerType::kJavaScriptHandler;
2509  }
2510
2511  // The exception has been externally caught if and only if there is an
2512  // external handler which is on top of the top-most JS_ENTRY handler.
2513  //
2514  // Note, that finally clauses would re-throw an exception unless it's aborted
2515  // by jumps in control flow (like return, break, etc.) and we'll have another
2516  // chance to set proper v8::TryCatch later.
2517  DCHECK_NE(kNullAddress, external_handler);
2518  DCHECK_NE(kNullAddress, js_handler);
2519  if (external_handler < js_handler) {
2520    return ExceptionHandlerType::kExternalTryCatch;
2521  }
2522  return ExceptionHandlerType::kJavaScriptHandler;
2523}
2524
2525std::vector<MemoryRange>* Isolate::GetCodePages() const {
2526  return code_pages_.load(std::memory_order_acquire);
2527}
2528
2529void Isolate::SetCodePages(std::vector<MemoryRange>* new_code_pages) {
2530  code_pages_.store(new_code_pages, std::memory_order_release);
2531}
2532
2533void Isolate::ReportPendingMessages() {
2534  DCHECK(AllowExceptions::IsAllowed(this));
2535
2536  // The embedder might run script in response to an exception.
2537  AllowJavascriptExecutionDebugOnly allow_script(this);
2538
2539  Object exception_obj = pending_exception();
2540  ExceptionHandlerType top_handler = TopExceptionHandlerType(exception_obj);
2541
2542  // Try to propagate the exception to an external v8::TryCatch handler. If
2543  // propagation was unsuccessful, then we will get another chance at reporting
2544  // the pending message if the exception is re-thrown.
2545  bool has_been_propagated =
2546      PropagatePendingExceptionToExternalTryCatch(top_handler);
2547  if (!has_been_propagated) return;
2548
2549  // Clear the pending message object early to avoid endless recursion.
2550  Object message_obj = pending_message();
2551  clear_pending_message();
2552
2553  // For uncatchable exceptions we do nothing. If needed, the exception and the
2554  // message have already been propagated to v8::TryCatch.
2555  if (!is_catchable_by_javascript(exception_obj)) return;
2556
2557  // Determine whether the message needs to be reported to all message handlers
2558  // depending on whether the topmost external v8::TryCatch is verbose. We know
2559  // there's no JavaScript handler on top; if there was, we would've returned
2560  // early.
2561  DCHECK_NE(ExceptionHandlerType::kJavaScriptHandler, top_handler);
2562
2563  bool should_report_exception;
2564  if (top_handler == ExceptionHandlerType::kExternalTryCatch) {
2565    should_report_exception = try_catch_handler()->is_verbose_;
2566  } else {
2567    should_report_exception = true;
2568  }
2569
2570  // Actually report the pending message to all message handlers.
2571  if (!message_obj.IsTheHole(this) && should_report_exception) {
2572    HandleScope scope(this);
2573    Handle<JSMessageObject> message(JSMessageObject::cast(message_obj), this);
2574    Handle<Object> exception(exception_obj, this);
2575    Handle<Script> script(message->script(), this);
2576    // Clear the exception and restore it afterwards, otherwise
2577    // CollectSourcePositions will abort.
2578    clear_pending_exception();
2579    JSMessageObject::EnsureSourcePositionsAvailable(this, message);
2580    set_pending_exception(*exception);
2581    int start_pos = message->GetStartPosition();
2582    int end_pos = message->GetEndPosition();
2583    MessageLocation location(script, start_pos, end_pos);
2584    MessageHandler::ReportMessage(this, &location, message);
2585  }
2586}
2587
2588bool Isolate::OptionalRescheduleException(bool clear_exception) {
2589  DCHECK(has_pending_exception());
2590  PropagatePendingExceptionToExternalTryCatch(
2591      TopExceptionHandlerType(pending_exception()));
2592
2593  bool is_termination_exception =
2594      pending_exception() == ReadOnlyRoots(this).termination_exception();
2595
2596  if (is_termination_exception) {
2597    if (clear_exception) {
2598      thread_local_top()->external_caught_exception_ = false;
2599      clear_pending_exception();
2600      return false;
2601    }
2602  } else if (thread_local_top()->external_caught_exception_) {
2603    // If the exception is externally caught, clear it if there are no
2604    // JavaScript frames on the way to the C++ frame that has the
2605    // external handler.
2606    DCHECK_NE(thread_local_top()->try_catch_handler_address(), kNullAddress);
2607    Address external_handler_address =
2608        thread_local_top()->try_catch_handler_address();
2609    JavaScriptFrameIterator it(this);
2610    if (it.done() || (it.frame()->sp() > external_handler_address)) {
2611      clear_exception = true;
2612    }
2613  }
2614
2615  // Clear the exception if needed.
2616  if (clear_exception) {
2617    thread_local_top()->external_caught_exception_ = false;
2618    clear_pending_exception();
2619    return false;
2620  }
2621
2622  // Reschedule the exception.
2623  set_scheduled_exception(pending_exception());
2624  clear_pending_exception();
2625  return true;
2626}
2627
2628void Isolate::PushPromise(Handle<JSObject> promise) {
2629  Handle<Object> promise_on_stack(debug()->thread_local_.promise_stack_, this);
2630  promise_on_stack = factory()->NewPromiseOnStack(promise_on_stack, promise);
2631  debug()->thread_local_.promise_stack_ = *promise_on_stack;
2632}
2633
2634void Isolate::PopPromise() {
2635  if (!IsPromiseStackEmpty()) {
2636    debug()->thread_local_.promise_stack_ =
2637        PromiseOnStack::cast(debug()->thread_local_.promise_stack_).prev();
2638  }
2639}
2640
2641bool Isolate::IsPromiseStackEmpty() const {
2642  DCHECK_IMPLIES(!debug()->thread_local_.promise_stack_.IsSmi(),
2643                 debug()->thread_local_.promise_stack_.IsPromiseOnStack());
2644  return debug()->thread_local_.promise_stack_.IsSmi();
2645}
2646
2647namespace {
2648bool PromiseIsRejectHandler(Isolate* isolate, Handle<JSReceiver> handler) {
2649  // Recurse to the forwarding Promise (e.g. return false) due to
2650  //  - await reaction forwarding to the throwaway Promise, which has
2651  //    a dependency edge to the outer Promise.
2652  //  - PromiseIdResolveHandler forwarding to the output of .then
2653  //  - Promise.all/Promise.race forwarding to a throwaway Promise, which
2654  //    has a dependency edge to the generated outer Promise.
2655  // Otherwise, this is a real reject handler for the Promise.
2656  Handle<Symbol> key = isolate->factory()->promise_forwarding_handler_symbol();
2657  Handle<Object> forwarding_handler =
2658      JSReceiver::GetDataProperty(isolate, handler, key);
2659  return forwarding_handler->IsUndefined(isolate);
2660}
2661
2662bool PromiseHasUserDefinedRejectHandlerInternal(Isolate* isolate,
2663                                                Handle<JSPromise> promise) {
2664  Handle<Object> current(promise->reactions(), isolate);
2665  while (!current->IsSmi()) {
2666    Handle<PromiseReaction> reaction = Handle<PromiseReaction>::cast(current);
2667    Handle<HeapObject> promise_or_capability(reaction->promise_or_capability(),
2668                                             isolate);
2669    if (!promise_or_capability->IsUndefined(isolate)) {
2670      if (!promise_or_capability->IsJSPromise()) {
2671        promise_or_capability = handle(
2672            Handle<PromiseCapability>::cast(promise_or_capability)->promise(),
2673            isolate);
2674      }
2675      promise = Handle<JSPromise>::cast(promise_or_capability);
2676      if (!reaction->reject_handler().IsUndefined(isolate)) {
2677        Handle<JSReceiver> reject_handler(
2678            JSReceiver::cast(reaction->reject_handler()), isolate);
2679        if (PromiseIsRejectHandler(isolate, reject_handler)) return true;
2680      }
2681      if (isolate->PromiseHasUserDefinedRejectHandler(promise)) return true;
2682    }
2683    current = handle(reaction->next(), isolate);
2684  }
2685  return false;
2686}
2687
2688}  // namespace
2689
2690bool Isolate::PromiseHasUserDefinedRejectHandler(Handle<JSPromise> promise) {
2691  Handle<Symbol> key = factory()->promise_handled_by_symbol();
2692  std::stack<Handle<JSPromise>> promises;
2693  // First descend into the outermost promise and collect the stack of
2694  // Promises for reverse processing.
2695  while (true) {
2696    // If this promise was marked as being handled by a catch block
2697    // in an async function, then it has a user-defined reject handler.
2698    if (promise->handled_hint()) return true;
2699    if (promise->status() == Promise::kPending) {
2700      promises.push(promise);
2701    }
2702    Handle<Object> outer_promise_obj =
2703        JSObject::GetDataProperty(this, promise, key);
2704    if (!outer_promise_obj->IsJSPromise()) break;
2705    promise = Handle<JSPromise>::cast(outer_promise_obj);
2706  }
2707
2708  while (!promises.empty()) {
2709    promise = promises.top();
2710    if (PromiseHasUserDefinedRejectHandlerInternal(this, promise)) return true;
2711    promises.pop();
2712  }
2713  return false;
2714}
2715
2716Handle<Object> Isolate::GetPromiseOnStackOnThrow() {
2717  Handle<Object> undefined = factory()->undefined_value();
2718  if (IsPromiseStackEmpty()) return undefined;
2719  // Find the top-most try-catch or try-finally handler.
2720  CatchType prediction = PredictExceptionCatcher();
2721  if (prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) {
2722    return undefined;
2723  }
2724  Handle<Object> retval = undefined;
2725  Handle<Object> promise_stack(debug()->thread_local_.promise_stack_, this);
2726  for (StackFrameIterator it(this); !it.done(); it.Advance()) {
2727    StackFrame* frame = it.frame();
2728    HandlerTable::CatchPrediction catch_prediction;
2729    if (frame->is_java_script()) {
2730      catch_prediction = PredictException(JavaScriptFrame::cast(frame));
2731    } else if (frame->type() == StackFrame::STUB) {
2732      Code code = frame->LookupCode();
2733      if (!code.IsCode() || code.kind() != CodeKind::BUILTIN ||
2734          !code.has_handler_table() || !code.is_turbofanned()) {
2735        continue;
2736      }
2737      catch_prediction = code.GetBuiltinCatchPrediction();
2738    } else {
2739      continue;
2740    }
2741
2742    switch (catch_prediction) {
2743      case HandlerTable::UNCAUGHT:
2744        continue;
2745      case HandlerTable::CAUGHT:
2746        if (retval->IsJSPromise()) {
2747          // Caught the result of an inner async/await invocation.
2748          // Mark the inner promise as caught in the "synchronous case" so
2749          // that Debug::OnException will see. In the synchronous case,
2750          // namely in the code in an async function before the first
2751          // await, the function which has this exception event has not yet
2752          // returned, so the generated Promise has not yet been marked
2753          // by AsyncFunctionAwaitCaught with promiseHandledHintSymbol.
2754          Handle<JSPromise>::cast(retval)->set_handled_hint(true);
2755        }
2756        return retval;
2757      case HandlerTable::PROMISE: {
2758        Handle<JSObject> promise;
2759        if (promise_stack->IsPromiseOnStack() &&
2760            PromiseOnStack::GetPromise(
2761                Handle<PromiseOnStack>::cast(promise_stack))
2762                .ToHandle(&promise)) {
2763          return promise;
2764        }
2765        return undefined;
2766      }
2767      case HandlerTable::UNCAUGHT_ASYNC_AWAIT:
2768      case HandlerTable::ASYNC_AWAIT: {
2769        // If in the initial portion of async/await, continue the loop to pop up
2770        // successive async/await stack frames until an asynchronous one with
2771        // dependents is found, or a non-async stack frame is encountered, in
2772        // order to handle the synchronous async/await catch prediction case:
2773        // assume that async function calls are awaited.
2774        if (!promise_stack->IsPromiseOnStack()) {
2775          return retval;
2776        }
2777        Handle<PromiseOnStack> promise_on_stack =
2778            Handle<PromiseOnStack>::cast(promise_stack);
2779        if (!PromiseOnStack::GetPromise(promise_on_stack).ToHandle(&retval)) {
2780          return retval;
2781        }
2782        if (retval->IsJSPromise()) {
2783          if (PromiseHasUserDefinedRejectHandler(
2784                  Handle<JSPromise>::cast(retval))) {
2785            return retval;
2786          }
2787        }
2788        promise_stack = handle(promise_on_stack->prev(), this);
2789        continue;
2790      }
2791    }
2792  }
2793  return retval;
2794}
2795
2796void Isolate::SetCaptureStackTraceForUncaughtExceptions(
2797    bool capture, int frame_limit, StackTrace::StackTraceOptions options) {
2798  capture_stack_trace_for_uncaught_exceptions_ = capture;
2799  stack_trace_for_uncaught_exceptions_frame_limit_ = frame_limit;
2800  stack_trace_for_uncaught_exceptions_options_ = options;
2801}
2802
2803bool Isolate::get_capture_stack_trace_for_uncaught_exceptions() const {
2804  return capture_stack_trace_for_uncaught_exceptions_;
2805}
2806
2807void Isolate::SetAbortOnUncaughtExceptionCallback(
2808    v8::Isolate::AbortOnUncaughtExceptionCallback callback) {
2809  abort_on_uncaught_exception_callback_ = callback;
2810}
2811
2812void Isolate::InstallConditionalFeatures(Handle<Context> context) {
2813  Handle<JSGlobalObject> global = handle(context->global_object(), this);
2814  Handle<String> sab_name = factory()->SharedArrayBuffer_string();
2815  if (IsSharedArrayBufferConstructorEnabled(context)) {
2816    if (!JSObject::HasRealNamedProperty(this, global, sab_name)
2817             .FromMaybe(true)) {
2818      JSObject::AddProperty(this, global, factory()->SharedArrayBuffer_string(),
2819                            shared_array_buffer_fun(), DONT_ENUM);
2820    }
2821  }
2822}
2823
2824bool Isolate::IsSharedArrayBufferConstructorEnabled(Handle<Context> context) {
2825  if (!FLAG_harmony_sharedarraybuffer) return false;
2826
2827  if (!FLAG_enable_sharedarraybuffer_per_context) return true;
2828
2829  if (sharedarraybuffer_constructor_enabled_callback()) {
2830    v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
2831    return sharedarraybuffer_constructor_enabled_callback()(api_context);
2832  }
2833  return false;
2834}
2835
2836bool Isolate::IsWasmSimdEnabled(Handle<Context> context) {
2837#if V8_ENABLE_WEBASSEMBLY
2838  if (wasm_simd_enabled_callback()) {
2839    v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
2840    return wasm_simd_enabled_callback()(api_context);
2841  }
2842  return FLAG_experimental_wasm_simd;
2843#else
2844  return false;
2845#endif  // V8_ENABLE_WEBASSEMBLY
2846}
2847
2848bool Isolate::AreWasmExceptionsEnabled(Handle<Context> context) {
2849#if V8_ENABLE_WEBASSEMBLY
2850  if (wasm_exceptions_enabled_callback()) {
2851    v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
2852    return wasm_exceptions_enabled_callback()(api_context);
2853  }
2854  return FLAG_experimental_wasm_eh;
2855#else
2856  return false;
2857#endif  // V8_ENABLE_WEBASSEMBLY
2858}
2859
2860bool Isolate::IsWasmDynamicTieringEnabled() {
2861#if V8_ENABLE_WEBASSEMBLY
2862  if (FLAG_wasm_dynamic_tiering) return true;
2863  if (wasm_dynamic_tiering_enabled_callback()) {
2864    HandleScope handle_scope(this);
2865    v8::Local<v8::Context> api_context =
2866        v8::Utils::ToLocal(handle(context(), this));
2867    return wasm_dynamic_tiering_enabled_callback()(api_context);
2868  }
2869#endif  // V8_ENABLE_WEBASSEMBLY
2870  return false;
2871}
2872
2873Handle<Context> Isolate::GetIncumbentContext() {
2874  JavaScriptFrameIterator it(this);
2875
2876  // 1st candidate: most-recently-entered author function's context
2877  // if it's newer than the last Context::BackupIncumbentScope entry.
2878  //
2879  // NOTE: This code assumes that the stack grows downward.
2880  Address top_backup_incumbent =
2881      top_backup_incumbent_scope()
2882          ? top_backup_incumbent_scope()->JSStackComparableAddressPrivate()
2883          : 0;
2884  if (!it.done() &&
2885      (!top_backup_incumbent || it.frame()->sp() < top_backup_incumbent)) {
2886    Context context = Context::cast(it.frame()->context());
2887    return Handle<Context>(context.native_context(), this);
2888  }
2889
2890  // 2nd candidate: the last Context::Scope's incumbent context if any.
2891  if (top_backup_incumbent_scope()) {
2892    return Utils::OpenHandle(
2893        *top_backup_incumbent_scope()->backup_incumbent_context_);
2894  }
2895
2896  // Last candidate: the entered context or microtask context.
2897  // Given that there is no other author function is running, there must be
2898  // no cross-context function running, then the incumbent realm must match
2899  // the entry realm.
2900  v8::Local<v8::Context> entered_context =
2901      reinterpret_cast<v8::Isolate*>(this)->GetEnteredOrMicrotaskContext();
2902  return Utils::OpenHandle(*entered_context);
2903}
2904
2905char* Isolate::ArchiveThread(char* to) {
2906  MemCopy(to, reinterpret_cast<char*>(thread_local_top()),
2907          sizeof(ThreadLocalTop));
2908  return to + sizeof(ThreadLocalTop);
2909}
2910
2911char* Isolate::RestoreThread(char* from) {
2912  MemCopy(reinterpret_cast<char*>(thread_local_top()), from,
2913          sizeof(ThreadLocalTop));
2914  DCHECK(context().is_null() || context().IsContext());
2915  return from + sizeof(ThreadLocalTop);
2916}
2917
2918void Isolate::ReleaseSharedPtrs() {
2919  base::MutexGuard lock(&managed_ptr_destructors_mutex_);
2920  while (managed_ptr_destructors_head_) {
2921    ManagedPtrDestructor* l = managed_ptr_destructors_head_;
2922    ManagedPtrDestructor* n = nullptr;
2923    managed_ptr_destructors_head_ = nullptr;
2924    for (; l != nullptr; l = n) {
2925      l->destructor_(l->shared_ptr_ptr_);
2926      n = l->next_;
2927      delete l;
2928    }
2929  }
2930}
2931
2932bool Isolate::IsBuiltinTableHandleLocation(Address* handle_location) {
2933  FullObjectSlot location(handle_location);
2934  FullObjectSlot first_root(builtin_table());
2935  FullObjectSlot last_root(first_root + Builtins::kBuiltinCount);
2936  if (location >= last_root) return false;
2937  if (location < first_root) return false;
2938  return true;
2939}
2940
2941void Isolate::RegisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
2942  base::MutexGuard lock(&managed_ptr_destructors_mutex_);
2943  DCHECK_NULL(destructor->prev_);
2944  DCHECK_NULL(destructor->next_);
2945  if (managed_ptr_destructors_head_) {
2946    managed_ptr_destructors_head_->prev_ = destructor;
2947  }
2948  destructor->next_ = managed_ptr_destructors_head_;
2949  managed_ptr_destructors_head_ = destructor;
2950}
2951
2952void Isolate::UnregisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
2953  base::MutexGuard lock(&managed_ptr_destructors_mutex_);
2954  if (destructor->prev_) {
2955    destructor->prev_->next_ = destructor->next_;
2956  } else {
2957    DCHECK_EQ(destructor, managed_ptr_destructors_head_);
2958    managed_ptr_destructors_head_ = destructor->next_;
2959  }
2960  if (destructor->next_) destructor->next_->prev_ = destructor->prev_;
2961  destructor->prev_ = nullptr;
2962  destructor->next_ = nullptr;
2963}
2964
2965#if V8_ENABLE_WEBASSEMBLY
2966void Isolate::AddSharedWasmMemory(Handle<WasmMemoryObject> memory_object) {
2967  HandleScope scope(this);
2968  Handle<WeakArrayList> shared_wasm_memories =
2969      factory()->shared_wasm_memories();
2970  shared_wasm_memories = WeakArrayList::AddToEnd(
2971      this, shared_wasm_memories, MaybeObjectHandle::Weak(memory_object));
2972  heap()->set_shared_wasm_memories(*shared_wasm_memories);
2973}
2974#endif  // V8_ENABLE_WEBASSEMBLY
2975
2976Isolate::PerIsolateThreadData::~PerIsolateThreadData() {
2977#if defined(USE_SIMULATOR)
2978  delete simulator_;
2979#endif
2980}
2981
2982Isolate::PerIsolateThreadData* Isolate::ThreadDataTable::Lookup(
2983    ThreadId thread_id) {
2984  auto t = table_.find(thread_id);
2985  if (t == table_.end()) return nullptr;
2986  return t->second;
2987}
2988
2989void Isolate::ThreadDataTable::Insert(Isolate::PerIsolateThreadData* data) {
2990  bool inserted = table_.insert(std::make_pair(data->thread_id_, data)).second;
2991  CHECK(inserted);
2992}
2993
2994void Isolate::ThreadDataTable::Remove(PerIsolateThreadData* data) {
2995  table_.erase(data->thread_id_);
2996  delete data;
2997}
2998
2999void Isolate::ThreadDataTable::RemoveAllThreads() {
3000  for (auto& x : table_) {
3001    delete x.second;
3002  }
3003  table_.clear();
3004}
3005
3006class TracingAccountingAllocator : public AccountingAllocator {
3007 public:
3008  explicit TracingAccountingAllocator(Isolate* isolate) : isolate_(isolate) {}
3009  ~TracingAccountingAllocator() = default;
3010
3011 protected:
3012  void TraceAllocateSegmentImpl(v8::internal::Segment* segment) override {
3013    base::MutexGuard lock(&mutex_);
3014    UpdateMemoryTrafficAndReportMemoryUsage(segment->total_size());
3015  }
3016
3017  void TraceZoneCreationImpl(const Zone* zone) override {
3018    base::MutexGuard lock(&mutex_);
3019    active_zones_.insert(zone);
3020    nesting_depth_++;
3021  }
3022
3023  void TraceZoneDestructionImpl(const Zone* zone) override {
3024    base::MutexGuard lock(&mutex_);
3025#ifdef V8_ENABLE_PRECISE_ZONE_STATS
3026    if (FLAG_trace_zone_type_stats) {
3027      type_stats_.MergeWith(zone->type_stats());
3028    }
3029#endif
3030    UpdateMemoryTrafficAndReportMemoryUsage(zone->segment_bytes_allocated());
3031    active_zones_.erase(zone);
3032    nesting_depth_--;
3033
3034#ifdef V8_ENABLE_PRECISE_ZONE_STATS
3035    if (FLAG_trace_zone_type_stats && active_zones_.empty()) {
3036      type_stats_.Dump();
3037    }
3038#endif
3039  }
3040
3041 private:
3042  void UpdateMemoryTrafficAndReportMemoryUsage(size_t memory_traffic_delta) {
3043    if (!FLAG_trace_zone_stats &&
3044        !(TracingFlags::zone_stats.load(std::memory_order_relaxed) &
3045          v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
3046      // Don't print anything if the zone tracing was enabled only because of
3047      // FLAG_trace_zone_type_stats.
3048      return;
3049    }
3050
3051    memory_traffic_since_last_report_ += memory_traffic_delta;
3052    if (memory_traffic_since_last_report_ < FLAG_zone_stats_tolerance) return;
3053    memory_traffic_since_last_report_ = 0;
3054
3055    Dump(buffer_, true);
3056
3057    {
3058      std::string trace_str = buffer_.str();
3059
3060      if (FLAG_trace_zone_stats) {
3061        PrintF(
3062            "{"
3063            "\"type\": \"v8-zone-trace\", "
3064            "\"stats\": %s"
3065            "}\n",
3066            trace_str.c_str());
3067      }
3068      if (V8_UNLIKELY(
3069              TracingFlags::zone_stats.load(std::memory_order_relaxed) &
3070              v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
3071        TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("v8.zone_stats"),
3072                             "V8.Zone_Stats", TRACE_EVENT_SCOPE_THREAD, "stats",
3073                             TRACE_STR_COPY(trace_str.c_str()));
3074      }
3075    }
3076
3077    // Clear the buffer.
3078    buffer_.str(std::string());
3079  }
3080
3081  void Dump(std::ostringstream& out, bool dump_details) {
3082    // Note: Neither isolate nor zones are locked, so be careful with accesses
3083    // as the allocator is potentially used on a concurrent thread.
3084    double time = isolate_->time_millis_since_init();
3085    out << "{"
3086        << "\"isolate\": \"" << reinterpret_cast<void*>(isolate_) << "\", "
3087        << "\"time\": " << time << ", ";
3088    size_t total_segment_bytes_allocated = 0;
3089    size_t total_zone_allocation_size = 0;
3090    size_t total_zone_freed_size = 0;
3091
3092    if (dump_details) {
3093      // Print detailed zone stats if memory usage changes direction.
3094      out << "\"zones\": [";
3095      bool first = true;
3096      for (const Zone* zone : active_zones_) {
3097        size_t zone_segment_bytes_allocated = zone->segment_bytes_allocated();
3098        size_t zone_allocation_size = zone->allocation_size_for_tracing();
3099        size_t freed_size = zone->freed_size_for_tracing();
3100        if (first) {
3101          first = false;
3102        } else {
3103          out << ", ";
3104        }
3105        out << "{"
3106            << "\"name\": \"" << zone->name() << "\", "
3107            << "\"allocated\": " << zone_segment_bytes_allocated << ", "
3108            << "\"used\": " << zone_allocation_size << ", "
3109            << "\"freed\": " << freed_size << "}";
3110        total_segment_bytes_allocated += zone_segment_bytes_allocated;
3111        total_zone_allocation_size += zone_allocation_size;
3112        total_zone_freed_size += freed_size;
3113      }
3114      out << "], ";
3115    } else {
3116      // Just calculate total allocated/used memory values.
3117      for (const Zone* zone : active_zones_) {
3118        total_segment_bytes_allocated += zone->segment_bytes_allocated();
3119        total_zone_allocation_size += zone->allocation_size_for_tracing();
3120        total_zone_freed_size += zone->freed_size_for_tracing();
3121      }
3122    }
3123    out << "\"allocated\": " << total_segment_bytes_allocated << ", "
3124        << "\"used\": " << total_zone_allocation_size << ", "
3125        << "\"freed\": " << total_zone_freed_size << "}";
3126  }
3127
3128  Isolate* const isolate_;
3129  std::atomic<size_t> nesting_depth_{0};
3130
3131  base::Mutex mutex_;
3132  std::unordered_set<const Zone*> active_zones_;
3133#ifdef V8_ENABLE_PRECISE_ZONE_STATS
3134  TypeStats type_stats_;
3135#endif
3136  std::ostringstream buffer_;
3137  // This value is increased on both allocations and deallocations.
3138  size_t memory_traffic_since_last_report_ = 0;
3139};
3140
3141#ifdef DEBUG
3142std::atomic<size_t> Isolate::non_disposed_isolates_;
3143#endif  // DEBUG
3144
3145// static
3146Isolate* Isolate::New() { return Isolate::Allocate(false); }
3147
3148// static
3149Isolate* Isolate::NewShared(const v8::Isolate::CreateParams& params) {
3150  DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
3151  Isolate* isolate = Isolate::Allocate(true);
3152  v8::Isolate::Initialize(reinterpret_cast<v8::Isolate*>(isolate), params);
3153  return isolate;
3154}
3155
3156// static
3157Isolate* Isolate::Allocate(bool is_shared) {
3158  // v8::V8::Initialize() must be called before creating any isolates.
3159  DCHECK_NOT_NULL(V8::GetCurrentPlatform());
3160  // IsolateAllocator allocates the memory for the Isolate object according to
3161  // the given allocation mode.
3162  std::unique_ptr<IsolateAllocator> isolate_allocator =
3163      std::make_unique<IsolateAllocator>();
3164  // Construct Isolate object in the allocated memory.
3165  void* isolate_ptr = isolate_allocator->isolate_memory();
3166  Isolate* isolate =
3167      new (isolate_ptr) Isolate(std::move(isolate_allocator), is_shared);
3168#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
3169  DCHECK(IsAligned(isolate->isolate_root(), kPtrComprCageBaseAlignment));
3170  DCHECK_EQ(isolate->isolate_root(), isolate->cage_base());
3171#endif
3172
3173#ifdef DEBUG
3174  non_disposed_isolates_++;
3175#endif  // DEBUG
3176
3177  return isolate;
3178}
3179
3180// static
3181void Isolate::Delete(Isolate* isolate) {
3182  DCHECK_NOT_NULL(isolate);
3183  // v8::V8::Dispose() must only be called after deleting all isolates.
3184  DCHECK_NOT_NULL(V8::GetCurrentPlatform());
3185  // Temporarily set this isolate as current so that various parts of
3186  // the isolate can access it in their destructors without having a
3187  // direct pointer. We don't use Enter/Exit here to avoid
3188  // initializing the thread data.
3189  PerIsolateThreadData* saved_data = isolate->CurrentPerIsolateThreadData();
3190  DCHECK_EQ(true, isolate_key_created_.load(std::memory_order_relaxed));
3191  Isolate* saved_isolate = reinterpret_cast<Isolate*>(
3192      base::Thread::GetThreadLocal(isolate->isolate_key_));
3193  SetIsolateThreadLocals(isolate, nullptr);
3194  isolate->set_thread_id(ThreadId::Current());
3195
3196  isolate->Deinit();
3197
3198#ifdef DEBUG
3199  non_disposed_isolates_--;
3200#endif  // DEBUG
3201
3202  // Take ownership of the IsolateAllocator to ensure the Isolate memory will
3203  // be available during Isolate descructor call.
3204  std::unique_ptr<IsolateAllocator> isolate_allocator =
3205      std::move(isolate->isolate_allocator_);
3206  isolate->~Isolate();
3207  // Now free the memory owned by the allocator.
3208  isolate_allocator.reset();
3209
3210  // Restore the previous current isolate.
3211  SetIsolateThreadLocals(saved_isolate, saved_data);
3212}
3213
3214void Isolate::SetUpFromReadOnlyArtifacts(
3215    std::shared_ptr<ReadOnlyArtifacts> artifacts, ReadOnlyHeap* ro_heap) {
3216  if (ReadOnlyHeap::IsReadOnlySpaceShared()) {
3217    DCHECK_NOT_NULL(artifacts);
3218    artifacts_ = artifacts;
3219  } else {
3220    DCHECK_NULL(artifacts);
3221  }
3222  DCHECK_NOT_NULL(ro_heap);
3223  DCHECK_IMPLIES(read_only_heap_ != nullptr, read_only_heap_ == ro_heap);
3224  read_only_heap_ = ro_heap;
3225  heap_.SetUpFromReadOnlyHeap(read_only_heap_);
3226}
3227
3228v8::PageAllocator* Isolate::page_allocator() const {
3229  return isolate_allocator_->page_allocator();
3230}
3231
3232Isolate::Isolate(std::unique_ptr<i::IsolateAllocator> isolate_allocator,
3233                 bool is_shared)
3234    : isolate_data_(this, isolate_allocator->GetPtrComprCageBase()),
3235      is_shared_(is_shared),
3236      isolate_allocator_(std::move(isolate_allocator)),
3237      id_(isolate_counter.fetch_add(1, std::memory_order_relaxed)),
3238      allocator_(new TracingAccountingAllocator(this)),
3239      builtins_(this),
3240#if defined(DEBUG) || defined(VERIFY_HEAP)
3241      num_active_deserializers_(0),
3242#endif
3243      rail_mode_(PERFORMANCE_ANIMATION),
3244      code_event_dispatcher_(new CodeEventDispatcher()),
3245      detailed_source_positions_for_profiling_(FLAG_detailed_line_info),
3246      persistent_handles_list_(new PersistentHandlesList()),
3247      jitless_(FLAG_jitless),
3248#if V8_SFI_HAS_UNIQUE_ID
3249      next_unique_sfi_id_(0),
3250#endif
3251      next_module_async_evaluating_ordinal_(
3252          SourceTextModule::kFirstAsyncEvaluatingOrdinal),
3253      cancelable_task_manager_(new CancelableTaskManager()) {
3254  TRACE_ISOLATE(constructor);
3255  CheckIsolateLayout();
3256
3257  // ThreadManager is initialized early to support locking an isolate
3258  // before it is entered.
3259  thread_manager_ = new ThreadManager(this);
3260
3261  handle_scope_data_.Initialize();
3262
3263  // A shared Isolate is used to support JavaScript shared memory features
3264  // across Isolates. These features require all of the following to hold in the
3265  // build configuration:
3266  //
3267  // 1. The RO space is shared, so e.g. immortal RO maps can be shared across
3268  //   Isolates.
3269  // 2. HeapObjects are shareable across Isolates, which requires either
3270  //   pointers to be uncompressed (!COMPRESS_POINTER_BOOL), or that there is a
3271  //   single virtual memory reservation shared by all Isolates in the process
3272  //   for compressing pointers (COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL).
3273  CHECK_IMPLIES(is_shared_, V8_SHARED_RO_HEAP_BOOL &&
3274                                (!COMPRESS_POINTERS_BOOL ||
3275                                 COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL));
3276
3277#define ISOLATE_INIT_EXECUTE(type, name, initial_value) \
3278  name##_ = (initial_value);
3279  ISOLATE_INIT_LIST(ISOLATE_INIT_EXECUTE)
3280#undef ISOLATE_INIT_EXECUTE
3281
3282#define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length) \
3283  memset(name##_, 0, sizeof(type) * length);
3284  ISOLATE_INIT_ARRAY_LIST(ISOLATE_INIT_ARRAY_EXECUTE)
3285#undef ISOLATE_INIT_ARRAY_EXECUTE
3286
3287  InitializeLoggingAndCounters();
3288  debug_ = new Debug(this);
3289
3290  InitializeDefaultEmbeddedBlob();
3291
3292  MicrotaskQueue::SetUpDefaultMicrotaskQueue(this);
3293
3294  if (is_shared_) {
3295    global_safepoint_ = std::make_unique<GlobalSafepoint>(this);
3296  }
3297}
3298
3299void Isolate::CheckIsolateLayout() {
3300  CHECK_EQ(OFFSET_OF(Isolate, isolate_data_), 0);
3301  CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.embedder_data_)),
3302           Internals::kIsolateEmbedderDataOffset);
3303  CHECK_EQ(static_cast<int>(
3304               OFFSET_OF(Isolate, isolate_data_.fast_c_call_caller_fp_)),
3305           Internals::kIsolateFastCCallCallerFpOffset);
3306  CHECK_EQ(static_cast<int>(
3307               OFFSET_OF(Isolate, isolate_data_.fast_c_call_caller_pc_)),
3308           Internals::kIsolateFastCCallCallerPcOffset);
3309  CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.cage_base_)),
3310           Internals::kIsolateCageBaseOffset);
3311  CHECK_EQ(static_cast<int>(
3312               OFFSET_OF(Isolate, isolate_data_.long_task_stats_counter_)),
3313           Internals::kIsolateLongTaskStatsCounterOffset);
3314  CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.stack_guard_)),
3315           Internals::kIsolateStackGuardOffset);
3316  CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.roots_table_)),
3317           Internals::kIsolateRootsOffset);
3318
3319  STATIC_ASSERT(Internals::kStackGuardSize == sizeof(StackGuard));
3320  STATIC_ASSERT(Internals::kBuiltinTier0TableSize ==
3321                Builtins::kBuiltinTier0Count * kSystemPointerSize);
3322  STATIC_ASSERT(Internals::kBuiltinTier0EntryTableSize ==
3323                Builtins::kBuiltinTier0Count * kSystemPointerSize);
3324
3325#ifdef V8_SANDBOXED_EXTERNAL_POINTERS
3326  CHECK_EQ(static_cast<int>(OFFSET_OF(ExternalPointerTable, buffer_)),
3327           Internals::kExternalPointerTableBufferOffset);
3328  CHECK_EQ(static_cast<int>(OFFSET_OF(ExternalPointerTable, capacity_)),
3329           Internals::kExternalPointerTableCapacityOffset);
3330  CHECK_EQ(static_cast<int>(OFFSET_OF(ExternalPointerTable, freelist_head_)),
3331           Internals::kExternalPointerTableFreelistHeadOffset);
3332#endif
3333}
3334
3335void Isolate::ClearSerializerData() {
3336  delete external_reference_map_;
3337  external_reference_map_ = nullptr;
3338}
3339
3340bool Isolate::LogObjectRelocation() {
3341  return FLAG_verify_predictable || logger()->is_logging() || is_profiling() ||
3342         heap()->isolate()->logger()->is_listening_to_code_events() ||
3343         (heap_profiler() != nullptr &&
3344          heap_profiler()->is_tracking_object_moves()) ||
3345         heap()->has_heap_object_allocation_tracker();
3346}
3347
3348void Isolate::Deinit() {
3349  TRACE_ISOLATE(deinit);
3350  DisallowHeapAllocation no_allocation;
3351
3352  tracing_cpu_profiler_.reset();
3353  if (FLAG_stress_sampling_allocation_profiler > 0) {
3354    heap_profiler()->StopSamplingHeapProfiler();
3355  }
3356
3357  metrics_recorder_->NotifyIsolateDisposal();
3358  recorder_context_id_map_.clear();
3359
3360#if defined(V8_OS_WIN64)
3361  if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
3362      heap()->memory_allocator() && RequiresCodeRange() &&
3363      heap()->code_range()->AtomicDecrementUnwindInfoUseCount() == 1) {
3364    const base::AddressRegion& code_region = heap()->code_region();
3365    void* start = reinterpret_cast<void*>(code_region.begin());
3366    win64_unwindinfo::UnregisterNonABICompliantCodeRange(start);
3367  }
3368#endif  // V8_OS_WIN64
3369
3370  FutexEmulation::IsolateDeinit(this);
3371
3372  debug()->Unload();
3373
3374#if V8_ENABLE_WEBASSEMBLY
3375  wasm::GetWasmEngine()->DeleteCompileJobsOnIsolate(this);
3376
3377  BackingStore::RemoveSharedWasmMemoryObjects(this);
3378#endif  // V8_ENABLE_WEBASSEMBLY
3379
3380  if (concurrent_recompilation_enabled()) {
3381    optimizing_compile_dispatcher_->Stop();
3382    delete optimizing_compile_dispatcher_;
3383    optimizing_compile_dispatcher_ = nullptr;
3384  }
3385
3386  // All client isolates should already be detached.
3387  if (is_shared()) global_safepoint()->AssertNoClients();
3388
3389  if (FLAG_print_deopt_stress) {
3390    PrintF(stdout, "=== Stress deopt counter: %u\n", stress_deopt_count_);
3391  }
3392
3393  // We must stop the logger before we tear down other components.
3394  sampler::Sampler* sampler = logger_->sampler();
3395  if (sampler && sampler->IsActive()) sampler->Stop();
3396
3397  FreeThreadResources();
3398  logger_->StopProfilerThread();
3399
3400  // We start with the heap tear down so that releasing managed objects does
3401  // not cause a GC.
3402  heap_.StartTearDown();
3403
3404  // This stops cancelable tasks (i.e. concurrent marking tasks).
3405  // Stop concurrent tasks before destroying resources since they might still
3406  // use those.
3407  {
3408    IgnoreLocalGCRequests ignore_gc_requests(heap());
3409    ParkedScope parked_scope(main_thread_local_heap());
3410    cancelable_task_manager()->CancelAndWait();
3411  }
3412
3413  // Cancel all compiler tasks.
3414  delete baseline_batch_compiler_;
3415  baseline_batch_compiler_ = nullptr;
3416
3417#ifdef V8_ENABLE_MAGLEV
3418  delete maglev_concurrent_dispatcher_;
3419  maglev_concurrent_dispatcher_ = nullptr;
3420#endif  // V8_ENABLE_MAGLEV
3421
3422  if (lazy_compile_dispatcher_) {
3423    lazy_compile_dispatcher_->AbortAll();
3424    lazy_compile_dispatcher_.reset();
3425  }
3426
3427  // At this point there are no more background threads left in this isolate.
3428  heap_.safepoint()->AssertMainThreadIsOnlyThread();
3429
3430  {
3431    // This isolate might have to park for a shared GC initiated by another
3432    // client isolate before it can actually detach from the shared isolate.
3433    AllowGarbageCollection allow_shared_gc;
3434    DetachFromSharedIsolate();
3435  }
3436
3437  ReleaseSharedPtrs();
3438
3439  builtins_.TearDown();
3440  bootstrapper_->TearDown();
3441
3442  if (tiering_manager_ != nullptr) {
3443    delete tiering_manager_;
3444    tiering_manager_ = nullptr;
3445  }
3446
3447  delete heap_profiler_;
3448  heap_profiler_ = nullptr;
3449
3450  string_table_.reset();
3451
3452#if USE_SIMULATOR
3453  delete simulator_data_;
3454  simulator_data_ = nullptr;
3455#endif
3456
3457  // After all concurrent tasks are stopped, we know for sure that stats aren't
3458  // updated anymore.
3459  DumpAndResetStats();
3460
3461  heap_.TearDown();
3462
3463  main_thread_local_isolate_.reset();
3464
3465  FILE* logfile = logger_->TearDownAndGetLogFile();
3466  if (logfile != nullptr) base::Fclose(logfile);
3467
3468#if V8_ENABLE_WEBASSEMBLY
3469  wasm::GetWasmEngine()->RemoveIsolate(this);
3470#endif  // V8_ENABLE_WEBASSEMBLY
3471
3472  TearDownEmbeddedBlob();
3473
3474  delete interpreter_;
3475  interpreter_ = nullptr;
3476
3477  delete ast_string_constants_;
3478  ast_string_constants_ = nullptr;
3479
3480  code_event_dispatcher_.reset();
3481
3482  delete root_index_map_;
3483  root_index_map_ = nullptr;
3484
3485  delete compiler_zone_;
3486  compiler_zone_ = nullptr;
3487  compiler_cache_ = nullptr;
3488
3489  SetCodePages(nullptr);
3490
3491  ClearSerializerData();
3492
3493#ifdef V8_SANDBOXED_EXTERNAL_POINTERS
3494  external_pointer_table().TearDown();
3495#endif  // V8_SANDBOXED_EXTERNAL_POINTERS
3496
3497  {
3498    base::MutexGuard lock_guard(&thread_data_table_mutex_);
3499    thread_data_table_.RemoveAllThreads();
3500  }
3501}
3502
3503void Isolate::SetIsolateThreadLocals(Isolate* isolate,
3504                                     PerIsolateThreadData* data) {
3505  base::Thread::SetThreadLocal(isolate_key_, isolate);
3506  base::Thread::SetThreadLocal(per_isolate_thread_data_key_, data);
3507}
3508
3509Isolate::~Isolate() {
3510  TRACE_ISOLATE(destructor);
3511
3512  // The entry stack must be empty when we get here.
3513  DCHECK(entry_stack_ == nullptr || entry_stack_->previous_item == nullptr);
3514
3515  delete entry_stack_;
3516  entry_stack_ = nullptr;
3517
3518  delete date_cache_;
3519  date_cache_ = nullptr;
3520
3521  delete regexp_stack_;
3522  regexp_stack_ = nullptr;
3523
3524  delete descriptor_lookup_cache_;
3525  descriptor_lookup_cache_ = nullptr;
3526
3527  delete load_stub_cache_;
3528  load_stub_cache_ = nullptr;
3529  delete store_stub_cache_;
3530  store_stub_cache_ = nullptr;
3531
3532  delete materialized_object_store_;
3533  materialized_object_store_ = nullptr;
3534
3535  delete logger_;
3536  logger_ = nullptr;
3537
3538  delete handle_scope_implementer_;
3539  handle_scope_implementer_ = nullptr;
3540
3541  delete code_tracer();
3542  set_code_tracer(nullptr);
3543
3544  delete compilation_cache_;
3545  compilation_cache_ = nullptr;
3546  delete bootstrapper_;
3547  bootstrapper_ = nullptr;
3548  delete inner_pointer_to_code_cache_;
3549  inner_pointer_to_code_cache_ = nullptr;
3550
3551  delete thread_manager_;
3552  thread_manager_ = nullptr;
3553
3554  bigint_processor_->Destroy();
3555
3556  delete global_handles_;
3557  global_handles_ = nullptr;
3558  delete eternal_handles_;
3559  eternal_handles_ = nullptr;
3560
3561  delete string_stream_debug_object_cache_;
3562  string_stream_debug_object_cache_ = nullptr;
3563
3564  delete random_number_generator_;
3565  random_number_generator_ = nullptr;
3566
3567  delete fuzzer_rng_;
3568  fuzzer_rng_ = nullptr;
3569
3570  delete debug_;
3571  debug_ = nullptr;
3572
3573  delete cancelable_task_manager_;
3574  cancelable_task_manager_ = nullptr;
3575
3576  delete allocator_;
3577  allocator_ = nullptr;
3578
3579  // Assert that |default_microtask_queue_| is the last MicrotaskQueue instance.
3580  DCHECK_IMPLIES(default_microtask_queue_,
3581                 default_microtask_queue_ == default_microtask_queue_->next());
3582  delete default_microtask_queue_;
3583  default_microtask_queue_ = nullptr;
3584
3585  // The ReadOnlyHeap should not be destroyed when sharing without pointer
3586  // compression as the object itself is shared.
3587  if (read_only_heap_->IsOwnedByIsolate()) {
3588    delete read_only_heap_;
3589    read_only_heap_ = nullptr;
3590  }
3591}
3592
3593void Isolate::InitializeThreadLocal() {
3594  thread_local_top()->Initialize(this);
3595  clear_pending_exception();
3596  clear_pending_message();
3597  clear_scheduled_exception();
3598}
3599
3600void Isolate::SetTerminationOnExternalTryCatch() {
3601  if (try_catch_handler() == nullptr) return;
3602  try_catch_handler()->can_continue_ = false;
3603  try_catch_handler()->has_terminated_ = true;
3604  try_catch_handler()->exception_ =
3605      reinterpret_cast<void*>(ReadOnlyRoots(heap()).null_value().ptr());
3606}
3607
3608bool Isolate::PropagatePendingExceptionToExternalTryCatch(
3609    ExceptionHandlerType top_handler) {
3610  Object exception = pending_exception();
3611
3612  if (top_handler == ExceptionHandlerType::kJavaScriptHandler) {
3613    thread_local_top()->external_caught_exception_ = false;
3614    return false;
3615  }
3616
3617  if (top_handler == ExceptionHandlerType::kNone) {
3618    thread_local_top()->external_caught_exception_ = false;
3619    return true;
3620  }
3621
3622  DCHECK_EQ(ExceptionHandlerType::kExternalTryCatch, top_handler);
3623  thread_local_top()->external_caught_exception_ = true;
3624  if (!is_catchable_by_javascript(exception)) {
3625    SetTerminationOnExternalTryCatch();
3626  } else {
3627    v8::TryCatch* handler = try_catch_handler();
3628    DCHECK(pending_message().IsJSMessageObject() ||
3629           pending_message().IsTheHole(this));
3630    handler->can_continue_ = true;
3631    handler->has_terminated_ = false;
3632    handler->exception_ = reinterpret_cast<void*>(exception.ptr());
3633    // Propagate to the external try-catch only if we got an actual message.
3634    if (!has_pending_message()) return true;
3635    handler->message_obj_ = reinterpret_cast<void*>(pending_message().ptr());
3636  }
3637  return true;
3638}
3639
3640bool Isolate::InitializeCounters() {
3641  if (async_counters_) return false;
3642  async_counters_ = std::make_shared<Counters>(this);
3643  return true;
3644}
3645
3646void Isolate::InitializeLoggingAndCounters() {
3647  if (logger_ == nullptr) {
3648    logger_ = new Logger(this);
3649  }
3650  InitializeCounters();
3651}
3652
3653namespace {
3654
3655void CreateOffHeapTrampolines(Isolate* isolate) {
3656  DCHECK_NOT_NULL(isolate->embedded_blob_code());
3657  DCHECK_NE(0, isolate->embedded_blob_code_size());
3658  DCHECK_NOT_NULL(isolate->embedded_blob_data());
3659  DCHECK_NE(0, isolate->embedded_blob_data_size());
3660
3661  HandleScope scope(isolate);
3662  Builtins* builtins = isolate->builtins();
3663
3664  EmbeddedData d = EmbeddedData::FromBlob(isolate);
3665
3666  STATIC_ASSERT(Builtins::kAllBuiltinsAreIsolateIndependent);
3667  for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
3668       ++builtin) {
3669    Address instruction_start = d.InstructionStartOfBuiltin(builtin);
3670    // TODO(v8:11880): avoid roundtrips between cdc and code.
3671    Handle<Code> trampoline = isolate->factory()->NewOffHeapTrampolineFor(
3672        FromCodeT(builtins->code_handle(builtin), isolate), instruction_start);
3673
3674    // From this point onwards, the old builtin code object is unreachable and
3675    // will be collected by the next GC.
3676    builtins->set_code(builtin, ToCodeT(*trampoline));
3677  }
3678}
3679
3680#ifdef DEBUG
3681bool IsolateIsCompatibleWithEmbeddedBlob(Isolate* isolate) {
3682  EmbeddedData d = EmbeddedData::FromBlob(isolate);
3683  return (d.IsolateHash() == isolate->HashIsolateForEmbeddedBlob());
3684}
3685#endif  // DEBUG
3686
3687}  // namespace
3688
3689void Isolate::InitializeDefaultEmbeddedBlob() {
3690  const uint8_t* code = DefaultEmbeddedBlobCode();
3691  uint32_t code_size = DefaultEmbeddedBlobCodeSize();
3692  const uint8_t* data = DefaultEmbeddedBlobData();
3693  uint32_t data_size = DefaultEmbeddedBlobDataSize();
3694
3695  if (StickyEmbeddedBlobCode() != nullptr) {
3696    base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
3697    // Check again now that we hold the lock.
3698    if (StickyEmbeddedBlobCode() != nullptr) {
3699      code = StickyEmbeddedBlobCode();
3700      code_size = StickyEmbeddedBlobCodeSize();
3701      data = StickyEmbeddedBlobData();
3702      data_size = StickyEmbeddedBlobDataSize();
3703      current_embedded_blob_refs_++;
3704    }
3705  }
3706
3707  if (code == nullptr) {
3708    CHECK_EQ(0, code_size);
3709  } else {
3710    SetEmbeddedBlob(code, code_size, data, data_size);
3711  }
3712}
3713
3714void Isolate::CreateAndSetEmbeddedBlob() {
3715  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
3716
3717  PrepareBuiltinSourcePositionMap();
3718
3719  PrepareBuiltinLabelInfoMap();
3720
3721  // If a sticky blob has been set, we reuse it.
3722  if (StickyEmbeddedBlobCode() != nullptr) {
3723    CHECK_EQ(embedded_blob_code(), StickyEmbeddedBlobCode());
3724    CHECK_EQ(embedded_blob_data(), StickyEmbeddedBlobData());
3725    CHECK_EQ(CurrentEmbeddedBlobCode(), StickyEmbeddedBlobCode());
3726    CHECK_EQ(CurrentEmbeddedBlobData(), StickyEmbeddedBlobData());
3727  } else {
3728    // Create and set a new embedded blob.
3729    uint8_t* code;
3730    uint32_t code_size;
3731    uint8_t* data;
3732    uint32_t data_size;
3733    OffHeapInstructionStream::CreateOffHeapOffHeapInstructionStream(
3734        this, &code, &code_size, &data, &data_size);
3735
3736    CHECK_EQ(0, current_embedded_blob_refs_);
3737    const uint8_t* const_code = const_cast<const uint8_t*>(code);
3738    const uint8_t* const_data = const_cast<const uint8_t*>(data);
3739    SetEmbeddedBlob(const_code, code_size, const_data, data_size);
3740    current_embedded_blob_refs_++;
3741
3742    SetStickyEmbeddedBlob(code, code_size, data, data_size);
3743  }
3744
3745  MaybeRemapEmbeddedBuiltinsIntoCodeRange();
3746
3747  CreateOffHeapTrampolines(this);
3748}
3749
3750void Isolate::MaybeRemapEmbeddedBuiltinsIntoCodeRange() {
3751  if (!is_short_builtin_calls_enabled() || !RequiresCodeRange()) {
3752    return;
3753  }
3754  if (V8_ENABLE_NEAR_CODE_RANGE_BOOL &&
3755      GetShortBuiltinsCallRegion().contains(heap_.code_region())) {
3756    // The embedded builtins are within the pc-relative reach from the code
3757    // range, so there's no need to remap embedded builtins.
3758    return;
3759  }
3760
3761  CHECK_NOT_NULL(embedded_blob_code_);
3762  CHECK_NE(embedded_blob_code_size_, 0);
3763
3764  DCHECK_NOT_NULL(heap_.code_range_);
3765  embedded_blob_code_ = heap_.code_range_->RemapEmbeddedBuiltins(
3766      this, embedded_blob_code_, embedded_blob_code_size_);
3767  CHECK_NOT_NULL(embedded_blob_code_);
3768  // The un-embedded code blob is already a part of the registered code range
3769  // so it's not necessary to register it again.
3770}
3771
3772void Isolate::TearDownEmbeddedBlob() {
3773  // Nothing to do in case the blob is embedded into the binary or unset.
3774  if (StickyEmbeddedBlobCode() == nullptr) return;
3775
3776  if (!is_short_builtin_calls_enabled()) {
3777    CHECK_EQ(embedded_blob_code(), StickyEmbeddedBlobCode());
3778    CHECK_EQ(embedded_blob_data(), StickyEmbeddedBlobData());
3779  }
3780  CHECK_EQ(CurrentEmbeddedBlobCode(), StickyEmbeddedBlobCode());
3781  CHECK_EQ(CurrentEmbeddedBlobData(), StickyEmbeddedBlobData());
3782
3783  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
3784  current_embedded_blob_refs_--;
3785  if (current_embedded_blob_refs_ == 0 && enable_embedded_blob_refcounting_) {
3786    // We own the embedded blob and are the last holder. Free it.
3787    OffHeapInstructionStream::FreeOffHeapOffHeapInstructionStream(
3788        const_cast<uint8_t*>(CurrentEmbeddedBlobCode()),
3789        embedded_blob_code_size(),
3790        const_cast<uint8_t*>(CurrentEmbeddedBlobData()),
3791        embedded_blob_data_size());
3792    ClearEmbeddedBlob();
3793  }
3794}
3795
3796bool Isolate::InitWithoutSnapshot() {
3797  return Init(nullptr, nullptr, nullptr, false);
3798}
3799
3800bool Isolate::InitWithSnapshot(SnapshotData* startup_snapshot_data,
3801                               SnapshotData* read_only_snapshot_data,
3802                               SnapshotData* shared_heap_snapshot_data,
3803                               bool can_rehash) {
3804  DCHECK_NOT_NULL(startup_snapshot_data);
3805  DCHECK_NOT_NULL(read_only_snapshot_data);
3806  DCHECK_NOT_NULL(shared_heap_snapshot_data);
3807  return Init(startup_snapshot_data, read_only_snapshot_data,
3808              shared_heap_snapshot_data, can_rehash);
3809}
3810
3811static std::string AddressToString(uintptr_t address) {
3812  std::stringstream stream_address;
3813  stream_address << "0x" << std::hex << address;
3814  return stream_address.str();
3815}
3816
3817void Isolate::AddCrashKeysForIsolateAndHeapPointers() {
3818  DCHECK_NOT_NULL(add_crash_key_callback_);
3819
3820  const uintptr_t isolate_address = reinterpret_cast<uintptr_t>(this);
3821  add_crash_key_callback_(v8::CrashKeyId::kIsolateAddress,
3822                          AddressToString(isolate_address));
3823
3824  const uintptr_t ro_space_firstpage_address =
3825      heap()->read_only_space()->FirstPageAddress();
3826  add_crash_key_callback_(v8::CrashKeyId::kReadonlySpaceFirstPageAddress,
3827                          AddressToString(ro_space_firstpage_address));
3828
3829  if (heap()->map_space()) {
3830    const uintptr_t map_space_firstpage_address =
3831        heap()->map_space()->FirstPageAddress();
3832    add_crash_key_callback_(v8::CrashKeyId::kMapSpaceFirstPageAddress,
3833                            AddressToString(map_space_firstpage_address));
3834  }
3835
3836  const uintptr_t code_space_firstpage_address =
3837      heap()->code_space()->FirstPageAddress();
3838  add_crash_key_callback_(v8::CrashKeyId::kCodeSpaceFirstPageAddress,
3839                          AddressToString(code_space_firstpage_address));
3840}
3841
3842void Isolate::InitializeCodeRanges() {
3843  DCHECK_NULL(GetCodePages());
3844  MemoryRange embedded_range{
3845      reinterpret_cast<const void*>(embedded_blob_code()),
3846      embedded_blob_code_size()};
3847  code_pages_buffer1_.push_back(embedded_range);
3848  SetCodePages(&code_pages_buffer1_);
3849}
3850
3851namespace {
3852
3853// This global counter contains number of stack loads/stores per optimized/wasm
3854// function.
3855using MapOfLoadsAndStoresPerFunction =
3856    std::map<std::string /* function_name */,
3857             std::pair<uint64_t /* loads */, uint64_t /* stores */>>;
3858MapOfLoadsAndStoresPerFunction* stack_access_count_map = nullptr;
3859
3860class BigIntPlatform : public bigint::Platform {
3861 public:
3862  explicit BigIntPlatform(Isolate* isolate) : isolate_(isolate) {}
3863  ~BigIntPlatform() override = default;
3864
3865  bool InterruptRequested() override {
3866    StackLimitCheck interrupt_check(isolate_);
3867    return (interrupt_check.InterruptRequested() &&
3868            isolate_->stack_guard()->HasTerminationRequest());
3869  }
3870
3871 private:
3872  Isolate* isolate_;
3873};
3874}  // namespace
3875
3876VirtualMemoryCage* Isolate::GetPtrComprCodeCageForTesting() {
3877  return V8_EXTERNAL_CODE_SPACE_BOOL ? heap_.code_range() : GetPtrComprCage();
3878}
3879
3880bool Isolate::Init(SnapshotData* startup_snapshot_data,
3881                   SnapshotData* read_only_snapshot_data,
3882                   SnapshotData* shared_heap_snapshot_data, bool can_rehash) {
3883  TRACE_ISOLATE(init);
3884  const bool create_heap_objects = (read_only_snapshot_data == nullptr);
3885  // We either have all or none.
3886  DCHECK_EQ(create_heap_objects, startup_snapshot_data == nullptr);
3887  DCHECK_EQ(create_heap_objects, shared_heap_snapshot_data == nullptr);
3888
3889  base::ElapsedTimer timer;
3890  if (create_heap_objects && FLAG_profile_deserialization) timer.Start();
3891
3892  time_millis_at_init_ = heap_.MonotonicallyIncreasingTimeInMs();
3893
3894  stress_deopt_count_ = FLAG_deopt_every_n_times;
3895  force_slow_path_ = FLAG_force_slow_path;
3896
3897  has_fatal_error_ = false;
3898
3899  // The initialization process does not handle memory exhaustion.
3900  AlwaysAllocateScope always_allocate(heap());
3901
3902#define ASSIGN_ELEMENT(CamelName, hacker_name)                  \
3903  isolate_addresses_[IsolateAddressId::k##CamelName##Address] = \
3904      reinterpret_cast<Address>(hacker_name##_address());
3905  FOR_EACH_ISOLATE_ADDRESS_NAME(ASSIGN_ELEMENT)
3906#undef ASSIGN_ELEMENT
3907
3908  // We need to initialize code_pages_ before any on-heap code is allocated to
3909  // make sure we record all code allocations.
3910  InitializeCodeRanges();
3911
3912  compilation_cache_ = new CompilationCache(this);
3913  descriptor_lookup_cache_ = new DescriptorLookupCache();
3914  inner_pointer_to_code_cache_ = new InnerPointerToCodeCache(this);
3915  global_handles_ = new GlobalHandles(this);
3916  eternal_handles_ = new EternalHandles();
3917  bootstrapper_ = new Bootstrapper(this);
3918  handle_scope_implementer_ = new HandleScopeImplementer(this);
3919  load_stub_cache_ = new StubCache(this);
3920  store_stub_cache_ = new StubCache(this);
3921  materialized_object_store_ = new MaterializedObjectStore(this);
3922  regexp_stack_ = new RegExpStack();
3923  date_cache_ = new DateCache();
3924  heap_profiler_ = new HeapProfiler(heap());
3925  interpreter_ = new interpreter::Interpreter(this);
3926  bigint_processor_ = bigint::Processor::New(new BigIntPlatform(this));
3927
3928  if (FLAG_lazy_compile_dispatcher) {
3929    lazy_compile_dispatcher_ = std::make_unique<LazyCompileDispatcher>(
3930        this, V8::GetCurrentPlatform(), FLAG_stack_size);
3931  }
3932  baseline_batch_compiler_ = new baseline::BaselineBatchCompiler(this);
3933#ifdef V8_ENABLE_MAGLEV
3934  maglev_concurrent_dispatcher_ = new maglev::MaglevConcurrentDispatcher(this);
3935#endif  // V8_ENABLE_MAGLEV
3936
3937#if USE_SIMULATOR
3938  simulator_data_ = new SimulatorData;
3939#endif
3940
3941  // Enable logging before setting up the heap
3942  logger_->SetUp(this);
3943
3944  metrics_recorder_ = std::make_shared<metrics::Recorder>();
3945
3946  {
3947    // Ensure that the thread has a valid stack guard.  The v8::Locker object
3948    // will ensure this too, but we don't have to use lockers if we are only
3949    // using one thread.
3950    ExecutionAccess lock(this);
3951    stack_guard()->InitThread(lock);
3952  }
3953
3954  // Create LocalIsolate/LocalHeap for the main thread and set state to Running.
3955  main_thread_local_isolate_.reset(new LocalIsolate(this, ThreadKind::kMain));
3956
3957  {
3958    IgnoreLocalGCRequests ignore_gc_requests(heap());
3959    main_thread_local_heap()->Unpark();
3960  }
3961
3962  // Lock clients_mutex_ in order to prevent shared GCs from other clients
3963  // during deserialization.
3964  base::Optional<base::MutexGuard> clients_guard;
3965
3966  if (shared_isolate_) {
3967    clients_guard.emplace(&shared_isolate_->global_safepoint()->clients_mutex_);
3968  }
3969
3970  // The main thread LocalHeap needs to be set up when attaching to the shared
3971  // isolate. Otherwise a global safepoint would find an isolate without
3972  // LocalHeaps and not wait until this thread is ready for a GC.
3973  AttachToSharedIsolate();
3974
3975  // SetUp the object heap.
3976  DCHECK(!heap_.HasBeenSetUp());
3977  heap_.SetUp(main_thread_local_heap());
3978  ReadOnlyHeap::SetUp(this, read_only_snapshot_data, can_rehash);
3979  heap_.SetUpSpaces(&isolate_data_.new_allocation_info_,
3980                    &isolate_data_.old_allocation_info_);
3981
3982  if (OwnsStringTable()) {
3983    string_table_ = std::make_shared<StringTable>(this);
3984  } else {
3985    // Only refer to shared string table after attaching to the shared isolate.
3986    DCHECK_NOT_NULL(shared_isolate());
3987    string_table_ = shared_isolate()->string_table_;
3988  }
3989
3990  if (V8_SHORT_BUILTIN_CALLS_BOOL && FLAG_short_builtin_calls) {
3991    // Check if the system has more than 4GB of physical memory by comparing the
3992    // old space size with respective threshold value.
3993    //
3994    // Additionally, enable if there is already a process-wide CodeRange that
3995    // has re-embedded builtins.
3996    is_short_builtin_calls_enabled_ = (heap_.MaxOldGenerationSize() >=
3997                                       kShortBuiltinCallsOldSpaceSizeThreshold);
3998    if (COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) {
3999      std::shared_ptr<CodeRange> code_range =
4000          CodeRange::GetProcessWideCodeRange();
4001      if (code_range && code_range->embedded_blob_code_copy() != nullptr) {
4002        is_short_builtin_calls_enabled_ = true;
4003      }
4004    }
4005    if (V8_ENABLE_NEAR_CODE_RANGE_BOOL) {
4006      // The short builtin calls could still be enabled if allocated code range
4007      // is close enough to embedded builtins so that the latter could be
4008      // reached using pc-relative (short) calls/jumps.
4009      is_short_builtin_calls_enabled_ |=
4010          GetShortBuiltinsCallRegion().contains(heap_.code_region());
4011    }
4012  }
4013#ifdef V8_EXTERNAL_CODE_SPACE
4014  if (heap_.code_range()) {
4015    code_cage_base_ = GetPtrComprCageBaseAddress(heap_.code_range()->base());
4016  } else {
4017    code_cage_base_ = cage_base();
4018  }
4019#endif  // V8_EXTERNAL_CODE_SPACE
4020
4021  isolate_data_.external_reference_table()->Init(this);
4022
4023#ifdef V8_SANDBOXED_EXTERNAL_POINTERS
4024  external_pointer_table().Init(this);
4025#endif  // V8_SANDBOXED_EXTERNAL_POINTERS
4026
4027#if V8_ENABLE_WEBASSEMBLY
4028  wasm::GetWasmEngine()->AddIsolate(this);
4029#endif  // V8_ENABLE_WEBASSEMBLY
4030
4031  if (setup_delegate_ == nullptr) {
4032    setup_delegate_ = new SetupIsolateDelegate(create_heap_objects);
4033  }
4034
4035  if (!FLAG_inline_new) heap_.DisableInlineAllocation();
4036
4037  if (!setup_delegate_->SetupHeap(&heap_)) {
4038    V8::FatalProcessOutOfMemory(this, "heap object creation");
4039  }
4040
4041  if (create_heap_objects) {
4042    // Terminate the startup and shared heap object caches so we can iterate.
4043    startup_object_cache_.push_back(ReadOnlyRoots(this).undefined_value());
4044    shared_heap_object_cache_.push_back(ReadOnlyRoots(this).undefined_value());
4045  }
4046
4047  InitializeThreadLocal();
4048
4049  // Profiler has to be created after ThreadLocal is initialized
4050  // because it makes use of interrupts.
4051  tracing_cpu_profiler_.reset(new TracingCpuProfilerImpl(this));
4052
4053  bootstrapper_->Initialize(create_heap_objects);
4054
4055  if (create_heap_objects) {
4056    builtins_constants_table_builder_ = new BuiltinsConstantsTableBuilder(this);
4057
4058    setup_delegate_->SetupBuiltins(this);
4059
4060#ifndef V8_TARGET_ARCH_ARM
4061    // Store the interpreter entry trampoline on the root list. It is used as a
4062    // template for further copies that may later be created to help profile
4063    // interpreted code.
4064    // We currently cannot do this on arm due to RELATIVE_CODE_TARGETs
4065    // assuming that all possible Code targets may be addressed with an int24
4066    // offset, effectively limiting code space size to 32MB. We can guarantee
4067    // this at mksnapshot-time, but not at runtime.
4068    // See also: https://crbug.com/v8/8713.
4069    heap_.SetInterpreterEntryTrampolineForProfiling(
4070        FromCodeT(builtins()->code(Builtin::kInterpreterEntryTrampoline)));
4071#endif
4072
4073    builtins_constants_table_builder_->Finalize();
4074    delete builtins_constants_table_builder_;
4075    builtins_constants_table_builder_ = nullptr;
4076
4077    CreateAndSetEmbeddedBlob();
4078  } else {
4079    setup_delegate_->SetupBuiltins(this);
4080    MaybeRemapEmbeddedBuiltinsIntoCodeRange();
4081  }
4082
4083  // Initialize custom memcopy and memmove functions (must happen after
4084  // embedded blob setup).
4085  init_memcopy_functions();
4086
4087  if (FLAG_log_internal_timer_events) {
4088    set_event_logger(Logger::DefaultEventLoggerSentinel);
4089  }
4090
4091  if (FLAG_trace_turbo || FLAG_trace_turbo_graph || FLAG_turbo_profiling) {
4092    PrintF("Concurrent recompilation has been disabled for tracing.\n");
4093  } else if (OptimizingCompileDispatcher::Enabled()) {
4094    optimizing_compile_dispatcher_ = new OptimizingCompileDispatcher(this);
4095  }
4096
4097  // Initialize before deserialization since collections may occur,
4098  // clearing/updating ICs (and thus affecting tiering decisions).
4099  tiering_manager_ = new TieringManager(this);
4100
4101  // If we are deserializing, read the state into the now-empty heap.
4102  {
4103    CodePageCollectionMemoryModificationScope modification_scope(heap());
4104
4105    if (create_heap_objects) {
4106      heap_.read_only_space()->ClearStringPaddingIfNeeded();
4107      read_only_heap_->OnCreateHeapObjectsComplete(this);
4108    } else {
4109      SharedHeapDeserializer shared_heap_deserializer(
4110          this, shared_heap_snapshot_data, can_rehash);
4111      shared_heap_deserializer.DeserializeIntoIsolate();
4112
4113      StartupDeserializer startup_deserializer(this, startup_snapshot_data,
4114                                               can_rehash);
4115      startup_deserializer.DeserializeIntoIsolate();
4116    }
4117    load_stub_cache_->Initialize();
4118    store_stub_cache_->Initialize();
4119    interpreter_->Initialize();
4120    heap_.NotifyDeserializationComplete();
4121  }
4122
4123#ifdef VERIFY_HEAP
4124  if (FLAG_verify_heap) {
4125    heap_.VerifyReadOnlyHeap();
4126  }
4127#endif
4128
4129  delete setup_delegate_;
4130  setup_delegate_ = nullptr;
4131
4132  Builtins::InitializeIsolateDataTables(this);
4133
4134  // Extra steps in the logger after the heap has been set up.
4135  logger_->LateSetup(this);
4136
4137#ifdef DEBUG
4138  // Verify that the current heap state (usually deserialized from the snapshot)
4139  // is compatible with the embedded blob. If this DCHECK fails, we've likely
4140  // loaded a snapshot generated by a different V8 version or build-time
4141  // configuration.
4142  if (!IsolateIsCompatibleWithEmbeddedBlob(this)) {
4143    FATAL(
4144        "The Isolate is incompatible with the embedded blob. This is usually "
4145        "caused by incorrect usage of mksnapshot. When generating custom "
4146        "snapshots, embedders must ensure they pass the same flags as during "
4147        "the V8 build process (e.g.: --turbo-instruction-scheduling).");
4148  }
4149#endif  // DEBUG
4150
4151#ifndef V8_TARGET_ARCH_ARM
4152  // The IET for profiling should always be a full on-heap Code object.
4153  DCHECK(!Code::cast(heap_.interpreter_entry_trampoline_for_profiling())
4154              .is_off_heap_trampoline());
4155#endif  // V8_TARGET_ARCH_ARM
4156
4157  if (FLAG_print_builtin_code) builtins()->PrintBuiltinCode();
4158  if (FLAG_print_builtin_size) builtins()->PrintBuiltinSize();
4159
4160  // Finish initialization of ThreadLocal after deserialization is done.
4161  clear_pending_exception();
4162  clear_pending_message();
4163  clear_scheduled_exception();
4164
4165  // Quiet the heap NaN if needed on target platform.
4166  if (!create_heap_objects)
4167    Assembler::QuietNaN(ReadOnlyRoots(this).nan_value());
4168
4169  if (FLAG_trace_turbo) {
4170    // Create an empty file.
4171    std::ofstream(GetTurboCfgFileName(this).c_str(), std::ios_base::trunc);
4172  }
4173
4174  {
4175    HandleScope scope(this);
4176    ast_string_constants_ = new AstStringConstants(this, HashSeed(this));
4177  }
4178
4179  initialized_from_snapshot_ = !create_heap_objects;
4180
4181  if (FLAG_stress_sampling_allocation_profiler > 0) {
4182    uint64_t sample_interval = FLAG_stress_sampling_allocation_profiler;
4183    int stack_depth = 128;
4184    v8::HeapProfiler::SamplingFlags sampling_flags =
4185        v8::HeapProfiler::SamplingFlags::kSamplingForceGC;
4186    heap_profiler()->StartSamplingHeapProfiler(sample_interval, stack_depth,
4187                                               sampling_flags);
4188  }
4189
4190#if defined(V8_OS_WIN64)
4191  if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
4192      heap()->code_range()->AtomicIncrementUnwindInfoUseCount() == 0) {
4193    const base::AddressRegion& code_region = heap()->code_region();
4194    void* start = reinterpret_cast<void*>(code_region.begin());
4195    size_t size_in_bytes = code_region.size();
4196    win64_unwindinfo::RegisterNonABICompliantCodeRange(start, size_in_bytes);
4197  }
4198#endif  // V8_OS_WIN64
4199
4200  if (create_heap_objects && FLAG_profile_deserialization) {
4201    double ms = timer.Elapsed().InMillisecondsF();
4202    PrintF("[Initializing isolate from scratch took %0.3f ms]\n", ms);
4203  }
4204
4205#ifdef V8_ENABLE_WEBASSEMBLY
4206  if (FLAG_experimental_wasm_stack_switching) {
4207    std::unique_ptr<wasm::StackMemory> stack(
4208        wasm::StackMemory::GetCurrentStackView(this));
4209    this->wasm_stacks() = stack.get();
4210    if (FLAG_trace_wasm_stack_switching) {
4211      PrintF("Set up native stack object (limit: %p, base: %p)\n",
4212             stack->jslimit(), reinterpret_cast<void*>(stack->base()));
4213    }
4214    HandleScope scope(this);
4215    Handle<WasmContinuationObject> continuation =
4216        WasmContinuationObject::New(this, std::move(stack));
4217    heap()
4218        ->roots_table()
4219        .slot(RootIndex::kActiveContinuation)
4220        .store(*continuation);
4221  }
4222#endif
4223
4224  initialized_ = true;
4225
4226  return true;
4227}
4228
4229void Isolate::Enter() {
4230  Isolate* current_isolate = nullptr;
4231  PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
4232  if (current_data != nullptr) {
4233    current_isolate = current_data->isolate_;
4234    DCHECK_NOT_NULL(current_isolate);
4235    if (current_isolate == this) {
4236      DCHECK(Current() == this);
4237      DCHECK_NOT_NULL(entry_stack_);
4238      DCHECK(entry_stack_->previous_thread_data == nullptr ||
4239             entry_stack_->previous_thread_data->thread_id() ==
4240                 ThreadId::Current());
4241      // Same thread re-enters the isolate, no need to re-init anything.
4242      entry_stack_->entry_count++;
4243      return;
4244    }
4245  }
4246
4247  PerIsolateThreadData* data = FindOrAllocatePerThreadDataForThisThread();
4248  DCHECK_NOT_NULL(data);
4249  DCHECK(data->isolate_ == this);
4250
4251  EntryStackItem* item =
4252      new EntryStackItem(current_data, current_isolate, entry_stack_);
4253  entry_stack_ = item;
4254
4255  SetIsolateThreadLocals(this, data);
4256
4257  // In case it's the first time some thread enters the isolate.
4258  set_thread_id(data->thread_id());
4259}
4260
4261void Isolate::Exit() {
4262  DCHECK_NOT_NULL(entry_stack_);
4263  DCHECK(entry_stack_->previous_thread_data == nullptr ||
4264         entry_stack_->previous_thread_data->thread_id() ==
4265             ThreadId::Current());
4266
4267  if (--entry_stack_->entry_count > 0) return;
4268
4269  DCHECK_NOT_NULL(CurrentPerIsolateThreadData());
4270  DCHECK(CurrentPerIsolateThreadData()->isolate_ == this);
4271
4272  // Pop the stack.
4273  EntryStackItem* item = entry_stack_;
4274  entry_stack_ = item->previous_item;
4275
4276  PerIsolateThreadData* previous_thread_data = item->previous_thread_data;
4277  Isolate* previous_isolate = item->previous_isolate;
4278
4279  delete item;
4280
4281  // Reinit the current thread for the isolate it was running before this one.
4282  SetIsolateThreadLocals(previous_isolate, previous_thread_data);
4283}
4284
4285std::unique_ptr<PersistentHandles> Isolate::NewPersistentHandles() {
4286  return std::make_unique<PersistentHandles>(this);
4287}
4288
4289void Isolate::DumpAndResetStats() {
4290  if (FLAG_trace_turbo_stack_accesses) {
4291    StdoutStream os;
4292    uint64_t total_loads = 0;
4293    uint64_t total_stores = 0;
4294    os << "=== Stack access counters === " << std::endl;
4295    if (!stack_access_count_map) {
4296      os << "No stack accesses in optimized/wasm functions found.";
4297    } else {
4298      DCHECK_NOT_NULL(stack_access_count_map);
4299      os << "Number of optimized/wasm stack-access functions: "
4300         << stack_access_count_map->size() << std::endl;
4301      for (auto it = stack_access_count_map->cbegin();
4302           it != stack_access_count_map->cend(); it++) {
4303        std::string function_name((*it).first);
4304        std::pair<uint64_t, uint64_t> per_func_count = (*it).second;
4305        os << "Name: " << function_name << ", Loads: " << per_func_count.first
4306           << ", Stores: " << per_func_count.second << std::endl;
4307        total_loads += per_func_count.first;
4308        total_stores += per_func_count.second;
4309      }
4310      os << "Total Loads: " << total_loads << ", Total Stores: " << total_stores
4311         << std::endl;
4312      stack_access_count_map = nullptr;
4313    }
4314  }
4315  if (turbo_statistics() != nullptr) {
4316    DCHECK(FLAG_turbo_stats || FLAG_turbo_stats_nvp);
4317    StdoutStream os;
4318    if (FLAG_turbo_stats) {
4319      AsPrintableStatistics ps = {*turbo_statistics(), false};
4320      os << ps << std::endl;
4321    }
4322    if (FLAG_turbo_stats_nvp) {
4323      AsPrintableStatistics ps = {*turbo_statistics(), true};
4324      os << ps << std::endl;
4325    }
4326    delete turbo_statistics_;
4327    turbo_statistics_ = nullptr;
4328  }
4329#if V8_ENABLE_WEBASSEMBLY
4330  // TODO(7424): There is no public API for the {WasmEngine} yet. So for now we
4331  // just dump and reset the engines statistics together with the Isolate.
4332  if (FLAG_turbo_stats_wasm) {
4333    wasm::GetWasmEngine()->DumpAndResetTurboStatistics();
4334  }
4335#endif  // V8_ENABLE_WEBASSEMBLY
4336#if V8_RUNTIME_CALL_STATS
4337  if (V8_UNLIKELY(TracingFlags::runtime_stats.load(std::memory_order_relaxed) ==
4338                  v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) {
4339    counters()->worker_thread_runtime_call_stats()->AddToMainTable(
4340        counters()->runtime_call_stats());
4341    counters()->runtime_call_stats()->Print();
4342    counters()->runtime_call_stats()->Reset();
4343  }
4344#endif  // V8_RUNTIME_CALL_STATS
4345  if (BasicBlockProfiler::Get()->HasData(this)) {
4346    StdoutStream out;
4347    BasicBlockProfiler::Get()->Print(out, this);
4348    BasicBlockProfiler::Get()->ResetCounts(this);
4349  }
4350}
4351
4352void Isolate::AbortConcurrentOptimization(BlockingBehavior behavior) {
4353  if (concurrent_recompilation_enabled()) {
4354    DisallowGarbageCollection no_recursive_gc;
4355    optimizing_compile_dispatcher()->Flush(behavior);
4356  }
4357}
4358
4359CompilationStatistics* Isolate::GetTurboStatistics() {
4360  if (turbo_statistics() == nullptr)
4361    set_turbo_statistics(new CompilationStatistics());
4362  return turbo_statistics();
4363}
4364
4365CodeTracer* Isolate::GetCodeTracer() {
4366  if (code_tracer() == nullptr) set_code_tracer(new CodeTracer(id()));
4367  return code_tracer();
4368}
4369
4370bool Isolate::use_optimizer() {
4371  // TODO(v8:7700): Update this predicate for a world with multiple tiers.
4372  return (FLAG_opt || FLAG_maglev) && !serializer_enabled_ &&
4373         CpuFeatures::SupportsOptimizer() && !is_precise_count_code_coverage();
4374}
4375
4376void Isolate::IncreaseTotalRegexpCodeGenerated(Handle<HeapObject> code) {
4377  PtrComprCageBase cage_base(this);
4378  DCHECK(code->IsCode(cage_base) || code->IsByteArray(cage_base));
4379  total_regexp_code_generated_ += code->Size(cage_base);
4380}
4381
4382bool Isolate::NeedsDetailedOptimizedCodeLineInfo() const {
4383  return NeedsSourcePositionsForProfiling() ||
4384         detailed_source_positions_for_profiling();
4385}
4386
4387bool Isolate::NeedsSourcePositionsForProfiling() const {
4388  return
4389      // Static conditions.
4390      FLAG_trace_deopt || FLAG_trace_turbo || FLAG_trace_turbo_graph ||
4391      FLAG_turbo_profiling || FLAG_perf_prof || FLAG_log_maps || FLAG_log_ic ||
4392      // Dynamic conditions; changing any of these conditions triggers source
4393      // position collection for the entire heap
4394      // (CollectSourcePositionsForAllBytecodeArrays).
4395      is_profiling() || debug_->is_active() || logger_->is_logging();
4396}
4397
4398void Isolate::SetFeedbackVectorsForProfilingTools(Object value) {
4399  DCHECK(value.IsUndefined(this) || value.IsArrayList());
4400  heap()->set_feedback_vectors_for_profiling_tools(value);
4401}
4402
4403void Isolate::MaybeInitializeVectorListFromHeap() {
4404  if (!heap()->feedback_vectors_for_profiling_tools().IsUndefined(this)) {
4405    // Already initialized, return early.
4406    DCHECK(heap()->feedback_vectors_for_profiling_tools().IsArrayList());
4407    return;
4408  }
4409
4410  // Collect existing feedback vectors.
4411  std::vector<Handle<FeedbackVector>> vectors;
4412
4413  {
4414    HeapObjectIterator heap_iterator(heap());
4415    for (HeapObject current_obj = heap_iterator.Next(); !current_obj.is_null();
4416         current_obj = heap_iterator.Next()) {
4417      if (!current_obj.IsFeedbackVector()) continue;
4418
4419      FeedbackVector vector = FeedbackVector::cast(current_obj);
4420      SharedFunctionInfo shared = vector.shared_function_info();
4421
4422      // No need to preserve the feedback vector for non-user-visible functions.
4423      if (!shared.IsSubjectToDebugging()) continue;
4424
4425      vectors.emplace_back(vector, this);
4426    }
4427  }
4428
4429  // Add collected feedback vectors to the root list lest we lose them to GC.
4430  Handle<ArrayList> list =
4431      ArrayList::New(this, static_cast<int>(vectors.size()));
4432  for (const auto& vector : vectors) list = ArrayList::Add(this, list, vector);
4433  SetFeedbackVectorsForProfilingTools(*list);
4434}
4435
4436void Isolate::set_date_cache(DateCache* date_cache) {
4437  if (date_cache != date_cache_) {
4438    delete date_cache_;
4439  }
4440  date_cache_ = date_cache;
4441}
4442
4443Isolate::KnownPrototype Isolate::IsArrayOrObjectOrStringPrototype(
4444    Object object) {
4445  Object context = heap()->native_contexts_list();
4446  while (!context.IsUndefined(this)) {
4447    Context current_context = Context::cast(context);
4448    if (current_context.initial_object_prototype() == object) {
4449      return KnownPrototype::kObject;
4450    } else if (current_context.initial_array_prototype() == object) {
4451      return KnownPrototype::kArray;
4452    } else if (current_context.initial_string_prototype() == object) {
4453      return KnownPrototype::kString;
4454    }
4455    context = current_context.next_context_link();
4456  }
4457  return KnownPrototype::kNone;
4458}
4459
4460bool Isolate::IsInAnyContext(Object object, uint32_t index) {
4461  DisallowGarbageCollection no_gc;
4462  Object context = heap()->native_contexts_list();
4463  while (!context.IsUndefined(this)) {
4464    Context current_context = Context::cast(context);
4465    if (current_context.get(index) == object) {
4466      return true;
4467    }
4468    context = current_context.next_context_link();
4469  }
4470  return false;
4471}
4472
4473void Isolate::UpdateNoElementsProtectorOnSetElement(Handle<JSObject> object) {
4474  DisallowGarbageCollection no_gc;
4475  if (!object->map().is_prototype_map()) return;
4476  if (!Protectors::IsNoElementsIntact(this)) return;
4477  KnownPrototype obj_type = IsArrayOrObjectOrStringPrototype(*object);
4478  if (obj_type == KnownPrototype::kNone) return;
4479  if (obj_type == KnownPrototype::kObject) {
4480    this->CountUsage(v8::Isolate::kObjectPrototypeHasElements);
4481  } else if (obj_type == KnownPrototype::kArray) {
4482    this->CountUsage(v8::Isolate::kArrayPrototypeHasElements);
4483  }
4484  Protectors::InvalidateNoElements(this);
4485}
4486
4487static base::RandomNumberGenerator* ensure_rng_exists(
4488    base::RandomNumberGenerator** rng, int seed) {
4489  if (*rng == nullptr) {
4490    if (seed != 0) {
4491      *rng = new base::RandomNumberGenerator(seed);
4492    } else {
4493      *rng = new base::RandomNumberGenerator();
4494    }
4495  }
4496  return *rng;
4497}
4498
4499base::RandomNumberGenerator* Isolate::random_number_generator() {
4500  // TODO(bmeurer) Initialized lazily because it depends on flags; can
4501  // be fixed once the default isolate cleanup is done.
4502  return ensure_rng_exists(&random_number_generator_, FLAG_random_seed);
4503}
4504
4505base::RandomNumberGenerator* Isolate::fuzzer_rng() {
4506  if (fuzzer_rng_ == nullptr) {
4507    int64_t seed = FLAG_fuzzer_random_seed;
4508    if (seed == 0) {
4509      seed = random_number_generator()->initial_seed();
4510    }
4511
4512    fuzzer_rng_ = new base::RandomNumberGenerator(seed);
4513  }
4514
4515  return fuzzer_rng_;
4516}
4517
4518int Isolate::GenerateIdentityHash(uint32_t mask) {
4519  int hash;
4520  int attempts = 0;
4521  do {
4522    hash = random_number_generator()->NextInt() & mask;
4523  } while (hash == 0 && attempts++ < 30);
4524  return hash != 0 ? hash : 1;
4525}
4526
4527Code Isolate::FindCodeObject(Address a) {
4528  return heap()->GcSafeFindCodeForInnerPointer(a);
4529}
4530
4531#ifdef DEBUG
4532#define ISOLATE_FIELD_OFFSET(type, name, ignored) \
4533  const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_);
4534ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
4535ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
4536#undef ISOLATE_FIELD_OFFSET
4537#endif
4538
4539Handle<Symbol> Isolate::SymbolFor(RootIndex dictionary_index,
4540                                  Handle<String> name, bool private_symbol) {
4541  Handle<String> key = factory()->InternalizeString(name);
4542  Handle<RegisteredSymbolTable> dictionary =
4543      Handle<RegisteredSymbolTable>::cast(root_handle(dictionary_index));
4544  InternalIndex entry = dictionary->FindEntry(this, key);
4545  Handle<Symbol> symbol;
4546  if (entry.is_not_found()) {
4547    symbol =
4548        private_symbol ? factory()->NewPrivateSymbol() : factory()->NewSymbol();
4549    symbol->set_description(*key);
4550    dictionary = RegisteredSymbolTable::Add(this, dictionary, key, symbol);
4551
4552    switch (dictionary_index) {
4553      case RootIndex::kPublicSymbolTable:
4554        symbol->set_is_in_public_symbol_table(true);
4555        heap()->set_public_symbol_table(*dictionary);
4556        break;
4557      case RootIndex::kApiSymbolTable:
4558        heap()->set_api_symbol_table(*dictionary);
4559        break;
4560      case RootIndex::kApiPrivateSymbolTable:
4561        heap()->set_api_private_symbol_table(*dictionary);
4562        break;
4563      default:
4564        UNREACHABLE();
4565    }
4566  } else {
4567    symbol = Handle<Symbol>(Symbol::cast(dictionary->ValueAt(entry)), this);
4568  }
4569  return symbol;
4570}
4571
4572void Isolate::AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback) {
4573  auto pos = std::find(before_call_entered_callbacks_.begin(),
4574                       before_call_entered_callbacks_.end(), callback);
4575  if (pos != before_call_entered_callbacks_.end()) return;
4576  before_call_entered_callbacks_.push_back(callback);
4577}
4578
4579void Isolate::RemoveBeforeCallEnteredCallback(
4580    BeforeCallEnteredCallback callback) {
4581  auto pos = std::find(before_call_entered_callbacks_.begin(),
4582                       before_call_entered_callbacks_.end(), callback);
4583  if (pos == before_call_entered_callbacks_.end()) return;
4584  before_call_entered_callbacks_.erase(pos);
4585}
4586
4587void Isolate::AddCallCompletedCallback(CallCompletedCallback callback) {
4588  auto pos = std::find(call_completed_callbacks_.begin(),
4589                       call_completed_callbacks_.end(), callback);
4590  if (pos != call_completed_callbacks_.end()) return;
4591  call_completed_callbacks_.push_back(callback);
4592}
4593
4594void Isolate::RemoveCallCompletedCallback(CallCompletedCallback callback) {
4595  auto pos = std::find(call_completed_callbacks_.begin(),
4596                       call_completed_callbacks_.end(), callback);
4597  if (pos == call_completed_callbacks_.end()) return;
4598  call_completed_callbacks_.erase(pos);
4599}
4600
4601void Isolate::FireCallCompletedCallbackInternal(
4602    MicrotaskQueue* microtask_queue) {
4603  DCHECK(thread_local_top()->CallDepthIsZero());
4604
4605  bool perform_checkpoint =
4606      microtask_queue &&
4607      microtask_queue->microtasks_policy() == v8::MicrotasksPolicy::kAuto;
4608
4609  v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this);
4610  if (perform_checkpoint) microtask_queue->PerformCheckpoint(isolate);
4611
4612  if (call_completed_callbacks_.empty()) return;
4613  // Fire callbacks.  Increase call depth to prevent recursive callbacks.
4614  v8::Isolate::SuppressMicrotaskExecutionScope suppress(isolate);
4615  std::vector<CallCompletedCallback> callbacks(call_completed_callbacks_);
4616  for (auto& callback : callbacks) {
4617    callback(reinterpret_cast<v8::Isolate*>(this));
4618  }
4619}
4620
4621void Isolate::UpdatePromiseHookProtector() {
4622  if (Protectors::IsPromiseHookIntact(this)) {
4623    HandleScope scope(this);
4624    Protectors::InvalidatePromiseHook(this);
4625  }
4626}
4627
4628void Isolate::PromiseHookStateUpdated() {
4629  promise_hook_flags_ =
4630    (promise_hook_flags_ & PromiseHookFields::HasContextPromiseHook::kMask) |
4631    PromiseHookFields::HasIsolatePromiseHook::encode(promise_hook_) |
4632    PromiseHookFields::HasAsyncEventDelegate::encode(async_event_delegate_) |
4633    PromiseHookFields::IsDebugActive::encode(debug()->is_active());
4634
4635  if (promise_hook_flags_ != 0) {
4636    UpdatePromiseHookProtector();
4637  }
4638}
4639
4640namespace {
4641
4642MaybeHandle<JSPromise> NewRejectedPromise(Isolate* isolate,
4643                                          v8::Local<v8::Context> api_context,
4644                                          Handle<Object> exception) {
4645  v8::Local<v8::Promise::Resolver> resolver;
4646  ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4647      isolate, resolver, v8::Promise::Resolver::New(api_context),
4648      MaybeHandle<JSPromise>());
4649
4650  RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4651      isolate, resolver->Reject(api_context, v8::Utils::ToLocal(exception)),
4652      MaybeHandle<JSPromise>());
4653
4654  v8::Local<v8::Promise> promise = resolver->GetPromise();
4655  return v8::Utils::OpenHandle(*promise);
4656}
4657
4658}  // namespace
4659
4660MaybeHandle<JSPromise> Isolate::RunHostImportModuleDynamicallyCallback(
4661    Handle<Script> referrer, Handle<Object> specifier,
4662    MaybeHandle<Object> maybe_import_assertions_argument) {
4663  v8::Local<v8::Context> api_context =
4664      v8::Utils::ToLocal(Handle<Context>::cast(native_context()));
4665  if (host_import_module_dynamically_with_import_assertions_callback_ ==
4666          nullptr &&
4667      host_import_module_dynamically_callback_ == nullptr) {
4668    Handle<Object> exception =
4669        factory()->NewError(error_function(), MessageTemplate::kUnsupported);
4670    return NewRejectedPromise(this, api_context, exception);
4671  }
4672
4673  Handle<String> specifier_str;
4674  MaybeHandle<String> maybe_specifier = Object::ToString(this, specifier);
4675  if (!maybe_specifier.ToHandle(&specifier_str)) {
4676    Handle<Object> exception(pending_exception(), this);
4677    clear_pending_exception();
4678    return NewRejectedPromise(this, api_context, exception);
4679  }
4680  DCHECK(!has_pending_exception());
4681
4682  v8::Local<v8::Promise> promise;
4683  Handle<FixedArray> import_assertions_array;
4684  if (!GetImportAssertionsFromArgument(maybe_import_assertions_argument)
4685           .ToHandle(&import_assertions_array)) {
4686    Handle<Object> exception(pending_exception(), this);
4687    clear_pending_exception();
4688    return NewRejectedPromise(this, api_context, exception);
4689  }
4690  if (host_import_module_dynamically_callback_) {
4691    ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4692        this, promise,
4693        host_import_module_dynamically_callback_(
4694            api_context,
4695            v8::Utils::ToLocal(handle(referrer->host_defined_options(), this)),
4696            v8::Utils::ToLocal(handle(referrer->name(), this)),
4697            v8::Utils::ToLocal(specifier_str),
4698            ToApiHandle<v8::FixedArray>(import_assertions_array)),
4699        MaybeHandle<JSPromise>());
4700  } else {
4701    // TODO(cbruni, v8:12302): Avoid creating tempory ScriptOrModule objects.
4702    auto script_or_module = i::Handle<i::ScriptOrModule>::cast(
4703        this->factory()->NewStruct(i::SCRIPT_OR_MODULE_TYPE));
4704    script_or_module->set_resource_name(referrer->name());
4705    script_or_module->set_host_defined_options(
4706        referrer->host_defined_options());
4707    ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4708        this, promise,
4709        host_import_module_dynamically_with_import_assertions_callback_(
4710            api_context, v8::Utils::ToLocal(script_or_module),
4711            v8::Utils::ToLocal(specifier_str),
4712            ToApiHandle<v8::FixedArray>(import_assertions_array)),
4713        MaybeHandle<JSPromise>());
4714  }
4715  return v8::Utils::OpenHandle(*promise);
4716}
4717
4718MaybeHandle<FixedArray> Isolate::GetImportAssertionsFromArgument(
4719    MaybeHandle<Object> maybe_import_assertions_argument) {
4720  Handle<FixedArray> import_assertions_array = factory()->empty_fixed_array();
4721  Handle<Object> import_assertions_argument;
4722  if (!maybe_import_assertions_argument.ToHandle(&import_assertions_argument) ||
4723      import_assertions_argument->IsUndefined()) {
4724    return import_assertions_array;
4725  }
4726
4727  // The parser shouldn't have allowed the second argument to import() if
4728  // the flag wasn't enabled.
4729  DCHECK(FLAG_harmony_import_assertions || FLAG_harmony_import_attributes);
4730
4731  if (!import_assertions_argument->IsJSReceiver()) {
4732    this->Throw(
4733        *factory()->NewTypeError(MessageTemplate::kNonObjectImportArgument));
4734    return MaybeHandle<FixedArray>();
4735  }
4736
4737  Handle<JSReceiver> import_assertions_argument_receiver =
4738      Handle<JSReceiver>::cast(import_assertions_argument);
4739
4740  Handle<Object> import_assertions_object;
4741
4742  if (FLAG_harmony_import_attributes) {
4743    Handle<Name> with_key = factory()->with_string();
4744    if (!JSReceiver::GetProperty(this, import_assertions_argument_receiver,
4745                                 with_key)
4746             .ToHandle(&import_assertions_object)) {
4747      // This can happen if the property has a getter function that throws
4748      // an error.
4749      return MaybeHandle<FixedArray>();
4750    }
4751  }
4752
4753  if (FLAG_harmony_import_assertions &&
4754      (!FLAG_harmony_import_attributes ||
4755       import_assertions_object->IsUndefined())) {
4756    Handle<Name> assert_key = factory()->assert_string();
4757    if (!JSReceiver::GetProperty(this, import_assertions_argument_receiver,
4758                                 assert_key)
4759             .ToHandle(&import_assertions_object)) {
4760      // This can happen if the property has a getter function that throws
4761      // an error.
4762      return MaybeHandle<FixedArray>();
4763    }
4764  }
4765
4766  // If there is no 'with' or 'assert' option in the options bag, it's not an
4767  // error. Just do the import() as if no assertions were provided.
4768  if (import_assertions_object->IsUndefined()) return import_assertions_array;
4769
4770  if (!import_assertions_object->IsJSReceiver()) {
4771    this->Throw(
4772        *factory()->NewTypeError(MessageTemplate::kNonObjectAssertOption));
4773    return MaybeHandle<FixedArray>();
4774  }
4775
4776  Handle<JSReceiver> import_assertions_object_receiver =
4777      Handle<JSReceiver>::cast(import_assertions_object);
4778
4779  Handle<FixedArray> assertion_keys;
4780  if (!KeyAccumulator::GetKeys(import_assertions_object_receiver,
4781                               KeyCollectionMode::kOwnOnly, ENUMERABLE_STRINGS,
4782                               GetKeysConversion::kConvertToString)
4783           .ToHandle(&assertion_keys)) {
4784    // This happens if the assertions object is a Proxy whose ownKeys() or
4785    // getOwnPropertyDescriptor() trap throws.
4786    return MaybeHandle<FixedArray>();
4787  }
4788
4789  bool has_non_string_attribute = false;
4790
4791  // The assertions will be passed to the host in the form: [key1,
4792  // value1, key2, value2, ...].
4793  constexpr size_t kAssertionEntrySizeForDynamicImport = 2;
4794  import_assertions_array = factory()->NewFixedArray(static_cast<int>(
4795      assertion_keys->length() * kAssertionEntrySizeForDynamicImport));
4796  for (int i = 0; i < assertion_keys->length(); i++) {
4797    Handle<String> assertion_key(String::cast(assertion_keys->get(i)), this);
4798    Handle<Object> assertion_value;
4799    if (!Object::GetPropertyOrElement(this, import_assertions_object_receiver,
4800                                      assertion_key)
4801             .ToHandle(&assertion_value)) {
4802      // This can happen if the property has a getter function that throws
4803      // an error.
4804      return MaybeHandle<FixedArray>();
4805    }
4806
4807    if (!assertion_value->IsString()) {
4808      has_non_string_attribute = true;
4809    }
4810
4811    import_assertions_array->set((i * kAssertionEntrySizeForDynamicImport),
4812                                 *assertion_key);
4813    import_assertions_array->set((i * kAssertionEntrySizeForDynamicImport) + 1,
4814                                 *assertion_value);
4815  }
4816
4817  if (has_non_string_attribute) {
4818    this->Throw(*factory()->NewTypeError(
4819        MessageTemplate::kNonStringImportAssertionValue));
4820    return MaybeHandle<FixedArray>();
4821  }
4822
4823  return import_assertions_array;
4824}
4825
4826void Isolate::ClearKeptObjects() { heap()->ClearKeptObjects(); }
4827
4828void Isolate::SetHostImportModuleDynamicallyCallback(
4829    HostImportModuleDynamicallyCallback callback) {
4830  DCHECK_NULL(host_import_module_dynamically_with_import_assertions_callback_);
4831  host_import_module_dynamically_callback_ = callback;
4832}
4833
4834void Isolate::SetHostImportModuleDynamicallyCallback(
4835    HostImportModuleDynamicallyWithImportAssertionsCallback callback) {
4836  DCHECK_NULL(host_import_module_dynamically_callback_);
4837  host_import_module_dynamically_with_import_assertions_callback_ = callback;
4838}
4839
4840MaybeHandle<JSObject> Isolate::RunHostInitializeImportMetaObjectCallback(
4841    Handle<SourceTextModule> module) {
4842  CHECK(module->import_meta(kAcquireLoad).IsTheHole(this));
4843  Handle<JSObject> import_meta = factory()->NewJSObjectWithNullProto();
4844  if (host_initialize_import_meta_object_callback_ != nullptr) {
4845    v8::Local<v8::Context> api_context =
4846        v8::Utils::ToLocal(Handle<Context>(native_context()));
4847    host_initialize_import_meta_object_callback_(
4848        api_context, Utils::ToLocal(Handle<Module>::cast(module)),
4849        v8::Local<v8::Object>::Cast(v8::Utils::ToLocal(import_meta)));
4850    if (has_scheduled_exception()) {
4851      PromoteScheduledException();
4852      return {};
4853    }
4854  }
4855  return import_meta;
4856}
4857
4858void Isolate::SetHostInitializeImportMetaObjectCallback(
4859    HostInitializeImportMetaObjectCallback callback) {
4860  host_initialize_import_meta_object_callback_ = callback;
4861}
4862
4863void Isolate::SetHostCreateShadowRealmContextCallback(
4864    HostCreateShadowRealmContextCallback callback) {
4865  host_create_shadow_realm_context_callback_ = callback;
4866}
4867
4868MaybeHandle<NativeContext> Isolate::RunHostCreateShadowRealmContextCallback() {
4869  if (host_create_shadow_realm_context_callback_ == nullptr) {
4870    Handle<Object> exception =
4871        factory()->NewError(error_function(), MessageTemplate::kUnsupported);
4872    Throw(*exception);
4873    return kNullMaybeHandle;
4874  }
4875
4876  v8::Local<v8::Context> api_context =
4877      v8::Utils::ToLocal(Handle<Context>(native_context()));
4878  v8::Local<v8::Context> shadow_realm_context;
4879  ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4880      this, shadow_realm_context,
4881      host_create_shadow_realm_context_callback_(api_context),
4882      MaybeHandle<NativeContext>());
4883  Handle<Context> shadow_realm_context_handle =
4884      v8::Utils::OpenHandle(*shadow_realm_context);
4885  DCHECK(shadow_realm_context_handle->IsNativeContext());
4886  return Handle<NativeContext>::cast(shadow_realm_context_handle);
4887}
4888
4889MaybeHandle<Object> Isolate::RunPrepareStackTraceCallback(
4890    Handle<Context> context, Handle<JSObject> error, Handle<JSArray> sites) {
4891  v8::Local<v8::Context> api_context = Utils::ToLocal(context);
4892
4893  v8::Local<v8::Value> stack;
4894  ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4895      this, stack,
4896      prepare_stack_trace_callback_(api_context, Utils::ToLocal(error),
4897                                    Utils::ToLocal(sites)),
4898      MaybeHandle<Object>());
4899  return Utils::OpenHandle(*stack);
4900}
4901
4902int Isolate::LookupOrAddExternallyCompiledFilename(const char* filename) {
4903  if (embedded_file_writer_ != nullptr) {
4904    return embedded_file_writer_->LookupOrAddExternallyCompiledFilename(
4905        filename);
4906  }
4907  return 0;
4908}
4909
4910const char* Isolate::GetExternallyCompiledFilename(int index) const {
4911  if (embedded_file_writer_ != nullptr) {
4912    return embedded_file_writer_->GetExternallyCompiledFilename(index);
4913  }
4914  return "";
4915}
4916
4917int Isolate::GetExternallyCompiledFilenameCount() const {
4918  if (embedded_file_writer_ != nullptr) {
4919    return embedded_file_writer_->GetExternallyCompiledFilenameCount();
4920  }
4921  return 0;
4922}
4923
4924void Isolate::PrepareBuiltinSourcePositionMap() {
4925  if (embedded_file_writer_ != nullptr) {
4926    return embedded_file_writer_->PrepareBuiltinSourcePositionMap(
4927        this->builtins());
4928  }
4929}
4930
4931void Isolate::PrepareBuiltinLabelInfoMap() {
4932  if (embedded_file_writer_ != nullptr) {
4933    embedded_file_writer_->PrepareBuiltinLabelInfoMap(
4934        heap()->construct_stub_create_deopt_pc_offset().value(),
4935        heap()->construct_stub_invoke_deopt_pc_offset().value());
4936  }
4937}
4938
4939#if defined(V8_OS_WIN64)
4940void Isolate::SetBuiltinUnwindData(
4941    Builtin builtin,
4942    const win64_unwindinfo::BuiltinUnwindInfo& unwinding_info) {
4943  if (embedded_file_writer_ != nullptr) {
4944    embedded_file_writer_->SetBuiltinUnwindData(builtin, unwinding_info);
4945  }
4946}
4947#endif  // V8_OS_WIN64
4948
4949void Isolate::SetPrepareStackTraceCallback(PrepareStackTraceCallback callback) {
4950  prepare_stack_trace_callback_ = callback;
4951}
4952
4953bool Isolate::HasPrepareStackTraceCallback() const {
4954  return prepare_stack_trace_callback_ != nullptr;
4955}
4956
4957void Isolate::SetAddCrashKeyCallback(AddCrashKeyCallback callback) {
4958  add_crash_key_callback_ = callback;
4959
4960  // Log the initial set of data.
4961  AddCrashKeysForIsolateAndHeapPointers();
4962}
4963
4964void Isolate::SetAtomicsWaitCallback(v8::Isolate::AtomicsWaitCallback callback,
4965                                     void* data) {
4966  atomics_wait_callback_ = callback;
4967  atomics_wait_callback_data_ = data;
4968}
4969
4970void Isolate::RunAtomicsWaitCallback(v8::Isolate::AtomicsWaitEvent event,
4971                                     Handle<JSArrayBuffer> array_buffer,
4972                                     size_t offset_in_bytes, int64_t value,
4973                                     double timeout_in_ms,
4974                                     AtomicsWaitWakeHandle* stop_handle) {
4975  DCHECK(array_buffer->is_shared());
4976  if (atomics_wait_callback_ == nullptr) return;
4977  HandleScope handle_scope(this);
4978  atomics_wait_callback_(
4979      event, v8::Utils::ToLocalShared(array_buffer), offset_in_bytes, value,
4980      timeout_in_ms,
4981      reinterpret_cast<v8::Isolate::AtomicsWaitWakeHandle*>(stop_handle),
4982      atomics_wait_callback_data_);
4983}
4984
4985void Isolate::SetPromiseHook(PromiseHook hook) {
4986  promise_hook_ = hook;
4987  PromiseHookStateUpdated();
4988}
4989
4990void Isolate::RunAllPromiseHooks(PromiseHookType type,
4991                                 Handle<JSPromise> promise,
4992                                 Handle<Object> parent) {
4993#ifdef V8_ENABLE_JAVASCRIPT_PROMISE_HOOKS
4994  if (HasContextPromiseHooks()) {
4995    native_context()->RunPromiseHook(type, promise, parent);
4996  }
4997#endif
4998  if (HasIsolatePromiseHooks() || HasAsyncEventDelegate()) {
4999    RunPromiseHook(type, promise, parent);
5000  }
5001}
5002
5003void Isolate::RunPromiseHook(PromiseHookType type, Handle<JSPromise> promise,
5004                             Handle<Object> parent) {
5005  if (!HasIsolatePromiseHooks()) return;
5006  DCHECK(promise_hook_ != nullptr);
5007  promise_hook_(type, v8::Utils::PromiseToLocal(promise),
5008                v8::Utils::ToLocal(parent));
5009}
5010
5011void Isolate::OnAsyncFunctionSuspended(Handle<JSPromise> promise,
5012                                       Handle<JSPromise> parent) {
5013  DCHECK_EQ(0, promise->async_task_id());
5014  RunAllPromiseHooks(PromiseHookType::kInit, promise, parent);
5015  if (HasAsyncEventDelegate()) {
5016    DCHECK_NE(nullptr, async_event_delegate_);
5017    promise->set_async_task_id(++async_task_count_);
5018    async_event_delegate_->AsyncEventOccurred(debug::kDebugAwait,
5019                                              promise->async_task_id(), false);
5020  }
5021  if (debug()->is_active()) {
5022    // We are about to suspend execution of the current async function,
5023    // so pop the outer promise from the isolate's promise stack.
5024    PopPromise();
5025  }
5026}
5027
5028void Isolate::OnPromiseThen(Handle<JSPromise> promise) {
5029  if (!HasAsyncEventDelegate()) return;
5030  Maybe<debug::DebugAsyncActionType> action_type =
5031      Nothing<debug::DebugAsyncActionType>();
5032  for (JavaScriptFrameIterator it(this); !it.done(); it.Advance()) {
5033    std::vector<Handle<SharedFunctionInfo>> infos;
5034    it.frame()->GetFunctions(&infos);
5035    for (auto it = infos.rbegin(); it != infos.rend(); ++it) {
5036      Handle<SharedFunctionInfo> info = *it;
5037      if (info->HasBuiltinId()) {
5038        // We should not report PromiseThen and PromiseCatch which is called
5039        // indirectly, e.g. Promise.all calls Promise.then internally.
5040        switch (info->builtin_id()) {
5041          case Builtin::kPromisePrototypeCatch:
5042            action_type = Just(debug::kDebugPromiseCatch);
5043            continue;
5044          case Builtin::kPromisePrototypeFinally:
5045            action_type = Just(debug::kDebugPromiseFinally);
5046            continue;
5047          case Builtin::kPromisePrototypeThen:
5048            action_type = Just(debug::kDebugPromiseThen);
5049            continue;
5050          default:
5051            return;
5052        }
5053      }
5054      if (info->IsUserJavaScript() && action_type.IsJust()) {
5055        DCHECK_EQ(0, promise->async_task_id());
5056        promise->set_async_task_id(++async_task_count_);
5057        async_event_delegate_->AsyncEventOccurred(action_type.FromJust(),
5058                                                  promise->async_task_id(),
5059                                                  debug()->IsBlackboxed(info));
5060      }
5061      return;
5062    }
5063  }
5064}
5065
5066void Isolate::OnPromiseBefore(Handle<JSPromise> promise) {
5067  RunPromiseHook(PromiseHookType::kBefore, promise,
5068                 factory()->undefined_value());
5069  if (HasAsyncEventDelegate()) {
5070    if (promise->async_task_id()) {
5071      async_event_delegate_->AsyncEventOccurred(
5072          debug::kDebugWillHandle, promise->async_task_id(), false);
5073    }
5074  }
5075  if (debug()->is_active()) PushPromise(promise);
5076}
5077
5078void Isolate::OnPromiseAfter(Handle<JSPromise> promise) {
5079  RunPromiseHook(PromiseHookType::kAfter, promise,
5080                 factory()->undefined_value());
5081  if (HasAsyncEventDelegate()) {
5082    if (promise->async_task_id()) {
5083      async_event_delegate_->AsyncEventOccurred(
5084          debug::kDebugDidHandle, promise->async_task_id(), false);
5085    }
5086  }
5087  if (debug()->is_active()) PopPromise();
5088}
5089
5090void Isolate::OnTerminationDuringRunMicrotasks() {
5091  // This performs cleanup for when RunMicrotasks (in
5092  // builtins-microtask-queue-gen.cc) is aborted via a termination exception.
5093  // This has to be kept in sync with the code in said file. Currently this
5094  // includes:
5095  //
5096  //  (1) Resetting the |current_microtask| slot on the Isolate to avoid leaking
5097  //      memory (and also to keep |current_microtask| not being undefined as an
5098  //      indicator that we're currently pumping the microtask queue).
5099  //  (2) Empty the promise stack to avoid leaking memory.
5100  //  (3) If the |current_microtask| is a promise reaction or resolve thenable
5101  //      job task, then signal the async event delegate and debugger that the
5102  //      microtask finished running.
5103  //
5104
5105  // Reset the |current_microtask| global slot.
5106  Handle<Microtask> current_microtask(
5107      Microtask::cast(heap()->current_microtask()), this);
5108  heap()->set_current_microtask(ReadOnlyRoots(this).undefined_value());
5109
5110  // Empty the promise stack.
5111  debug()->thread_local_.promise_stack_ = Smi::zero();
5112
5113  if (current_microtask->IsPromiseReactionJobTask()) {
5114    Handle<PromiseReactionJobTask> promise_reaction_job_task =
5115        Handle<PromiseReactionJobTask>::cast(current_microtask);
5116    Handle<HeapObject> promise_or_capability(
5117        promise_reaction_job_task->promise_or_capability(), this);
5118    if (promise_or_capability->IsPromiseCapability()) {
5119      promise_or_capability = handle(
5120          Handle<PromiseCapability>::cast(promise_or_capability)->promise(),
5121          this);
5122    }
5123    if (promise_or_capability->IsJSPromise()) {
5124      OnPromiseAfter(Handle<JSPromise>::cast(promise_or_capability));
5125    }
5126  } else if (current_microtask->IsPromiseResolveThenableJobTask()) {
5127    Handle<PromiseResolveThenableJobTask> promise_resolve_thenable_job_task =
5128        Handle<PromiseResolveThenableJobTask>::cast(current_microtask);
5129    Handle<JSPromise> promise_to_resolve(
5130        promise_resolve_thenable_job_task->promise_to_resolve(), this);
5131    OnPromiseAfter(promise_to_resolve);
5132  }
5133
5134  SetTerminationOnExternalTryCatch();
5135}
5136
5137void Isolate::SetPromiseRejectCallback(PromiseRejectCallback callback) {
5138  promise_reject_callback_ = callback;
5139}
5140
5141void Isolate::ReportPromiseReject(Handle<JSPromise> promise,
5142                                  Handle<Object> value,
5143                                  v8::PromiseRejectEvent event) {
5144  if (promise_reject_callback_ == nullptr) return;
5145  promise_reject_callback_(v8::PromiseRejectMessage(
5146      v8::Utils::PromiseToLocal(promise), event, v8::Utils::ToLocal(value)));
5147}
5148
5149void Isolate::SetUseCounterCallback(v8::Isolate::UseCounterCallback callback) {
5150  DCHECK(!use_counter_callback_);
5151  use_counter_callback_ = callback;
5152}
5153
5154void Isolate::CountUsage(v8::Isolate::UseCounterFeature feature) {
5155  // The counter callback
5156  // - may cause the embedder to call into V8, which is not generally possible
5157  //   during GC.
5158  // - requires a current native context, which may not always exist.
5159  // TODO(jgruber): Consider either removing the native context requirement in
5160  // blink, or passing it to the callback explicitly.
5161  if (heap_.gc_state() == Heap::NOT_IN_GC && !context().is_null()) {
5162    DCHECK(context().IsContext());
5163    DCHECK(context().native_context().IsNativeContext());
5164    if (use_counter_callback_) {
5165      HandleScope handle_scope(this);
5166      use_counter_callback_(reinterpret_cast<v8::Isolate*>(this), feature);
5167    }
5168  } else {
5169    heap_.IncrementDeferredCount(feature);
5170  }
5171}
5172
5173void Isolate::CountUsage(v8::Isolate::UseCounterFeature feature, int count) {
5174  for (int i = 0; i < count; ++i) {
5175    CountUsage(feature);
5176  }
5177}
5178
5179int Isolate::GetNextScriptId() { return heap()->NextScriptId(); }
5180
5181// static
5182std::string Isolate::GetTurboCfgFileName(Isolate* isolate) {
5183  if (FLAG_trace_turbo_cfg_file == nullptr) {
5184    std::ostringstream os;
5185    os << "turbo-" << base::OS::GetCurrentProcessId() << "-";
5186    if (isolate != nullptr) {
5187      os << isolate->id();
5188    } else {
5189      os << "any";
5190    }
5191    os << ".cfg";
5192    return os.str();
5193  } else {
5194    return FLAG_trace_turbo_cfg_file;
5195  }
5196}
5197
5198// Heap::detached_contexts tracks detached contexts as pairs
5199// (number of GC since the context was detached, the context).
5200void Isolate::AddDetachedContext(Handle<Context> context) {
5201  HandleScope scope(this);
5202  Handle<WeakArrayList> detached_contexts = factory()->detached_contexts();
5203  detached_contexts = WeakArrayList::AddToEnd(
5204      this, detached_contexts, MaybeObjectHandle(Smi::zero(), this),
5205      MaybeObjectHandle::Weak(context));
5206  heap()->set_detached_contexts(*detached_contexts);
5207}
5208
5209void Isolate::CheckDetachedContextsAfterGC() {
5210  HandleScope scope(this);
5211  Handle<WeakArrayList> detached_contexts = factory()->detached_contexts();
5212  int length = detached_contexts->length();
5213  if (length == 0) return;
5214  int new_length = 0;
5215  for (int i = 0; i < length; i += 2) {
5216    int mark_sweeps = detached_contexts->Get(i).ToSmi().value();
5217    MaybeObject context = detached_contexts->Get(i + 1);
5218    DCHECK(context->IsWeakOrCleared());
5219    if (!context->IsCleared()) {
5220      detached_contexts->Set(
5221          new_length, MaybeObject::FromSmi(Smi::FromInt(mark_sweeps + 1)));
5222      detached_contexts->Set(new_length + 1, context);
5223      new_length += 2;
5224    }
5225  }
5226  detached_contexts->set_length(new_length);
5227  while (new_length < length) {
5228    detached_contexts->Set(new_length, MaybeObject::FromSmi(Smi::zero()));
5229    ++new_length;
5230  }
5231
5232  if (FLAG_trace_detached_contexts) {
5233    PrintF("%d detached contexts are collected out of %d\n",
5234           length - new_length, length);
5235    for (int i = 0; i < new_length; i += 2) {
5236      int mark_sweeps = detached_contexts->Get(i).ToSmi().value();
5237      MaybeObject context = detached_contexts->Get(i + 1);
5238      DCHECK(context->IsWeakOrCleared());
5239      if (mark_sweeps > 3) {
5240        PrintF("detached context %p\n survived %d GCs (leak?)\n",
5241               reinterpret_cast<void*>(context.ptr()), mark_sweeps);
5242      }
5243    }
5244  }
5245}
5246
5247void Isolate::DetachGlobal(Handle<Context> env) {
5248  counters()->errors_thrown_per_context()->AddSample(
5249      env->native_context().GetErrorsThrown());
5250
5251  ReadOnlyRoots roots(this);
5252  Handle<JSGlobalProxy> global_proxy(env->global_proxy(), this);
5253  global_proxy->set_native_context(roots.null_value());
5254  // NOTE: Turbofan's JSNativeContextSpecialization depends on DetachGlobal
5255  // causing a map change.
5256  JSObject::ForceSetPrototype(this, global_proxy, factory()->null_value());
5257  global_proxy->map().set_constructor_or_back_pointer(roots.null_value(),
5258                                                      kRelaxedStore);
5259  if (FLAG_track_detached_contexts) AddDetachedContext(env);
5260  DCHECK(global_proxy->IsDetached());
5261
5262  env->native_context().set_microtask_queue(this, nullptr);
5263}
5264
5265double Isolate::LoadStartTimeMs() {
5266  base::MutexGuard guard(&rail_mutex_);
5267  return load_start_time_ms_;
5268}
5269
5270void Isolate::UpdateLoadStartTime() {
5271  base::MutexGuard guard(&rail_mutex_);
5272  load_start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
5273}
5274
5275void Isolate::SetRAILMode(RAILMode rail_mode) {
5276  RAILMode old_rail_mode = rail_mode_.load();
5277  if (old_rail_mode != PERFORMANCE_LOAD && rail_mode == PERFORMANCE_LOAD) {
5278    base::MutexGuard guard(&rail_mutex_);
5279    load_start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
5280  }
5281  rail_mode_.store(rail_mode);
5282  if (old_rail_mode == PERFORMANCE_LOAD && rail_mode != PERFORMANCE_LOAD) {
5283    heap()->incremental_marking()->incremental_marking_job()->ScheduleTask(
5284        heap());
5285  }
5286  if (FLAG_trace_rail) {
5287    PrintIsolate(this, "RAIL mode: %s\n", RAILModeName(rail_mode));
5288  }
5289}
5290
5291void Isolate::IsolateInBackgroundNotification() {
5292  is_isolate_in_background_ = true;
5293  heap()->ActivateMemoryReducerIfNeeded();
5294}
5295
5296void Isolate::IsolateInForegroundNotification() {
5297  is_isolate_in_background_ = false;
5298}
5299
5300void Isolate::PrintWithTimestamp(const char* format, ...) {
5301  base::OS::Print("[%d:%p] %8.0f ms: ", base::OS::GetCurrentProcessId(),
5302                  static_cast<void*>(this), time_millis_since_init());
5303  va_list arguments;
5304  va_start(arguments, format);
5305  base::OS::VPrint(format, arguments);
5306  va_end(arguments);
5307}
5308
5309void Isolate::SetIdle(bool is_idle) {
5310  StateTag state = current_vm_state();
5311  if (js_entry_sp() != kNullAddress) return;
5312  DCHECK(state == EXTERNAL || state == IDLE);
5313  if (is_idle) {
5314    set_current_vm_state(IDLE);
5315  } else if (state == IDLE) {
5316    set_current_vm_state(EXTERNAL);
5317  }
5318}
5319
5320void Isolate::CollectSourcePositionsForAllBytecodeArrays() {
5321  if (!initialized_) return;
5322
5323  HandleScope scope(this);
5324  std::vector<Handle<SharedFunctionInfo>> sfis;
5325  {
5326    HeapObjectIterator iterator(heap());
5327    for (HeapObject obj = iterator.Next(); !obj.is_null();
5328         obj = iterator.Next()) {
5329      if (!obj.IsSharedFunctionInfo()) continue;
5330      SharedFunctionInfo sfi = SharedFunctionInfo::cast(obj);
5331      if (!sfi.CanCollectSourcePosition(this)) continue;
5332      sfis.push_back(Handle<SharedFunctionInfo>(sfi, this));
5333    }
5334  }
5335  for (auto sfi : sfis) {
5336    SharedFunctionInfo::EnsureSourcePositionsAvailable(this, sfi);
5337  }
5338}
5339
5340#ifdef V8_INTL_SUPPORT
5341
5342namespace {
5343
5344std::string GetStringFromLocales(Isolate* isolate, Handle<Object> locales) {
5345  if (locales->IsUndefined(isolate)) return "";
5346  return std::string(String::cast(*locales).ToCString().get());
5347}
5348
5349bool StringEqualsLocales(Isolate* isolate, const std::string& str,
5350                         Handle<Object> locales) {
5351  if (locales->IsUndefined(isolate)) return str == "";
5352  return Handle<String>::cast(locales)->IsEqualTo(
5353      base::VectorOf(str.c_str(), str.length()));
5354}
5355
5356}  // namespace
5357
5358const std::string& Isolate::DefaultLocale() {
5359  if (default_locale_.empty()) {
5360    icu::Locale default_locale;
5361    // Translate ICU's fallback locale to a well-known locale.
5362    if (strcmp(default_locale.getName(), "en_US_POSIX") == 0 ||
5363        strcmp(default_locale.getName(), "c") == 0) {
5364      set_default_locale("en-US");
5365    } else {
5366      // Set the locale
5367      set_default_locale(default_locale.isBogus()
5368                             ? "und"
5369                             : Intl::ToLanguageTag(default_locale).FromJust());
5370    }
5371    DCHECK(!default_locale_.empty());
5372  }
5373  return default_locale_;
5374}
5375
5376void Isolate::ResetDefaultLocale() {
5377  default_locale_.clear();
5378  clear_cached_icu_objects();
5379  // We inline fast paths assuming certain locales. Since this path is rarely
5380  // taken, we deoptimize everything to keep things simple.
5381  Deoptimizer::DeoptimizeAll(this);
5382}
5383
5384icu::UMemory* Isolate::get_cached_icu_object(ICUObjectCacheType cache_type,
5385                                             Handle<Object> locales) {
5386  const ICUObjectCacheEntry& entry =
5387      icu_object_cache_[static_cast<int>(cache_type)];
5388  return StringEqualsLocales(this, entry.locales, locales) ? entry.obj.get()
5389                                                           : nullptr;
5390}
5391
5392void Isolate::set_icu_object_in_cache(ICUObjectCacheType cache_type,
5393                                      Handle<Object> locales,
5394                                      std::shared_ptr<icu::UMemory> obj) {
5395  icu_object_cache_[static_cast<int>(cache_type)] = {
5396      GetStringFromLocales(this, locales), std::move(obj)};
5397}
5398
5399void Isolate::clear_cached_icu_object(ICUObjectCacheType cache_type) {
5400  icu_object_cache_[static_cast<int>(cache_type)] = ICUObjectCacheEntry{};
5401}
5402
5403void Isolate::clear_cached_icu_objects() {
5404  for (int i = 0; i < kICUObjectCacheTypeCount; i++) {
5405    clear_cached_icu_object(static_cast<ICUObjectCacheType>(i));
5406  }
5407}
5408
5409#endif  // V8_INTL_SUPPORT
5410
5411bool StackLimitCheck::JsHasOverflowed(uintptr_t gap) const {
5412  StackGuard* stack_guard = isolate_->stack_guard();
5413#ifdef USE_SIMULATOR
5414  // The simulator uses a separate JS stack.
5415  Address jssp_address = Simulator::current(isolate_)->get_sp();
5416  uintptr_t jssp = static_cast<uintptr_t>(jssp_address);
5417  if (jssp - gap < stack_guard->real_jslimit()) return true;
5418#endif  // USE_SIMULATOR
5419  return GetCurrentStackPosition() - gap < stack_guard->real_climit();
5420}
5421
5422SaveContext::SaveContext(Isolate* isolate) : isolate_(isolate) {
5423  if (!isolate->context().is_null()) {
5424    context_ = Handle<Context>(isolate->context(), isolate);
5425  }
5426
5427  c_entry_fp_ = isolate->c_entry_fp(isolate->thread_local_top());
5428}
5429
5430SaveContext::~SaveContext() {
5431  isolate_->set_context(context_.is_null() ? Context() : *context_);
5432}
5433
5434bool SaveContext::IsBelowFrame(CommonFrame* frame) {
5435  return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
5436}
5437
5438SaveAndSwitchContext::SaveAndSwitchContext(Isolate* isolate,
5439                                           Context new_context)
5440    : SaveContext(isolate) {
5441  isolate->set_context(new_context);
5442}
5443
5444#ifdef DEBUG
5445AssertNoContextChange::AssertNoContextChange(Isolate* isolate)
5446    : isolate_(isolate), context_(isolate->context(), isolate) {}
5447
5448namespace {
5449
5450bool Overlapping(const MemoryRange& a, const MemoryRange& b) {
5451  uintptr_t a1 = reinterpret_cast<uintptr_t>(a.start);
5452  uintptr_t a2 = a1 + a.length_in_bytes;
5453  uintptr_t b1 = reinterpret_cast<uintptr_t>(b.start);
5454  uintptr_t b2 = b1 + b.length_in_bytes;
5455  // Either b1 or b2 are in the [a1, a2) range.
5456  return (a1 <= b1 && b1 < a2) || (a1 <= b2 && b2 < a2);
5457}
5458
5459}  // anonymous namespace
5460
5461#endif  // DEBUG
5462
5463void Isolate::AddCodeMemoryRange(MemoryRange range) {
5464  base::MutexGuard guard(&code_pages_mutex_);
5465  std::vector<MemoryRange>* old_code_pages = GetCodePages();
5466  DCHECK_NOT_NULL(old_code_pages);
5467#ifdef DEBUG
5468  auto overlapping = [range](const MemoryRange& a) {
5469    return Overlapping(range, a);
5470  };
5471  DCHECK_EQ(old_code_pages->end(),
5472            std::find_if(old_code_pages->begin(), old_code_pages->end(),
5473                         overlapping));
5474#endif
5475
5476  std::vector<MemoryRange>* new_code_pages;
5477  if (old_code_pages == &code_pages_buffer1_) {
5478    new_code_pages = &code_pages_buffer2_;
5479  } else {
5480    new_code_pages = &code_pages_buffer1_;
5481  }
5482
5483  // Copy all existing data from the old vector to the new vector and insert the
5484  // new page.
5485  new_code_pages->clear();
5486  new_code_pages->reserve(old_code_pages->size() + 1);
5487  std::merge(old_code_pages->begin(), old_code_pages->end(), &range, &range + 1,
5488             std::back_inserter(*new_code_pages),
5489             [](const MemoryRange& a, const MemoryRange& b) {
5490               return a.start < b.start;
5491             });
5492
5493  // Atomically switch out the pointer
5494  SetCodePages(new_code_pages);
5495}
5496
5497// |chunk| is either a Page or an executable LargePage.
5498void Isolate::AddCodeMemoryChunk(MemoryChunk* chunk) {
5499  // We only keep track of individual code pages/allocations if we are on arm32,
5500  // because on x64 and arm64 we have a code range which makes this unnecessary.
5501#if !defined(V8_TARGET_ARCH_ARM)
5502  return;
5503#else
5504  void* new_page_start = reinterpret_cast<void*>(chunk->area_start());
5505  size_t new_page_size = chunk->area_size();
5506
5507  MemoryRange new_range{new_page_start, new_page_size};
5508
5509  AddCodeMemoryRange(new_range);
5510#endif  // !defined(V8_TARGET_ARCH_ARM)
5511}
5512
5513void Isolate::AddCodeRange(Address begin, size_t length_in_bytes) {
5514  AddCodeMemoryRange(
5515      MemoryRange{reinterpret_cast<void*>(begin), length_in_bytes});
5516}
5517
5518bool Isolate::RequiresCodeRange() const {
5519  return kPlatformRequiresCodeRange && !jitless_;
5520}
5521
5522v8::metrics::Recorder::ContextId Isolate::GetOrRegisterRecorderContextId(
5523    Handle<NativeContext> context) {
5524  if (serializer_enabled_) return v8::metrics::Recorder::ContextId::Empty();
5525  i::Object id = context->recorder_context_id();
5526  if (id.IsNullOrUndefined()) {
5527    CHECK_LT(last_recorder_context_id_, i::Smi::kMaxValue);
5528    context->set_recorder_context_id(
5529        i::Smi::FromIntptr(++last_recorder_context_id_));
5530    v8::HandleScope handle_scope(reinterpret_cast<v8::Isolate*>(this));
5531    auto result = recorder_context_id_map_.emplace(
5532        std::piecewise_construct,
5533        std::forward_as_tuple(last_recorder_context_id_),
5534        std::forward_as_tuple(reinterpret_cast<v8::Isolate*>(this),
5535                              ToApiHandle<v8::Context>(context)));
5536    result.first->second.SetWeak(
5537        reinterpret_cast<void*>(last_recorder_context_id_),
5538        RemoveContextIdCallback, v8::WeakCallbackType::kParameter);
5539    return v8::metrics::Recorder::ContextId(last_recorder_context_id_);
5540  } else {
5541    DCHECK(id.IsSmi());
5542    return v8::metrics::Recorder::ContextId(
5543        static_cast<uintptr_t>(i::Smi::ToInt(id)));
5544  }
5545}
5546
5547MaybeLocal<v8::Context> Isolate::GetContextFromRecorderContextId(
5548    v8::metrics::Recorder::ContextId id) {
5549  auto result = recorder_context_id_map_.find(id.id_);
5550  if (result == recorder_context_id_map_.end() || result->second.IsEmpty())
5551    return MaybeLocal<v8::Context>();
5552  return result->second.Get(reinterpret_cast<v8::Isolate*>(this));
5553}
5554
5555void Isolate::UpdateLongTaskStats() {
5556  if (last_long_task_stats_counter_ != isolate_data_.long_task_stats_counter_) {
5557    last_long_task_stats_counter_ = isolate_data_.long_task_stats_counter_;
5558    long_task_stats_ = v8::metrics::LongTaskStats{};
5559  }
5560}
5561
5562v8::metrics::LongTaskStats* Isolate::GetCurrentLongTaskStats() {
5563  UpdateLongTaskStats();
5564  return &long_task_stats_;
5565}
5566
5567void Isolate::RemoveContextIdCallback(const v8::WeakCallbackInfo<void>& data) {
5568  Isolate* isolate = reinterpret_cast<Isolate*>(data.GetIsolate());
5569  uintptr_t context_id = reinterpret_cast<uintptr_t>(data.GetParameter());
5570  isolate->recorder_context_id_map_.erase(context_id);
5571}
5572
5573LocalHeap* Isolate::main_thread_local_heap() {
5574  return main_thread_local_isolate()->heap();
5575}
5576
5577LocalHeap* Isolate::CurrentLocalHeap() {
5578  LocalHeap* local_heap = LocalHeap::Current();
5579  return local_heap ? local_heap : main_thread_local_heap();
5580}
5581
5582// |chunk| is either a Page or an executable LargePage.
5583void Isolate::RemoveCodeMemoryChunk(MemoryChunk* chunk) {
5584  // We only keep track of individual code pages/allocations if we are on arm32,
5585  // because on x64 and arm64 we have a code range which makes this unnecessary.
5586#if !defined(V8_TARGET_ARCH_ARM)
5587  return;
5588#else
5589  void* removed_page_start = reinterpret_cast<void*>(chunk->area_start());
5590  std::vector<MemoryRange>* old_code_pages = GetCodePages();
5591  DCHECK_NOT_NULL(old_code_pages);
5592
5593  std::vector<MemoryRange>* new_code_pages;
5594  if (old_code_pages == &code_pages_buffer1_) {
5595    new_code_pages = &code_pages_buffer2_;
5596  } else {
5597    new_code_pages = &code_pages_buffer1_;
5598  }
5599
5600  // Copy all existing data from the old vector to the new vector except the
5601  // removed page.
5602  new_code_pages->clear();
5603  new_code_pages->reserve(old_code_pages->size() - 1);
5604  std::remove_copy_if(old_code_pages->begin(), old_code_pages->end(),
5605                      std::back_inserter(*new_code_pages),
5606                      [removed_page_start](const MemoryRange& range) {
5607                        return range.start == removed_page_start;
5608                      });
5609  DCHECK_EQ(old_code_pages->size(), new_code_pages->size() + 1);
5610  // Atomically switch out the pointer
5611  SetCodePages(new_code_pages);
5612#endif  // !defined(V8_TARGET_ARCH_ARM)
5613}
5614
5615#undef TRACE_ISOLATE
5616
5617// static
5618Address Isolate::load_from_stack_count_address(const char* function_name) {
5619  DCHECK_NOT_NULL(function_name);
5620  if (!stack_access_count_map) {
5621    stack_access_count_map = new MapOfLoadsAndStoresPerFunction{};
5622  }
5623  auto& map = *stack_access_count_map;
5624  std::string name(function_name);
5625  // It is safe to return the address of std::map values.
5626  // Only iterators and references to the erased elements are invalidated.
5627  return reinterpret_cast<Address>(&map[name].first);
5628}
5629
5630// static
5631Address Isolate::store_to_stack_count_address(const char* function_name) {
5632  DCHECK_NOT_NULL(function_name);
5633  if (!stack_access_count_map) {
5634    stack_access_count_map = new MapOfLoadsAndStoresPerFunction{};
5635  }
5636  auto& map = *stack_access_count_map;
5637  std::string name(function_name);
5638  // It is safe to return the address of std::map values.
5639  // Only iterators and references to the erased elements are invalidated.
5640  return reinterpret_cast<Address>(&map[name].second);
5641}
5642
5643void Isolate::AttachToSharedIsolate() {
5644  DCHECK(!attached_to_shared_isolate_);
5645
5646  if (shared_isolate_) {
5647    DCHECK(shared_isolate_->is_shared());
5648    shared_isolate_->global_safepoint()->AppendClient(this);
5649  }
5650
5651#if DEBUG
5652  attached_to_shared_isolate_ = true;
5653#endif  // DEBUG
5654}
5655
5656void Isolate::DetachFromSharedIsolate() {
5657  DCHECK(attached_to_shared_isolate_);
5658
5659  if (shared_isolate_) {
5660    shared_isolate_->global_safepoint()->RemoveClient(this);
5661    shared_isolate_ = nullptr;
5662  }
5663
5664#if DEBUG
5665  attached_to_shared_isolate_ = false;
5666#endif  // DEBUG
5667}
5668
5669}  // namespace internal
5670}  // namespace v8
5671