xref: /third_party/node/deps/v8/src/heap/local-heap.cc (revision 1cb0ef41)
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/heap/local-heap.h"
6
7#include <atomic>
8#include <memory>
9
10#include "src/base/logging.h"
11#include "src/base/optional.h"
12#include "src/base/platform/mutex.h"
13#include "src/common/globals.h"
14#include "src/execution/isolate.h"
15#include "src/handles/local-handles.h"
16#include "src/heap/collection-barrier.h"
17#include "src/heap/concurrent-allocator.h"
18#include "src/heap/gc-tracer-inl.h"
19#include "src/heap/gc-tracer.h"
20#include "src/heap/heap-inl.h"
21#include "src/heap/heap-write-barrier.h"
22#include "src/heap/heap.h"
23#include "src/heap/local-heap-inl.h"
24#include "src/heap/marking-barrier.h"
25#include "src/heap/parked-scope.h"
26#include "src/heap/safepoint.h"
27
28namespace v8 {
29namespace internal {
30
31namespace {
32thread_local LocalHeap* current_local_heap = nullptr;
33}  // namespace
34
35LocalHeap* LocalHeap::Current() { return current_local_heap; }
36
37#ifdef DEBUG
38void LocalHeap::VerifyCurrent() {
39  LocalHeap* current = LocalHeap::Current();
40
41  if (is_main_thread())
42    DCHECK_NULL(current);
43  else
44    DCHECK_EQ(current, this);
45}
46#endif
47
48LocalHeap::LocalHeap(Heap* heap, ThreadKind kind,
49                     std::unique_ptr<PersistentHandles> persistent_handles)
50    : heap_(heap),
51      is_main_thread_(kind == ThreadKind::kMain),
52      state_(ThreadState::Parked()),
53      allocation_failed_(false),
54      main_thread_parked_(false),
55      prev_(nullptr),
56      next_(nullptr),
57      handles_(new LocalHandles),
58      persistent_handles_(std::move(persistent_handles)) {
59  DCHECK_IMPLIES(!is_main_thread(), heap_->deserialization_complete());
60  if (!is_main_thread()) SetUp();
61
62  heap_->safepoint()->AddLocalHeap(this, [this] {
63    if (!is_main_thread()) {
64      WriteBarrier::SetForThread(marking_barrier_.get());
65      if (heap_->incremental_marking()->IsMarking()) {
66        marking_barrier_->Activate(
67            heap_->incremental_marking()->IsCompacting());
68      }
69    }
70  });
71
72  if (persistent_handles_) {
73    persistent_handles_->Attach(this);
74  }
75  DCHECK_NULL(current_local_heap);
76  if (!is_main_thread()) current_local_heap = this;
77}
78
79LocalHeap::~LocalHeap() {
80  // Park thread since removing the local heap could block.
81  EnsureParkedBeforeDestruction();
82
83  heap_->safepoint()->RemoveLocalHeap(this, [this] {
84    FreeLinearAllocationArea();
85
86    if (!is_main_thread()) {
87      marking_barrier_->Publish();
88      WriteBarrier::ClearForThread(marking_barrier_.get());
89    }
90  });
91
92  if (!is_main_thread()) {
93    DCHECK_EQ(current_local_heap, this);
94    current_local_heap = nullptr;
95  }
96
97  DCHECK(gc_epilogue_callbacks_.empty());
98}
99
100void LocalHeap::SetUpMainThreadForTesting() { SetUpMainThread(); }
101
102void LocalHeap::SetUpMainThread() {
103  DCHECK(is_main_thread());
104  SetUp();
105}
106
107void LocalHeap::SetUp() {
108  DCHECK_NULL(old_space_allocator_);
109  old_space_allocator_ =
110      std::make_unique<ConcurrentAllocator>(this, heap_->old_space());
111
112  DCHECK_NULL(code_space_allocator_);
113  code_space_allocator_ =
114      std::make_unique<ConcurrentAllocator>(this, heap_->code_space());
115
116  DCHECK_NULL(shared_old_space_allocator_);
117  if (heap_->isolate()->shared_isolate()) {
118    shared_old_space_allocator_ =
119        std::make_unique<ConcurrentAllocator>(this, heap_->shared_old_space());
120  }
121
122  DCHECK_NULL(marking_barrier_);
123  marking_barrier_ = std::make_unique<MarkingBarrier>(this);
124}
125
126void LocalHeap::EnsurePersistentHandles() {
127  if (!persistent_handles_) {
128    persistent_handles_.reset(
129        heap_->isolate()->NewPersistentHandles().release());
130    persistent_handles_->Attach(this);
131  }
132}
133
134void LocalHeap::AttachPersistentHandles(
135    std::unique_ptr<PersistentHandles> persistent_handles) {
136  DCHECK_NULL(persistent_handles_);
137  persistent_handles_ = std::move(persistent_handles);
138  persistent_handles_->Attach(this);
139}
140
141std::unique_ptr<PersistentHandles> LocalHeap::DetachPersistentHandles() {
142  if (persistent_handles_) persistent_handles_->Detach();
143  return std::move(persistent_handles_);
144}
145
146#ifdef DEBUG
147bool LocalHeap::ContainsPersistentHandle(Address* location) {
148  return persistent_handles_ ? persistent_handles_->Contains(location) : false;
149}
150
151bool LocalHeap::ContainsLocalHandle(Address* location) {
152  return handles_ ? handles_->Contains(location) : false;
153}
154
155bool LocalHeap::IsHandleDereferenceAllowed() {
156  VerifyCurrent();
157  return IsRunning();
158}
159#endif
160
161bool LocalHeap::IsParked() {
162#ifdef DEBUG
163  VerifyCurrent();
164#endif
165  return state_.load_relaxed().IsParked();
166}
167
168bool LocalHeap::IsRunning() {
169#ifdef DEBUG
170  VerifyCurrent();
171#endif
172  return state_.load_relaxed().IsRunning();
173}
174
175void LocalHeap::ParkSlowPath() {
176  while (true) {
177    ThreadState current_state = ThreadState::Running();
178    if (state_.CompareExchangeStrong(current_state, ThreadState::Parked()))
179      return;
180
181    // CAS above failed, so state is Running with some additional flag.
182    DCHECK(current_state.IsRunning());
183
184    if (is_main_thread()) {
185      DCHECK(current_state.IsSafepointRequested() ||
186             current_state.IsCollectionRequested());
187
188      if (current_state.IsSafepointRequested()) {
189        ThreadState old_state = state_.SetParked();
190        heap_->safepoint()->NotifyPark();
191        if (old_state.IsCollectionRequested())
192          heap_->collection_barrier_->CancelCollectionAndResumeThreads();
193        return;
194      }
195
196      if (current_state.IsCollectionRequested()) {
197        if (!heap()->ignore_local_gc_requests()) {
198          heap_->CollectGarbageForBackground(this);
199          continue;
200        }
201
202        DCHECK(!current_state.IsSafepointRequested());
203
204        if (state_.CompareExchangeStrong(current_state,
205                                         current_state.SetParked())) {
206          heap_->collection_barrier_->CancelCollectionAndResumeThreads();
207          return;
208        } else {
209          continue;
210        }
211      }
212    } else {
213      DCHECK(current_state.IsSafepointRequested());
214      DCHECK(!current_state.IsCollectionRequested());
215
216      ThreadState old_state = state_.SetParked();
217      CHECK(old_state.IsRunning());
218      CHECK(old_state.IsSafepointRequested());
219      CHECK(!old_state.IsCollectionRequested());
220
221      heap_->safepoint()->NotifyPark();
222      return;
223    }
224  }
225}
226
227void LocalHeap::UnparkSlowPath() {
228  while (true) {
229    ThreadState current_state = ThreadState::Parked();
230    if (state_.CompareExchangeStrong(current_state, ThreadState::Running()))
231      return;
232
233    // CAS above failed, so state is Parked with some additional flag.
234    DCHECK(current_state.IsParked());
235
236    if (is_main_thread()) {
237      DCHECK(current_state.IsSafepointRequested() ||
238             current_state.IsCollectionRequested());
239
240      if (current_state.IsSafepointRequested()) {
241        SleepInUnpark();
242        continue;
243      }
244
245      if (current_state.IsCollectionRequested()) {
246        DCHECK(!current_state.IsSafepointRequested());
247
248        if (!state_.CompareExchangeStrong(current_state,
249                                          current_state.SetRunning()))
250          continue;
251
252        if (!heap()->ignore_local_gc_requests()) {
253          heap_->CollectGarbageForBackground(this);
254        }
255
256        return;
257      }
258    } else {
259      DCHECK(current_state.IsSafepointRequested());
260      DCHECK(!current_state.IsCollectionRequested());
261
262      SleepInUnpark();
263    }
264  }
265}
266
267void LocalHeap::SleepInUnpark() {
268  GCTracer::Scope::ScopeId scope_id;
269  ThreadKind thread_kind;
270
271  if (is_main_thread()) {
272    scope_id = GCTracer::Scope::UNPARK;
273    thread_kind = ThreadKind::kMain;
274  } else {
275    scope_id = GCTracer::Scope::BACKGROUND_UNPARK;
276    thread_kind = ThreadKind::kBackground;
277  }
278
279  TRACE_GC1(heap_->tracer(), scope_id, thread_kind);
280  heap_->safepoint()->WaitInUnpark();
281}
282
283void LocalHeap::EnsureParkedBeforeDestruction() {
284  DCHECK_IMPLIES(!is_main_thread(), IsParked());
285}
286
287void LocalHeap::SafepointSlowPath() {
288  ThreadState current_state = state_.load_relaxed();
289  DCHECK(current_state.IsRunning());
290
291  if (is_main_thread()) {
292    DCHECK(current_state.IsSafepointRequested() ||
293           current_state.IsCollectionRequested());
294
295    if (current_state.IsSafepointRequested()) {
296      SleepInSafepoint();
297    }
298
299    if (current_state.IsCollectionRequested()) {
300      heap_->CollectGarbageForBackground(this);
301    }
302  } else {
303    DCHECK(current_state.IsSafepointRequested());
304    DCHECK(!current_state.IsCollectionRequested());
305
306    SleepInSafepoint();
307  }
308}
309
310void LocalHeap::SleepInSafepoint() {
311  GCTracer::Scope::ScopeId scope_id;
312  ThreadKind thread_kind;
313
314  if (is_main_thread()) {
315    scope_id = GCTracer::Scope::SAFEPOINT;
316    thread_kind = ThreadKind::kMain;
317  } else {
318    scope_id = GCTracer::Scope::BACKGROUND_SAFEPOINT;
319    thread_kind = ThreadKind::kBackground;
320  }
321
322  TRACE_GC1(heap_->tracer(), scope_id, thread_kind);
323
324  // Parking the running thread here is an optimization. We do not need to
325  // wake this thread up to reach the next safepoint.
326  ThreadState old_state = state_.SetParked();
327  CHECK(old_state.IsRunning());
328  CHECK(old_state.IsSafepointRequested());
329  CHECK_IMPLIES(old_state.IsCollectionRequested(), is_main_thread());
330
331  heap_->safepoint()->WaitInSafepoint();
332
333  base::Optional<IgnoreLocalGCRequests> ignore_gc_requests;
334  if (is_main_thread()) ignore_gc_requests.emplace(heap());
335  Unpark();
336}
337
338void LocalHeap::FreeLinearAllocationArea() {
339  old_space_allocator_->FreeLinearAllocationArea();
340  code_space_allocator_->FreeLinearAllocationArea();
341}
342
343void LocalHeap::FreeSharedLinearAllocationArea() {
344  shared_old_space_allocator_->FreeLinearAllocationArea();
345}
346
347void LocalHeap::MakeLinearAllocationAreaIterable() {
348  old_space_allocator_->MakeLinearAllocationAreaIterable();
349  code_space_allocator_->MakeLinearAllocationAreaIterable();
350}
351
352void LocalHeap::MarkLinearAllocationAreaBlack() {
353  old_space_allocator_->MarkLinearAllocationAreaBlack();
354  code_space_allocator_->MarkLinearAllocationAreaBlack();
355}
356
357void LocalHeap::UnmarkLinearAllocationArea() {
358  old_space_allocator_->UnmarkLinearAllocationArea();
359  code_space_allocator_->UnmarkLinearAllocationArea();
360}
361
362bool LocalHeap::TryPerformCollection() {
363  if (is_main_thread()) {
364    heap_->CollectGarbageForBackground(this);
365    return true;
366  } else {
367    DCHECK(IsRunning());
368    if (!heap_->collection_barrier_->TryRequestGC()) return false;
369
370    LocalHeap* main_thread = heap_->main_thread_local_heap();
371
372    const ThreadState old_state = main_thread->state_.SetCollectionRequested();
373
374    if (old_state.IsRunning()) {
375      const bool performed_gc =
376          heap_->collection_barrier_->AwaitCollectionBackground(this);
377      return performed_gc;
378    } else {
379      DCHECK(old_state.IsParked());
380      return false;
381    }
382  }
383}
384
385Address LocalHeap::PerformCollectionAndAllocateAgain(
386    int object_size, AllocationType type, AllocationOrigin origin,
387    AllocationAlignment alignment) {
388  CHECK(!allocation_failed_);
389  CHECK(!main_thread_parked_);
390  allocation_failed_ = true;
391  static const int kMaxNumberOfRetries = 3;
392
393  for (int i = 0; i < kMaxNumberOfRetries; i++) {
394    if (!TryPerformCollection()) {
395      main_thread_parked_ = true;
396    }
397
398    AllocationResult result = AllocateRaw(object_size, type, origin, alignment);
399
400    if (!result.IsFailure()) {
401      allocation_failed_ = false;
402      main_thread_parked_ = false;
403      return result.ToObjectChecked().address();
404    }
405  }
406
407  heap_->FatalProcessOutOfMemory("LocalHeap: allocation failed");
408}
409
410void LocalHeap::AddGCEpilogueCallback(GCEpilogueCallback* callback,
411                                      void* data) {
412  DCHECK(!IsParked());
413  std::pair<GCEpilogueCallback*, void*> callback_and_data(callback, data);
414  DCHECK_EQ(std::find(gc_epilogue_callbacks_.begin(),
415                      gc_epilogue_callbacks_.end(), callback_and_data),
416            gc_epilogue_callbacks_.end());
417  gc_epilogue_callbacks_.push_back(callback_and_data);
418}
419
420void LocalHeap::RemoveGCEpilogueCallback(GCEpilogueCallback* callback,
421                                         void* data) {
422  DCHECK(!IsParked());
423  std::pair<GCEpilogueCallback*, void*> callback_and_data(callback, data);
424  auto it = std::find(gc_epilogue_callbacks_.begin(),
425                      gc_epilogue_callbacks_.end(), callback_and_data);
426  *it = gc_epilogue_callbacks_.back();
427  gc_epilogue_callbacks_.pop_back();
428}
429
430void LocalHeap::InvokeGCEpilogueCallbacksInSafepoint() {
431  for (auto callback_and_data : gc_epilogue_callbacks_) {
432    callback_and_data.first(callback_and_data.second);
433  }
434}
435
436}  // namespace internal
437}  // namespace v8
438