1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/cppgc/heap-base.h"
6
7 #include "include/cppgc/heap-consistency.h"
8 #include "src/base/platform/platform.h"
9 #include "src/base/sanitizer/lsan-page-allocator.h"
10 #include "src/heap/base/stack.h"
11 #include "src/heap/cppgc/globals.h"
12 #include "src/heap/cppgc/heap-object-header.h"
13 #include "src/heap/cppgc/heap-page.h"
14 #include "src/heap/cppgc/heap-statistics-collector.h"
15 #include "src/heap/cppgc/heap-visitor.h"
16 #include "src/heap/cppgc/marking-verifier.h"
17 #include "src/heap/cppgc/object-view.h"
18 #include "src/heap/cppgc/page-memory.h"
19 #include "src/heap/cppgc/platform.h"
20 #include "src/heap/cppgc/prefinalizer-handler.h"
21 #include "src/heap/cppgc/stats-collector.h"
22 #include "src/heap/cppgc/unmarker.h"
23
24 namespace cppgc {
25 namespace internal {
26
27 namespace {
28
29 class ObjectSizeCounter : private HeapVisitor<ObjectSizeCounter> {
30 friend class HeapVisitor<ObjectSizeCounter>;
31
32 public:
GetSize(RawHeap& heap)33 size_t GetSize(RawHeap& heap) {
34 Traverse(heap);
35 return accumulated_size_;
36 }
37
38 private:
ObjectSize(const HeapObjectHeader& header)39 static size_t ObjectSize(const HeapObjectHeader& header) {
40 return ObjectView<>(header).Size();
41 }
42
VisitHeapObjectHeader(HeapObjectHeader& header)43 bool VisitHeapObjectHeader(HeapObjectHeader& header) {
44 if (header.IsFree()) return true;
45 accumulated_size_ += ObjectSize(header);
46 return true;
47 }
48
49 size_t accumulated_size_ = 0;
50 };
51
52 } // namespace
53
HeapBase( std::shared_ptr<cppgc::Platform> platform, const std::vector<std::unique_ptr<CustomSpaceBase>>& custom_spaces, StackSupport stack_support, MarkingType marking_support, SweepingType sweeping_support)54 HeapBase::HeapBase(
55 std::shared_ptr<cppgc::Platform> platform,
56 const std::vector<std::unique_ptr<CustomSpaceBase>>& custom_spaces,
57 StackSupport stack_support, MarkingType marking_support,
58 SweepingType sweeping_support)
59 : raw_heap_(this, custom_spaces),
60 platform_(std::move(platform)),
61 oom_handler_(std::make_unique<FatalOutOfMemoryHandler>(this)),
62 #if defined(LEAK_SANITIZER)
63 lsan_page_allocator_(std::make_unique<v8::base::LsanPageAllocator>(
64 platform_->GetPageAllocator())),
65 #endif // LEAK_SANITIZER
66 #if defined(CPPGC_CAGED_HEAP)
67 caged_heap_(*this, *page_allocator()),
68 page_backend_(std::make_unique<PageBackend>(caged_heap_.allocator(),
69 *oom_handler_.get())),
70 #else // !CPPGC_CAGED_HEAP
71 page_backend_(std::make_unique<PageBackend>(*page_allocator(),
72 *oom_handler_.get())),
73 #endif // !CPPGC_CAGED_HEAP
74 stats_collector_(std::make_unique<StatsCollector>(platform_.get())),
75 stack_(std::make_unique<heap::base::Stack>(
76 v8::base::Stack::GetStackStart())),
77 prefinalizer_handler_(std::make_unique<PreFinalizerHandler>(*this)),
78 compactor_(raw_heap_),
79 object_allocator_(raw_heap_, *page_backend_, *stats_collector_,
80 *prefinalizer_handler_),
81 sweeper_(*this),
82 strong_persistent_region_(*oom_handler_.get()),
83 weak_persistent_region_(*oom_handler_.get()),
84 strong_cross_thread_persistent_region_(*oom_handler_.get()),
85 weak_cross_thread_persistent_region_(*oom_handler_.get()),
86 #if defined(CPPGC_YOUNG_GENERATION)
87 remembered_set_(*this),
88 #endif // defined(CPPGC_YOUNG_GENERATION)
89 stack_support_(stack_support),
90 marking_support_(marking_support),
91 sweeping_support_(sweeping_support) {
92 stats_collector_->RegisterObserver(
93 &allocation_observer_for_PROCESS_HEAP_STATISTICS_);
94 }
95
96 HeapBase::~HeapBase() = default;
97
page_allocator() const98 PageAllocator* HeapBase::page_allocator() const {
99 #if defined(LEAK_SANITIZER)
100 return lsan_page_allocator_.get();
101 #else // !LEAK_SANITIZER
102 return platform_->GetPageAllocator();
103 #endif // !LEAK_SANITIZER
104 }
105
ObjectPayloadSize() const106 size_t HeapBase::ObjectPayloadSize() const {
107 return ObjectSizeCounter().GetSize(const_cast<RawHeap&>(raw_heap()));
108 }
109
ExecutePreFinalizers()110 size_t HeapBase::ExecutePreFinalizers() {
111 #ifdef CPPGC_ALLOW_ALLOCATIONS_IN_PREFINALIZERS
112 // Allocations in pre finalizers should not trigger another GC.
113 cppgc::subtle::NoGarbageCollectionScope no_gc_scope(*this);
114 #else
115 // Pre finalizers are forbidden from allocating objects.
116 cppgc::subtle::DisallowGarbageCollectionScope no_gc_scope(*this);
117 #endif // CPPGC_ALLOW_ALLOCATIONS_IN_PREFINALIZERS
118 prefinalizer_handler_->InvokePreFinalizers();
119 return prefinalizer_handler_->ExtractBytesAllocatedInPrefinalizers();
120 }
121
122 #if defined(CPPGC_YOUNG_GENERATION)
ResetRememberedSet()123 void HeapBase::ResetRememberedSet() {
124 class AllLABsAreEmpty final : protected HeapVisitor<AllLABsAreEmpty> {
125 friend class HeapVisitor<AllLABsAreEmpty>;
126
127 public:
128 explicit AllLABsAreEmpty(RawHeap& raw_heap) { Traverse(raw_heap); }
129
130 bool value() const { return !some_lab_is_set_; }
131
132 protected:
133 bool VisitNormalPageSpace(NormalPageSpace& space) {
134 some_lab_is_set_ |=
135 static_cast<bool>(space.linear_allocation_buffer().size());
136 return true;
137 }
138
139 private:
140 bool some_lab_is_set_ = false;
141 };
142 DCHECK(AllLABsAreEmpty(raw_heap()).value());
143 caged_heap().local_data().age_table.Reset(&caged_heap().allocator());
144 remembered_set_.Reset();
145 }
146 #endif // defined(CPPGC_YOUNG_GENERATION)
147
Terminate()148 void HeapBase::Terminate() {
149 DCHECK(!IsMarking());
150 CHECK(!in_disallow_gc_scope());
151
152 sweeper().FinishIfRunning();
153
154 constexpr size_t kMaxTerminationGCs = 20;
155 size_t gc_count = 0;
156 bool more_termination_gcs_needed = false;
157
158 do {
159 CHECK_LT(gc_count++, kMaxTerminationGCs);
160
161 // Clear root sets.
162 strong_persistent_region_.ClearAllUsedNodes();
163 weak_persistent_region_.ClearAllUsedNodes();
164 {
165 PersistentRegionLock guard;
166 strong_cross_thread_persistent_region_.ClearAllUsedNodes();
167 weak_cross_thread_persistent_region_.ClearAllUsedNodes();
168 }
169
170 #if defined(CPPGC_YOUNG_GENERATION)
171 // Unmark the heap so that the sweeper destructs all objects.
172 // TODO(chromium:1029379): Merge two heap iterations (unmarking + sweeping)
173 // into forced finalization.
174 SequentialUnmarker unmarker(raw_heap());
175 #endif // defined(CPPGC_YOUNG_GENERATION)
176
177 in_atomic_pause_ = true;
178 stats_collector()->NotifyMarkingStarted(
179 GarbageCollector::Config::CollectionType::kMajor,
180 GarbageCollector::Config::IsForcedGC::kForced);
181 object_allocator().ResetLinearAllocationBuffers();
182 stats_collector()->NotifyMarkingCompleted(0);
183 ExecutePreFinalizers();
184 // TODO(chromium:1029379): Prefinalizers may black-allocate objects (under a
185 // compile-time option). Run sweeping with forced finalization here.
186 sweeper().Start(
187 {Sweeper::SweepingConfig::SweepingType::kAtomic,
188 Sweeper::SweepingConfig::CompactableSpaceHandling::kSweep});
189 in_atomic_pause_ = false;
190
191 sweeper().NotifyDoneIfNeeded();
192 more_termination_gcs_needed =
193 strong_persistent_region_.NodesInUse() ||
194 weak_persistent_region_.NodesInUse() || [this]() {
195 PersistentRegionLock guard;
196 return strong_cross_thread_persistent_region_.NodesInUse() ||
197 weak_cross_thread_persistent_region_.NodesInUse();
198 }();
199 } while (more_termination_gcs_needed);
200
201 object_allocator().Terminate();
202 disallow_gc_scope_++;
203
204 CHECK_EQ(0u, strong_persistent_region_.NodesInUse());
205 CHECK_EQ(0u, weak_persistent_region_.NodesInUse());
206 CHECK_EQ(0u, strong_cross_thread_persistent_region_.NodesInUse());
207 CHECK_EQ(0u, weak_cross_thread_persistent_region_.NodesInUse());
208 }
209
CollectStatistics( HeapStatistics::DetailLevel detail_level)210 HeapStatistics HeapBase::CollectStatistics(
211 HeapStatistics::DetailLevel detail_level) {
212 if (detail_level == HeapStatistics::DetailLevel::kBrief) {
213 return {stats_collector_->allocated_memory_size(),
214 stats_collector_->resident_memory_size(),
215 stats_collector_->allocated_object_size(),
216 HeapStatistics::DetailLevel::kBrief,
217 {},
218 {}};
219 }
220
221 sweeper_.FinishIfRunning();
222 object_allocator_.ResetLinearAllocationBuffers();
223 return HeapStatisticsCollector().CollectDetailedStatistics(this);
224 }
225
226 } // namespace internal
227 } // namespace cppgc
228