1// Copyright 2020 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#ifndef V8_HEAP_CPPGC_PAGE_MEMORY_H_ 6#define V8_HEAP_CPPGC_PAGE_MEMORY_H_ 7 8#include <array> 9#include <map> 10#include <memory> 11#include <unordered_map> 12#include <vector> 13 14#include "include/cppgc/platform.h" 15#include "src/base/macros.h" 16#include "src/base/platform/mutex.h" 17#include "src/heap/cppgc/globals.h" 18 19namespace cppgc { 20namespace internal { 21 22class FatalOutOfMemoryHandler; 23 24class V8_EXPORT_PRIVATE MemoryRegion final { 25 public: 26 MemoryRegion() = default; 27 MemoryRegion(Address base, size_t size) : base_(base), size_(size) { 28 DCHECK(base); 29 DCHECK_LT(0u, size); 30 } 31 32 Address base() const { return base_; } 33 size_t size() const { return size_; } 34 Address end() const { return base_ + size_; } 35 36 bool Contains(ConstAddress addr) const { 37 return (reinterpret_cast<uintptr_t>(addr) - 38 reinterpret_cast<uintptr_t>(base_)) < size_; 39 } 40 41 bool Contains(const MemoryRegion& other) const { 42 return base_ <= other.base() && other.end() <= end(); 43 } 44 45 private: 46 Address base_ = nullptr; 47 size_t size_ = 0; 48}; 49 50// PageMemory provides the backing of a single normal or large page. 51class V8_EXPORT_PRIVATE PageMemory final { 52 public: 53 PageMemory(MemoryRegion overall, MemoryRegion writeable) 54 : overall_(overall), writable_(writeable) { 55 DCHECK(overall.Contains(writeable)); 56 } 57 58 const MemoryRegion writeable_region() const { return writable_; } 59 const MemoryRegion overall_region() const { return overall_; } 60 61 private: 62 MemoryRegion overall_; 63 MemoryRegion writable_; 64}; 65 66class V8_EXPORT_PRIVATE PageMemoryRegion { 67 public: 68 virtual ~PageMemoryRegion(); 69 70 const MemoryRegion reserved_region() const { return reserved_region_; } 71 bool is_large() const { return is_large_; } 72 73 // Lookup writeable base for an |address| that's contained in 74 // PageMemoryRegion. Filters out addresses that are contained in non-writeable 75 // regions (e.g. guard pages). 76 inline Address Lookup(ConstAddress address) const; 77 78 // Disallow copy/move. 79 PageMemoryRegion(const PageMemoryRegion&) = delete; 80 PageMemoryRegion& operator=(const PageMemoryRegion&) = delete; 81 82 virtual void UnprotectForTesting() = 0; 83 84 protected: 85 PageMemoryRegion(PageAllocator&, FatalOutOfMemoryHandler&, MemoryRegion, 86 bool); 87 88 PageAllocator& allocator_; 89 FatalOutOfMemoryHandler& oom_handler_; 90 const MemoryRegion reserved_region_; 91 const bool is_large_; 92}; 93 94// NormalPageMemoryRegion serves kNumPageRegions normal-sized PageMemory object. 95class V8_EXPORT_PRIVATE NormalPageMemoryRegion final : public PageMemoryRegion { 96 public: 97 static constexpr size_t kNumPageRegions = 10; 98 99 NormalPageMemoryRegion(PageAllocator&, FatalOutOfMemoryHandler&); 100 ~NormalPageMemoryRegion() override; 101 102 const PageMemory GetPageMemory(size_t index) const { 103 DCHECK_LT(index, kNumPageRegions); 104 return PageMemory( 105 MemoryRegion(reserved_region().base() + kPageSize * index, kPageSize), 106 MemoryRegion( 107 reserved_region().base() + kPageSize * index + kGuardPageSize, 108 kPageSize - 2 * kGuardPageSize)); 109 } 110 111 // Allocates a normal page at |writeable_base| address. Changes page 112 // protection. 113 void Allocate(Address writeable_base); 114 115 // Frees a normal page at at |writeable_base| address. Changes page 116 // protection. 117 void Free(Address); 118 119 inline Address Lookup(ConstAddress) const; 120 121 void UnprotectForTesting() final; 122 123 private: 124 void ChangeUsed(size_t index, bool value) { 125 DCHECK_LT(index, kNumPageRegions); 126 DCHECK_EQ(value, !page_memories_in_use_[index]); 127 page_memories_in_use_[index] = value; 128 } 129 130 size_t GetIndex(ConstAddress address) const { 131 return static_cast<size_t>(address - reserved_region().base()) >> 132 kPageSizeLog2; 133 } 134 135 std::array<bool, kNumPageRegions> page_memories_in_use_ = {}; 136}; 137 138// LargePageMemoryRegion serves a single large PageMemory object. 139class V8_EXPORT_PRIVATE LargePageMemoryRegion final : public PageMemoryRegion { 140 public: 141 LargePageMemoryRegion(PageAllocator&, FatalOutOfMemoryHandler&, size_t); 142 ~LargePageMemoryRegion() override; 143 144 const PageMemory GetPageMemory() const { 145 return PageMemory( 146 MemoryRegion(reserved_region().base(), reserved_region().size()), 147 MemoryRegion(reserved_region().base() + kGuardPageSize, 148 reserved_region().size() - 2 * kGuardPageSize)); 149 } 150 151 inline Address Lookup(ConstAddress) const; 152 153 void UnprotectForTesting() final; 154}; 155 156// A PageMemoryRegionTree is a binary search tree of PageMemoryRegions sorted 157// by reserved base addresses. 158// 159// The tree does not keep its elements alive but merely provides indexing 160// capabilities. 161class V8_EXPORT_PRIVATE PageMemoryRegionTree final { 162 public: 163 PageMemoryRegionTree(); 164 ~PageMemoryRegionTree(); 165 166 void Add(PageMemoryRegion*); 167 void Remove(PageMemoryRegion*); 168 169 inline PageMemoryRegion* Lookup(ConstAddress) const; 170 171 private: 172 std::map<ConstAddress, PageMemoryRegion*> set_; 173}; 174 175// A pool of PageMemory objects represented by the writeable base addresses. 176// 177// The pool does not keep its elements alive but merely provides pooling 178// capabilities. 179class V8_EXPORT_PRIVATE NormalPageMemoryPool final { 180 public: 181 static constexpr size_t kNumPoolBuckets = 16; 182 183 using Result = std::pair<NormalPageMemoryRegion*, Address>; 184 185 NormalPageMemoryPool(); 186 ~NormalPageMemoryPool(); 187 188 void Add(size_t, NormalPageMemoryRegion*, Address); 189 Result Take(size_t); 190 191 private: 192 std::vector<Result> pool_[kNumPoolBuckets]; 193}; 194 195// A backend that is used for allocating and freeing normal and large pages. 196// 197// Internally maintaints a set of PageMemoryRegions. The backend keeps its used 198// regions alive. 199class V8_EXPORT_PRIVATE PageBackend final { 200 public: 201 PageBackend(PageAllocator&, FatalOutOfMemoryHandler&); 202 ~PageBackend(); 203 204 // Allocates a normal page from the backend. 205 // 206 // Returns the writeable base of the region. 207 Address AllocateNormalPageMemory(size_t); 208 209 // Returns normal page memory back to the backend. Expects the 210 // |writeable_base| returned by |AllocateNormalMemory()|. 211 void FreeNormalPageMemory(size_t, Address writeable_base); 212 213 // Allocates a large page from the backend. 214 // 215 // Returns the writeable base of the region. 216 Address AllocateLargePageMemory(size_t size); 217 218 // Returns large page memory back to the backend. Expects the |writeable_base| 219 // returned by |AllocateLargePageMemory()|. 220 void FreeLargePageMemory(Address writeable_base); 221 222 // Returns the writeable base if |address| is contained in a valid page 223 // memory. 224 inline Address Lookup(ConstAddress) const; 225 226 // Disallow copy/move. 227 PageBackend(const PageBackend&) = delete; 228 PageBackend& operator=(const PageBackend&) = delete; 229 230 private: 231 // Guards against concurrent uses of `Lookup()`. 232 mutable v8::base::Mutex mutex_; 233 PageAllocator& allocator_; 234 FatalOutOfMemoryHandler& oom_handler_; 235 NormalPageMemoryPool page_pool_; 236 PageMemoryRegionTree page_memory_region_tree_; 237 std::vector<std::unique_ptr<PageMemoryRegion>> normal_page_memory_regions_; 238 std::unordered_map<PageMemoryRegion*, std::unique_ptr<PageMemoryRegion>> 239 large_page_memory_regions_; 240}; 241 242// Returns true if the provided allocator supports committing at the required 243// granularity. 244inline bool SupportsCommittingGuardPages(PageAllocator& allocator) { 245 return kGuardPageSize != 0 && 246 kGuardPageSize % allocator.CommitPageSize() == 0; 247} 248 249Address NormalPageMemoryRegion::Lookup(ConstAddress address) const { 250 size_t index = GetIndex(address); 251 if (!page_memories_in_use_[index]) return nullptr; 252 const MemoryRegion writeable_region = GetPageMemory(index).writeable_region(); 253 return writeable_region.Contains(address) ? writeable_region.base() : nullptr; 254} 255 256Address LargePageMemoryRegion::Lookup(ConstAddress address) const { 257 const MemoryRegion writeable_region = GetPageMemory().writeable_region(); 258 return writeable_region.Contains(address) ? writeable_region.base() : nullptr; 259} 260 261Address PageMemoryRegion::Lookup(ConstAddress address) const { 262 DCHECK(reserved_region().Contains(address)); 263 return is_large() 264 ? static_cast<const LargePageMemoryRegion*>(this)->Lookup(address) 265 : static_cast<const NormalPageMemoryRegion*>(this)->Lookup( 266 address); 267} 268 269PageMemoryRegion* PageMemoryRegionTree::Lookup(ConstAddress address) const { 270 auto it = set_.upper_bound(address); 271 // This check also covers set_.size() > 0, since for empty vectors it is 272 // guaranteed that begin() == end(). 273 if (it == set_.begin()) return nullptr; 274 auto* result = std::next(it, -1)->second; 275 if (address < result->reserved_region().end()) return result; 276 return nullptr; 277} 278 279Address PageBackend::Lookup(ConstAddress address) const { 280 v8::base::MutexGuard guard(&mutex_); 281 PageMemoryRegion* pmr = page_memory_region_tree_.Lookup(address); 282 return pmr ? pmr->Lookup(address) : nullptr; 283} 284 285} // namespace internal 286} // namespace cppgc 287 288#endif // V8_HEAP_CPPGC_PAGE_MEMORY_H_ 289