1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_CPPGC_OBJECT_ALLOCATOR_H_
6#define V8_HEAP_CPPGC_OBJECT_ALLOCATOR_H_
7
8#include "include/cppgc/allocation.h"
9#include "include/cppgc/internal/gc-info.h"
10#include "include/cppgc/macros.h"
11#include "src/base/logging.h"
12#include "src/heap/cppgc/globals.h"
13#include "src/heap/cppgc/heap-object-header.h"
14#include "src/heap/cppgc/heap-page.h"
15#include "src/heap/cppgc/heap-space.h"
16#include "src/heap/cppgc/memory.h"
17#include "src/heap/cppgc/object-start-bitmap.h"
18#include "src/heap/cppgc/raw-heap.h"
19
20namespace cppgc {
21
22namespace internal {
23class ObjectAllocator;
24class PreFinalizerHandler;
25}  // namespace internal
26
27class V8_EXPORT AllocationHandle {
28 private:
29  AllocationHandle() = default;
30  friend class internal::ObjectAllocator;
31};
32
33namespace internal {
34
35class StatsCollector;
36class PageBackend;
37
38class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle {
39 public:
40  static constexpr size_t kSmallestSpaceSize = 32;
41
42  ObjectAllocator(RawHeap& heap, PageBackend& page_backend,
43                  StatsCollector& stats_collector,
44                  PreFinalizerHandler& prefinalizer_handler);
45
46  inline void* AllocateObject(size_t size, GCInfoIndex gcinfo);
47  inline void* AllocateObject(size_t size, AlignVal alignment,
48                              GCInfoIndex gcinfo);
49  inline void* AllocateObject(size_t size, GCInfoIndex gcinfo,
50                              CustomSpaceIndex space_index);
51  inline void* AllocateObject(size_t size, AlignVal alignment,
52                              GCInfoIndex gcinfo, CustomSpaceIndex space_index);
53
54  void ResetLinearAllocationBuffers();
55
56  // Terminate the allocator. Subsequent allocation calls result in a crash.
57  void Terminate();
58
59 private:
60  bool in_disallow_gc_scope() const;
61
62  // Returns the initially tried SpaceType to allocate an object of |size| bytes
63  // on. Returns the largest regular object size bucket for large objects.
64  inline static RawHeap::RegularSpaceType GetInitialSpaceIndexForSize(
65      size_t size);
66
67  inline void* AllocateObjectOnSpace(NormalPageSpace& space, size_t size,
68                                     GCInfoIndex gcinfo);
69  inline void* AllocateObjectOnSpace(NormalPageSpace& space, size_t size,
70                                     AlignVal alignment, GCInfoIndex gcinfo);
71  void* OutOfLineAllocate(NormalPageSpace&, size_t, AlignVal, GCInfoIndex);
72  void* OutOfLineAllocateImpl(NormalPageSpace&, size_t, AlignVal, GCInfoIndex);
73
74  void RefillLinearAllocationBuffer(NormalPageSpace&, size_t);
75  bool RefillLinearAllocationBufferFromFreeList(NormalPageSpace&, size_t);
76
77  RawHeap& raw_heap_;
78  PageBackend& page_backend_;
79  StatsCollector& stats_collector_;
80  PreFinalizerHandler& prefinalizer_handler_;
81};
82
83void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo) {
84  DCHECK(!in_disallow_gc_scope());
85  const size_t allocation_size =
86      RoundUp<kAllocationGranularity>(size + sizeof(HeapObjectHeader));
87  const RawHeap::RegularSpaceType type =
88      GetInitialSpaceIndexForSize(allocation_size);
89  return AllocateObjectOnSpace(NormalPageSpace::From(*raw_heap_.Space(type)),
90                               allocation_size, gcinfo);
91}
92
93void* ObjectAllocator::AllocateObject(size_t size, AlignVal alignment,
94                                      GCInfoIndex gcinfo) {
95  DCHECK(!in_disallow_gc_scope());
96  const size_t allocation_size =
97      RoundUp<kAllocationGranularity>(size + sizeof(HeapObjectHeader));
98  const RawHeap::RegularSpaceType type =
99      GetInitialSpaceIndexForSize(allocation_size);
100  return AllocateObjectOnSpace(NormalPageSpace::From(*raw_heap_.Space(type)),
101                               allocation_size, alignment, gcinfo);
102}
103
104void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo,
105                                      CustomSpaceIndex space_index) {
106  DCHECK(!in_disallow_gc_scope());
107  const size_t allocation_size =
108      RoundUp<kAllocationGranularity>(size + sizeof(HeapObjectHeader));
109  return AllocateObjectOnSpace(
110      NormalPageSpace::From(*raw_heap_.CustomSpace(space_index)),
111      allocation_size, gcinfo);
112}
113
114void* ObjectAllocator::AllocateObject(size_t size, AlignVal alignment,
115                                      GCInfoIndex gcinfo,
116                                      CustomSpaceIndex space_index) {
117  DCHECK(!in_disallow_gc_scope());
118  const size_t allocation_size =
119      RoundUp<kAllocationGranularity>(size + sizeof(HeapObjectHeader));
120  return AllocateObjectOnSpace(
121      NormalPageSpace::From(*raw_heap_.CustomSpace(space_index)),
122      allocation_size, alignment, gcinfo);
123}
124
125// static
126RawHeap::RegularSpaceType ObjectAllocator::GetInitialSpaceIndexForSize(
127    size_t size) {
128  static_assert(kSmallestSpaceSize == 32,
129                "should be half the next larger size");
130  if (size < 64) {
131    if (size < kSmallestSpaceSize) return RawHeap::RegularSpaceType::kNormal1;
132    return RawHeap::RegularSpaceType::kNormal2;
133  }
134  if (size < 128) return RawHeap::RegularSpaceType::kNormal3;
135  return RawHeap::RegularSpaceType::kNormal4;
136}
137
138void* ObjectAllocator::AllocateObjectOnSpace(NormalPageSpace& space,
139                                             size_t size, AlignVal alignment,
140                                             GCInfoIndex gcinfo) {
141  // The APIs are set up to support general alignment. Since we want to keep
142  // track of the actual usage there the alignment support currently only covers
143  // double-world alignment (8 bytes on 32bit and 16 bytes on 64bit
144  // architectures). This is enforced on the public API via static_asserts
145  // against alignof(T).
146  STATIC_ASSERT(2 * kAllocationGranularity ==
147                api_constants::kMaxSupportedAlignment);
148  STATIC_ASSERT(kAllocationGranularity == sizeof(HeapObjectHeader));
149  STATIC_ASSERT(kAllocationGranularity ==
150                api_constants::kAllocationGranularity);
151  DCHECK_EQ(2 * sizeof(HeapObjectHeader), static_cast<size_t>(alignment));
152  constexpr size_t kAlignment = 2 * kAllocationGranularity;
153  constexpr size_t kAlignmentMask = kAlignment - 1;
154  constexpr size_t kPaddingSize = kAlignment - sizeof(HeapObjectHeader);
155
156  NormalPageSpace::LinearAllocationBuffer& current_lab =
157      space.linear_allocation_buffer();
158  const size_t current_lab_size = current_lab.size();
159  // Case 1: The LAB fits the request and the LAB start is already properly
160  // aligned.
161  bool lab_allocation_will_succeed =
162      current_lab_size >= size &&
163      (reinterpret_cast<uintptr_t>(current_lab.start() +
164                                   sizeof(HeapObjectHeader)) &
165       kAlignmentMask) == 0;
166  // Case 2: The LAB fits an extended request to manually align the second
167  // allocation.
168  if (!lab_allocation_will_succeed &&
169      (current_lab_size >= (size + kPaddingSize))) {
170    void* filler_memory = current_lab.Allocate(kPaddingSize);
171    auto& filler = Filler::CreateAt(filler_memory, kPaddingSize);
172    NormalPage::From(BasePage::FromPayload(&filler))
173        ->object_start_bitmap()
174        .SetBit<AccessMode::kAtomic>(reinterpret_cast<ConstAddress>(&filler));
175    lab_allocation_will_succeed = true;
176  }
177  if (lab_allocation_will_succeed) {
178    void* object = AllocateObjectOnSpace(space, size, gcinfo);
179    DCHECK_NOT_NULL(object);
180    DCHECK_EQ(0u, reinterpret_cast<uintptr_t>(object) & kAlignmentMask);
181    return object;
182  }
183  return OutOfLineAllocate(space, size, alignment, gcinfo);
184}
185
186void* ObjectAllocator::AllocateObjectOnSpace(NormalPageSpace& space,
187                                             size_t size, GCInfoIndex gcinfo) {
188  DCHECK_LT(0u, gcinfo);
189
190  NormalPageSpace::LinearAllocationBuffer& current_lab =
191      space.linear_allocation_buffer();
192  if (current_lab.size() < size) {
193    return OutOfLineAllocate(
194        space, size, static_cast<AlignVal>(kAllocationGranularity), gcinfo);
195  }
196
197  void* raw = current_lab.Allocate(size);
198#if !defined(V8_USE_MEMORY_SANITIZER) && !defined(V8_USE_ADDRESS_SANITIZER) && \
199    DEBUG
200  // For debug builds, unzap only the payload.
201  SetMemoryAccessible(static_cast<char*>(raw) + sizeof(HeapObjectHeader),
202                      size - sizeof(HeapObjectHeader));
203#else
204  SetMemoryAccessible(raw, size);
205#endif
206  auto* header = new (raw) HeapObjectHeader(size, gcinfo);
207
208  // The marker needs to find the object start concurrently.
209  NormalPage::From(BasePage::FromPayload(header))
210      ->object_start_bitmap()
211      .SetBit<AccessMode::kAtomic>(reinterpret_cast<ConstAddress>(header));
212
213  return header->ObjectStart();
214}
215
216}  // namespace internal
217}  // namespace cppgc
218
219#endif  // V8_HEAP_CPPGC_OBJECT_ALLOCATOR_H_
220