1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_CPPGC_OBJECT_START_BITMAP_H_
6 #define V8_HEAP_CPPGC_OBJECT_START_BITMAP_H_
7 
8 #include <limits.h>
9 #include <stdint.h>
10 
11 #include <array>
12 
13 #include "include/cppgc/internal/write-barrier.h"
14 #include "src/base/atomic-utils.h"
15 #include "src/base/bits.h"
16 #include "src/base/macros.h"
17 #include "src/heap/cppgc/globals.h"
18 #include "src/heap/cppgc/heap-object-header.h"
19 
20 namespace cppgc {
21 namespace internal {
22 
23 // A bitmap for recording object starts. Objects have to be allocated at
24 // minimum granularity of kGranularity.
25 //
26 // Depends on internals such as:
27 // - kBlinkPageSize
28 // - kAllocationGranularity
29 //
30 // ObjectStartBitmap supports concurrent reads from multiple threads but
31 // only a single mutator thread can write to it.
32 class V8_EXPORT_PRIVATE ObjectStartBitmap {
33  public:
34   // Granularity of addresses added to the bitmap.
Granularity()35   static constexpr size_t Granularity() { return kAllocationGranularity; }
36 
37   // Maximum number of entries in the bitmap.
MaxEntries()38   static constexpr size_t MaxEntries() {
39     return kReservedForBitmap * kBitsPerCell;
40   }
41 
42   explicit inline ObjectStartBitmap(Address offset);
43 
44   // Finds an object header based on a
45   // address_maybe_pointing_to_the_middle_of_object. Will search for an object
46   // start in decreasing address order.
47   template <AccessMode = AccessMode::kNonAtomic>
48   inline HeapObjectHeader* FindHeader(
49       ConstAddress address_maybe_pointing_to_the_middle_of_object) const;
50 
51   template <AccessMode = AccessMode::kNonAtomic>
52   inline void SetBit(ConstAddress);
53   template <AccessMode = AccessMode::kNonAtomic>
54   inline void ClearBit(ConstAddress);
55   template <AccessMode = AccessMode::kNonAtomic>
56   inline bool CheckBit(ConstAddress) const;
57 
58   // Iterates all object starts recorded in the bitmap.
59   //
60   // The callback is of type
61   //   void(Address)
62   // and is passed the object start address as parameter.
63   template <typename Callback>
64   inline void Iterate(Callback) const;
65 
66   // Clear the object start bitmap.
67   inline void Clear();
68 
69   // Marks the bitmap as fully populated. Unpopulated bitmaps are in an
70   // inconsistent state and must be populated before they can be used to find
71   // object headers.
72   inline void MarkAsFullyPopulated();
73 
74  private:
75   template <AccessMode = AccessMode::kNonAtomic>
76   inline void store(size_t cell_index, uint8_t value);
77   template <AccessMode = AccessMode::kNonAtomic>
78   inline uint8_t load(size_t cell_index) const;
79 
80   static constexpr size_t kBitsPerCell = sizeof(uint8_t) * CHAR_BIT;
81   static constexpr size_t kCellMask = kBitsPerCell - 1;
82   static constexpr size_t kBitmapSize =
83       (kPageSize + ((kBitsPerCell * kAllocationGranularity) - 1)) /
84       (kBitsPerCell * kAllocationGranularity);
85   static constexpr size_t kReservedForBitmap =
86       ((kBitmapSize + kAllocationMask) & ~kAllocationMask);
87 
88   inline void ObjectStartIndexAndBit(ConstAddress, size_t*, size_t*) const;
89 
90   const Address offset_;
91   // `fully_populated_` is used to denote that the bitmap is popluated with all
92   // currently allocated objects on the page and is in a consistent state. It is
93   // used to guard against using the bitmap for finding headers during
94   // concurrent sweeping.
95   //
96   // Although this flag can be used by both the main thread and concurrent
97   // sweeping threads, it is not atomic. The flag should never be accessed by
98   // multiple threads at the same time. If data races are observed on this flag,
99   // it likely means that the bitmap is queried while concurrent sweeping is
100   // active, which is not supported and should be avoided.
101   bool fully_populated_ = false;
102   // The bitmap contains a bit for every kGranularity aligned address on a
103   // a NormalPage, i.e., for a page of size kBlinkPageSize.
104   std::array<uint8_t, kReservedForBitmap> object_start_bit_map_;
105 };
106 
ObjectStartBitmap(Address offset)107 ObjectStartBitmap::ObjectStartBitmap(Address offset) : offset_(offset) {
108   Clear();
109   MarkAsFullyPopulated();
110 }
111 
112 template <AccessMode mode>
FindHeader( ConstAddress address_maybe_pointing_to_the_middle_of_object) const113 HeapObjectHeader* ObjectStartBitmap::FindHeader(
114     ConstAddress address_maybe_pointing_to_the_middle_of_object) const {
115   DCHECK(fully_populated_);
116   DCHECK_LE(offset_, address_maybe_pointing_to_the_middle_of_object);
117   size_t object_offset =
118       address_maybe_pointing_to_the_middle_of_object - offset_;
119   size_t object_start_number = object_offset / kAllocationGranularity;
120   size_t cell_index = object_start_number / kBitsPerCell;
121   DCHECK_GT(object_start_bit_map_.size(), cell_index);
122   const size_t bit = object_start_number & kCellMask;
123   uint8_t byte = load<mode>(cell_index) & ((1 << (bit + 1)) - 1);
124   while (!byte && cell_index) {
125     DCHECK_LT(0u, cell_index);
126     byte = load<mode>(--cell_index);
127   }
128   const int leading_zeroes = v8::base::bits::CountLeadingZeros(byte);
129   object_start_number =
130       (cell_index * kBitsPerCell) + (kBitsPerCell - 1) - leading_zeroes;
131   object_offset = object_start_number * kAllocationGranularity;
132   return reinterpret_cast<HeapObjectHeader*>(object_offset + offset_);
133 }
134 
135 template <AccessMode mode>
SetBit(ConstAddress header_address)136 void ObjectStartBitmap::SetBit(ConstAddress header_address) {
137   size_t cell_index, object_bit;
138   ObjectStartIndexAndBit(header_address, &cell_index, &object_bit);
139   // Only a single mutator thread can write to the bitmap, so no need for CAS.
140   store<mode>(cell_index,
141               static_cast<uint8_t>(load(cell_index) | (1 << object_bit)));
142 }
143 
144 template <AccessMode mode>
ClearBit(ConstAddress header_address)145 void ObjectStartBitmap::ClearBit(ConstAddress header_address) {
146   size_t cell_index, object_bit;
147   ObjectStartIndexAndBit(header_address, &cell_index, &object_bit);
148   store<mode>(cell_index,
149               static_cast<uint8_t>(load(cell_index) & ~(1 << object_bit)));
150 }
151 
152 template <AccessMode mode>
CheckBit(ConstAddress header_address) const153 bool ObjectStartBitmap::CheckBit(ConstAddress header_address) const {
154   size_t cell_index, object_bit;
155   ObjectStartIndexAndBit(header_address, &cell_index, &object_bit);
156   return load<mode>(cell_index) & (1 << object_bit);
157 }
158 
159 template <AccessMode mode>
store(size_t cell_index, uint8_t value)160 void ObjectStartBitmap::store(size_t cell_index, uint8_t value) {
161   if (mode == AccessMode::kNonAtomic) {
162     object_start_bit_map_[cell_index] = value;
163     return;
164   }
165   v8::base::AsAtomicPtr(&object_start_bit_map_[cell_index])
166       ->store(value, std::memory_order_release);
167 }
168 
169 template <AccessMode mode>
load(size_t cell_index) const170 uint8_t ObjectStartBitmap::load(size_t cell_index) const {
171   if (mode == AccessMode::kNonAtomic) {
172     return object_start_bit_map_[cell_index];
173   }
174   return v8::base::AsAtomicPtr(&object_start_bit_map_[cell_index])
175       ->load(std::memory_order_acquire);
176 }
177 
ObjectStartIndexAndBit(ConstAddress header_address, size_t* cell_index, size_t* bit) const178 void ObjectStartBitmap::ObjectStartIndexAndBit(ConstAddress header_address,
179                                                size_t* cell_index,
180                                                size_t* bit) const {
181   const size_t object_offset = header_address - offset_;
182   DCHECK(!(object_offset & kAllocationMask));
183   const size_t object_start_number = object_offset / kAllocationGranularity;
184   *cell_index = object_start_number / kBitsPerCell;
185   DCHECK_GT(kBitmapSize, *cell_index);
186   *bit = object_start_number & kCellMask;
187 }
188 
189 template <typename Callback>
Iterate(Callback callback) const190 inline void ObjectStartBitmap::Iterate(Callback callback) const {
191   for (size_t cell_index = 0; cell_index < kReservedForBitmap; cell_index++) {
192     if (!object_start_bit_map_[cell_index]) continue;
193 
194     uint8_t value = object_start_bit_map_[cell_index];
195     while (value) {
196       const int trailing_zeroes = v8::base::bits::CountTrailingZeros(value);
197       const size_t object_start_number =
198           (cell_index * kBitsPerCell) + trailing_zeroes;
199       const Address object_address =
200           offset_ + (kAllocationGranularity * object_start_number);
201       callback(object_address);
202       // Clear current object bit in temporary value to advance iteration.
203       value &= ~(1 << (object_start_number & kCellMask));
204     }
205   }
206 }
207 
MarkAsFullyPopulated()208 void ObjectStartBitmap::MarkAsFullyPopulated() {
209   DCHECK(!fully_populated_);
210   fully_populated_ = true;
211 }
212 
Clear()213 void ObjectStartBitmap::Clear() {
214   fully_populated_ = false;
215   std::fill(object_start_bit_map_.begin(), object_start_bit_map_.end(), 0);
216 }
217 
218 // A platform aware version of ObjectStartBitmap to provide platform specific
219 // optimizations (e.g. Use non-atomic stores on ARMv7 when not marking).
220 class V8_EXPORT_PRIVATE PlatformAwareObjectStartBitmap
221     : public ObjectStartBitmap {
222  public:
223   explicit inline PlatformAwareObjectStartBitmap(Address offset);
224 
225   template <AccessMode = AccessMode::kNonAtomic>
226   inline void SetBit(ConstAddress);
227   template <AccessMode = AccessMode::kNonAtomic>
228   inline void ClearBit(ConstAddress);
229 
230  private:
231   template <AccessMode>
232   static bool ShouldForceNonAtomic();
233 };
234 
PlatformAwareObjectStartBitmap(Address offset)235 PlatformAwareObjectStartBitmap::PlatformAwareObjectStartBitmap(Address offset)
236     : ObjectStartBitmap(offset) {}
237 
238 // static
239 template <AccessMode mode>
ShouldForceNonAtomic()240 bool PlatformAwareObjectStartBitmap::ShouldForceNonAtomic() {
241 #if defined(V8_TARGET_ARCH_ARM)
242   // Use non-atomic accesses on ARMv7 when marking is not active.
243   if (mode == AccessMode::kAtomic) {
244     if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking()))
245       return true;
246   }
247 #endif  // defined(V8_TARGET_ARCH_ARM)
248   return false;
249 }
250 
251 template <AccessMode mode>
SetBit(ConstAddress header_address)252 void PlatformAwareObjectStartBitmap::SetBit(ConstAddress header_address) {
253   if (ShouldForceNonAtomic<mode>()) {
254     ObjectStartBitmap::SetBit<AccessMode::kNonAtomic>(header_address);
255     return;
256   }
257   ObjectStartBitmap::SetBit<mode>(header_address);
258 }
259 
260 template <AccessMode mode>
ClearBit(ConstAddress header_address)261 void PlatformAwareObjectStartBitmap::ClearBit(ConstAddress header_address) {
262   if (ShouldForceNonAtomic<mode>()) {
263     ObjectStartBitmap::ClearBit<AccessMode::kNonAtomic>(header_address);
264     return;
265   }
266   ObjectStartBitmap::ClearBit<mode>(header_address);
267 }
268 
269 }  // namespace internal
270 }  // namespace cppgc
271 
272 #endif  // V8_HEAP_CPPGC_OBJECT_START_BITMAP_H_
273