1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
6#define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
7
8#include <cstddef>
9#include <cstdint>
10
11#include "cppgc/heap-handle.h"
12#include "cppgc/heap-state.h"
13#include "cppgc/internal/api-constants.h"
14#include "cppgc/internal/atomic-entry-flag.h"
15#include "cppgc/internal/base-page-handle.h"
16#include "cppgc/internal/member-storage.h"
17#include "cppgc/platform.h"
18#include "cppgc/sentinel-pointer.h"
19#include "cppgc/trace-trait.h"
20#include "v8config.h"  // NOLINT(build/include_directory)
21
22#if defined(CPPGC_CAGED_HEAP)
23#include "cppgc/internal/caged-heap-local-data.h"
24#include "cppgc/internal/caged-heap.h"
25#endif
26
27namespace cppgc {
28
29class HeapHandle;
30
31namespace internal {
32
33#if defined(CPPGC_CAGED_HEAP)
34class WriteBarrierTypeForCagedHeapPolicy;
35#else   // !CPPGC_CAGED_HEAP
36class WriteBarrierTypeForNonCagedHeapPolicy;
37#endif  // !CPPGC_CAGED_HEAP
38
39class V8_EXPORT WriteBarrier final {
40 public:
41  enum class Type : uint8_t {
42    kNone,
43    kMarking,
44    kGenerational,
45  };
46
47  enum class GenerationalBarrierType : uint8_t {
48    kPreciseSlot,
49    kPreciseUncompressedSlot,
50    kImpreciseSlot,
51  };
52
53  struct Params {
54    HeapHandle* heap = nullptr;
55#if V8_ENABLE_CHECKS
56    Type type = Type::kNone;
57#endif  // !V8_ENABLE_CHECKS
58#if defined(CPPGC_CAGED_HEAP)
59    uintptr_t slot_offset = 0;
60    uintptr_t value_offset = 0;
61#endif  // CPPGC_CAGED_HEAP
62  };
63
64  enum class ValueMode {
65    kValuePresent,
66    kNoValuePresent,
67  };
68
69  // Returns the required write barrier for a given `slot` and `value`.
70  static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
71                                            Params& params);
72  // Returns the required write barrier for a given `slot` and `value`.
73  template <typename MemberStorage>
74  static V8_INLINE Type GetWriteBarrierType(const void* slot, MemberStorage,
75                                            Params& params);
76  // Returns the required write barrier for a given `slot`.
77  template <typename HeapHandleCallback>
78  static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
79                                            HeapHandleCallback callback);
80  // Returns the required write barrier for a given  `value`.
81  static V8_INLINE Type GetWriteBarrierType(const void* value, Params& params);
82
83#ifdef CPPGC_SLIM_WRITE_BARRIER
84  // A write barrier that combines `GenerationalBarrier()` and
85  // `DijkstraMarkingBarrier()`. We only pass a single parameter here to clobber
86  // as few registers as possible.
87  template <WriteBarrierSlotType>
88  static V8_NOINLINE void V8_PRESERVE_MOST
89  CombinedWriteBarrierSlow(const void* slot);
90#endif  // CPPGC_SLIM_WRITE_BARRIER
91
92  static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
93                                               const void* object);
94  static V8_INLINE void DijkstraMarkingBarrierRange(
95      const Params& params, const void* first_element, size_t element_size,
96      size_t number_of_elements, TraceCallback trace_callback);
97  static V8_INLINE void SteeleMarkingBarrier(const Params& params,
98                                             const void* object);
99#if defined(CPPGC_YOUNG_GENERATION)
100  template <GenerationalBarrierType>
101  static V8_INLINE void GenerationalBarrier(const Params& params,
102                                            const void* slot);
103#else  // !CPPGC_YOUNG_GENERATION
104  template <GenerationalBarrierType>
105  static V8_INLINE void GenerationalBarrier(const Params& params,
106                                            const void* slot){}
107#endif  // CPPGC_YOUNG_GENERATION
108
109#if V8_ENABLE_CHECKS
110  static void CheckParams(Type expected_type, const Params& params);
111#else   // !V8_ENABLE_CHECKS
112  static void CheckParams(Type expected_type, const Params& params) {}
113#endif  // !V8_ENABLE_CHECKS
114
115  // The FlagUpdater class allows cppgc internal to update
116  // |write_barrier_enabled_|.
117  class FlagUpdater;
118  static bool IsEnabled() { return write_barrier_enabled_.MightBeEntered(); }
119
120 private:
121  WriteBarrier() = delete;
122
123#if defined(CPPGC_CAGED_HEAP)
124  using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
125#else   // !CPPGC_CAGED_HEAP
126  using WriteBarrierTypePolicy = WriteBarrierTypeForNonCagedHeapPolicy;
127#endif  // !CPPGC_CAGED_HEAP
128
129  static void DijkstraMarkingBarrierSlow(const void* value);
130  static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void* value);
131  static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
132                                              const void* first_element,
133                                              size_t element_size,
134                                              size_t number_of_elements,
135                                              TraceCallback trace_callback);
136  static void SteeleMarkingBarrierSlow(const void* value);
137  static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
138
139#if defined(CPPGC_YOUNG_GENERATION)
140  static CagedHeapLocalData& GetLocalData(HeapHandle&);
141  static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
142                                      const AgeTable& age_table,
143                                      const void* slot, uintptr_t value_offset,
144                                      HeapHandle* heap_handle);
145  static void GenerationalBarrierForUncompressedSlotSlow(
146      const CagedHeapLocalData& local_data, const AgeTable& age_table,
147      const void* slot, uintptr_t value_offset, HeapHandle* heap_handle);
148  static void GenerationalBarrierForSourceObjectSlow(
149      const CagedHeapLocalData& local_data, const void* object,
150      HeapHandle* heap_handle);
151#endif  // CPPGC_YOUNG_GENERATION
152
153  static AtomicEntryFlag write_barrier_enabled_;
154};
155
156template <WriteBarrier::Type type>
157V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
158  if constexpr (type == WriteBarrier::Type::kNone)
159    return WriteBarrier::Type::kNone;
160#if V8_ENABLE_CHECKS
161  params.type = type;
162#endif  // !V8_ENABLE_CHECKS
163  return type;
164}
165
166#if defined(CPPGC_CAGED_HEAP)
167class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
168 public:
169  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
170  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
171                                          WriteBarrier::Params& params,
172                                          HeapHandleCallback callback) {
173    return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
174  }
175
176  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback,
177            typename MemberStorage>
178  static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage value,
179                                          WriteBarrier::Params& params,
180                                          HeapHandleCallback callback) {
181    return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
182  }
183
184  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
185  static V8_INLINE WriteBarrier::Type Get(const void* value,
186                                          WriteBarrier::Params& params,
187                                          HeapHandleCallback callback) {
188    return GetNoSlot(value, params, callback);
189  }
190
191 private:
192  WriteBarrierTypeForCagedHeapPolicy() = delete;
193
194  template <typename HeapHandleCallback>
195  static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
196                                                WriteBarrier::Params& params,
197                                                HeapHandleCallback) {
198    const bool within_cage = CagedHeapBase::IsWithinCage(value);
199    if (!within_cage) return WriteBarrier::Type::kNone;
200
201    // We know that |value| points either within the normal page or to the
202    // beginning of large-page, so extract the page header by bitmasking.
203    BasePageHandle* page =
204        BasePageHandle::FromPayload(const_cast<void*>(value));
205
206    HeapHandle& heap_handle = page->heap_handle();
207    if (V8_UNLIKELY(heap_handle.is_incremental_marking_in_progress())) {
208      return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
209    }
210
211    return SetAndReturnType<WriteBarrier::Type::kNone>(params);
212  }
213
214  template <WriteBarrier::ValueMode value_mode>
215  struct ValueModeDispatch;
216};
217
218template <>
219struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
220    WriteBarrier::ValueMode::kValuePresent> {
221  template <typename HeapHandleCallback, typename MemberStorage>
222  static V8_INLINE WriteBarrier::Type Get(const void* slot,
223                                          MemberStorage storage,
224                                          WriteBarrier::Params& params,
225                                          HeapHandleCallback) {
226    if (V8_LIKELY(!WriteBarrier::IsEnabled()))
227      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
228
229    return BarrierEnabledGet(slot, storage.Load(), params);
230  }
231
232  template <typename HeapHandleCallback>
233  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
234                                          WriteBarrier::Params& params,
235                                          HeapHandleCallback) {
236    if (V8_LIKELY(!WriteBarrier::IsEnabled()))
237      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
238
239    return BarrierEnabledGet(slot, value, params);
240  }
241
242 private:
243  static V8_INLINE WriteBarrier::Type BarrierEnabledGet(
244      const void* slot, const void* value, WriteBarrier::Params& params) {
245    const bool within_cage = CagedHeapBase::AreWithinCage(slot, value);
246    if (!within_cage) return WriteBarrier::Type::kNone;
247
248    // We know that |value| points either within the normal page or to the
249    // beginning of large-page, so extract the page header by bitmasking.
250    BasePageHandle* page =
251        BasePageHandle::FromPayload(const_cast<void*>(value));
252
253    HeapHandle& heap_handle = page->heap_handle();
254    if (V8_LIKELY(!heap_handle.is_incremental_marking_in_progress())) {
255#if defined(CPPGC_YOUNG_GENERATION)
256      if (!heap_handle.is_young_generation_enabled())
257        return WriteBarrier::Type::kNone;
258      params.heap = &heap_handle;
259      params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
260      params.value_offset = CagedHeapBase::OffsetFromAddress(value);
261      return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
262#else   // !CPPGC_YOUNG_GENERATION
263      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
264#endif  // !CPPGC_YOUNG_GENERATION
265    }
266
267    // Use marking barrier.
268    params.heap = &heap_handle;
269    return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
270  }
271};
272
273template <>
274struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
275    WriteBarrier::ValueMode::kNoValuePresent> {
276  template <typename HeapHandleCallback>
277  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
278                                          WriteBarrier::Params& params,
279                                          HeapHandleCallback callback) {
280    if (V8_LIKELY(!WriteBarrier::IsEnabled()))
281      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
282
283    HeapHandle& handle = callback();
284#if defined(CPPGC_YOUNG_GENERATION)
285    if (V8_LIKELY(!handle.is_incremental_marking_in_progress())) {
286      if (!handle.is_young_generation_enabled()) {
287        return WriteBarrier::Type::kNone;
288      }
289      params.heap = &handle;
290      // Check if slot is on stack.
291      if (V8_UNLIKELY(!CagedHeapBase::IsWithinCage(slot))) {
292        return SetAndReturnType<WriteBarrier::Type::kNone>(params);
293      }
294      params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
295      return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
296    }
297#else   // !defined(CPPGC_YOUNG_GENERATION)
298    if (V8_UNLIKELY(!handle.is_incremental_marking_in_progress())) {
299      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
300    }
301#endif  // !defined(CPPGC_YOUNG_GENERATION)
302    params.heap = &handle;
303    return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
304  }
305};
306
307#endif  // CPPGC_CAGED_HEAP
308
309class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
310 public:
311  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
312  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
313                                          WriteBarrier::Params& params,
314                                          HeapHandleCallback callback) {
315    return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
316  }
317
318  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
319  static V8_INLINE WriteBarrier::Type Get(const void* slot, RawPointer value,
320                                          WriteBarrier::Params& params,
321                                          HeapHandleCallback callback) {
322    return ValueModeDispatch<value_mode>::Get(slot, value.Load(), params,
323                                              callback);
324  }
325
326  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
327  static V8_INLINE WriteBarrier::Type Get(const void* value,
328                                          WriteBarrier::Params& params,
329                                          HeapHandleCallback callback) {
330    // The slot will never be used in `Get()` below.
331    return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
332                                                       callback);
333  }
334
335 private:
336  template <WriteBarrier::ValueMode value_mode>
337  struct ValueModeDispatch;
338
339  WriteBarrierTypeForNonCagedHeapPolicy() = delete;
340};
341
342template <>
343struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
344    WriteBarrier::ValueMode::kValuePresent> {
345  template <typename HeapHandleCallback>
346  static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
347                                          WriteBarrier::Params& params,
348                                          HeapHandleCallback callback) {
349    // The following check covers nullptr as well as sentinel pointer.
350    if (object <= static_cast<void*>(kSentinelPointer)) {
351      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
352    }
353    if (V8_LIKELY(!WriteBarrier::IsEnabled())) {
354      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
355    }
356    // We know that |object| is within the normal page or in the beginning of a
357    // large page, so extract the page header by bitmasking.
358    BasePageHandle* page =
359        BasePageHandle::FromPayload(const_cast<void*>(object));
360
361    HeapHandle& heap_handle = page->heap_handle();
362    if (V8_LIKELY(heap_handle.is_incremental_marking_in_progress())) {
363      return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
364    }
365    return SetAndReturnType<WriteBarrier::Type::kNone>(params);
366  }
367};
368
369template <>
370struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
371    WriteBarrier::ValueMode::kNoValuePresent> {
372  template <typename HeapHandleCallback>
373  static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
374                                          WriteBarrier::Params& params,
375                                          HeapHandleCallback callback) {
376    if (V8_UNLIKELY(WriteBarrier::IsEnabled())) {
377      HeapHandle& handle = callback();
378      if (V8_LIKELY(handle.is_incremental_marking_in_progress())) {
379        params.heap = &handle;
380        return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
381      }
382    }
383    return WriteBarrier::Type::kNone;
384  }
385};
386
387// static
388WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
389    const void* slot, const void* value, WriteBarrier::Params& params) {
390  return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
391                                                               params, []() {});
392}
393
394// static
395template <typename MemberStorage>
396WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
397    const void* slot, MemberStorage value, WriteBarrier::Params& params) {
398  return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
399                                                               params, []() {});
400}
401
402// static
403template <typename HeapHandleCallback>
404WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
405    const void* slot, WriteBarrier::Params& params,
406    HeapHandleCallback callback) {
407  return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
408      slot, nullptr, params, callback);
409}
410
411// static
412WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
413    const void* value, WriteBarrier::Params& params) {
414  return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(value, params,
415                                                               []() {});
416}
417
418// static
419void WriteBarrier::DijkstraMarkingBarrier(const Params& params,
420                                          const void* object) {
421  CheckParams(Type::kMarking, params);
422#if defined(CPPGC_CAGED_HEAP)
423  // Caged heap already filters out sentinels.
424  DijkstraMarkingBarrierSlow(object);
425#else   // !CPPGC_CAGED_HEAP
426  DijkstraMarkingBarrierSlowWithSentinelCheck(object);
427#endif  // !CPPGC_CAGED_HEAP
428}
429
430// static
431void WriteBarrier::DijkstraMarkingBarrierRange(const Params& params,
432                                               const void* first_element,
433                                               size_t element_size,
434                                               size_t number_of_elements,
435                                               TraceCallback trace_callback) {
436  CheckParams(Type::kMarking, params);
437  DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
438                                  number_of_elements, trace_callback);
439}
440
441// static
442void WriteBarrier::SteeleMarkingBarrier(const Params& params,
443                                        const void* object) {
444  CheckParams(Type::kMarking, params);
445#if defined(CPPGC_CAGED_HEAP)
446  // Caged heap already filters out sentinels.
447  SteeleMarkingBarrierSlow(object);
448#else   // !CPPGC_CAGED_HEAP
449  SteeleMarkingBarrierSlowWithSentinelCheck(object);
450#endif  // !CPPGC_CAGED_HEAP
451}
452
453#if defined(CPPGC_YOUNG_GENERATION)
454
455// static
456template <WriteBarrier::GenerationalBarrierType type>
457void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
458  CheckParams(Type::kGenerational, params);
459
460  const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
461  const AgeTable& age_table = local_data.age_table;
462
463  // Bail out if the slot (precise or imprecise) is in young generation.
464  if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
465    return;
466
467  // Dispatch between different types of barriers.
468  // TODO(chromium:1029379): Consider reload local_data in the slow path to
469  // reduce register pressure.
470  if constexpr (type == GenerationalBarrierType::kPreciseSlot) {
471    GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
472                            params.heap);
473  } else if constexpr (type ==
474                       GenerationalBarrierType::kPreciseUncompressedSlot) {
475    GenerationalBarrierForUncompressedSlotSlow(
476        local_data, age_table, slot, params.value_offset, params.heap);
477  } else {
478    GenerationalBarrierForSourceObjectSlow(local_data, slot, params.heap);
479  }
480}
481
482#endif  // !CPPGC_YOUNG_GENERATION
483
484}  // namespace internal
485}  // namespace cppgc
486
487#endif  // INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
488