1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
6#define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
7
8#include <cstddef>
9#include <cstdint>
10
11#include "cppgc/heap-state.h"
12#include "cppgc/internal/api-constants.h"
13#include "cppgc/internal/atomic-entry-flag.h"
14#include "cppgc/platform.h"
15#include "cppgc/sentinel-pointer.h"
16#include "cppgc/trace-trait.h"
17#include "v8config.h"  // NOLINT(build/include_directory)
18
19#if defined(CPPGC_CAGED_HEAP)
20#include "cppgc/internal/caged-heap-local-data.h"
21#endif
22
23namespace cppgc {
24
25class HeapHandle;
26
27namespace internal {
28
29#if defined(CPPGC_CAGED_HEAP)
30class WriteBarrierTypeForCagedHeapPolicy;
31#else   // !CPPGC_CAGED_HEAP
32class WriteBarrierTypeForNonCagedHeapPolicy;
33#endif  // !CPPGC_CAGED_HEAP
34
35class V8_EXPORT WriteBarrier final {
36 public:
37  enum class Type : uint8_t {
38    kNone,
39    kMarking,
40    kGenerational,
41  };
42
43  struct Params {
44    HeapHandle* heap = nullptr;
45#if V8_ENABLE_CHECKS
46    Type type = Type::kNone;
47#endif  // !V8_ENABLE_CHECKS
48#if defined(CPPGC_CAGED_HEAP)
49    uintptr_t start = 0;
50    CagedHeapLocalData& caged_heap() const {
51      return *reinterpret_cast<CagedHeapLocalData*>(start);
52    }
53    uintptr_t slot_offset = 0;
54    uintptr_t value_offset = 0;
55#endif  // CPPGC_CAGED_HEAP
56  };
57
58  enum class ValueMode {
59    kValuePresent,
60    kNoValuePresent,
61  };
62
63  // Returns the required write barrier for a given `slot` and `value`.
64  static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
65                                            Params& params);
66  // Returns the required write barrier for a given `slot`.
67  template <typename HeapHandleCallback>
68  static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
69                                            HeapHandleCallback callback);
70  // Returns the required write barrier for a given  `value`.
71  static V8_INLINE Type GetWriteBarrierType(const void* value, Params& params);
72
73  static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
74                                               const void* object);
75  static V8_INLINE void DijkstraMarkingBarrierRange(
76      const Params& params, const void* first_element, size_t element_size,
77      size_t number_of_elements, TraceCallback trace_callback);
78  static V8_INLINE void SteeleMarkingBarrier(const Params& params,
79                                             const void* object);
80#if defined(CPPGC_YOUNG_GENERATION)
81  static V8_INLINE void GenerationalBarrier(const Params& params,
82                                            const void* slot);
83  static V8_INLINE void GenerationalBarrierForSourceObject(
84      const Params& params, const void* inner_pointer);
85#else  // !CPPGC_YOUNG_GENERATION
86  static V8_INLINE void GenerationalBarrier(const Params& params,
87                                            const void* slot) {}
88  static V8_INLINE void GenerationalBarrierForSourceObject(
89      const Params& params, const void* inner_pointer) {}
90#endif  // CPPGC_YOUNG_GENERATION
91
92#if V8_ENABLE_CHECKS
93  static void CheckParams(Type expected_type, const Params& params);
94#else   // !V8_ENABLE_CHECKS
95  static void CheckParams(Type expected_type, const Params& params) {}
96#endif  // !V8_ENABLE_CHECKS
97
98  // The IncrementalOrConcurrentUpdater class allows cppgc internal to update
99  // |incremental_or_concurrent_marking_flag_|.
100  class IncrementalOrConcurrentMarkingFlagUpdater;
101  static bool IsAnyIncrementalOrConcurrentMarking() {
102    return incremental_or_concurrent_marking_flag_.MightBeEntered();
103  }
104
105 private:
106  WriteBarrier() = delete;
107
108#if defined(CPPGC_CAGED_HEAP)
109  using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
110#else   // !CPPGC_CAGED_HEAP
111  using WriteBarrierTypePolicy = WriteBarrierTypeForNonCagedHeapPolicy;
112#endif  // !CPPGC_CAGED_HEAP
113
114  static void DijkstraMarkingBarrierSlow(const void* value);
115  static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void* value);
116  static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
117                                              const void* first_element,
118                                              size_t element_size,
119                                              size_t number_of_elements,
120                                              TraceCallback trace_callback);
121  static void SteeleMarkingBarrierSlow(const void* value);
122  static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
123
124#if defined(CPPGC_YOUNG_GENERATION)
125  static CagedHeapLocalData& GetLocalData(HeapHandle&);
126  static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
127                                      const AgeTable& age_table,
128                                      const void* slot, uintptr_t value_offset);
129  static void GenerationalBarrierForSourceObjectSlow(
130      const CagedHeapLocalData& local_data, const void* object);
131#endif  // CPPGC_YOUNG_GENERATION
132
133  static AtomicEntryFlag incremental_or_concurrent_marking_flag_;
134};
135
136template <WriteBarrier::Type type>
137V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
138  if (type == WriteBarrier::Type::kNone) return WriteBarrier::Type::kNone;
139#if V8_ENABLE_CHECKS
140  params.type = type;
141#endif  // !V8_ENABLE_CHECKS
142  return type;
143}
144
145#if defined(CPPGC_CAGED_HEAP)
146class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
147 public:
148  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
149  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
150                                          WriteBarrier::Params& params,
151                                          HeapHandleCallback callback) {
152    return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
153  }
154
155  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
156  static V8_INLINE WriteBarrier::Type Get(const void* value,
157                                          WriteBarrier::Params& params,
158                                          HeapHandleCallback callback) {
159    return GetNoSlot(value, params, callback);
160  }
161
162 private:
163  WriteBarrierTypeForCagedHeapPolicy() = delete;
164
165  template <typename HeapHandleCallback>
166  static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
167                                                WriteBarrier::Params& params,
168                                                HeapHandleCallback) {
169    if (!TryGetCagedHeap(value, value, params)) {
170      return WriteBarrier::Type::kNone;
171    }
172    if (V8_UNLIKELY(params.caged_heap().is_incremental_marking_in_progress)) {
173      return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
174    }
175    return SetAndReturnType<WriteBarrier::Type::kNone>(params);
176  }
177
178  template <WriteBarrier::ValueMode value_mode>
179  struct ValueModeDispatch;
180
181  static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
182                                        WriteBarrier::Params& params) {
183    // TODO(chromium:1056170): Check if the null check can be folded in with
184    // the rest of the write barrier.
185    if (!value) return false;
186    params.start = reinterpret_cast<uintptr_t>(value) &
187                   ~(api_constants::kCagedHeapReservationAlignment - 1);
188    const uintptr_t slot_offset =
189        reinterpret_cast<uintptr_t>(slot) - params.start;
190    if (slot_offset > api_constants::kCagedHeapReservationSize) {
191      // Check if slot is on stack or value is sentinel or nullptr. This relies
192      // on the fact that kSentinelPointer is encoded as 0x1.
193      return false;
194    }
195    return true;
196  }
197
198  // Returns whether marking is in progress. If marking is not in progress
199  // sets the start of the cage accordingly.
200  //
201  // TODO(chromium:1056170): Create fast path on API.
202  static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
203};
204
205template <>
206struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
207    WriteBarrier::ValueMode::kValuePresent> {
208  template <typename HeapHandleCallback>
209  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
210                                          WriteBarrier::Params& params,
211                                          HeapHandleCallback) {
212#if !defined(CPPGC_YOUNG_GENERATION)
213    if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
214      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
215    }
216#endif  // !CPPGC_YOUNG_GENERATION
217    bool within_cage = TryGetCagedHeap(slot, value, params);
218    if (!within_cage) {
219      return WriteBarrier::Type::kNone;
220    }
221    if (V8_LIKELY(!params.caged_heap().is_incremental_marking_in_progress)) {
222#if defined(CPPGC_YOUNG_GENERATION)
223      params.heap = reinterpret_cast<HeapHandle*>(params.start);
224      params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
225      params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
226      return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
227#else   // !CPPGC_YOUNG_GENERATION
228      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
229#endif  // !CPPGC_YOUNG_GENERATION
230    }
231    params.heap = reinterpret_cast<HeapHandle*>(params.start);
232    return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
233  }
234};
235
236template <>
237struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
238    WriteBarrier::ValueMode::kNoValuePresent> {
239  template <typename HeapHandleCallback>
240  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
241                                          WriteBarrier::Params& params,
242                                          HeapHandleCallback callback) {
243#if defined(CPPGC_YOUNG_GENERATION)
244    HeapHandle& handle = callback();
245    if (V8_LIKELY(!IsMarking(handle, params))) {
246      // params.start is populated by IsMarking().
247      params.heap = &handle;
248      params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
249      // params.value_offset stays 0.
250      if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
251        // Check if slot is on stack.
252        return SetAndReturnType<WriteBarrier::Type::kNone>(params);
253      }
254      return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
255    }
256#else   // !CPPGC_YOUNG_GENERATION
257    if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
258      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
259    }
260    HeapHandle& handle = callback();
261    if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) {
262      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
263    }
264#endif  // !CPPGC_YOUNG_GENERATION
265    params.heap = &handle;
266    return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
267  }
268};
269
270#endif  // CPPGC_CAGED_HEAP
271
272class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
273 public:
274  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
275  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
276                                          WriteBarrier::Params& params,
277                                          HeapHandleCallback callback) {
278    return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
279  }
280
281  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
282  static V8_INLINE WriteBarrier::Type Get(const void* value,
283                                          WriteBarrier::Params& params,
284                                          HeapHandleCallback callback) {
285    // The slot will never be used in `Get()` below.
286    return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
287                                                       callback);
288  }
289
290 private:
291  template <WriteBarrier::ValueMode value_mode>
292  struct ValueModeDispatch;
293
294  // TODO(chromium:1056170): Create fast path on API.
295  static bool IsMarking(const void*, HeapHandle**);
296  // TODO(chromium:1056170): Create fast path on API.
297  static bool IsMarking(HeapHandle&);
298
299  WriteBarrierTypeForNonCagedHeapPolicy() = delete;
300};
301
302template <>
303struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
304    WriteBarrier::ValueMode::kValuePresent> {
305  template <typename HeapHandleCallback>
306  static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
307                                          WriteBarrier::Params& params,
308                                          HeapHandleCallback callback) {
309    // The following check covers nullptr as well as sentinel pointer.
310    if (object <= static_cast<void*>(kSentinelPointer)) {
311      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
312    }
313    if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
314      return SetAndReturnType<WriteBarrier::Type::kNone>(params);
315    }
316    if (IsMarking(object, &params.heap)) {
317      return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
318    }
319    return SetAndReturnType<WriteBarrier::Type::kNone>(params);
320  }
321};
322
323template <>
324struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
325    WriteBarrier::ValueMode::kNoValuePresent> {
326  template <typename HeapHandleCallback>
327  static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
328                                          WriteBarrier::Params& params,
329                                          HeapHandleCallback callback) {
330    if (V8_UNLIKELY(WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
331      HeapHandle& handle = callback();
332      if (IsMarking(handle)) {
333        params.heap = &handle;
334        return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
335      }
336    }
337    return WriteBarrier::Type::kNone;
338  }
339};
340
341// static
342WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
343    const void* slot, const void* value, WriteBarrier::Params& params) {
344  return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
345                                                               params, []() {});
346}
347
348// static
349template <typename HeapHandleCallback>
350WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
351    const void* slot, WriteBarrier::Params& params,
352    HeapHandleCallback callback) {
353  return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
354      slot, nullptr, params, callback);
355}
356
357// static
358WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
359    const void* value, WriteBarrier::Params& params) {
360  return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(value, params,
361                                                               []() {});
362}
363
364// static
365void WriteBarrier::DijkstraMarkingBarrier(const Params& params,
366                                          const void* object) {
367  CheckParams(Type::kMarking, params);
368#if defined(CPPGC_CAGED_HEAP)
369  // Caged heap already filters out sentinels.
370  DijkstraMarkingBarrierSlow(object);
371#else   // !CPPGC_CAGED_HEAP
372  DijkstraMarkingBarrierSlowWithSentinelCheck(object);
373#endif  // !CPPGC_CAGED_HEAP
374}
375
376// static
377void WriteBarrier::DijkstraMarkingBarrierRange(const Params& params,
378                                               const void* first_element,
379                                               size_t element_size,
380                                               size_t number_of_elements,
381                                               TraceCallback trace_callback) {
382  CheckParams(Type::kMarking, params);
383  DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
384                                  number_of_elements, trace_callback);
385}
386
387// static
388void WriteBarrier::SteeleMarkingBarrier(const Params& params,
389                                        const void* object) {
390  CheckParams(Type::kMarking, params);
391#if defined(CPPGC_CAGED_HEAP)
392  // Caged heap already filters out sentinels.
393  SteeleMarkingBarrierSlow(object);
394#else   // !CPPGC_CAGED_HEAP
395  SteeleMarkingBarrierSlowWithSentinelCheck(object);
396#endif  // !CPPGC_CAGED_HEAP
397}
398
399#if defined(CPPGC_YOUNG_GENERATION)
400// static
401void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
402  CheckParams(Type::kGenerational, params);
403
404  const CagedHeapLocalData& local_data = params.caged_heap();
405  const AgeTable& age_table = local_data.age_table;
406
407  // Bail out if the slot is in young generation.
408  if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
409    return;
410
411  GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset);
412}
413
414// static
415void WriteBarrier::GenerationalBarrierForSourceObject(
416    const Params& params, const void* inner_pointer) {
417  CheckParams(Type::kGenerational, params);
418
419  const CagedHeapLocalData& local_data = params.caged_heap();
420  const AgeTable& age_table = local_data.age_table;
421
422  // Assume that if the first element is in young generation, the whole range is
423  // in young generation.
424  if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
425    return;
426
427  GenerationalBarrierForSourceObjectSlow(local_data, inner_pointer);
428}
429
430#endif  // !CPPGC_YOUNG_GENERATION
431
432}  // namespace internal
433}  // namespace cppgc
434
435#endif  // INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
436