Loading...
Searching...
No Matches
write-barrier.h
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
6#define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
7
8#include <cstddef>
9#include <cstdint>
10
11#include "cppgc/heap-handle.h"
12#include "cppgc/heap-state.h"
17#include "cppgc/platform.h"
19#include "cppgc/trace-trait.h"
20#include "v8config.h" // NOLINT(build/include_directory)
21
22#if defined(CPPGC_CAGED_HEAP)
25#endif
26
27namespace cppgc {
28
29class HeapHandle;
30
31namespace internal {
32
33#if defined(CPPGC_CAGED_HEAP)
34class WriteBarrierTypeForCagedHeapPolicy;
35#else // !CPPGC_CAGED_HEAP
36class WriteBarrierTypeForNonCagedHeapPolicy;
37#endif // !CPPGC_CAGED_HEAP
38
40 public:
41 enum class Type : uint8_t {
42 kNone,
43 kMarking,
44 kGenerational,
45 };
46
47 enum class GenerationalBarrierType : uint8_t {
48 kPreciseSlot,
49 kPreciseUncompressedSlot,
50 kImpreciseSlot,
51 };
52
53 struct Params {
54 HeapHandle* heap = nullptr;
55#if V8_ENABLE_CHECKS
56 Type type = Type::kNone;
57#endif // !V8_ENABLE_CHECKS
58#if defined(CPPGC_CAGED_HEAP)
59 uintptr_t slot_offset = 0;
60 uintptr_t value_offset = 0;
61#endif // CPPGC_CAGED_HEAP
62 };
63
64 enum class ValueMode {
65 kValuePresent,
66 kNoValuePresent,
67 };
68
69 // Returns the required write barrier for a given `slot` and `value`.
70 static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
71 Params& params);
72 // Returns the required write barrier for a given `slot` and `value`.
73 template <typename MemberStorage>
74 static V8_INLINE Type GetWriteBarrierType(const void* slot, MemberStorage,
75 Params& params);
76 // Returns the required write barrier for a given `slot`.
77 template <typename HeapHandleCallback>
78 static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
79 HeapHandleCallback callback);
80 // Returns the required write barrier for a given `value`.
81 static V8_INLINE Type GetWriteBarrierType(const void* value, Params& params);
82
83#ifdef CPPGC_SLIM_WRITE_BARRIER
84 // A write barrier that combines `GenerationalBarrier()` and
85 // `DijkstraMarkingBarrier()`. We only pass a single parameter here to clobber
86 // as few registers as possible.
87 template <WriteBarrierSlotType>
89 CombinedWriteBarrierSlow(const void* slot);
90#endif // CPPGC_SLIM_WRITE_BARRIER
91
92 static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
93 const void* object);
94 static V8_INLINE void DijkstraMarkingBarrierRange(
95 const Params& params, const void* first_element, size_t element_size,
96 size_t number_of_elements, TraceCallback trace_callback);
97 static V8_INLINE void SteeleMarkingBarrier(const Params& params,
98 const void* object);
99#if defined(CPPGC_YOUNG_GENERATION)
100 template <GenerationalBarrierType>
101 static V8_INLINE void GenerationalBarrier(const Params& params,
102 const void* slot);
103#else // !CPPGC_YOUNG_GENERATION
104 template <GenerationalBarrierType>
105 static V8_INLINE void GenerationalBarrier(const Params& params,
106 const void* slot){}
107#endif // CPPGC_YOUNG_GENERATION
108
109#if V8_ENABLE_CHECKS
110 static void CheckParams(Type expected_type, const Params& params);
111#else // !V8_ENABLE_CHECKS
112 static void CheckParams(Type expected_type, const Params& params) {}
113#endif // !V8_ENABLE_CHECKS
114
115 // The FlagUpdater class allows cppgc internal to update
116 // |write_barrier_enabled_|.
117 class FlagUpdater;
118 static bool IsEnabled() { return write_barrier_enabled_.MightBeEntered(); }
119
120 private:
121 WriteBarrier() = delete;
122
123#if defined(CPPGC_CAGED_HEAP)
124 using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
125#else // !CPPGC_CAGED_HEAP
126 using WriteBarrierTypePolicy = WriteBarrierTypeForNonCagedHeapPolicy;
127#endif // !CPPGC_CAGED_HEAP
128
129 static void DijkstraMarkingBarrierSlow(const void* value);
130 static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void* value);
131 static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
132 const void* first_element,
133 size_t element_size,
134 size_t number_of_elements,
135 TraceCallback trace_callback);
136 static void SteeleMarkingBarrierSlow(const void* value);
137 static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
138
139#if defined(CPPGC_YOUNG_GENERATION)
140 static CagedHeapLocalData& GetLocalData(HeapHandle&);
141 static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
142 const AgeTable& age_table,
143 const void* slot, uintptr_t value_offset,
144 HeapHandle* heap_handle);
145 static void GenerationalBarrierForUncompressedSlotSlow(
146 const CagedHeapLocalData& local_data, const AgeTable& age_table,
147 const void* slot, uintptr_t value_offset, HeapHandle* heap_handle);
148 static void GenerationalBarrierForSourceObjectSlow(
149 const CagedHeapLocalData& local_data, const void* object,
150 HeapHandle* heap_handle);
151#endif // CPPGC_YOUNG_GENERATION
152
153 static AtomicEntryFlag write_barrier_enabled_;
154};
155
156template <WriteBarrier::Type type>
158 if constexpr (type == WriteBarrier::Type::kNone)
160#if V8_ENABLE_CHECKS
161 params.type = type;
162#endif // !V8_ENABLE_CHECKS
163 return type;
164}
165
166#if defined(CPPGC_CAGED_HEAP)
167class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
168 public:
169 template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
170 static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
171 WriteBarrier::Params& params,
172 HeapHandleCallback callback) {
173 return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
174 }
175
176 template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback,
177 typename MemberStorage>
178 static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage value,
179 WriteBarrier::Params& params,
180 HeapHandleCallback callback) {
181 return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
182 }
183
184 template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
185 static V8_INLINE WriteBarrier::Type Get(const void* value,
186 WriteBarrier::Params& params,
187 HeapHandleCallback callback) {
188 return GetNoSlot(value, params, callback);
189 }
190
191 private:
192 WriteBarrierTypeForCagedHeapPolicy() = delete;
193
194 template <typename HeapHandleCallback>
195 static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
196 WriteBarrier::Params& params,
197 HeapHandleCallback) {
198 const bool within_cage = CagedHeapBase::IsWithinCage(value);
199 if (!within_cage) return WriteBarrier::Type::kNone;
200
201 // We know that |value| points either within the normal page or to the
202 // beginning of large-page, so extract the page header by bitmasking.
203 BasePageHandle* page =
204 BasePageHandle::FromPayload(const_cast<void*>(value));
205
206 HeapHandle& heap_handle = page->heap_handle();
207 if (V8_UNLIKELY(heap_handle.is_incremental_marking_in_progress())) {
208 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
209 }
210
211 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
212 }
213
214 template <WriteBarrier::ValueMode value_mode>
215 struct ValueModeDispatch;
216};
217
218template <>
219struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
220 WriteBarrier::ValueMode::kValuePresent> {
221 template <typename HeapHandleCallback, typename MemberStorage>
222 static V8_INLINE WriteBarrier::Type Get(const void* slot,
223 MemberStorage storage,
224 WriteBarrier::Params& params,
225 HeapHandleCallback) {
226 if (V8_LIKELY(!WriteBarrier::IsEnabled()))
227 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
228
229 return BarrierEnabledGet(slot, storage.Load(), params);
230 }
231
232 template <typename HeapHandleCallback>
233 static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
234 WriteBarrier::Params& params,
235 HeapHandleCallback) {
236 if (V8_LIKELY(!WriteBarrier::IsEnabled()))
237 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
238
239 return BarrierEnabledGet(slot, value, params);
240 }
241
242 private:
243 static V8_INLINE WriteBarrier::Type BarrierEnabledGet(
244 const void* slot, const void* value, WriteBarrier::Params& params) {
245 const bool within_cage = CagedHeapBase::AreWithinCage(slot, value);
246 if (!within_cage) return WriteBarrier::Type::kNone;
247
248 // We know that |value| points either within the normal page or to the
249 // beginning of large-page, so extract the page header by bitmasking.
250 BasePageHandle* page =
251 BasePageHandle::FromPayload(const_cast<void*>(value));
252
253 HeapHandle& heap_handle = page->heap_handle();
254 if (V8_LIKELY(!heap_handle.is_incremental_marking_in_progress())) {
255#if defined(CPPGC_YOUNG_GENERATION)
256 if (!heap_handle.is_young_generation_enabled())
257 return WriteBarrier::Type::kNone;
258 params.heap = &heap_handle;
259 params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
260 params.value_offset = CagedHeapBase::OffsetFromAddress(value);
261 return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
262#else // !CPPGC_YOUNG_GENERATION
263 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
264#endif // !CPPGC_YOUNG_GENERATION
265 }
266
267 // Use marking barrier.
268 params.heap = &heap_handle;
269 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
270 }
271};
272
273template <>
274struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
275 WriteBarrier::ValueMode::kNoValuePresent> {
276 template <typename HeapHandleCallback>
277 static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
278 WriteBarrier::Params& params,
279 HeapHandleCallback callback) {
280 if (V8_LIKELY(!WriteBarrier::IsEnabled()))
281 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
282
283 HeapHandle& handle = callback();
284#if defined(CPPGC_YOUNG_GENERATION)
285 if (V8_LIKELY(!handle.is_incremental_marking_in_progress())) {
286 if (!handle.is_young_generation_enabled()) {
287 return WriteBarrier::Type::kNone;
288 }
289 params.heap = &handle;
290 // Check if slot is on stack.
291 if (V8_UNLIKELY(!CagedHeapBase::IsWithinCage(slot))) {
292 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
293 }
294 params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
295 return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
296 }
297#else // !defined(CPPGC_YOUNG_GENERATION)
298 if (V8_UNLIKELY(!handle.is_incremental_marking_in_progress())) {
299 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
300 }
301#endif // !defined(CPPGC_YOUNG_GENERATION)
302 params.heap = &handle;
303 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
304 }
305};
306
307#endif // CPPGC_CAGED_HEAP
308
310 public:
311 template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
312 static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
313 WriteBarrier::Params& params,
314 HeapHandleCallback callback) {
315 return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
316 }
317
318 template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
319 static V8_INLINE WriteBarrier::Type Get(const void* slot, RawPointer value,
320 WriteBarrier::Params& params,
321 HeapHandleCallback callback) {
322 return ValueModeDispatch<value_mode>::Get(slot, value.Load(), params,
323 callback);
324 }
325
326 template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
327 static V8_INLINE WriteBarrier::Type Get(const void* value,
328 WriteBarrier::Params& params,
329 HeapHandleCallback callback) {
330 // The slot will never be used in `Get()` below.
331 return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
332 callback);
333 }
334
335 private:
336 template <WriteBarrier::ValueMode value_mode>
337 struct ValueModeDispatch;
338
340};
341
342template <>
343struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
344 WriteBarrier::ValueMode::kValuePresent> {
345 template <typename HeapHandleCallback>
346 static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
347 WriteBarrier::Params& params,
348 HeapHandleCallback callback) {
349 // The following check covers nullptr as well as sentinel pointer.
350 if (object <= static_cast<void*>(kSentinelPointer)) {
351 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
352 }
354 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
355 }
356 // We know that |object| is within the normal page or in the beginning of a
357 // large page, so extract the page header by bitmasking.
358 BasePageHandle* page =
359 BasePageHandle::FromPayload(const_cast<void*>(object));
360
361 HeapHandle& heap_handle = page->heap_handle();
362 if (V8_LIKELY(heap_handle.is_incremental_marking_in_progress())) {
363 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
364 }
365 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
366 }
367};
368
369template <>
370struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
371 WriteBarrier::ValueMode::kNoValuePresent> {
372 template <typename HeapHandleCallback>
373 static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
374 WriteBarrier::Params& params,
375 HeapHandleCallback callback) {
377 HeapHandle& handle = callback();
378 if (V8_LIKELY(handle.is_incremental_marking_in_progress())) {
379 params.heap = &handle;
380 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
381 }
382 }
384 }
385};
386
387// static
389 const void* slot, const void* value, WriteBarrier::Params& params) {
390 return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
391 params, []() {});
392}
393
394// static
395template <typename MemberStorage>
397 const void* slot, MemberStorage value, WriteBarrier::Params& params) {
398 return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
399 params, []() {});
400}
401
402// static
403template <typename HeapHandleCallback>
405 const void* slot, WriteBarrier::Params& params,
406 HeapHandleCallback callback) {
407 return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
408 slot, nullptr, params, callback);
409}
410
411// static
413 const void* value, WriteBarrier::Params& params) {
414 return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(value, params,
415 []() {});
416}
417
418// static
420 const void* object) {
422#if defined(CPPGC_CAGED_HEAP)
423 // Caged heap already filters out sentinels.
424 DijkstraMarkingBarrierSlow(object);
425#else // !CPPGC_CAGED_HEAP
426 DijkstraMarkingBarrierSlowWithSentinelCheck(object);
427#endif // !CPPGC_CAGED_HEAP
428}
429
430// static
432 const void* first_element,
433 size_t element_size,
434 size_t number_of_elements,
435 TraceCallback trace_callback) {
437 DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
438 number_of_elements, trace_callback);
439}
440
441// static
443 const void* object) {
445#if defined(CPPGC_CAGED_HEAP)
446 // Caged heap already filters out sentinels.
447 SteeleMarkingBarrierSlow(object);
448#else // !CPPGC_CAGED_HEAP
449 SteeleMarkingBarrierSlowWithSentinelCheck(object);
450#endif // !CPPGC_CAGED_HEAP
451}
452
453#if defined(CPPGC_YOUNG_GENERATION)
454
455// static
456template <WriteBarrier::GenerationalBarrierType type>
457void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
459
460 const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
461 const AgeTable& age_table = local_data.age_table;
462
463 // Bail out if the slot (precise or imprecise) is in young generation.
464 if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
465 return;
466
467 // Dispatch between different types of barriers.
468 // TODO(chromium:1029379): Consider reload local_data in the slow path to
469 // reduce register pressure.
470 if constexpr (type == GenerationalBarrierType::kPreciseSlot) {
471 GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
472 params.heap);
473 } else if constexpr (type ==
475 GenerationalBarrierForUncompressedSlotSlow(
476 local_data, age_table, slot, params.value_offset, params.heap);
477 } else {
478 GenerationalBarrierForSourceObjectSlow(local_data, slot, params.heap);
479 }
480}
481
482#endif // !CPPGC_YOUNG_GENERATION
483
484} // namespace internal
485} // namespace cppgc
486
487#endif // INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
Definition: heap-handle.h:21
Definition: atomic-entry-flag.h:29
Definition: base-page-handle.h:18
HeapHandle & heap_handle()
Definition: base-page-handle.h:30
static BasePageHandle * FromPayload(void *payload)
Definition: base-page-handle.h:20
Definition: member-storage.h:203
const void * Load() const
Definition: member-storage.h:217
static WriteBarrier::Type Get(const void *slot, RawPointer value, WriteBarrier::Params &params, HeapHandleCallback callback)
Definition: write-barrier.h:319
static WriteBarrier::Type Get(const void *slot, const void *value, WriteBarrier::Params &params, HeapHandleCallback callback)
Definition: write-barrier.h:312
static WriteBarrier::Type Get(const void *value, WriteBarrier::Params &params, HeapHandleCallback callback)
Definition: write-barrier.h:327
Definition: write-barrier.h:39
GenerationalBarrierType
Definition: write-barrier.h:47
static void CheckParams(Type expected_type, const Params &params)
Definition: write-barrier.h:112
Type
Definition: write-barrier.h:41
static Type GetWriteBarrierType(const void *slot, const void *value, Params &params)
Definition: write-barrier.h:388
static void DijkstraMarkingBarrierRange(const Params &params, const void *first_element, size_t element_size, size_t number_of_elements, TraceCallback trace_callback)
Definition: write-barrier.h:431
static void GenerationalBarrier(const Params &params, const void *slot)
Definition: write-barrier.h:105
static bool IsEnabled()
Definition: write-barrier.h:118
static void SteeleMarkingBarrier(const Params &params, const void *object)
Definition: write-barrier.h:442
static void DijkstraMarkingBarrier(const Params &params, const void *object)
Definition: write-barrier.h:419
ValueMode
Definition: write-barrier.h:64
WriteBarrier::Type SetAndReturnType(WriteBarrier::Params &params)
Definition: write-barrier.h:157
Definition: allocation.h:38
void(*)(Visitor *visitor, const void *object) TraceCallback
Definition: trace-trait.h:38
constexpr internal::SentinelPointer kSentinelPointer
Definition: sentinel-pointer.h:35
static WriteBarrier::Type Get(const void *, const void *, WriteBarrier::Params &params, HeapHandleCallback callback)
Definition: write-barrier.h:373
static WriteBarrier::Type Get(const void *, const void *object, WriteBarrier::Params &params, HeapHandleCallback callback)
Definition: write-barrier.h:346
Definition: write-barrier.h:53
HeapHandle * heap
Definition: write-barrier.h:54
#define V8_EXPORT
Definition: v8config.h:793
#define V8_INLINE
Definition: v8config.h:499
#define V8_LIKELY(condition)
Definition: v8config.h:650
#define V8_UNLIKELY(condition)
Definition: v8config.h:649
#define V8_NOINLINE
Definition: v8config.h:575
#define V8_PRESERVE_MOST
Definition: v8config.h:587