Loading...
Searching...
No Matches
write-barrier.h
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
6#define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
7
8#include <cstddef>
9#include <cstdint>
10
11#include "cppgc/heap-handle.h"
12#include "cppgc/heap-state.h"
17#include "cppgc/platform.h"
19#include "cppgc/trace-trait.h"
20#include "v8config.h" // NOLINT(build/include_directory)
21
22#if defined(CPPGC_CAGED_HEAP)
25#endif
26
27namespace cppgc {
28
29class HeapHandle;
30
31namespace internal {
32
33#if defined(CPPGC_CAGED_HEAP)
34class WriteBarrierTypeForCagedHeapPolicy;
35#else // !CPPGC_CAGED_HEAP
36class WriteBarrierTypeForNonCagedHeapPolicy;
37#endif // !CPPGC_CAGED_HEAP
38
40 public:
41 enum class Type : uint8_t {
42 kNone,
43 kMarking,
44 kGenerational,
45 };
46
47 enum class GenerationalBarrierType : uint8_t {
48 kPreciseSlot,
49 kPreciseUncompressedSlot,
50 kImpreciseSlot,
51 };
52
53 struct Params {
54 HeapHandle* heap = nullptr;
55#if V8_ENABLE_CHECKS
56 Type type = Type::kNone;
57#endif // !V8_ENABLE_CHECKS
58#if defined(CPPGC_CAGED_HEAP)
59 uintptr_t slot_offset = 0;
60 uintptr_t value_offset = 0;
61#endif // CPPGC_CAGED_HEAP
62 };
63
64 enum class ValueMode {
65 kValuePresent,
66 kNoValuePresent,
67 };
68
69 // Returns the required write barrier for a given `slot` and `value`.
70 static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
71 Params& params);
72 // Returns the required write barrier for a given `slot` and `value`.
73 template <typename MemberStorage>
74 static V8_INLINE Type GetWriteBarrierType(const void* slot, MemberStorage,
75 Params& params);
76 // Returns the required write barrier for a given `slot`.
77 template <typename HeapHandleCallback>
78 static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
79 HeapHandleCallback callback);
80 // Returns the required write barrier for a given `value`.
81 static V8_INLINE Type GetWriteBarrierType(const void* value, Params& params);
82
83#ifdef CPPGC_SLIM_WRITE_BARRIER
84 // A write barrier that combines `GenerationalBarrier()` and
85 // `DijkstraMarkingBarrier()`. We only pass a single parameter here to clobber
86 // as few registers as possible.
87 template <WriteBarrierSlotType>
89 CombinedWriteBarrierSlow(const void* slot);
90#endif // CPPGC_SLIM_WRITE_BARRIER
91
92 static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
93 const void* object);
94 static V8_INLINE void DijkstraMarkingBarrierRange(
95 const Params& params, const void* first_element, size_t element_size,
96 size_t number_of_elements, TraceCallback trace_callback);
97 static V8_INLINE void SteeleMarkingBarrier(const Params& params,
98 const void* object);
99#if defined(CPPGC_YOUNG_GENERATION)
100 template <GenerationalBarrierType>
101 static V8_INLINE void GenerationalBarrier(const Params& params,
102 const void* slot);
103#else // !CPPGC_YOUNG_GENERATION
104 template <GenerationalBarrierType>
105 static V8_INLINE void GenerationalBarrier(const Params& params,
106 const void* slot){}
107#endif // CPPGC_YOUNG_GENERATION
108
109#if V8_ENABLE_CHECKS
110 static void CheckParams(Type expected_type, const Params& params);
111#else // !V8_ENABLE_CHECKS
112 static void CheckParams(Type expected_type, const Params& params) {}
113#endif // !V8_ENABLE_CHECKS
114
115 // The FlagUpdater class allows cppgc internal to update
116 // |write_barrier_enabled_|.
117 class FlagUpdater;
118 static bool IsEnabled() { return write_barrier_enabled_.MightBeEntered(); }
119
120 private:
121 WriteBarrier() = delete;
122
123#if defined(CPPGC_CAGED_HEAP)
124 using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
125#else // !CPPGC_CAGED_HEAP
126 using WriteBarrierTypePolicy = WriteBarrierTypeForNonCagedHeapPolicy;
127#endif // !CPPGC_CAGED_HEAP
128
129 static void DijkstraMarkingBarrierSlow(const void* value);
130 static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void* value);
131 static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
132 const void* first_element,
133 size_t element_size,
134 size_t number_of_elements,
135 TraceCallback trace_callback);
136 static void SteeleMarkingBarrierSlow(const void* value);
137 static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
138
139#if defined(CPPGC_YOUNG_GENERATION)
140 static CagedHeapLocalData& GetLocalData(HeapHandle&);
141 static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
142 const AgeTable& age_table,
143 const void* slot, uintptr_t value_offset,
144 HeapHandle* heap_handle);
145 static void GenerationalBarrierForUncompressedSlotSlow(
146 const CagedHeapLocalData& local_data, const AgeTable& age_table,
147 const void* slot, uintptr_t value_offset, HeapHandle* heap_handle);
148 static void GenerationalBarrierForSourceObjectSlow(
149 const CagedHeapLocalData& local_data, const void* object,
150 HeapHandle* heap_handle);
151#endif // CPPGC_YOUNG_GENERATION
152
153 static AtomicEntryFlag write_barrier_enabled_;
154};
155
156template <WriteBarrier::Type type>
158 if constexpr (type == WriteBarrier::Type::kNone) {
160 }
161#if V8_ENABLE_CHECKS
162 params.type = type;
163#endif // !V8_ENABLE_CHECKS
164 return type;
165}
166
167#if defined(CPPGC_CAGED_HEAP)
168class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
169 public:
170 template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
171 static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
172 WriteBarrier::Params& params,
173 HeapHandleCallback callback) {
174 return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
175 }
176
177 template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback,
178 typename MemberStorage>
179 static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage value,
180 WriteBarrier::Params& params,
181 HeapHandleCallback callback) {
182 return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
183 }
184
185 template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
186 static V8_INLINE WriteBarrier::Type Get(const void* value,
187 WriteBarrier::Params& params,
188 HeapHandleCallback callback) {
189 return GetNoSlot(value, params, callback);
190 }
191
192 private:
193 WriteBarrierTypeForCagedHeapPolicy() = delete;
194
195 template <typename HeapHandleCallback>
196 static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
197 WriteBarrier::Params& params,
198 HeapHandleCallback) {
199 const bool within_cage = CagedHeapBase::IsWithinCage(value);
200 if (!within_cage) return WriteBarrier::Type::kNone;
201
202 // We know that |value| points either within the normal page or to the
203 // beginning of large-page, so extract the page header by bitmasking.
204 BasePageHandle* page =
205 BasePageHandle::FromPayload(const_cast<void*>(value));
206
207 HeapHandle& heap_handle = page->heap_handle();
208 if (V8_UNLIKELY(heap_handle.is_incremental_marking_in_progress())) {
209 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
210 }
211
212 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
213 }
214
215 template <WriteBarrier::ValueMode value_mode>
216 struct ValueModeDispatch;
217};
218
219template <>
220struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
221 WriteBarrier::ValueMode::kValuePresent> {
222 template <typename HeapHandleCallback, typename MemberStorage>
223 static V8_INLINE WriteBarrier::Type Get(const void* slot,
224 MemberStorage storage,
225 WriteBarrier::Params& params,
226 HeapHandleCallback) {
227 if (V8_LIKELY(!WriteBarrier::IsEnabled())) {
228 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
229 }
230
231 return BarrierEnabledGet(slot, storage.Load(), params);
232 }
233
234 template <typename HeapHandleCallback>
235 static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
236 WriteBarrier::Params& params,
237 HeapHandleCallback) {
238 if (V8_LIKELY(!WriteBarrier::IsEnabled())) {
239 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
240 }
241
242 return BarrierEnabledGet(slot, value, params);
243 }
244
245 private:
246 static V8_INLINE WriteBarrier::Type BarrierEnabledGet(
247 const void* slot, const void* value, WriteBarrier::Params& params) {
248 const bool within_cage = CagedHeapBase::AreWithinCage(slot, value);
249 if (!within_cage) return WriteBarrier::Type::kNone;
250
251 // We know that |value| points either within the normal page or to the
252 // beginning of large-page, so extract the page header by bitmasking.
253 BasePageHandle* page =
254 BasePageHandle::FromPayload(const_cast<void*>(value));
255
256 HeapHandle& heap_handle = page->heap_handle();
257 if (V8_LIKELY(!heap_handle.is_incremental_marking_in_progress())) {
258#if defined(CPPGC_YOUNG_GENERATION)
259 if (!heap_handle.is_young_generation_enabled()) {
260 return WriteBarrier::Type::kNone;
261 }
262 params.heap = &heap_handle;
263 params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
264 params.value_offset = CagedHeapBase::OffsetFromAddress(value);
265 return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
266#else // !CPPGC_YOUNG_GENERATION
267 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
268#endif // !CPPGC_YOUNG_GENERATION
269 }
270
271 // Use marking barrier.
272 params.heap = &heap_handle;
273 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
274 }
275};
276
277template <>
278struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
279 WriteBarrier::ValueMode::kNoValuePresent> {
280 template <typename HeapHandleCallback>
281 static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
282 WriteBarrier::Params& params,
283 HeapHandleCallback callback) {
284 if (V8_LIKELY(!WriteBarrier::IsEnabled())) {
285 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
286 }
287
288 HeapHandle& handle = callback();
289#if defined(CPPGC_YOUNG_GENERATION)
290 if (V8_LIKELY(!handle.is_incremental_marking_in_progress())) {
291 if (!handle.is_young_generation_enabled()) {
292 return WriteBarrier::Type::kNone;
293 }
294 params.heap = &handle;
295 // Check if slot is on stack.
296 if (V8_UNLIKELY(!CagedHeapBase::IsWithinCage(slot))) {
297 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
298 }
299 params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
300 return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
301 }
302#else // !defined(CPPGC_YOUNG_GENERATION)
303 if (V8_UNLIKELY(!handle.is_incremental_marking_in_progress())) {
304 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
305 }
306#endif // !defined(CPPGC_YOUNG_GENERATION)
307 params.heap = &handle;
308 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
309 }
310};
311
312#endif // CPPGC_CAGED_HEAP
313
315 public:
316 template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
317 static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
318 WriteBarrier::Params& params,
319 HeapHandleCallback callback) {
320 return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
321 }
322
323 template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
324 static V8_INLINE WriteBarrier::Type Get(const void* slot, RawPointer value,
325 WriteBarrier::Params& params,
326 HeapHandleCallback callback) {
327 return ValueModeDispatch<value_mode>::Get(slot, value.Load(), params,
328 callback);
329 }
330
331 template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
332 static V8_INLINE WriteBarrier::Type Get(const void* value,
333 WriteBarrier::Params& params,
334 HeapHandleCallback callback) {
335 // The slot will never be used in `Get()` below.
336 return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
337 callback);
338 }
339
340 private:
341 template <WriteBarrier::ValueMode value_mode>
342 struct ValueModeDispatch;
343
345};
346
347template <>
348struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
349 WriteBarrier::ValueMode::kValuePresent> {
350 template <typename HeapHandleCallback>
351 static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
352 WriteBarrier::Params& params,
353 HeapHandleCallback callback) {
354 // The following check covers nullptr as well as sentinel pointer.
355 if (object <= static_cast<void*>(kSentinelPointer)) {
356 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
357 }
359 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
360 }
361 // We know that |object| is within the normal page or in the beginning of a
362 // large page, so extract the page header by bitmasking.
363 BasePageHandle* page =
364 BasePageHandle::FromPayload(const_cast<void*>(object));
365
366 HeapHandle& heap_handle = page->heap_handle();
367 if (V8_LIKELY(heap_handle.is_incremental_marking_in_progress())) {
368 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
369 }
370 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
371 }
372};
373
374template <>
375struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
376 WriteBarrier::ValueMode::kNoValuePresent> {
377 template <typename HeapHandleCallback>
378 static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
379 WriteBarrier::Params& params,
380 HeapHandleCallback callback) {
382 HeapHandle& handle = callback();
383 if (V8_LIKELY(handle.is_incremental_marking_in_progress())) {
384 params.heap = &handle;
385 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
386 }
387 }
389 }
390};
391
392// static
394 const void* slot, const void* value, WriteBarrier::Params& params) {
395 return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
396 params, []() {});
397}
398
399// static
400template <typename MemberStorage>
402 const void* slot, MemberStorage value, WriteBarrier::Params& params) {
403 return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
404 params, []() {});
405}
406
407// static
408template <typename HeapHandleCallback>
410 const void* slot, WriteBarrier::Params& params,
411 HeapHandleCallback callback) {
412 return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
413 slot, nullptr, params, callback);
414}
415
416// static
418 const void* value, WriteBarrier::Params& params) {
419 return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(value, params,
420 []() {});
421}
422
423// static
425 const void* object) {
427#if defined(CPPGC_CAGED_HEAP)
428 // Caged heap already filters out sentinels.
429 DijkstraMarkingBarrierSlow(object);
430#else // !CPPGC_CAGED_HEAP
431 DijkstraMarkingBarrierSlowWithSentinelCheck(object);
432#endif // !CPPGC_CAGED_HEAP
433}
434
435// static
437 const void* first_element,
438 size_t element_size,
439 size_t number_of_elements,
440 TraceCallback trace_callback) {
442 DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
443 number_of_elements, trace_callback);
444}
445
446// static
448 const void* object) {
450#if defined(CPPGC_CAGED_HEAP)
451 // Caged heap already filters out sentinels.
452 SteeleMarkingBarrierSlow(object);
453#else // !CPPGC_CAGED_HEAP
454 SteeleMarkingBarrierSlowWithSentinelCheck(object);
455#endif // !CPPGC_CAGED_HEAP
456}
457
458#if defined(CPPGC_YOUNG_GENERATION)
459
460// static
461template <WriteBarrier::GenerationalBarrierType type>
462void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
464
465 const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
466 const AgeTable& age_table = local_data.age_table;
467
468 // Bail out if the slot (precise or imprecise) is in young generation.
469 if (V8_LIKELY(age_table.GetAge(params.slot_offset) ==
470 AgeTable::Age::kYoung)) {
471 return;
472 }
473
474 // Dispatch between different types of barriers.
475 // TODO(chromium:1029379): Consider reload local_data in the slow path to
476 // reduce register pressure.
477 if constexpr (type == GenerationalBarrierType::kPreciseSlot) {
478 GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
479 params.heap);
480 } else if constexpr (type ==
482 GenerationalBarrierForUncompressedSlotSlow(
483 local_data, age_table, slot, params.value_offset, params.heap);
484 } else {
485 GenerationalBarrierForSourceObjectSlow(local_data, slot, params.heap);
486 }
487}
488
489#endif // !CPPGC_YOUNG_GENERATION
490
491} // namespace internal
492} // namespace cppgc
493
494#endif // INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
Definition: heap-handle.h:21
Definition: atomic-entry-flag.h:29
Definition: base-page-handle.h:18
HeapHandle & heap_handle()
Definition: base-page-handle.h:28
static BasePageHandle * FromPayload(void *payload)
Definition: base-page-handle.h:20
Definition: member-storage.h:228
const void * Load() const
Definition: member-storage.h:242
static WriteBarrier::Type Get(const void *slot, RawPointer value, WriteBarrier::Params &params, HeapHandleCallback callback)
Definition: write-barrier.h:324
static WriteBarrier::Type Get(const void *slot, const void *value, WriteBarrier::Params &params, HeapHandleCallback callback)
Definition: write-barrier.h:317
static WriteBarrier::Type Get(const void *value, WriteBarrier::Params &params, HeapHandleCallback callback)
Definition: write-barrier.h:332
Definition: write-barrier.h:39
GenerationalBarrierType
Definition: write-barrier.h:47
static void CheckParams(Type expected_type, const Params &params)
Definition: write-barrier.h:112
Type
Definition: write-barrier.h:41
static Type GetWriteBarrierType(const void *slot, const void *value, Params &params)
Definition: write-barrier.h:393
static void DijkstraMarkingBarrierRange(const Params &params, const void *first_element, size_t element_size, size_t number_of_elements, TraceCallback trace_callback)
Definition: write-barrier.h:436
static void GenerationalBarrier(const Params &params, const void *slot)
Definition: write-barrier.h:105
static bool IsEnabled()
Definition: write-barrier.h:118
static void SteeleMarkingBarrier(const Params &params, const void *object)
Definition: write-barrier.h:447
static void DijkstraMarkingBarrier(const Params &params, const void *object)
Definition: write-barrier.h:424
ValueMode
Definition: write-barrier.h:64
WriteBarrier::Type SetAndReturnType(WriteBarrier::Params &params)
Definition: write-barrier.h:157
Definition: allocation.h:38
void(*)(Visitor *visitor, const void *object) TraceCallback
Definition: trace-trait.h:37
constexpr internal::SentinelPointer kSentinelPointer
Definition: sentinel-pointer.h:35
static WriteBarrier::Type Get(const void *, const void *, WriteBarrier::Params &params, HeapHandleCallback callback)
Definition: write-barrier.h:378
static WriteBarrier::Type Get(const void *, const void *object, WriteBarrier::Params &params, HeapHandleCallback callback)
Definition: write-barrier.h:351
Definition: write-barrier.h:53
HeapHandle * heap
Definition: write-barrier.h:54
#define V8_EXPORT
Definition: v8config.h:854
#define V8_INLINE
Definition: v8config.h:508
#define V8_LIKELY(condition)
Definition: v8config.h:668
#define V8_UNLIKELY(condition)
Definition: v8config.h:667
#define V8_NOINLINE
Definition: v8config.h:593
#define V8_PRESERVE_MOST
Definition: v8config.h:605