5#ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
6#define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
22#if defined(CPPGC_CAGED_HEAP)
33#if defined(CPPGC_CAGED_HEAP)
34class WriteBarrierTypeForCagedHeapPolicy;
36class WriteBarrierTypeForNonCagedHeapPolicy;
41 enum class Type : uint8_t {
49 kPreciseUncompressedSlot,
56 Type type = Type::kNone;
58#if defined(CPPGC_CAGED_HEAP)
59 uintptr_t slot_offset = 0;
60 uintptr_t value_offset = 0;
70 static V8_INLINE Type GetWriteBarrierType(
const void* slot,
const void* value,
73 template <
typename MemberStorage>
74 static V8_INLINE Type GetWriteBarrierType(
const void* slot, MemberStorage,
77 template <
typename HeapHandleCallback>
78 static V8_INLINE Type GetWriteBarrierType(
const void* slot, Params& params,
79 HeapHandleCallback callback);
81 static V8_INLINE Type GetWriteBarrierType(
const void* value, Params& params);
83#ifdef CPPGC_SLIM_WRITE_BARRIER
87 template <WriteBarrierSlotType>
89 CombinedWriteBarrierSlow(
const void* slot);
92 static V8_INLINE void DijkstraMarkingBarrier(
const Params& params,
94 static V8_INLINE void DijkstraMarkingBarrierRange(
95 const Params& params,
const void* first_element,
size_t element_size,
97 static V8_INLINE void SteeleMarkingBarrier(
const Params& params,
99#if defined(CPPGC_YOUNG_GENERATION)
100 template <GenerationalBarrierType>
101 static V8_INLINE void GenerationalBarrier(
const Params& params,
104 template <GenerationalBarrierType>
110 static void CheckParams(Type expected_type,
const Params& params);
118 static bool IsEnabled() {
return write_barrier_enabled_.MightBeEntered(); }
123#if defined(CPPGC_CAGED_HEAP)
124 using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
129 static void DijkstraMarkingBarrierSlow(
const void* value);
130 static void DijkstraMarkingBarrierSlowWithSentinelCheck(
const void* value);
131 static void DijkstraMarkingBarrierRangeSlow(
HeapHandle& heap_handle,
132 const void* first_element,
134 size_t number_of_elements,
136 static void SteeleMarkingBarrierSlow(
const void* value);
137 static void SteeleMarkingBarrierSlowWithSentinelCheck(
const void* value);
139#if defined(CPPGC_YOUNG_GENERATION)
140 static CagedHeapLocalData& GetLocalData(
HeapHandle&);
141 static void GenerationalBarrierSlow(
const CagedHeapLocalData& local_data,
142 const AgeTable& age_table,
143 const void* slot, uintptr_t value_offset,
145 static void GenerationalBarrierForUncompressedSlotSlow(
146 const CagedHeapLocalData& local_data,
const AgeTable& age_table,
147 const void* slot, uintptr_t value_offset,
HeapHandle* heap_handle);
148 static void GenerationalBarrierForSourceObjectSlow(
149 const CagedHeapLocalData& local_data,
const void*
object,
156template <WriteBarrier::Type type>
166#if defined(CPPGC_CAGED_HEAP)
167class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
169 template <WriteBarrier::ValueMode value_mode,
typename HeapHandleCallback>
170 static V8_INLINE WriteBarrier::Type Get(
const void* slot,
const void* value,
171 WriteBarrier::Params& params,
172 HeapHandleCallback callback) {
173 return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
176 template <WriteBarrier::ValueMode value_mode,
typename HeapHandleCallback,
177 typename MemberStorage>
178 static V8_INLINE WriteBarrier::Type Get(
const void* slot, MemberStorage value,
179 WriteBarrier::Params& params,
180 HeapHandleCallback callback) {
181 return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
184 template <WriteBarrier::ValueMode value_mode,
typename HeapHandleCallback>
185 static V8_INLINE WriteBarrier::Type Get(
const void* value,
186 WriteBarrier::Params& params,
187 HeapHandleCallback callback) {
188 return GetNoSlot(value, params, callback);
192 WriteBarrierTypeForCagedHeapPolicy() =
delete;
194 template <
typename HeapHandleCallback>
195 static V8_INLINE WriteBarrier::Type GetNoSlot(
const void* value,
196 WriteBarrier::Params& params,
197 HeapHandleCallback) {
198 const bool within_cage = CagedHeapBase::IsWithinCage(value);
199 if (!within_cage)
return WriteBarrier::Type::kNone;
203 BasePageHandle* page =
204 BasePageHandle::FromPayload(
const_cast<void*
>(value));
206 HeapHandle& heap_handle = page->heap_handle();
207 if (
V8_UNLIKELY(heap_handle.is_incremental_marking_in_progress())) {
208 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
211 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
214 template <WriteBarrier::ValueMode value_mode>
215 struct ValueModeDispatch;
219struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
220 WriteBarrier::ValueMode::kValuePresent> {
221 template <
typename HeapHandleCallback,
typename MemberStorage>
222 static V8_INLINE WriteBarrier::Type Get(
const void* slot,
223 MemberStorage storage,
224 WriteBarrier::Params& params,
225 HeapHandleCallback) {
226 if (
V8_LIKELY(!WriteBarrier::IsEnabled()))
227 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
229 return BarrierEnabledGet(slot, storage.Load(), params);
232 template <
typename HeapHandleCallback>
233 static V8_INLINE WriteBarrier::Type Get(
const void* slot,
const void* value,
234 WriteBarrier::Params& params,
235 HeapHandleCallback) {
236 if (
V8_LIKELY(!WriteBarrier::IsEnabled()))
237 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
239 return BarrierEnabledGet(slot, value, params);
243 static V8_INLINE WriteBarrier::Type BarrierEnabledGet(
244 const void* slot,
const void* value, WriteBarrier::Params& params) {
245 const bool within_cage = CagedHeapBase::AreWithinCage(slot, value);
246 if (!within_cage)
return WriteBarrier::Type::kNone;
250 BasePageHandle* page =
251 BasePageHandle::FromPayload(
const_cast<void*
>(value));
253 HeapHandle& heap_handle = page->heap_handle();
254 if (
V8_LIKELY(!heap_handle.is_incremental_marking_in_progress())) {
255#if defined(CPPGC_YOUNG_GENERATION)
256 if (!heap_handle.is_young_generation_enabled())
257 return WriteBarrier::Type::kNone;
258 params.heap = &heap_handle;
259 params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
260 params.value_offset = CagedHeapBase::OffsetFromAddress(value);
261 return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
263 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
268 params.heap = &heap_handle;
269 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
274struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
275 WriteBarrier::ValueMode::kNoValuePresent> {
276 template <
typename HeapHandleCallback>
277 static V8_INLINE WriteBarrier::Type Get(
const void* slot,
const void*,
278 WriteBarrier::Params& params,
279 HeapHandleCallback callback) {
280 if (
V8_LIKELY(!WriteBarrier::IsEnabled()))
281 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
283 HeapHandle& handle = callback();
284#if defined(CPPGC_YOUNG_GENERATION)
285 if (
V8_LIKELY(!handle.is_incremental_marking_in_progress())) {
286 if (!handle.is_young_generation_enabled()) {
287 return WriteBarrier::Type::kNone;
289 params.heap = &handle;
291 if (
V8_UNLIKELY(!CagedHeapBase::IsWithinCage(slot))) {
292 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
294 params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
295 return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
298 if (
V8_UNLIKELY(!handle.is_incremental_marking_in_progress())) {
299 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
302 params.heap = &handle;
303 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
311 template <WriteBarrier::ValueMode value_mode,
typename HeapHandleCallback>
314 HeapHandleCallback callback) {
315 return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
318 template <WriteBarrier::ValueMode value_mode,
typename HeapHandleCallback>
321 HeapHandleCallback callback) {
322 return ValueModeDispatch<value_mode>::Get(slot, value.
Load(), params,
326 template <WriteBarrier::ValueMode value_mode,
typename HeapHandleCallback>
329 HeapHandleCallback callback) {
331 return Get<WriteBarrier::ValueMode::kValuePresent>(
nullptr, value, params,
336 template <WriteBarrier::ValueMode value_mode>
337 struct ValueModeDispatch;
343struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
345 template <
typename HeapHandleCallback>
348 HeapHandleCallback callback) {
351 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
354 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
362 if (
V8_LIKELY(heap_handle.is_incremental_marking_in_progress())) {
363 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
365 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
370struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
372 template <
typename HeapHandleCallback>
375 HeapHandleCallback callback) {
378 if (
V8_LIKELY(handle.is_incremental_marking_in_progress())) {
379 params.
heap = &handle;
380 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
390 return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
395template <
typename MemberStorage>
398 return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
403template <
typename HeapHandleCallback>
406 HeapHandleCallback callback) {
407 return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
408 slot,
nullptr, params, callback);
414 return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(value, params,
420 const void*
object) {
422#if defined(CPPGC_CAGED_HEAP)
424 DijkstraMarkingBarrierSlow(
object);
426 DijkstraMarkingBarrierSlowWithSentinelCheck(
object);
432 const void* first_element,
434 size_t number_of_elements,
437 DijkstraMarkingBarrierRangeSlow(*params.
heap, first_element, element_size,
438 number_of_elements, trace_callback);
443 const void*
object) {
445#if defined(CPPGC_CAGED_HEAP)
447 SteeleMarkingBarrierSlow(
object);
449 SteeleMarkingBarrierSlowWithSentinelCheck(
object);
453#if defined(CPPGC_YOUNG_GENERATION)
456template <WriteBarrier::GenerationalBarrierType type>
460 const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
461 const AgeTable& age_table = local_data.age_table;
464 if (
V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
471 GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
473 }
else if constexpr (type ==
475 GenerationalBarrierForUncompressedSlotSlow(
476 local_data, age_table, slot, params.value_offset, params.heap);
478 GenerationalBarrierForSourceObjectSlow(local_data, slot, params.heap);
Definition: heap-handle.h:21
Definition: atomic-entry-flag.h:29
Definition: base-page-handle.h:18
HeapHandle & heap_handle()
Definition: base-page-handle.h:30
static BasePageHandle * FromPayload(void *payload)
Definition: base-page-handle.h:20
Definition: member-storage.h:192
const void * Load() const
Definition: member-storage.h:206
Definition: write-barrier.h:309
static WriteBarrier::Type Get(const void *slot, RawPointer value, WriteBarrier::Params ¶ms, HeapHandleCallback callback)
Definition: write-barrier.h:319
static WriteBarrier::Type Get(const void *slot, const void *value, WriteBarrier::Params ¶ms, HeapHandleCallback callback)
Definition: write-barrier.h:312
static WriteBarrier::Type Get(const void *value, WriteBarrier::Params ¶ms, HeapHandleCallback callback)
Definition: write-barrier.h:327
Definition: write-barrier.h:39
GenerationalBarrierType
Definition: write-barrier.h:47
@ kPreciseUncompressedSlot
static void CheckParams(Type expected_type, const Params ¶ms)
Definition: write-barrier.h:112
Type
Definition: write-barrier.h:41
static Type GetWriteBarrierType(const void *slot, const void *value, Params ¶ms)
Definition: write-barrier.h:388
static void DijkstraMarkingBarrierRange(const Params ¶ms, const void *first_element, size_t element_size, size_t number_of_elements, TraceCallback trace_callback)
Definition: write-barrier.h:431
static void GenerationalBarrier(const Params ¶ms, const void *slot)
Definition: write-barrier.h:105
static bool IsEnabled()
Definition: write-barrier.h:118
static void SteeleMarkingBarrier(const Params ¶ms, const void *object)
Definition: write-barrier.h:442
static void DijkstraMarkingBarrier(const Params ¶ms, const void *object)
Definition: write-barrier.h:419
ValueMode
Definition: write-barrier.h:64
WriteBarrier::Type SetAndReturnType(WriteBarrier::Params ¶ms)
Definition: write-barrier.h:157
Definition: allocation.h:38
void(*)(Visitor *visitor, const void *object) TraceCallback
Definition: trace-trait.h:38
constexpr internal::SentinelPointer kSentinelPointer
Definition: sentinel-pointer.h:35
static WriteBarrier::Type Get(const void *, const void *, WriteBarrier::Params ¶ms, HeapHandleCallback callback)
Definition: write-barrier.h:373
static WriteBarrier::Type Get(const void *, const void *object, WriteBarrier::Params ¶ms, HeapHandleCallback callback)
Definition: write-barrier.h:346
Definition: write-barrier.h:53
HeapHandle * heap
Definition: write-barrier.h:54
#define V8_EXPORT
Definition: v8config.h:793
#define V8_INLINE
Definition: v8config.h:499
#define V8_LIKELY(condition)
Definition: v8config.h:650
#define V8_UNLIKELY(condition)
Definition: v8config.h:649
#define V8_NOINLINE
Definition: v8config.h:575
#define V8_PRESERVE_MOST
Definition: v8config.h:587