Loading...
Searching...
No Matches
member-storage.h
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
6#define INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
7
8#include <atomic>
9#include <cstddef>
10#include <type_traits>
11
16#include "v8config.h" // NOLINT(build/include_directory)
17
18namespace cppgc {
19namespace internal {
20
24};
25
26#if defined(CPPGC_POINTER_COMPRESSION)
27
28#if defined(__clang__)
29// Attribute const allows the compiler to assume that CageBaseGlobal::g_base_
30// doesn't change (e.g. across calls) and thereby avoid redundant loads.
31#define CPPGC_CONST __attribute__((const))
32#define CPPGC_REQUIRE_CONSTANT_INIT \
33 __attribute__((require_constant_initialization))
34#else // defined(__clang__)
35#define CPPGC_CONST
36#define CPPGC_REQUIRE_CONSTANT_INIT
37#endif // defined(__clang__)
38
39class V8_EXPORT CageBaseGlobal final {
40 public:
41 V8_INLINE CPPGC_CONST static uintptr_t Get() {
42 CPPGC_DCHECK(IsBaseConsistent());
43 return g_base_.base;
44 }
45
46 V8_INLINE CPPGC_CONST static bool IsSet() {
47 CPPGC_DCHECK(IsBaseConsistent());
48 return (g_base_.base & ~kLowerHalfWordMask) != 0;
49 }
50
51 private:
52 // We keep the lower halfword as ones to speed up decompression.
53 static constexpr uintptr_t kLowerHalfWordMask =
54 (api_constants::kCagedHeapReservationAlignment - 1);
55
56 static union alignas(api_constants::kCachelineSize) Base {
57 uintptr_t base;
58 char cache_line[api_constants::kCachelineSize];
59 } g_base_ CPPGC_REQUIRE_CONSTANT_INIT;
60
61 CageBaseGlobal() = delete;
62
63 V8_INLINE static bool IsBaseConsistent() {
64 return kLowerHalfWordMask == (g_base_.base & kLowerHalfWordMask);
65 }
66
67 friend class CageBaseGlobalUpdater;
68};
69
70#undef CPPGC_REQUIRE_CONSTANT_INIT
71#undef CPPGC_CONST
72
73class V8_TRIVIAL_ABI CompressedPointer final {
74 public:
75 struct AtomicInitializerTag {};
76
77 using IntegralType = uint32_t;
78 static constexpr auto kWriteBarrierSlotType =
79 WriteBarrierSlotType::kCompressed;
80
81 V8_INLINE CompressedPointer() : value_(0u) {}
82 V8_INLINE explicit CompressedPointer(const void* value,
83 AtomicInitializerTag) {
84 StoreAtomic(value);
85 }
86 V8_INLINE explicit CompressedPointer(const void* ptr)
87 : value_(Compress(ptr)) {}
88 V8_INLINE explicit CompressedPointer(std::nullptr_t) : value_(0u) {}
89 V8_INLINE explicit CompressedPointer(SentinelPointer)
90 : value_(kCompressedSentinel) {}
91
92 V8_INLINE const void* Load() const { return Decompress(value_); }
93 V8_INLINE const void* LoadAtomic() const {
94 return Decompress(
95 reinterpret_cast<const std::atomic<IntegralType>&>(value_).load(
96 std::memory_order_relaxed));
97 }
98
99 V8_INLINE void Store(const void* ptr) { value_ = Compress(ptr); }
100 V8_INLINE void StoreAtomic(const void* value) {
101 reinterpret_cast<std::atomic<IntegralType>&>(value_).store(
102 Compress(value), std::memory_order_relaxed);
103 }
104
105 V8_INLINE void Clear() { value_ = 0u; }
106 V8_INLINE bool IsCleared() const { return !value_; }
107
108 V8_INLINE bool IsSentinel() const { return value_ == kCompressedSentinel; }
109
110 V8_INLINE uint32_t GetAsInteger() const { return value_; }
111
112 V8_INLINE friend bool operator==(CompressedPointer a, CompressedPointer b) {
113 return a.value_ == b.value_;
114 }
115 V8_INLINE friend bool operator!=(CompressedPointer a, CompressedPointer b) {
116 return a.value_ != b.value_;
117 }
118 V8_INLINE friend bool operator<(CompressedPointer a, CompressedPointer b) {
119 return a.value_ < b.value_;
120 }
121 V8_INLINE friend bool operator<=(CompressedPointer a, CompressedPointer b) {
122 return a.value_ <= b.value_;
123 }
124 V8_INLINE friend bool operator>(CompressedPointer a, CompressedPointer b) {
125 return a.value_ > b.value_;
126 }
127 V8_INLINE friend bool operator>=(CompressedPointer a, CompressedPointer b) {
128 return a.value_ >= b.value_;
129 }
130
131 static V8_INLINE IntegralType Compress(const void* ptr) {
132 static_assert(SentinelPointer::kSentinelValue ==
133 1 << api_constants::kPointerCompressionShift,
134 "The compression scheme relies on the sentinel encoded as 1 "
135 "<< kPointerCompressionShift");
136 static constexpr size_t kGigaCageMask =
137 ~(api_constants::kCagedHeapReservationAlignment - 1);
138 static constexpr size_t kPointerCompressionShiftMask =
139 (1 << api_constants::kPointerCompressionShift) - 1;
140
141 CPPGC_DCHECK(CageBaseGlobal::IsSet());
142 const uintptr_t base = CageBaseGlobal::Get();
143 CPPGC_DCHECK(!ptr || ptr == kSentinelPointer ||
144 (base & kGigaCageMask) ==
145 (reinterpret_cast<uintptr_t>(ptr) & kGigaCageMask));
147 (reinterpret_cast<uintptr_t>(ptr) & kPointerCompressionShiftMask) == 0);
148
149 const auto uptr = reinterpret_cast<uintptr_t>(ptr);
150 // Shift the pointer and truncate.
151 auto compressed = static_cast<IntegralType>(
152 uptr >> api_constants::kPointerCompressionShift);
153 // Normal compressed pointers must have the MSB set. This is guaranteed by
154 // the cage alignment.
155 CPPGC_DCHECK((!compressed || compressed == kCompressedSentinel) ||
156 (compressed & (1 << 31)));
157 return compressed;
158 }
159
160 static V8_INLINE void* Decompress(IntegralType ptr) {
161 CPPGC_DCHECK(CageBaseGlobal::IsSet());
162 const uintptr_t base = CageBaseGlobal::Get();
163 return Decompress(ptr, base);
164 }
165
166 static V8_INLINE void* Decompress(IntegralType ptr, uintptr_t base) {
167 CPPGC_DCHECK(CageBaseGlobal::IsSet());
168 CPPGC_DCHECK(base == CageBaseGlobal::Get());
169 // Sign-extend compressed pointer to full width. This ensure that normal
170 // pointers have only 1s in the base part of the address. It's also
171 // important to shift the unsigned value, as otherwise it would result in
172 // undefined behavior.
173 const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr))
174 << api_constants::kPointerCompressionShift;
175 // Set the base part of the address for normal compressed pointers. Note
176 // that nullptr and the sentinel value do not have 1s in the base part and
177 // remain as-is in this operation.
178 return reinterpret_cast<void*>(mask & base);
179 }
180
181 // For a given memory `address`, this method iterates all possible pointers
182 // that can be reasonably recovered with the current compression scheme and
183 // passes them to `callback`.
184 template <typename Callback>
185 static V8_INLINE void VisitPossiblePointers(const void* address,
186 Callback callback);
187
188 private:
189 static constexpr IntegralType kCompressedSentinel =
190 SentinelPointer::kSentinelValue >>
191 api_constants::kPointerCompressionShift;
192 // All constructors initialize `value_`. Do not add a default value here as it
193 // results in a non-atomic write on some builds, even when the atomic version
194 // of the constructor is used.
195 IntegralType value_;
196};
197
198template <typename Callback>
199// static
200void CompressedPointer::VisitPossiblePointers(const void* address,
201 Callback callback) {
202 const uintptr_t base = CageBaseGlobal::Get();
203 CPPGC_DCHECK(base);
204 // We may have random compressed pointers on stack (e.g. due to inlined
205 // collections). These could be present in both halfwords.
206 const uint32_t compressed_low =
207 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(address));
208 callback(CompressedPointer::Decompress(compressed_low, base));
209 const uint32_t compressed_high = static_cast<uint32_t>(
210 reinterpret_cast<uintptr_t>(address) >> (sizeof(uint32_t) * CHAR_BIT));
211 callback(CompressedPointer::Decompress(compressed_high, base));
212 // Iterate possible intermediate values, see `Decompress()`. The intermediate
213 // value of decompressing is a 64-bit value where 35 bits are the offset. We
214 // don't assume sign extension is stored and recover that part.
215 //
216 // Note that this case conveniently also recovers the full pointer.
217 static constexpr uintptr_t kBitForIntermediateValue =
218 (sizeof(uint32_t) * CHAR_BIT) + api_constants::kPointerCompressionShift;
219 static constexpr uintptr_t kSignExtensionMask =
220 ~((uintptr_t{1} << kBitForIntermediateValue) - 1);
221 const uintptr_t intermediate_sign_extended =
222 reinterpret_cast<uintptr_t>(address) | kSignExtensionMask;
223 callback(reinterpret_cast<void*>(intermediate_sign_extended & base));
224}
225
226#endif // defined(CPPGC_POINTER_COMPRESSION)
227
229 public:
231
232 using IntegralType = uintptr_t;
233 static constexpr auto kWriteBarrierSlotType =
234 WriteBarrierSlotType::kUncompressed;
235
236 V8_INLINE RawPointer() : ptr_(nullptr) {}
237 V8_INLINE explicit RawPointer(const void* ptr, AtomicInitializerTag) {
238 StoreAtomic(ptr);
239 }
240 V8_INLINE explicit RawPointer(const void* ptr) : ptr_(ptr) {}
241
242 V8_INLINE const void* Load() const { return ptr_; }
243 V8_INLINE const void* LoadAtomic() const {
244 return reinterpret_cast<const std::atomic<const void*>&>(ptr_).load(
245 std::memory_order_relaxed);
246 }
247
248 V8_INLINE void Store(const void* ptr) { ptr_ = ptr; }
249 V8_INLINE void StoreAtomic(const void* ptr) {
250 reinterpret_cast<std::atomic<const void*>&>(ptr_).store(
251 ptr, std::memory_order_relaxed);
252 }
253
254 V8_INLINE void Clear() { ptr_ = nullptr; }
255 V8_INLINE bool IsCleared() const { return !ptr_; }
256
257 V8_INLINE bool IsSentinel() const { return ptr_ == kSentinelPointer; }
258
259 V8_INLINE uintptr_t GetAsInteger() const {
260 return reinterpret_cast<uintptr_t>(ptr_);
261 }
262
264 return a.ptr_ == b.ptr_;
265 }
267 return a.ptr_ != b.ptr_;
268 }
270 return a.ptr_ < b.ptr_;
271 }
273 return a.ptr_ <= b.ptr_;
274 }
276 return a.ptr_ > b.ptr_;
277 }
279 return a.ptr_ >= b.ptr_;
280 }
281
282 template <typename Callback>
283 static V8_INLINE void VisitPossiblePointers(const void* address,
284 Callback callback) {
285 // Pass along the full pointer.
286 return callback(const_cast<void*>(address));
287 }
288
289 private:
290 // All constructors initialize `ptr_`. Do not add a default value here as it
291 // results in a non-atomic write on some builds, even when the atomic version
292 // of the constructor is used.
293 const void* ptr_;
294};
295
296#if defined(CPPGC_POINTER_COMPRESSION)
297using DefaultMemberStorage = CompressedPointer;
298#else // !defined(CPPGC_POINTER_COMPRESSION)
300#endif // !defined(CPPGC_POINTER_COMPRESSION)
301
302} // namespace internal
303} // namespace cppgc
304
305#endif // INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
Definition: member-storage.h:228
const void * Load() const
Definition: member-storage.h:242
const void * LoadAtomic() const
Definition: member-storage.h:243
RawPointer(const void *ptr)
Definition: member-storage.h:240
static void VisitPossiblePointers(const void *address, Callback callback)
Definition: member-storage.h:283
friend bool operator==(RawPointer a, RawPointer b)
Definition: member-storage.h:263
uintptr_t GetAsInteger() const
Definition: member-storage.h:259
friend bool operator>=(RawPointer a, RawPointer b)
Definition: member-storage.h:278
void Clear()
Definition: member-storage.h:254
friend bool operator<(RawPointer a, RawPointer b)
Definition: member-storage.h:269
RawPointer()
Definition: member-storage.h:236
RawPointer(const void *ptr, AtomicInitializerTag)
Definition: member-storage.h:237
friend bool operator!=(RawPointer a, RawPointer b)
Definition: member-storage.h:266
bool IsSentinel() const
Definition: member-storage.h:257
void Store(const void *ptr)
Definition: member-storage.h:248
uintptr_t IntegralType
Definition: member-storage.h:232
friend bool operator>(RawPointer a, RawPointer b)
Definition: member-storage.h:275
friend bool operator<=(RawPointer a, RawPointer b)
Definition: member-storage.h:272
bool IsCleared() const
Definition: member-storage.h:255
void StoreAtomic(const void *ptr)
Definition: member-storage.h:249
#define CPPGC_DCHECK(condition)
Definition: logging.h:36
bool operator<=(const BasicMember< T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1, StorageType > &member1, const BasicMember< T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2, StorageType > &member2)
Definition: member.h:510
bool operator!=(const BasicMember< T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1, StorageType > &member1, const BasicMember< T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2, StorageType > &member2)
Definition: member.h:363
bool operator>(const BasicMember< T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1, StorageType > &member1, const BasicMember< T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2, StorageType > &member2)
Definition: member.h:525
bool operator==(const BasicMember< T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1, StorageType > &member1, const BasicMember< T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2, StorageType > &member2)
Definition: member.h:344
WriteBarrierSlotType
Definition: member-storage.h:21
bool operator>=(const BasicMember< T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1, StorageType > &member1, const BasicMember< T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2, StorageType > &member2)
Definition: member.h:540
bool operator<(const BasicMember< T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1, StorageType > &member1, const BasicMember< T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2, StorageType > &member2)
Definition: member.h:495
RawPointer DefaultMemberStorage
Definition: member-storage.h:299
Definition: allocation.h:38
constexpr internal::SentinelPointer kSentinelPointer
Definition: sentinel-pointer.h:35
#define V8_EXPORT
Definition: v8config.h:803
#define V8_INLINE
Definition: v8config.h:499
#define V8_TRIVIAL_ABI
Definition: v8config.h:753