Loading...
Searching...
No Matches
member-storage.h
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
6#define INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
7
8#include <atomic>
9#include <cstddef>
10#include <type_traits>
11
16#include "v8config.h" // NOLINT(build/include_directory)
17
18namespace cppgc {
19namespace internal {
20
24};
25
26#if defined(CPPGC_POINTER_COMPRESSION)
27
28#if defined(__clang__)
29// Attribute const allows the compiler to assume that CageBaseGlobal::g_base_
30// doesn't change (e.g. across calls) and thereby avoid redundant loads.
31#define CPPGC_CONST __attribute__((const))
32#define CPPGC_REQUIRE_CONSTANT_INIT \
33 __attribute__((require_constant_initialization))
34#else // defined(__clang__)
35#define CPPGC_CONST
36#define CPPGC_REQUIRE_CONSTANT_INIT
37#endif // defined(__clang__)
38
39class V8_EXPORT CageBaseGlobal final {
40 public:
41 V8_INLINE CPPGC_CONST static uintptr_t Get() {
42 CPPGC_DCHECK(IsBaseConsistent());
43 return g_base_.base;
44 }
45
46 V8_INLINE CPPGC_CONST static bool IsSet() {
47 CPPGC_DCHECK(IsBaseConsistent());
48 return (g_base_.base & ~kLowerHalfWordMask) != 0;
49 }
50
51 private:
52 // We keep the lower halfword as ones to speed up decompression.
53 static constexpr uintptr_t kLowerHalfWordMask =
54 (api_constants::kCagedHeapReservationAlignment - 1);
55
56 static union alignas(api_constants::kCachelineSize) Base {
57 uintptr_t base;
58 char cache_line[api_constants::kCachelineSize];
59 } g_base_ CPPGC_REQUIRE_CONSTANT_INIT;
60
61 CageBaseGlobal() = delete;
62
63 V8_INLINE static bool IsBaseConsistent() {
64 return kLowerHalfWordMask == (g_base_.base & kLowerHalfWordMask);
65 }
66
67 friend class CageBaseGlobalUpdater;
68};
69
70#undef CPPGC_REQUIRE_CONSTANT_INIT
71#undef CPPGC_CONST
72
73class V8_TRIVIAL_ABI CompressedPointer final {
74 public:
75 struct AtomicInitializerTag {};
76
77 using IntegralType = uint32_t;
78 static constexpr auto kWriteBarrierSlotType =
79 WriteBarrierSlotType::kCompressed;
80
81 V8_INLINE CompressedPointer() : value_(0u) {}
82 V8_INLINE explicit CompressedPointer(const void* value,
83 AtomicInitializerTag) {
84 StoreAtomic(value);
85 }
86 V8_INLINE explicit CompressedPointer(const void* ptr)
87 : value_(Compress(ptr)) {}
88 V8_INLINE explicit CompressedPointer(std::nullptr_t) : value_(0u) {}
89 V8_INLINE explicit CompressedPointer(SentinelPointer)
90 : value_(kCompressedSentinel) {}
91
92 V8_INLINE const void* Load() const { return Decompress(value_); }
93 V8_INLINE const void* LoadAtomic() const {
94 return Decompress(
95 reinterpret_cast<const std::atomic<IntegralType>&>(value_).load(
96 std::memory_order_relaxed));
97 }
98
99 V8_INLINE void Store(const void* ptr) { value_ = Compress(ptr); }
100 V8_INLINE void StoreAtomic(const void* value) {
101 reinterpret_cast<std::atomic<IntegralType>&>(value_).store(
102 Compress(value), std::memory_order_relaxed);
103 }
104
105 V8_INLINE void Clear() { value_ = 0u; }
106 V8_INLINE bool IsCleared() const { return !value_; }
107
108 V8_INLINE bool IsSentinel() const { return value_ == kCompressedSentinel; }
109
110 V8_INLINE uint32_t GetAsInteger() const { return value_; }
111
112 V8_INLINE friend bool operator==(CompressedPointer a, CompressedPointer b) {
113 return a.value_ == b.value_;
114 }
115 V8_INLINE friend bool operator!=(CompressedPointer a, CompressedPointer b) {
116 return a.value_ != b.value_;
117 }
118 V8_INLINE friend bool operator<(CompressedPointer a, CompressedPointer b) {
119 return a.value_ < b.value_;
120 }
121 V8_INLINE friend bool operator<=(CompressedPointer a, CompressedPointer b) {
122 return a.value_ <= b.value_;
123 }
124 V8_INLINE friend bool operator>(CompressedPointer a, CompressedPointer b) {
125 return a.value_ > b.value_;
126 }
127 V8_INLINE friend bool operator>=(CompressedPointer a, CompressedPointer b) {
128 return a.value_ >= b.value_;
129 }
130
131 static V8_INLINE IntegralType Compress(const void* ptr) {
132 static_assert(SentinelPointer::kSentinelValue ==
133 1 << api_constants::kPointerCompressionShift,
134 "The compression scheme relies on the sentinel encoded as 1 "
135 "<< kPointerCompressionShift");
136 static constexpr size_t kGigaCageMask =
137 ~(api_constants::kCagedHeapReservationAlignment - 1);
138 static constexpr size_t kPointerCompressionShiftMask =
139 (1 << api_constants::kPointerCompressionShift) - 1;
140
141 CPPGC_DCHECK(CageBaseGlobal::IsSet());
142 const uintptr_t base = CageBaseGlobal::Get();
143 CPPGC_DCHECK(!ptr || ptr == kSentinelPointer ||
144 (base & kGigaCageMask) ==
145 (reinterpret_cast<uintptr_t>(ptr) & kGigaCageMask));
147 (reinterpret_cast<uintptr_t>(ptr) & kPointerCompressionShiftMask) == 0);
148
149 const auto uptr = reinterpret_cast<uintptr_t>(ptr);
150 // Shift the pointer and truncate.
151 auto compressed = static_cast<IntegralType>(
152 uptr >> api_constants::kPointerCompressionShift);
153 // Normal compressed pointers must have the MSB set. This is guaranteed by
154 // the cage alignment.
155 CPPGC_DCHECK((!compressed || compressed == kCompressedSentinel) ||
156 (compressed & (1 << 31)));
157
158 // Tell the compiler that decompressing this compressed pointer
159 // is a no-op. Note that Clang is unable to inline Decompress()
160 // into an assume, so we need to write it out fully.
161 [[assume(((static_cast<uint64_t>(static_cast<int32_t>(compressed))
162 << api_constants::kPointerCompressionShift) &
163 base) == uptr)]];
164
165 return compressed;
166 }
167
168 static V8_INLINE void* Decompress(IntegralType ptr) {
169 CPPGC_DCHECK(CageBaseGlobal::IsSet());
170 const uintptr_t base = CageBaseGlobal::Get();
171 return Decompress(ptr, base);
172 }
173
174 static V8_INLINE void* Decompress(IntegralType ptr, uintptr_t base) {
175 CPPGC_DCHECK(CageBaseGlobal::IsSet());
176 CPPGC_DCHECK(base == CageBaseGlobal::Get());
177 // Sign-extend compressed pointer to full width. This ensure that normal
178 // pointers have only 1s in the base part of the address. It's also
179 // important to shift the unsigned value, as otherwise it would result in
180 // undefined behavior.
181 const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr))
182 << api_constants::kPointerCompressionShift;
183 // Set the base part of the address for normal compressed pointers. Note
184 // that nullptr and the sentinel value do not have 1s in the base part and
185 // remain as-is in this operation.
186 return reinterpret_cast<void*>(mask & base);
187 }
188
189 // For a given memory `address`, this method iterates all possible pointers
190 // that can be reasonably recovered with the current compression scheme and
191 // passes them to `callback`.
192 template <typename Callback>
193 static V8_INLINE void VisitPossiblePointers(const void* address,
194 Callback callback);
195
196 private:
197 static constexpr IntegralType kCompressedSentinel =
198 SentinelPointer::kSentinelValue >>
199 api_constants::kPointerCompressionShift;
200 // All constructors initialize `value_`. Do not add a default value here as it
201 // results in a non-atomic write on some builds, even when the atomic version
202 // of the constructor is used.
203 IntegralType value_;
204};
205
206template <typename Callback>
207// static
208void CompressedPointer::VisitPossiblePointers(const void* address,
209 Callback callback) {
210 const uintptr_t base = CageBaseGlobal::Get();
211 CPPGC_DCHECK(base);
212 // We may have random compressed pointers on stack (e.g. due to inlined
213 // collections). These could be present in both halfwords.
214 const uint32_t compressed_low =
215 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(address));
216 callback(CompressedPointer::Decompress(compressed_low, base));
217 const uint32_t compressed_high = static_cast<uint32_t>(
218 reinterpret_cast<uintptr_t>(address) >> (sizeof(uint32_t) * CHAR_BIT));
219 callback(CompressedPointer::Decompress(compressed_high, base));
220 // Iterate possible intermediate values, see `Decompress()`. The intermediate
221 // value of decompressing is a 64-bit value where 35 bits are the offset. We
222 // don't assume sign extension is stored and recover that part.
223 //
224 // Note that this case conveniently also recovers the full pointer.
225 static constexpr uintptr_t kBitForIntermediateValue =
226 (sizeof(uint32_t) * CHAR_BIT) + api_constants::kPointerCompressionShift;
227 static constexpr uintptr_t kSignExtensionMask =
228 ~((uintptr_t{1} << kBitForIntermediateValue) - 1);
229 const uintptr_t intermediate_sign_extended =
230 reinterpret_cast<uintptr_t>(address) | kSignExtensionMask;
231 callback(reinterpret_cast<void*>(intermediate_sign_extended & base));
232}
233
234#endif // defined(CPPGC_POINTER_COMPRESSION)
235
237 public:
239
240 using IntegralType = uintptr_t;
241 static constexpr auto kWriteBarrierSlotType =
242 WriteBarrierSlotType::kUncompressed;
243
244 V8_INLINE RawPointer() : ptr_(nullptr) {}
245 V8_INLINE explicit RawPointer(const void* ptr, AtomicInitializerTag) {
246 StoreAtomic(ptr);
247 }
248 V8_INLINE explicit RawPointer(const void* ptr) : ptr_(ptr) {}
249
250 V8_INLINE const void* Load() const { return ptr_; }
251 V8_INLINE const void* LoadAtomic() const {
252 return reinterpret_cast<const std::atomic<const void*>&>(ptr_).load(
253 std::memory_order_relaxed);
254 }
255
256 V8_INLINE void Store(const void* ptr) { ptr_ = ptr; }
257 V8_INLINE void StoreAtomic(const void* ptr) {
258 reinterpret_cast<std::atomic<const void*>&>(ptr_).store(
259 ptr, std::memory_order_relaxed);
260 }
261
262 V8_INLINE void Clear() { ptr_ = nullptr; }
263 V8_INLINE bool IsCleared() const { return !ptr_; }
264
265 V8_INLINE bool IsSentinel() const { return ptr_ == kSentinelPointer; }
266
267 V8_INLINE uintptr_t GetAsInteger() const {
268 return reinterpret_cast<uintptr_t>(ptr_);
269 }
270
272 return a.ptr_ == b.ptr_;
273 }
275 return a.ptr_ != b.ptr_;
276 }
278 return a.ptr_ < b.ptr_;
279 }
281 return a.ptr_ <= b.ptr_;
282 }
284 return a.ptr_ > b.ptr_;
285 }
287 return a.ptr_ >= b.ptr_;
288 }
289
290 template <typename Callback>
291 static V8_INLINE void VisitPossiblePointers(const void* address,
292 Callback callback) {
293 // Pass along the full pointer.
294 return callback(const_cast<void*>(address));
295 }
296
297 private:
298 // All constructors initialize `ptr_`. Do not add a default value here as it
299 // results in a non-atomic write on some builds, even when the atomic version
300 // of the constructor is used.
301 const void* ptr_;
302};
303
304#if defined(CPPGC_POINTER_COMPRESSION)
305using DefaultMemberStorage = CompressedPointer;
306#else // !defined(CPPGC_POINTER_COMPRESSION)
308#endif // !defined(CPPGC_POINTER_COMPRESSION)
309
310} // namespace internal
311} // namespace cppgc
312
313#endif // INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
Definition: member-storage.h:236
const void * Load() const
Definition: member-storage.h:250
const void * LoadAtomic() const
Definition: member-storage.h:251
RawPointer(const void *ptr)
Definition: member-storage.h:248
static void VisitPossiblePointers(const void *address, Callback callback)
Definition: member-storage.h:291
friend bool operator==(RawPointer a, RawPointer b)
Definition: member-storage.h:271
uintptr_t GetAsInteger() const
Definition: member-storage.h:267
friend bool operator>=(RawPointer a, RawPointer b)
Definition: member-storage.h:286
void Clear()
Definition: member-storage.h:262
friend bool operator<(RawPointer a, RawPointer b)
Definition: member-storage.h:277
RawPointer()
Definition: member-storage.h:244
RawPointer(const void *ptr, AtomicInitializerTag)
Definition: member-storage.h:245
friend bool operator!=(RawPointer a, RawPointer b)
Definition: member-storage.h:274
bool IsSentinel() const
Definition: member-storage.h:265
void Store(const void *ptr)
Definition: member-storage.h:256
uintptr_t IntegralType
Definition: member-storage.h:240
friend bool operator>(RawPointer a, RawPointer b)
Definition: member-storage.h:283
friend bool operator<=(RawPointer a, RawPointer b)
Definition: member-storage.h:280
bool IsCleared() const
Definition: member-storage.h:263
void StoreAtomic(const void *ptr)
Definition: member-storage.h:257
#define CPPGC_DCHECK(condition)
Definition: logging.h:36
bool operator<=(const BasicMember< T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1, StorageType > &member1, const BasicMember< T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2, StorageType > &member2)
Definition: member.h:516
bool operator!=(const BasicMember< T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1, StorageType > &member1, const BasicMember< T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2, StorageType > &member2)
Definition: member.h:369
bool operator>(const BasicMember< T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1, StorageType > &member1, const BasicMember< T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2, StorageType > &member2)
Definition: member.h:531
bool operator==(const BasicMember< T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1, StorageType > &member1, const BasicMember< T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2, StorageType > &member2)
Definition: member.h:350
WriteBarrierSlotType
Definition: member-storage.h:21
bool operator>=(const BasicMember< T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1, StorageType > &member1, const BasicMember< T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2, StorageType > &member2)
Definition: member.h:546
bool operator<(const BasicMember< T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1, StorageType > &member1, const BasicMember< T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2, StorageType > &member2)
Definition: member.h:501
RawPointer DefaultMemberStorage
Definition: member-storage.h:307
Definition: allocation.h:38
constexpr internal::SentinelPointer kSentinelPointer
Definition: sentinel-pointer.h:35
#define V8_EXPORT
Definition: v8config.h:855
#define V8_INLINE
Definition: v8config.h:508
#define V8_TRIVIAL_ABI
Definition: v8config.h:808