wasmtime/runtime/vm/gc/gc_ref.rs
1use crate::prelude::*;
2use crate::runtime::vm::{GcHeap, GcStore, I31};
3use core::fmt;
4use core::marker;
5use core::num::NonZeroU32;
6use wasmtime_environ::packed_option::ReservedValue;
7use wasmtime_environ::{VMGcKind, VMSharedTypeIndex};
8
9/// The common header for all objects allocated in a GC heap.
10///
11/// This header is shared across all collectors, although particular collectors
12/// may always add their own trailing fields to this header for all of their own
13/// GC objects.
14///
15/// This is a bit-packed structure that logically has the following fields:
16///
17/// ```ignore
18/// struct VMGcHeader {
19/// // Highest 5 bits.
20/// kind: VMGcKind,
21///
22/// // 27 bits available for the `GcRuntime` to make use of however it sees fit.
23/// reserved: u27,
24///
25/// // The `VMSharedTypeIndex` for this GC object, if it isn't an
26/// // `externref` (or an `externref` re-wrapped as an `anyref`). `None` is
27/// // represented with `VMSharedTypeIndex::reserved_value()`.
28/// ty: Option<VMSharedTypeIndex>,
29/// }
30/// ```
31#[repr(C, align(8))]
32#[derive(Debug, Clone, Copy)]
33pub struct VMGcHeader {
34 /// The object's `VMGcKind` and 27 bits of space reserved for however the GC
35 /// sees fit to use it.
36 kind: u32,
37
38 /// The object's type index.
39 ty: VMSharedTypeIndex,
40}
41
42unsafe impl GcHeapObject for VMGcHeader {
43 #[inline]
44 fn is(_: &VMGcHeader) -> bool {
45 true
46 }
47}
48
49const _: () = {
50 use core::mem::offset_of;
51 use wasmtime_environ::*;
52 assert!((VM_GC_HEADER_SIZE as usize) == core::mem::size_of::<VMGcHeader>());
53 assert!((VM_GC_HEADER_ALIGN as usize) == core::mem::align_of::<VMGcHeader>());
54 assert!((VM_GC_HEADER_KIND_OFFSET as usize) == offset_of!(VMGcHeader, kind));
55 assert!((VM_GC_HEADER_TYPE_INDEX_OFFSET as usize) == offset_of!(VMGcHeader, ty));
56};
57
58impl VMGcHeader {
59 /// Create the header for an `externref`.
60 pub fn externref() -> Self {
61 Self::from_kind_and_index(VMGcKind::ExternRef, VMSharedTypeIndex::reserved_value())
62 }
63
64 /// Create the header for the given kind and type index.
65 pub fn from_kind_and_index(kind: VMGcKind, ty: VMSharedTypeIndex) -> Self {
66 let kind = kind.as_u32();
67 Self { kind, ty }
68 }
69
70 /// Get the kind of GC object that this is.
71 pub fn kind(&self) -> VMGcKind {
72 VMGcKind::from_high_bits_of_u32(self.kind)
73 }
74
75 /// Get the reserved 27 bits in this header.
76 ///
77 /// These are bits are reserved for `GcRuntime` implementations to make use
78 /// of however they see fit.
79 pub fn reserved_u27(&self) -> u32 {
80 self.kind & VMGcKind::UNUSED_MASK
81 }
82
83 /// Set the 27-bit reserved value.
84 ///
85 /// # Panics
86 ///
87 /// Panics if the given `value` has any of the upper 6 bits set.
88 pub fn set_reserved_u27(&mut self, value: u32) {
89 assert!(
90 VMGcKind::value_fits_in_unused_bits(value),
91 "VMGcHeader::set_reserved_u26 with value using more than 26 bits"
92 );
93 self.kind |= value;
94 }
95
96 /// Set the 27-bit reserved value.
97 ///
98 /// # Safety
99 ///
100 /// The given `value` must only use the lower 27 bits; its upper 5 bits must
101 /// be unset.
102 pub unsafe fn unchecked_set_reserved_u26(&mut self, value: u32) {
103 debug_assert_eq!(value & VMGcKind::MASK, 0);
104 self.kind |= value;
105 }
106
107 /// Get this object's specific concrete type.
108 pub fn ty(&self) -> Option<VMSharedTypeIndex> {
109 if self.ty.is_reserved_value() {
110 None
111 } else {
112 Some(self.ty)
113 }
114 }
115}
116
117/// A raw, unrooted GC reference.
118///
119/// A `VMGcRef` is either:
120///
121/// * A reference to some kind of object on the GC heap, but we don't know
122/// exactly which kind without further reflection. Furthermore, this is not
123/// actually a pointer, but a compact index into a Wasm GC heap.
124///
125/// * An `i31ref`: it doesn't actually reference an object in the GC heap, but
126/// is instead an inline, unboxed 31-bit integer.
127///
128/// ## `VMGcRef` and GC Barriers
129///
130/// Depending on the garbage collector in use, cloning, writing, and dropping a
131/// `VMGcRef` may require invoking GC barriers (little snippets of code provided
132/// by the collector to ensure it is correctly tracking all GC references).
133///
134/// Therefore, to encourage correct usage of GC barriers, this type does *NOT*
135/// implement `Clone` or `Copy`. Use `GcStore::clone_gc_ref`,
136/// `GcStore::write_gc_ref`, and `GcStore::drop_gc_ref` to clone, write, and
137/// drop `VMGcRef`s respectively.
138///
139/// As an escape hatch, if you really need to copy a `VMGcRef` without invoking
140/// GC barriers and you understand why that will not lead to GC bugs in this
141/// particular case, you can use the `unchecked_copy` method.
142#[derive(Debug, PartialEq, Eq, Hash)]
143#[repr(transparent)]
144pub struct VMGcRef(NonZeroU32);
145
146impl<T> From<TypedGcRef<T>> for VMGcRef {
147 #[inline]
148 fn from(value: TypedGcRef<T>) -> Self {
149 value.gc_ref
150 }
151}
152
153impl fmt::LowerHex for VMGcRef {
154 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
155 self.0.fmt(f)
156 }
157}
158
159impl fmt::UpperHex for VMGcRef {
160 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
161 self.0.fmt(f)
162 }
163}
164
165impl fmt::Pointer for VMGcRef {
166 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
167 write!(f, "{self:#x}")
168 }
169}
170
171impl VMGcRef {
172 /// If this bit is set on a GC reference, then the GC reference is actually an
173 /// unboxed `i31`.
174 ///
175 /// Must be kept in sync with `wasmtime_cranelift::I31_REF_DISCRIMINANT`.
176 pub const I31_REF_DISCRIMINANT: u32 = 1;
177
178 /// Create a new `VMGcRef` from the given raw u32 value.
179 ///
180 /// Does not discriminate between indices into a GC heap and `i31ref`s.
181 ///
182 /// Returns `None` for zero values.
183 ///
184 /// The given index should point to a valid GC-managed object within this
185 /// reference's associated heap. Failure to uphold this will be memory safe,
186 /// but will lead to general failures such as panics or incorrect results.
187 pub fn from_raw_u32(raw: u32) -> Option<Self> {
188 Some(Self::from_raw_non_zero_u32(NonZeroU32::new(raw)?))
189 }
190
191 /// Create a new `VMGcRef` from the given index into a GC heap.
192 ///
193 /// The given index should point to a valid GC-managed object within this
194 /// reference's associated heap. Failure to uphold this will be memory safe,
195 /// but will lead to general failures such as panics or incorrect results.
196 ///
197 /// Returns `None` when the index is not 2-byte aligned and therefore
198 /// conflicts with the `i31ref` discriminant.
199 pub fn from_heap_index(index: NonZeroU32) -> Option<Self> {
200 if (index.get() & Self::I31_REF_DISCRIMINANT) == 0 {
201 Some(Self::from_raw_non_zero_u32(index))
202 } else {
203 None
204 }
205 }
206
207 /// Create a new `VMGcRef` from the given raw value.
208 ///
209 /// Does not discriminate between indices into a GC heap and `i31ref`s.
210 pub fn from_raw_non_zero_u32(raw: NonZeroU32) -> Self {
211 VMGcRef(raw)
212 }
213
214 /// Construct a new `VMGcRef` from an unboxed 31-bit integer.
215 #[inline]
216 pub fn from_i31(val: I31) -> Self {
217 let val = (val.get_u32() << 1) | Self::I31_REF_DISCRIMINANT;
218 debug_assert_ne!(val, 0);
219 let non_zero = unsafe { NonZeroU32::new_unchecked(val) };
220 VMGcRef::from_raw_non_zero_u32(non_zero)
221 }
222
223 /// Copy this `VMGcRef` without running the GC's clone barriers.
224 ///
225 /// Prefer calling `clone(&mut GcStore)` instead! This is mostly an internal
226 /// escape hatch for collector implementations.
227 ///
228 /// Failure to run GC barriers when they would otherwise be necessary can
229 /// lead to leaks, panics, and wrong results. It cannot lead to memory
230 /// unsafety, however.
231 pub fn unchecked_copy(&self) -> Self {
232 VMGcRef(self.0)
233 }
234
235 /// Get this GC reference as a u32 index into its GC heap.
236 ///
237 /// Returns `None` for `i31ref`s.
238 pub fn as_heap_index(&self) -> Option<NonZeroU32> {
239 if self.is_i31() {
240 None
241 } else {
242 Some(self.0)
243 }
244 }
245
246 /// Get this GC reference as a raw u32 value, regardless whether it is
247 /// actually a reference to a GC object or is an `i31ref`.
248 pub fn as_raw_u32(&self) -> u32 {
249 self.0.get()
250 }
251
252 /// Creates a typed GC reference from `self`, checking that `self` actually
253 /// is a `T`.
254 ///
255 /// If this is not a GC reference to a `T`, then `Err(self)` is returned.
256 pub fn into_typed<T>(self, gc_heap: &impl GcHeap) -> Result<TypedGcRef<T>, Self>
257 where
258 T: GcHeapObject,
259 {
260 if self.is_i31() {
261 return Err(self);
262 }
263 if T::is(gc_heap.header(&self)) {
264 Ok(TypedGcRef {
265 gc_ref: self,
266 _phantom: marker::PhantomData,
267 })
268 } else {
269 Err(self)
270 }
271 }
272
273 /// Creates a typed GC reference without actually checking that `self` is a
274 /// `T`.
275 ///
276 /// `self` should point to a `T` object. Failure to uphold this invariant is
277 /// memory safe, but will lead to general incorrectness such as panics or
278 /// wrong results.
279 pub fn into_typed_unchecked<T>(self) -> TypedGcRef<T>
280 where
281 T: GcHeapObject,
282 {
283 debug_assert!(!self.is_i31());
284 TypedGcRef {
285 gc_ref: self,
286 _phantom: marker::PhantomData,
287 }
288 }
289
290 /// Is this GC reference pointing to a `T`?
291 pub fn is_typed<T>(&self, gc_heap: &impl GcHeap) -> bool
292 where
293 T: GcHeapObject,
294 {
295 if self.is_i31() {
296 return false;
297 }
298 T::is(gc_heap.header(&self))
299 }
300
301 /// Borrow `self` as a typed GC reference, checking that `self` actually is
302 /// a `T`.
303 pub fn as_typed<T>(&self, gc_heap: &impl GcHeap) -> Option<&TypedGcRef<T>>
304 where
305 T: GcHeapObject,
306 {
307 if self.is_i31() {
308 return None;
309 }
310 if T::is(gc_heap.header(&self)) {
311 let ptr = self as *const VMGcRef;
312 let ret = unsafe { &*ptr.cast() };
313 assert!(matches!(
314 ret,
315 TypedGcRef {
316 gc_ref: VMGcRef(_),
317 _phantom
318 }
319 ));
320 Some(ret)
321 } else {
322 None
323 }
324 }
325
326 /// Creates a typed GC reference without actually checking that `self` is a
327 /// `T`.
328 ///
329 /// `self` should point to a `T` object. Failure to uphold this invariant is
330 /// memory safe, but will lead to general incorrectness such as panics or
331 /// wrong results.
332 pub fn as_typed_unchecked<T>(&self) -> &TypedGcRef<T>
333 where
334 T: GcHeapObject,
335 {
336 debug_assert!(!self.is_i31());
337 let ptr = self as *const VMGcRef;
338 let ret = unsafe { &*ptr.cast() };
339 assert!(matches!(
340 ret,
341 TypedGcRef {
342 gc_ref: VMGcRef(_),
343 _phantom
344 }
345 ));
346 ret
347 }
348
349 /// Get a reference to the GC header that this GC reference is pointing to.
350 ///
351 /// Returns `None` when this is an `i31ref` and doesn't actually point to a
352 /// GC header.
353 pub fn gc_header<'a>(&self, gc_heap: &'a (impl GcHeap + ?Sized)) -> Option<&'a VMGcHeader> {
354 if self.is_i31() {
355 None
356 } else {
357 Some(gc_heap.header(self))
358 }
359 }
360
361 /// Is this `VMGcRef` actually an unboxed 31-bit integer, and not actually a
362 /// GC reference?
363 #[inline]
364 pub fn is_i31(&self) -> bool {
365 let val = self.0.get();
366 (val & Self::I31_REF_DISCRIMINANT) != 0
367 }
368
369 /// Get the underlying `i31`, if any.
370 #[inline]
371 pub fn as_i31(&self) -> Option<I31> {
372 if self.is_i31() {
373 let val = self.0.get();
374 Some(I31::wrapping_u32(val >> 1))
375 } else {
376 None
377 }
378 }
379
380 /// Get the underlying `i31`, panicking if this is not an `i31`.
381 #[inline]
382 pub fn unwrap_i31(&self) -> I31 {
383 self.as_i31().unwrap()
384 }
385
386 /// Is this `VMGcRef` a `VMExternRef`?
387 #[inline]
388 pub fn is_extern_ref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> bool {
389 self.gc_header(gc_heap)
390 .map_or(false, |h| h.kind().matches(VMGcKind::ExternRef))
391 }
392
393 /// Is this `VMGcRef` an `anyref`?
394 #[inline]
395 pub fn is_any_ref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> bool {
396 self.is_i31()
397 || self
398 .gc_header(gc_heap)
399 .map_or(false, |h| h.kind().matches(VMGcKind::AnyRef))
400 }
401}
402
403/// A trait implemented by all objects allocated inside a GC heap.
404///
405/// # Safety
406///
407/// All implementations must:
408///
409/// * Be `repr(C)` or `repr(transparent)`
410///
411/// * Begin with a `VMGcHeader` as their first field
412///
413/// * Not have `Drop` implementations (aka, `std::mem::needs_drop::<Self>()`
414/// should return `false`).
415///
416/// * Be memory safe to transmute to from an arbitrary byte sequence (that is,
417/// it is okay if some bit patterns are invalid with regards to correctness,
418/// so long as these invalid bit patterns cannot lead to memory unsafety).
419pub unsafe trait GcHeapObject: Send + Sync {
420 /// Check whether the GC object with the given header is an instance of
421 /// `Self`.
422 fn is(header: &VMGcHeader) -> bool;
423}
424
425/// A GC reference to a heap object of concrete type `T`.
426///
427/// Create typed GC refs via `VMGcRef::into_typed` and `VMGcRef::as_typed`, as
428/// well as via their unchecked equivalents `VMGcRef::into_typed_unchecked` and
429/// `VMGcRef::as_typed_unchecked`.
430#[derive(Debug, PartialEq, Eq, Hash)]
431#[repr(transparent)]
432pub struct TypedGcRef<T> {
433 gc_ref: VMGcRef,
434 _phantom: marker::PhantomData<*mut T>,
435}
436
437impl<T> TypedGcRef<T>
438where
439 T: GcHeapObject,
440{
441 /// Clone this `VMGcRef`, running any GC barriers as necessary.
442 pub fn clone(&self, gc_store: &mut GcStore) -> Self {
443 Self {
444 gc_ref: gc_store.clone_gc_ref(&self.gc_ref),
445 _phantom: marker::PhantomData,
446 }
447 }
448
449 /// Explicitly drop this GC reference, running any GC barriers as necessary.
450 pub fn drop(self, gc_store: &mut GcStore) {
451 gc_store.drop_gc_ref(self.gc_ref);
452 }
453
454 /// Copy this GC reference without running the GC's clone barriers.
455 ///
456 /// Prefer calling `clone(&mut GcStore)` instead! This is mostly an internal
457 /// escape hatch for collector implementations.
458 ///
459 /// Failure to run GC barriers when they would otherwise be necessary can
460 /// lead to leaks, panics, and wrong results. It cannot lead to memory
461 /// unsafety, however.
462 pub fn unchecked_copy(&self) -> Self {
463 Self {
464 gc_ref: self.gc_ref.unchecked_copy(),
465 _phantom: marker::PhantomData,
466 }
467 }
468}
469
470impl<T> TypedGcRef<T> {
471 /// Get the untyped version of this GC reference.
472 pub fn as_untyped(&self) -> &VMGcRef {
473 &self.gc_ref
474 }
475}