wasmtime/runtime/vm/gc/
gc_ref.rs

1use crate::prelude::*;
2use crate::runtime::vm::{GcHeap, GcStore, I31};
3use core::fmt;
4use core::marker;
5use core::num::NonZeroU32;
6use wasmtime_environ::packed_option::ReservedValue;
7use wasmtime_environ::{VMGcKind, VMSharedTypeIndex};
8
9/// The common header for all objects allocated in a GC heap.
10///
11/// This header is shared across all collectors, although particular collectors
12/// may always add their own trailing fields to this header for all of their own
13/// GC objects.
14///
15/// This is a bit-packed structure that logically has the following fields:
16///
17/// ```ignore
18/// struct VMGcHeader {
19///     // Highest 5 bits.
20///     kind: VMGcKind,
21///
22///     // 27 bits available for the `GcRuntime` to make use of however it sees fit.
23///     reserved: u27,
24///
25///     // The `VMSharedTypeIndex` for this GC object, if it isn't an
26///     // `externref` (or an `externref` re-wrapped as an `anyref`). `None` is
27///     // represented with `VMSharedTypeIndex::reserved_value()`.
28///     ty: Option<VMSharedTypeIndex>,
29/// }
30/// ```
31#[repr(C, align(8))]
32#[derive(Debug, Clone, Copy)]
33pub struct VMGcHeader {
34    /// The object's `VMGcKind` and 27 bits of space reserved for however the GC
35    /// sees fit to use it.
36    kind: u32,
37
38    /// The object's type index.
39    ty: VMSharedTypeIndex,
40}
41
42unsafe impl GcHeapObject for VMGcHeader {
43    #[inline]
44    fn is(_: &VMGcHeader) -> bool {
45        true
46    }
47}
48
49const _: () = {
50    use core::mem::offset_of;
51    use wasmtime_environ::*;
52    assert!((VM_GC_HEADER_SIZE as usize) == core::mem::size_of::<VMGcHeader>());
53    assert!((VM_GC_HEADER_ALIGN as usize) == core::mem::align_of::<VMGcHeader>());
54    assert!((VM_GC_HEADER_KIND_OFFSET as usize) == offset_of!(VMGcHeader, kind));
55    assert!((VM_GC_HEADER_TYPE_INDEX_OFFSET as usize) == offset_of!(VMGcHeader, ty));
56};
57
58impl VMGcHeader {
59    /// Create the header for an `externref`.
60    pub fn externref() -> Self {
61        Self::from_kind_and_index(VMGcKind::ExternRef, VMSharedTypeIndex::reserved_value())
62    }
63
64    /// Create the header for the given kind and type index.
65    pub fn from_kind_and_index(kind: VMGcKind, ty: VMSharedTypeIndex) -> Self {
66        let kind = kind.as_u32();
67        Self { kind, ty }
68    }
69
70    /// Get the kind of GC object that this is.
71    pub fn kind(&self) -> VMGcKind {
72        VMGcKind::from_high_bits_of_u32(self.kind)
73    }
74
75    /// Get the reserved 27 bits in this header.
76    ///
77    /// These are bits are reserved for `GcRuntime` implementations to make use
78    /// of however they see fit.
79    pub fn reserved_u27(&self) -> u32 {
80        self.kind & VMGcKind::UNUSED_MASK
81    }
82
83    /// Set the 27-bit reserved value.
84    ///
85    /// # Panics
86    ///
87    /// Panics if the given `value` has any of the upper 6 bits set.
88    pub fn set_reserved_u27(&mut self, value: u32) {
89        assert!(
90            VMGcKind::value_fits_in_unused_bits(value),
91            "VMGcHeader::set_reserved_u27 with value using more than 27 bits: \
92             {value:#034b} ({value}, {value:#010x})"
93        );
94        self.kind &= VMGcKind::MASK;
95        self.kind |= value;
96    }
97
98    /// Set the 27-bit reserved value.
99    ///
100    /// # Safety
101    ///
102    /// The given `value` must only use the lower 27 bits; its upper 5 bits must
103    /// be unset.
104    pub unsafe fn unchecked_set_reserved_u27(&mut self, value: u32) {
105        debug_assert_eq!(value & VMGcKind::MASK, 0);
106        self.kind &= VMGcKind::MASK;
107        self.kind |= value;
108    }
109
110    /// Get this object's specific concrete type.
111    pub fn ty(&self) -> Option<VMSharedTypeIndex> {
112        if self.ty.is_reserved_value() {
113            None
114        } else {
115            Some(self.ty)
116        }
117    }
118}
119
120/// A raw, unrooted GC reference.
121///
122/// A `VMGcRef` is either:
123///
124/// * A reference to some kind of object on the GC heap, but we don't know
125///   exactly which kind without further reflection. Furthermore, this is not
126///   actually a pointer, but a compact index into a Wasm GC heap.
127///
128/// * An `i31ref`: it doesn't actually reference an object in the GC heap, but
129///   is instead an inline, unboxed 31-bit integer.
130///
131/// ## `VMGcRef` and GC Barriers
132///
133/// Depending on the garbage collector in use, cloning, writing, and dropping a
134/// `VMGcRef` may require invoking GC barriers (little snippets of code provided
135/// by the collector to ensure it is correctly tracking all GC references).
136///
137/// Therefore, to encourage correct usage of GC barriers, this type does *NOT*
138/// implement `Clone` or `Copy`. Use `GcStore::clone_gc_ref`,
139/// `GcStore::write_gc_ref`, and `GcStore::drop_gc_ref` to clone, write, and
140/// drop `VMGcRef`s respectively.
141///
142/// As an escape hatch, if you really need to copy a `VMGcRef` without invoking
143/// GC barriers and you understand why that will not lead to GC bugs in this
144/// particular case, you can use the `unchecked_copy` method.
145#[derive(Debug, PartialEq, Eq, Hash)]
146#[repr(transparent)]
147pub struct VMGcRef(NonZeroU32);
148
149impl<T> From<TypedGcRef<T>> for VMGcRef {
150    #[inline]
151    fn from(value: TypedGcRef<T>) -> Self {
152        value.gc_ref
153    }
154}
155
156impl fmt::LowerHex for VMGcRef {
157    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
158        self.0.fmt(f)
159    }
160}
161
162impl fmt::UpperHex for VMGcRef {
163    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
164        self.0.fmt(f)
165    }
166}
167
168impl fmt::Pointer for VMGcRef {
169    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
170        write!(f, "{self:#x}")
171    }
172}
173
174impl VMGcRef {
175    /// If this bit is set on a GC reference, then the GC reference is actually an
176    /// unboxed `i31`.
177    ///
178    /// Must be kept in sync with `wasmtime_cranelift::I31_REF_DISCRIMINANT`.
179    pub const I31_REF_DISCRIMINANT: u32 = 1;
180
181    /// Create a new `VMGcRef` from the given raw u32 value.
182    ///
183    /// Does not discriminate between indices into a GC heap and `i31ref`s.
184    ///
185    /// Returns `None` for zero values.
186    ///
187    /// The given index should point to a valid GC-managed object within this
188    /// reference's associated heap. Failure to uphold this will be memory safe,
189    /// but will lead to general failures such as panics or incorrect results.
190    pub fn from_raw_u32(raw: u32) -> Option<Self> {
191        Some(Self::from_raw_non_zero_u32(NonZeroU32::new(raw)?))
192    }
193
194    /// Create a new `VMGcRef` from the given index into a GC heap.
195    ///
196    /// The given index should point to a valid GC-managed object within this
197    /// reference's associated heap. Failure to uphold this will be memory safe,
198    /// but will lead to general failures such as panics or incorrect results.
199    ///
200    /// Returns `None` when the index is not 2-byte aligned and therefore
201    /// conflicts with the `i31ref` discriminant.
202    pub fn from_heap_index(index: NonZeroU32) -> Option<Self> {
203        if (index.get() & Self::I31_REF_DISCRIMINANT) == 0 {
204            Some(Self::from_raw_non_zero_u32(index))
205        } else {
206            None
207        }
208    }
209
210    /// Create a new `VMGcRef` from the given raw value.
211    ///
212    /// Does not discriminate between indices into a GC heap and `i31ref`s.
213    pub fn from_raw_non_zero_u32(raw: NonZeroU32) -> Self {
214        VMGcRef(raw)
215    }
216
217    /// Construct a new `VMGcRef` from an unboxed 31-bit integer.
218    #[inline]
219    pub fn from_i31(val: I31) -> Self {
220        let val = (val.get_u32() << 1) | Self::I31_REF_DISCRIMINANT;
221        debug_assert_ne!(val, 0);
222        let non_zero = unsafe { NonZeroU32::new_unchecked(val) };
223        VMGcRef::from_raw_non_zero_u32(non_zero)
224    }
225
226    /// Copy this `VMGcRef` without running the GC's clone barriers.
227    ///
228    /// Prefer calling `clone(&mut GcStore)` instead! This is mostly an internal
229    /// escape hatch for collector implementations.
230    ///
231    /// Failure to run GC barriers when they would otherwise be necessary can
232    /// lead to leaks, panics, and wrong results. It cannot lead to memory
233    /// unsafety, however.
234    pub fn unchecked_copy(&self) -> Self {
235        VMGcRef(self.0)
236    }
237
238    /// Copy this `i31` GC reference, which never requires any GC barriers.
239    ///
240    /// Panics if this is not an `i31`.
241    pub fn copy_i31(&self) -> Self {
242        assert!(self.is_i31());
243        self.unchecked_copy()
244    }
245
246    /// Get this GC reference as a u32 index into its GC heap.
247    ///
248    /// Returns `None` for `i31ref`s.
249    pub fn as_heap_index(&self) -> Option<NonZeroU32> {
250        if self.is_i31() { None } else { Some(self.0) }
251    }
252
253    /// Get this GC reference as a raw, non-zero u32 value, regardless whether
254    /// it is actually a reference to a GC object or is an `i31ref`.
255    pub fn as_raw_non_zero_u32(&self) -> NonZeroU32 {
256        self.0
257    }
258
259    /// Get this GC reference as a raw u32 value, regardless whether it is
260    /// actually a reference to a GC object or is an `i31ref`.
261    pub fn as_raw_u32(&self) -> u32 {
262        self.0.get()
263    }
264
265    /// Creates a typed GC reference from `self`, checking that `self` actually
266    /// is a `T`.
267    ///
268    /// If this is not a GC reference to a `T`, then `Err(self)` is returned.
269    pub fn into_typed<T>(self, gc_heap: &impl GcHeap) -> Result<TypedGcRef<T>, Self>
270    where
271        T: GcHeapObject,
272    {
273        if self.is_i31() {
274            return Err(self);
275        }
276        if T::is(gc_heap.header(&self)) {
277            Ok(TypedGcRef {
278                gc_ref: self,
279                _phantom: marker::PhantomData,
280            })
281        } else {
282            Err(self)
283        }
284    }
285
286    /// Creates a typed GC reference without actually checking that `self` is a
287    /// `T`.
288    ///
289    /// `self` should point to a `T` object. Failure to uphold this invariant is
290    /// memory safe, but will lead to general incorrectness such as panics or
291    /// wrong results.
292    pub fn into_typed_unchecked<T>(self) -> TypedGcRef<T>
293    where
294        T: GcHeapObject,
295    {
296        debug_assert!(!self.is_i31());
297        TypedGcRef {
298            gc_ref: self,
299            _phantom: marker::PhantomData,
300        }
301    }
302
303    /// Is this GC reference pointing to a `T`?
304    pub fn is_typed<T>(&self, gc_heap: &impl GcHeap) -> bool
305    where
306        T: GcHeapObject,
307    {
308        if self.is_i31() {
309            return false;
310        }
311        T::is(gc_heap.header(&self))
312    }
313
314    /// Borrow `self` as a typed GC reference, checking that `self` actually is
315    /// a `T`.
316    #[inline]
317    pub fn as_typed<T>(&self, gc_heap: &impl GcHeap) -> Option<&TypedGcRef<T>>
318    where
319        T: GcHeapObject,
320    {
321        if self.is_i31() {
322            return None;
323        }
324        if T::is(gc_heap.header(&self)) {
325            let ptr = self as *const VMGcRef;
326            let ret = unsafe { &*ptr.cast() };
327            assert!(matches!(
328                ret,
329                TypedGcRef {
330                    gc_ref: VMGcRef(_),
331                    _phantom
332                }
333            ));
334            Some(ret)
335        } else {
336            None
337        }
338    }
339
340    /// Creates a typed GC reference without actually checking that `self` is a
341    /// `T`.
342    ///
343    /// `self` should point to a `T` object. Failure to uphold this invariant is
344    /// memory safe, but will lead to general incorrectness such as panics or
345    /// wrong results.
346    pub fn as_typed_unchecked<T>(&self) -> &TypedGcRef<T>
347    where
348        T: GcHeapObject,
349    {
350        debug_assert!(!self.is_i31());
351        let ptr = self as *const VMGcRef;
352        let ret = unsafe { &*ptr.cast() };
353        assert!(matches!(
354            ret,
355            TypedGcRef {
356                gc_ref: VMGcRef(_),
357                _phantom
358            }
359        ));
360        ret
361    }
362
363    /// Get a reference to the GC header that this GC reference is pointing to.
364    ///
365    /// Returns `None` when this is an `i31ref` and doesn't actually point to a
366    /// GC header.
367    pub fn gc_header<'a>(&self, gc_heap: &'a (impl GcHeap + ?Sized)) -> Option<&'a VMGcHeader> {
368        if self.is_i31() {
369            None
370        } else {
371            Some(gc_heap.header(self))
372        }
373    }
374
375    /// Is this `VMGcRef` actually an unboxed 31-bit integer, and not actually a
376    /// GC reference?
377    #[inline]
378    pub fn is_i31(&self) -> bool {
379        let val = self.0.get();
380        (val & Self::I31_REF_DISCRIMINANT) != 0
381    }
382
383    /// Get the underlying `i31`, if any.
384    #[inline]
385    pub fn as_i31(&self) -> Option<I31> {
386        if self.is_i31() {
387            let val = self.0.get();
388            Some(I31::wrapping_u32(val >> 1))
389        } else {
390            None
391        }
392    }
393
394    /// Get the underlying `i31`, panicking if this is not an `i31`.
395    #[inline]
396    pub fn unwrap_i31(&self) -> I31 {
397        self.as_i31().unwrap()
398    }
399
400    /// Is this `VMGcRef` a `VMExternRef`?
401    #[inline]
402    pub fn is_extern_ref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> bool {
403        self.gc_header(gc_heap)
404            .map_or(false, |h| h.kind().matches(VMGcKind::ExternRef))
405    }
406
407    /// Is this `VMGcRef` an `anyref`?
408    #[inline]
409    pub fn is_any_ref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> bool {
410        self.is_i31()
411            || self
412                .gc_header(gc_heap)
413                .map_or(false, |h| h.kind().matches(VMGcKind::AnyRef))
414    }
415}
416
417/// A trait implemented by all objects allocated inside a GC heap.
418///
419/// # Safety
420///
421/// All implementations must:
422///
423/// * Be `repr(C)` or `repr(transparent)`
424///
425/// * Begin with a `VMGcHeader` as their first field
426///
427/// * Not have `Drop` implementations (aka, `std::mem::needs_drop::<Self>()`
428///   should return `false`).
429///
430/// * Be memory safe to transmute to from an arbitrary byte sequence (that is,
431///   it is okay if some bit patterns are invalid with regards to correctness,
432///   so long as these invalid bit patterns cannot lead to memory unsafety).
433pub unsafe trait GcHeapObject: Send + Sync {
434    /// Check whether the GC object with the given header is an instance of
435    /// `Self`.
436    fn is(header: &VMGcHeader) -> bool;
437}
438
439/// A GC reference to a heap object of concrete type `T`.
440///
441/// Create typed GC refs via `VMGcRef::into_typed` and `VMGcRef::as_typed`, as
442/// well as via their unchecked equivalents `VMGcRef::into_typed_unchecked` and
443/// `VMGcRef::as_typed_unchecked`.
444#[derive(Debug, PartialEq, Eq, Hash)]
445#[repr(transparent)]
446pub struct TypedGcRef<T> {
447    gc_ref: VMGcRef,
448    _phantom: marker::PhantomData<*mut T>,
449}
450
451impl<T> TypedGcRef<T>
452where
453    T: GcHeapObject,
454{
455    /// Clone this `VMGcRef`, running any GC barriers as necessary.
456    pub fn clone(&self, gc_store: &mut GcStore) -> Self {
457        Self {
458            gc_ref: gc_store.clone_gc_ref(&self.gc_ref),
459            _phantom: marker::PhantomData,
460        }
461    }
462
463    /// Explicitly drop this GC reference, running any GC barriers as necessary.
464    pub fn drop(self, gc_store: &mut GcStore) {
465        gc_store.drop_gc_ref(self.gc_ref);
466    }
467
468    /// Copy this GC reference without running the GC's clone barriers.
469    ///
470    /// Prefer calling `clone(&mut GcStore)` instead! This is mostly an internal
471    /// escape hatch for collector implementations.
472    ///
473    /// Failure to run GC barriers when they would otherwise be necessary can
474    /// lead to leaks, panics, and wrong results. It cannot lead to memory
475    /// unsafety, however.
476    pub fn unchecked_copy(&self) -> Self {
477        Self {
478            gc_ref: self.gc_ref.unchecked_copy(),
479            _phantom: marker::PhantomData,
480        }
481    }
482}
483
484impl<T> TypedGcRef<T> {
485    /// Get the untyped version of this GC reference.
486    pub fn as_untyped(&self) -> &VMGcRef {
487        &self.gc_ref
488    }
489}
490
491#[cfg(test)]
492mod tests {
493    use super::*;
494
495    #[test]
496    fn reserved_bits() {
497        let kind = VMGcKind::StructRef;
498        let ty = VMSharedTypeIndex::new(1234);
499        let mut header = VMGcHeader::from_kind_and_index(kind, ty);
500
501        assert_eq!(header.reserved_u27(), 0);
502        assert_eq!(header.kind(), kind);
503        assert_eq!(header.ty(), Some(ty));
504
505        header.set_reserved_u27(36);
506        assert_eq!(header.reserved_u27(), 36);
507        assert_eq!(header.kind(), kind);
508        assert_eq!(header.ty(), Some(ty));
509
510        let max = (1 << 27) - 1;
511        header.set_reserved_u27(max);
512        assert_eq!(header.reserved_u27(), max);
513        assert_eq!(header.kind(), kind);
514        assert_eq!(header.ty(), Some(ty));
515
516        header.set_reserved_u27(0);
517        assert_eq!(header.reserved_u27(), 0);
518        assert_eq!(header.kind(), kind);
519        assert_eq!(header.ty(), Some(ty));
520
521        let result = std::panic::catch_unwind(move || header.set_reserved_u27(max + 1));
522        assert!(result.is_err());
523    }
524}