wasmtime/runtime/vm/gc/enabled/externref.rs
1use crate::runtime::vm::{GcHeap, GcStore, VMGcRef};
2use core::fmt;
3use wasmtime_environ::VMGcKind;
4
5/// A `VMGcRef` that we know points to an `externref`.
6///
7/// Create a `VMExternRef` via `VMGcRef::into_externref` and
8/// `VMGcRef::as_externref`, or their untyped equivalents
9/// `VMGcRef::into_externref_unchecked` and `VMGcRef::as_externref_unchecked`.
10///
11/// Note: This is not a `TypedGcRef<_>` because each collector can have a
12/// different concrete representation of `externref` that they allocate inside
13/// their heaps.
14#[derive(Debug, PartialEq, Eq, Hash)]
15#[repr(transparent)]
16pub struct VMExternRef(VMGcRef);
17
18impl fmt::Pointer for VMExternRef {
19 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
20 fmt::Pointer::fmt(&self.0, f)
21 }
22}
23
24impl From<VMExternRef> for VMGcRef {
25 #[inline]
26 fn from(x: VMExternRef) -> Self {
27 x.0
28 }
29}
30
31impl VMGcRef {
32 /// Create a new `VMExternRef` from the given `gc_ref`.
33 ///
34 /// If this is not GC reference to an `externref`, `Err(self)` is returned.
35 pub fn into_externref(self, gc_heap: &impl GcHeap) -> Result<VMExternRef, VMGcRef> {
36 if self.is_i31() {
37 return Err(self);
38 }
39 if gc_heap.header(&self).kind() == VMGcKind::ExternRef {
40 Ok(VMExternRef(self))
41 } else {
42 Err(self)
43 }
44 }
45
46 /// Create a new `VMExternRef` from `self` without actually checking that
47 /// `self` is an `externref`.
48 ///
49 /// This method does not check that `self` is actually an `externref`, but
50 /// it should be. Failure to uphold this invariant is memory safe but will
51 /// result in general incorrectness down the line such as panics or wrong
52 /// results.
53 #[inline]
54 pub fn into_externref_unchecked(self) -> VMExternRef {
55 debug_assert!(!self.is_i31());
56 VMExternRef(self)
57 }
58
59 /// Get this GC reference as an `externref` reference, if it actually is an
60 /// `externref` reference.
61 pub fn as_externref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> Option<&VMExternRef> {
62 if self.is_i31() {
63 return None;
64 }
65 if gc_heap.header(&self).kind() == VMGcKind::ExternRef {
66 let ptr = self as *const VMGcRef;
67 let ret = unsafe { &*ptr.cast() };
68 assert!(matches!(ret, VMExternRef(VMGcRef { .. })));
69 Some(ret)
70 } else {
71 None
72 }
73 }
74}
75
76impl VMExternRef {
77 /// Get the underlying `VMGcRef`.
78 pub fn as_gc_ref(&self) -> &VMGcRef {
79 &self.0
80 }
81
82 /// Clone this `VMExternRef`, running any GC barriers as necessary.
83 pub fn clone(&self, gc_store: &mut GcStore) -> Self {
84 Self(gc_store.clone_gc_ref(&self.0))
85 }
86
87 /// Explicitly drop this `externref`, running GC drop barriers as necessary.
88 pub fn drop(self, gc_store: &mut GcStore) {
89 gc_store.drop_gc_ref(self.0);
90 }
91
92 /// Copy this `VMExternRef` without running the GC's clone barriers.
93 ///
94 /// Prefer calling `clone(&mut GcStore)` instead! This is mostly an internal
95 /// escape hatch for collector implementations.
96 ///
97 /// Failure to run GC barriers when they would otherwise be necessary can
98 /// lead to leaks, panics, and wrong results. It cannot lead to memory
99 /// unsafety, however.
100 pub fn unchecked_copy(&self) -> Self {
101 Self(self.0.unchecked_copy())
102 }
103}