wasmtime/runtime/gc/enabled/
structref.rs

1//! Working with GC `struct` objects.
2
3use crate::runtime::vm::VMGcRef;
4use crate::store::StoreId;
5use crate::vm::{VMGcHeader, VMStructRef};
6use crate::{AnyRef, FieldType};
7use crate::{
8    AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
9    ManuallyRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
10    prelude::*,
11    store::{AutoAssertNoGc, StoreContextMut, StoreOpaque},
12};
13use core::mem::{self, MaybeUninit};
14use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
15
16/// An allocator for a particular Wasm GC struct type.
17///
18/// Every `StructRefPre` is associated with a particular
19/// [`Store`][crate::Store] and a particular [StructType][crate::StructType].
20///
21/// Reusing an allocator across many allocations amortizes some per-type runtime
22/// overheads inside Wasmtime. A `StructRefPre` is to `StructRef`s as an
23/// `InstancePre` is to `Instance`s.
24///
25/// # Example
26///
27/// ```
28/// use wasmtime::*;
29///
30/// # fn foo() -> Result<()> {
31/// let mut config = Config::new();
32/// config.wasm_function_references(true);
33/// config.wasm_gc(true);
34///
35/// let engine = Engine::new(&config)?;
36/// let mut store = Store::new(&engine, ());
37///
38/// // Define a struct type.
39/// let struct_ty = StructType::new(
40///    store.engine(),
41///    [FieldType::new(Mutability::Var, StorageType::I8)],
42/// )?;
43///
44/// // Create an allocator for the struct type.
45/// let allocator = StructRefPre::new(&mut store, struct_ty);
46///
47/// {
48///     let mut scope = RootScope::new(&mut store);
49///
50///     // Allocate a bunch of instances of our struct type using the same
51///     // allocator! This is faster than creating a new allocator for each
52///     // instance we want to allocate.
53///     for i in 0..10 {
54///         StructRef::new(&mut scope, &allocator, &[Val::I32(i)])?;
55///     }
56/// }
57/// # Ok(())
58/// # }
59/// # foo().unwrap();
60/// ```
61pub struct StructRefPre {
62    store_id: StoreId,
63    ty: StructType,
64}
65
66impl StructRefPre {
67    /// Create a new `StructRefPre` that is associated with the given store
68    /// and type.
69    pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
70        Self::_new(store.as_context_mut().0, ty)
71    }
72
73    pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
74        store.insert_gc_host_alloc_type(ty.registered_type().clone());
75        let store_id = store.id();
76
77        StructRefPre { store_id, ty }
78    }
79
80    pub(crate) fn layout(&self) -> &GcStructLayout {
81        self.ty
82            .registered_type()
83            .layout()
84            .expect("struct types have a layout")
85            .unwrap_struct()
86    }
87
88    pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
89        self.ty.registered_type().index()
90    }
91}
92
93/// A reference to a GC-managed `struct` instance.
94///
95/// WebAssembly `struct`s are static, fixed-length, ordered sequences of
96/// fields. Fields are named by index, not by identifier; in this way, they are
97/// similar to Rust's tuples. Each field is mutable or constant and stores
98/// unpacked [`Val`][crate::Val]s or packed 8-/16-bit integers.
99///
100/// Like all WebAssembly references, these are opaque and unforgeable to Wasm:
101/// they cannot be faked and Wasm cannot, for example, cast the integer
102/// `0x12345678` into a reference, pretend it is a valid `structref`, and trick
103/// the host into dereferencing it and segfaulting or worse.
104///
105/// Note that you can also use `Rooted<StructRef>` and
106/// `ManuallyRooted<StructRef>` as a type parameter with
107/// [`Func::typed`][crate::Func::typed]- and
108/// [`Func::wrap`][crate::Func::wrap]-style APIs.
109///
110/// # Example
111///
112/// ```
113/// use wasmtime::*;
114///
115/// # fn foo() -> Result<()> {
116/// let mut config = Config::new();
117/// config.wasm_function_references(true);
118/// config.wasm_gc(true);
119///
120/// let engine = Engine::new(&config)?;
121/// let mut store = Store::new(&engine, ());
122///
123/// // Define a struct type.
124/// let struct_ty = StructType::new(
125///    store.engine(),
126///    [FieldType::new(Mutability::Var, StorageType::I8)],
127/// )?;
128///
129/// // Create an allocator for the struct type.
130/// let allocator = StructRefPre::new(&mut store, struct_ty);
131///
132/// {
133///     let mut scope = RootScope::new(&mut store);
134///
135///     // Allocate an instance of the struct type.
136///     let my_struct = StructRef::new(&mut scope, &allocator, &[Val::I32(42)])?;
137///
138///     // That instance's field should have the expected value.
139///     let val = my_struct.field(&mut scope, 0)?.unwrap_i32();
140///     assert_eq!(val, 42);
141///
142///     // And we can update the field's value because it is a mutable field.
143///     my_struct.set_field(&mut scope, 0, Val::I32(36))?;
144///     let new_val = my_struct.field(&mut scope, 0)?.unwrap_i32();
145///     assert_eq!(new_val, 36);
146/// }
147/// # Ok(())
148/// # }
149/// # foo().unwrap();
150/// ```
151#[derive(Debug)]
152#[repr(transparent)]
153pub struct StructRef {
154    pub(super) inner: GcRootIndex,
155}
156
157unsafe impl GcRefImpl for StructRef {
158    fn transmute_ref(index: &GcRootIndex) -> &Self {
159        // Safety: `StructRef` is a newtype of a `GcRootIndex`.
160        let me: &Self = unsafe { mem::transmute(index) };
161
162        // Assert we really are just a newtype of a `GcRootIndex`.
163        assert!(matches!(
164            me,
165            Self {
166                inner: GcRootIndex { .. },
167            }
168        ));
169
170        me
171    }
172}
173
174impl Rooted<StructRef> {
175    /// Upcast this `structref` into an `anyref`.
176    #[inline]
177    pub fn to_anyref(self) -> Rooted<AnyRef> {
178        self.unchecked_cast()
179    }
180
181    /// Upcast this `structref` into an `eqref`.
182    #[inline]
183    pub fn to_eqref(self) -> Rooted<EqRef> {
184        self.unchecked_cast()
185    }
186}
187
188impl ManuallyRooted<StructRef> {
189    /// Upcast this `structref` into an `anyref`.
190    #[inline]
191    pub fn to_anyref(self) -> ManuallyRooted<AnyRef> {
192        self.unchecked_cast()
193    }
194
195    /// Upcast this `structref` into an `eqref`.
196    #[inline]
197    pub fn to_eqref(self) -> ManuallyRooted<EqRef> {
198        self.unchecked_cast()
199    }
200}
201
202impl StructRef {
203    /// Synchronously allocate a new `struct` and get a reference to it.
204    ///
205    /// # Automatic Garbage Collection
206    ///
207    /// If the GC heap is at capacity, and there isn't room for allocating this
208    /// new struct, then this method will automatically trigger a synchronous
209    /// collection in an attempt to free up space in the GC heap.
210    ///
211    /// # Errors
212    ///
213    /// If the given `fields` values' types do not match the field types of the
214    /// `allocator`'s struct type, an error is returned.
215    ///
216    /// If the allocation cannot be satisfied because the GC heap is currently
217    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
218    /// error is returned. The allocation might succeed on a second attempt if
219    /// you drop some rooted GC references and try again.
220    ///
221    /// # Panics
222    ///
223    /// Panics if your engine is configured for async; use
224    /// [`StructRef::new_async`][crate::StructRef::new_async] to perform
225    /// synchronous allocation instead.
226    ///
227    /// Panics if the allocator, or any of the field values, is not associated
228    /// with the given store.
229    pub fn new(
230        mut store: impl AsContextMut,
231        allocator: &StructRefPre,
232        fields: &[Val],
233    ) -> Result<Rooted<StructRef>> {
234        Self::_new(store.as_context_mut().0, allocator, fields)
235    }
236
237    pub(crate) fn _new(
238        store: &mut StoreOpaque,
239        allocator: &StructRefPre,
240        fields: &[Val],
241    ) -> Result<Rooted<StructRef>> {
242        assert!(
243            !store.async_support(),
244            "use `StructRef::new_async` with asynchronous stores"
245        );
246        Self::type_check_fields(store, allocator, fields)?;
247        store.retry_after_gc((), |store, ()| {
248            Self::new_unchecked(store, allocator, fields)
249        })
250    }
251
252    /// Asynchronously allocate a new `struct` and get a reference to it.
253    ///
254    /// # Automatic Garbage Collection
255    ///
256    /// If the GC heap is at capacity, and there isn't room for allocating this
257    /// new struct, then this method will automatically trigger a synchronous
258    /// collection in an attempt to free up space in the GC heap.
259    ///
260    /// # Errors
261    ///
262    /// If the given `fields` values' types do not match the field types of the
263    /// `allocator`'s struct type, an error is returned.
264    ///
265    /// If the allocation cannot be satisfied because the GC heap is currently
266    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
267    /// error is returned. The allocation might succeed on a second attempt if
268    /// you drop some rooted GC references and try again.
269    ///
270    /// # Panics
271    ///
272    /// Panics if your engine is not configured for async; use
273    /// [`StructRef::new`][crate::StructRef::new] to perform synchronous
274    /// allocation instead.
275    ///
276    /// Panics if the allocator, or any of the field values, is not associated
277    /// with the given store.
278    #[cfg(feature = "async")]
279    pub async fn new_async(
280        mut store: impl AsContextMut,
281        allocator: &StructRefPre,
282        fields: &[Val],
283    ) -> Result<Rooted<StructRef>> {
284        Self::_new_async(store.as_context_mut().0, allocator, fields).await
285    }
286
287    #[cfg(feature = "async")]
288    pub(crate) async fn _new_async(
289        store: &mut StoreOpaque,
290        allocator: &StructRefPre,
291        fields: &[Val],
292    ) -> Result<Rooted<StructRef>> {
293        assert!(
294            store.async_support(),
295            "use `StructRef::new` with synchronous stores"
296        );
297        Self::type_check_fields(store, allocator, fields)?;
298        store
299            .retry_after_gc_async((), |store, ()| {
300                Self::new_unchecked(store, allocator, fields)
301            })
302            .await
303    }
304
305    /// Like `Self::new` but caller's must ensure that if the store is
306    /// configured for async, this is only ever called from on a fiber stack.
307    pub(crate) unsafe fn new_maybe_async(
308        store: &mut StoreOpaque,
309        allocator: &StructRefPre,
310        fields: &[Val],
311    ) -> Result<Rooted<StructRef>> {
312        Self::type_check_fields(store, allocator, fields)?;
313        unsafe {
314            store.retry_after_gc_maybe_async((), |store, ()| {
315                Self::new_unchecked(store, allocator, fields)
316            })
317        }
318    }
319
320    /// Type check the field values before allocating a new struct.
321    fn type_check_fields(
322        store: &mut StoreOpaque,
323        allocator: &StructRefPre,
324        fields: &[Val],
325    ) -> Result<(), Error> {
326        let expected_len = allocator.ty.fields().len();
327        let actual_len = fields.len();
328        ensure!(
329            actual_len == expected_len,
330            "expected {expected_len} fields, got {actual_len}"
331        );
332        for (ty, val) in allocator.ty.fields().zip(fields) {
333            assert!(
334                val.comes_from_same_store(store),
335                "field value comes from the wrong store",
336            );
337            let ty = ty.element_type().unpack();
338            val.ensure_matches_ty(store, ty)
339                .context("field type mismatch")?;
340        }
341        Ok(())
342    }
343
344    /// Given that the field values have already been type checked, allocate a
345    /// new struct.
346    ///
347    /// Does not attempt GC+retry on OOM, that is the caller's responsibility.
348    fn new_unchecked(
349        store: &mut StoreOpaque,
350        allocator: &StructRefPre,
351        fields: &[Val],
352    ) -> Result<Rooted<StructRef>> {
353        assert_eq!(
354            store.id(),
355            allocator.store_id,
356            "attempted to use a `StructRefPre` with the wrong store"
357        );
358
359        // Allocate the struct and write each field value into the appropriate
360        // offset.
361        let structref = store
362            .gc_store_mut()?
363            .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
364            .context("unrecoverable error when allocating new `structref`")?
365            .map_err(|n| GcHeapOutOfMemory::new((), n))?;
366
367        // From this point on, if we get any errors, then the struct is not
368        // fully initialized, so we need to eagerly deallocate it before the
369        // next GC where the collector might try to interpret one of the
370        // uninitialized fields as a GC reference.
371        let mut store = AutoAssertNoGc::new(store);
372        match (|| {
373            for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
374                structref.initialize_field(
375                    &mut store,
376                    allocator.layout(),
377                    ty.element_type(),
378                    index,
379                    *val,
380                )?;
381            }
382            Ok(())
383        })() {
384            Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
385            Err(e) => {
386                store.gc_store_mut()?.dealloc_uninit_struct(structref);
387                Err(e)
388            }
389        }
390    }
391
392    #[inline]
393    pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
394        self.inner.comes_from_same_store(store)
395    }
396
397    /// Get this `structref`'s type.
398    ///
399    /// # Errors
400    ///
401    /// Return an error if this reference has been unrooted.
402    ///
403    /// # Panics
404    ///
405    /// Panics if this reference is associated with a different store.
406    pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
407        self._ty(store.as_context().0)
408    }
409
410    pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
411        assert!(self.comes_from_same_store(store));
412        let index = self.type_index(store)?;
413        Ok(StructType::from_shared_type_index(store.engine(), index))
414    }
415
416    /// Does this `structref` match the given type?
417    ///
418    /// That is, is this struct's type a subtype of the given type?
419    ///
420    /// # Errors
421    ///
422    /// Return an error if this reference has been unrooted.
423    ///
424    /// # Panics
425    ///
426    /// Panics if this reference is associated with a different store or if the
427    /// type is not associated with the store's engine.
428    pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
429        self._matches_ty(store.as_context().0, ty)
430    }
431
432    pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
433        assert!(self.comes_from_same_store(store));
434        Ok(self._ty(store)?.matches(ty))
435    }
436
437    pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
438        if !self.comes_from_same_store(store) {
439            bail!("function used with wrong store");
440        }
441        if self._matches_ty(store, ty)? {
442            Ok(())
443        } else {
444            let actual_ty = self._ty(store)?;
445            bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
446        }
447    }
448
449    /// Get the values of this struct's fields.
450    ///
451    /// Note that `i8` and `i16` field values are zero-extended into
452    /// `Val::I32(_)`s.
453    ///
454    /// # Errors
455    ///
456    /// Return an error if this reference has been unrooted.
457    ///
458    /// # Panics
459    ///
460    /// Panics if this reference is associated with a different store.
461    pub fn fields<'a, T: 'static>(
462        &'a self,
463        store: impl Into<StoreContextMut<'a, T>>,
464    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
465        self._fields(store.into().0)
466    }
467
468    pub(crate) fn _fields<'a>(
469        &'a self,
470        store: &'a mut StoreOpaque,
471    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
472        assert!(self.comes_from_same_store(store));
473        let store = AutoAssertNoGc::new(store);
474
475        let gc_ref = self.inner.try_gc_ref(&store)?;
476        let header = store.gc_store()?.header(gc_ref);
477        debug_assert!(header.kind().matches(VMGcKind::StructRef));
478
479        let index = header.ty().expect("structrefs should have concrete types");
480        let ty = StructType::from_shared_type_index(store.engine(), index);
481        let len = ty.fields().len();
482
483        return Ok(Fields {
484            structref: self,
485            store,
486            index: 0,
487            len,
488        });
489
490        struct Fields<'a, 'b> {
491            structref: &'a StructRef,
492            store: AutoAssertNoGc<'b>,
493            index: usize,
494            len: usize,
495        }
496
497        impl Iterator for Fields<'_, '_> {
498            type Item = Val;
499
500            #[inline]
501            fn next(&mut self) -> Option<Self::Item> {
502                let i = self.index;
503                debug_assert!(i <= self.len);
504                if i >= self.len {
505                    return None;
506                }
507                self.index += 1;
508                Some(self.structref._field(&mut self.store, i).unwrap())
509            }
510
511            #[inline]
512            fn size_hint(&self) -> (usize, Option<usize>) {
513                let len = self.len - self.index;
514                (len, Some(len))
515            }
516        }
517
518        impl ExactSizeIterator for Fields<'_, '_> {
519            #[inline]
520            fn len(&self) -> usize {
521                self.len - self.index
522            }
523        }
524    }
525
526    fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
527        assert!(self.comes_from_same_store(&store));
528        let gc_ref = self.inner.try_gc_ref(store)?;
529        Ok(store.gc_store()?.header(gc_ref))
530    }
531
532    fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
533        assert!(self.comes_from_same_store(&store));
534        let gc_ref = self.inner.try_gc_ref(store)?;
535        debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
536        Ok(gc_ref.as_structref_unchecked())
537    }
538
539    fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
540        assert!(self.comes_from_same_store(&store));
541        let type_index = self.type_index(store)?;
542        let layout = store
543            .engine()
544            .signatures()
545            .layout(type_index)
546            .expect("struct types should have GC layouts");
547        match layout {
548            GcLayout::Struct(s) => Ok(s),
549            GcLayout::Array(_) => unreachable!(),
550            GcLayout::Exception(_) => unreachable!(),
551        }
552    }
553
554    fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
555        let ty = self._ty(store)?;
556        match ty.field(field) {
557            Some(f) => Ok(f),
558            None => {
559                let len = ty.fields().len();
560                bail!("cannot access field {field}: struct only has {len} fields")
561            }
562        }
563    }
564
565    /// Get this struct's `index`th field.
566    ///
567    /// Note that `i8` and `i16` field values are zero-extended into
568    /// `Val::I32(_)`s.
569    ///
570    /// # Errors
571    ///
572    /// Returns an `Err(_)` if the index is out of bounds or this reference has
573    /// been unrooted.
574    ///
575    /// # Panics
576    ///
577    /// Panics if this reference is associated with a different store.
578    pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
579        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
580        self._field(&mut store, index)
581    }
582
583    pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
584        assert!(self.comes_from_same_store(store));
585        let structref = self.structref(store)?.unchecked_copy();
586        let field_ty = self.field_ty(store, index)?;
587        let layout = self.layout(store)?;
588        Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
589    }
590
591    /// Set this struct's `index`th field.
592    ///
593    /// # Errors
594    ///
595    /// Returns an error in the following scenarios:
596    ///
597    /// * When given a value of the wrong type, such as trying to set an `f32`
598    ///   field to an `i64` value.
599    ///
600    /// * When the field is not mutable.
601    ///
602    /// * When this struct does not have an `index`th field, i.e. `index` is out
603    ///   of bounds.
604    ///
605    /// * When `value` is a GC reference that has since been unrooted.
606    ///
607    /// # Panics
608    ///
609    /// Panics if this reference is associated with a different store.
610    pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
611        self._set_field(store.as_context_mut().0, index, value)
612    }
613
614    pub(crate) fn _set_field(
615        &self,
616        store: &mut StoreOpaque,
617        index: usize,
618        value: Val,
619    ) -> Result<()> {
620        assert!(self.comes_from_same_store(store));
621        let mut store = AutoAssertNoGc::new(store);
622
623        let field_ty = self.field_ty(&store, index)?;
624        ensure!(
625            field_ty.mutability().is_var(),
626            "cannot set field {index}: field is not mutable"
627        );
628
629        value
630            .ensure_matches_ty(&store, &field_ty.element_type().unpack())
631            .with_context(|| format!("cannot set field {index}: type mismatch"))?;
632
633        let layout = self.layout(&store)?;
634        let structref = self.structref(&store)?.unchecked_copy();
635
636        structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
637    }
638
639    pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
640        let gc_ref = self.inner.try_gc_ref(store)?;
641        let header = store.gc_store()?.header(gc_ref);
642        debug_assert!(header.kind().matches(VMGcKind::StructRef));
643        Ok(header.ty().expect("structrefs should have concrete types"))
644    }
645
646    /// Create a new `Rooted<StructRef>` from the given GC reference.
647    ///
648    /// `gc_ref` should point to a valid `structref` and should belong to the
649    /// store's GC heap. Failure to uphold these invariants is memory safe but
650    /// will lead to general incorrectness such as panics or wrong results.
651    pub(crate) fn from_cloned_gc_ref(
652        store: &mut AutoAssertNoGc<'_>,
653        gc_ref: VMGcRef,
654    ) -> Rooted<Self> {
655        debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
656        Rooted::new(store, gc_ref)
657    }
658}
659
660unsafe impl WasmTy for Rooted<StructRef> {
661    #[inline]
662    fn valtype() -> ValType {
663        ValType::Ref(RefType::new(false, HeapType::Struct))
664    }
665
666    #[inline]
667    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
668        self.comes_from_same_store(store)
669    }
670
671    #[inline]
672    fn dynamic_concrete_type_check(
673        &self,
674        store: &StoreOpaque,
675        _nullable: bool,
676        ty: &HeapType,
677    ) -> Result<()> {
678        match ty {
679            HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
680            HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
681
682            HeapType::Extern
683            | HeapType::NoExtern
684            | HeapType::Func
685            | HeapType::ConcreteFunc(_)
686            | HeapType::NoFunc
687            | HeapType::I31
688            | HeapType::Array
689            | HeapType::ConcreteArray(_)
690            | HeapType::None
691            | HeapType::NoCont
692            | HeapType::Cont
693            | HeapType::ConcreteCont(_)
694            | HeapType::NoExn
695            | HeapType::Exn
696            | HeapType::ConcreteExn(_) => bail!(
697                "type mismatch: expected `(ref {ty})`, got `(ref {})`",
698                self._ty(store)?,
699            ),
700        }
701    }
702
703    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
704        self.wasm_ty_store(store, ptr, ValRaw::anyref)
705    }
706
707    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
708        Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
709    }
710}
711
712unsafe impl WasmTy for Option<Rooted<StructRef>> {
713    #[inline]
714    fn valtype() -> ValType {
715        ValType::STRUCTREF
716    }
717
718    #[inline]
719    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
720        self.map_or(true, |x| x.comes_from_same_store(store))
721    }
722
723    #[inline]
724    fn dynamic_concrete_type_check(
725        &self,
726        store: &StoreOpaque,
727        nullable: bool,
728        ty: &HeapType,
729    ) -> Result<()> {
730        match self {
731            Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
732            None => {
733                ensure!(
734                    nullable,
735                    "expected a non-null reference, but found a null reference"
736                );
737                Ok(())
738            }
739        }
740    }
741
742    #[inline]
743    fn is_vmgcref_and_points_to_object(&self) -> bool {
744        self.is_some()
745    }
746
747    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
748        <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
749    }
750
751    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
752        <Rooted<StructRef>>::wasm_ty_option_load(
753            store,
754            ptr.get_anyref(),
755            StructRef::from_cloned_gc_ref,
756        )
757    }
758}
759
760unsafe impl WasmTy for ManuallyRooted<StructRef> {
761    #[inline]
762    fn valtype() -> ValType {
763        ValType::Ref(RefType::new(false, HeapType::Struct))
764    }
765
766    #[inline]
767    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
768        self.comes_from_same_store(store)
769    }
770
771    #[inline]
772    fn dynamic_concrete_type_check(
773        &self,
774        store: &StoreOpaque,
775        _: bool,
776        ty: &HeapType,
777    ) -> Result<()> {
778        match ty {
779            HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
780            HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
781
782            HeapType::Extern
783            | HeapType::NoExtern
784            | HeapType::Func
785            | HeapType::ConcreteFunc(_)
786            | HeapType::NoFunc
787            | HeapType::I31
788            | HeapType::Array
789            | HeapType::ConcreteArray(_)
790            | HeapType::None
791            | HeapType::NoCont
792            | HeapType::Cont
793            | HeapType::ConcreteCont(_)
794            | HeapType::NoExn
795            | HeapType::Exn
796            | HeapType::ConcreteExn(_) => bail!(
797                "type mismatch: expected `(ref {ty})`, got `(ref {})`",
798                self._ty(store)?,
799            ),
800        }
801    }
802
803    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
804        self.wasm_ty_store(store, ptr, ValRaw::anyref)
805    }
806
807    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
808        Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
809    }
810}
811
812unsafe impl WasmTy for Option<ManuallyRooted<StructRef>> {
813    #[inline]
814    fn valtype() -> ValType {
815        ValType::STRUCTREF
816    }
817
818    #[inline]
819    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
820        self.as_ref()
821            .map_or(true, |x| x.comes_from_same_store(store))
822    }
823
824    #[inline]
825    fn dynamic_concrete_type_check(
826        &self,
827        store: &StoreOpaque,
828        nullable: bool,
829        ty: &HeapType,
830    ) -> Result<()> {
831        match self {
832            Some(s) => {
833                ManuallyRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
834            }
835            None => {
836                ensure!(
837                    nullable,
838                    "expected a non-null reference, but found a null reference"
839                );
840                Ok(())
841            }
842        }
843    }
844
845    #[inline]
846    fn is_vmgcref_and_points_to_object(&self) -> bool {
847        self.is_some()
848    }
849
850    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
851        <ManuallyRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
852    }
853
854    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
855        <ManuallyRooted<StructRef>>::wasm_ty_option_load(
856            store,
857            ptr.get_anyref(),
858            StructRef::from_cloned_gc_ref,
859        )
860    }
861}