Skip to main content

wasmtime/runtime/gc/enabled/
structref.rs

1//! Working with GC `struct` objects.
2
3use crate::runtime::vm::VMGcRef;
4use crate::store::{Asyncness, StoreId};
5#[cfg(feature = "async")]
6use crate::vm::VMStore;
7use crate::vm::{self, VMGcHeader, VMStructRef};
8use crate::{AnyRef, FieldType};
9use crate::{
10    AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
11    OwnedRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
12    prelude::*,
13    store::{AutoAssertNoGc, StoreContextMut, StoreOpaque, StoreResourceLimiter},
14};
15use core::mem::{self, MaybeUninit};
16use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
17
18/// An allocator for a particular Wasm GC struct type.
19///
20/// Every `StructRefPre` is associated with a particular
21/// [`Store`][crate::Store] and a particular [StructType][crate::StructType].
22///
23/// Reusing an allocator across many allocations amortizes some per-type runtime
24/// overheads inside Wasmtime. A `StructRefPre` is to `StructRef`s as an
25/// `InstancePre` is to `Instance`s.
26///
27/// # Example
28///
29/// ```
30/// use wasmtime::*;
31///
32/// # fn foo() -> Result<()> {
33/// let mut config = Config::new();
34/// config.wasm_function_references(true);
35/// config.wasm_gc(true);
36///
37/// let engine = Engine::new(&config)?;
38/// let mut store = Store::new(&engine, ());
39///
40/// // Define a struct type.
41/// let struct_ty = StructType::new(
42///    store.engine(),
43///    [FieldType::new(Mutability::Var, StorageType::I8)],
44/// )?;
45///
46/// // Create an allocator for the struct type.
47/// let allocator = StructRefPre::new(&mut store, struct_ty);
48///
49/// {
50///     let mut scope = RootScope::new(&mut store);
51///
52///     // Allocate a bunch of instances of our struct type using the same
53///     // allocator! This is faster than creating a new allocator for each
54///     // instance we want to allocate.
55///     for i in 0..10 {
56///         StructRef::new(&mut scope, &allocator, &[Val::I32(i)])?;
57///     }
58/// }
59/// # Ok(())
60/// # }
61/// # foo().unwrap();
62/// ```
63pub struct StructRefPre {
64    store_id: StoreId,
65    ty: StructType,
66}
67
68impl StructRefPre {
69    /// Create a new `StructRefPre` that is associated with the given store
70    /// and type.
71    pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
72        Self::_new(store.as_context_mut().0, ty)
73    }
74
75    pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
76        store.insert_gc_host_alloc_type(ty.registered_type().clone());
77        let store_id = store.id();
78
79        StructRefPre { store_id, ty }
80    }
81
82    pub(crate) fn layout(&self) -> &GcStructLayout {
83        self.ty
84            .registered_type()
85            .layout()
86            .expect("struct types have a layout")
87            .unwrap_struct()
88    }
89
90    pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
91        self.ty.registered_type().index()
92    }
93}
94
95/// A reference to a GC-managed `struct` instance.
96///
97/// WebAssembly `struct`s are static, fixed-length, ordered sequences of
98/// fields. Fields are named by index, not by identifier; in this way, they are
99/// similar to Rust's tuples. Each field is mutable or constant and stores
100/// unpacked [`Val`][crate::Val]s or packed 8-/16-bit integers.
101///
102/// Like all WebAssembly references, these are opaque and unforgeable to Wasm:
103/// they cannot be faked and Wasm cannot, for example, cast the integer
104/// `0x12345678` into a reference, pretend it is a valid `structref`, and trick
105/// the host into dereferencing it and segfaulting or worse.
106///
107/// Note that you can also use `Rooted<StructRef>` and
108/// `OwnedRooted<StructRef>` as a type parameter with
109/// [`Func::typed`][crate::Func::typed]- and
110/// [`Func::wrap`][crate::Func::wrap]-style APIs.
111///
112/// # Example
113///
114/// ```
115/// use wasmtime::*;
116///
117/// # fn foo() -> Result<()> {
118/// let mut config = Config::new();
119/// config.wasm_function_references(true);
120/// config.wasm_gc(true);
121///
122/// let engine = Engine::new(&config)?;
123/// let mut store = Store::new(&engine, ());
124///
125/// // Define a struct type.
126/// let struct_ty = StructType::new(
127///    store.engine(),
128///    [FieldType::new(Mutability::Var, StorageType::I8)],
129/// )?;
130///
131/// // Create an allocator for the struct type.
132/// let allocator = StructRefPre::new(&mut store, struct_ty);
133///
134/// {
135///     let mut scope = RootScope::new(&mut store);
136///
137///     // Allocate an instance of the struct type.
138///     let my_struct = StructRef::new(&mut scope, &allocator, &[Val::I32(42)])?;
139///
140///     // That instance's field should have the expected value.
141///     let val = my_struct.field(&mut scope, 0)?.unwrap_i32();
142///     assert_eq!(val, 42);
143///
144///     // And we can update the field's value because it is a mutable field.
145///     my_struct.set_field(&mut scope, 0, Val::I32(36))?;
146///     let new_val = my_struct.field(&mut scope, 0)?.unwrap_i32();
147///     assert_eq!(new_val, 36);
148/// }
149/// # Ok(())
150/// # }
151/// # foo().unwrap();
152/// ```
153#[derive(Debug)]
154#[repr(transparent)]
155pub struct StructRef {
156    pub(super) inner: GcRootIndex,
157}
158
159unsafe impl GcRefImpl for StructRef {
160    fn transmute_ref(index: &GcRootIndex) -> &Self {
161        // Safety: `StructRef` is a newtype of a `GcRootIndex`.
162        let me: &Self = unsafe { mem::transmute(index) };
163
164        // Assert we really are just a newtype of a `GcRootIndex`.
165        assert!(matches!(
166            me,
167            Self {
168                inner: GcRootIndex { .. },
169            }
170        ));
171
172        me
173    }
174}
175
176impl Rooted<StructRef> {
177    /// Upcast this `structref` into an `anyref`.
178    #[inline]
179    pub fn to_anyref(self) -> Rooted<AnyRef> {
180        self.unchecked_cast()
181    }
182
183    /// Upcast this `structref` into an `eqref`.
184    #[inline]
185    pub fn to_eqref(self) -> Rooted<EqRef> {
186        self.unchecked_cast()
187    }
188}
189
190impl OwnedRooted<StructRef> {
191    /// Upcast this `structref` into an `anyref`.
192    #[inline]
193    pub fn to_anyref(self) -> OwnedRooted<AnyRef> {
194        self.unchecked_cast()
195    }
196
197    /// Upcast this `structref` into an `eqref`.
198    #[inline]
199    pub fn to_eqref(self) -> OwnedRooted<EqRef> {
200        self.unchecked_cast()
201    }
202}
203
204impl StructRef {
205    /// Synchronously allocate a new `struct` and get a reference to it.
206    ///
207    /// # Automatic Garbage Collection
208    ///
209    /// If the GC heap is at capacity, and there isn't room for allocating this
210    /// new struct, then this method will automatically trigger a synchronous
211    /// collection in an attempt to free up space in the GC heap.
212    ///
213    /// # Errors
214    ///
215    /// If the given `fields` values' types do not match the field types of the
216    /// `allocator`'s struct type, an error is returned.
217    ///
218    /// If the allocation cannot be satisfied because the GC heap is currently
219    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
220    /// error is returned. The allocation might succeed on a second attempt if
221    /// you drop some rooted GC references and try again.
222    ///
223    /// If `store` is configured with a
224    /// [`ResourceLimiterAsync`](crate::ResourceLimiterAsync) then an error
225    /// will be returned because [`StructRef::new_async`] should be used
226    /// instead.
227    ///
228    /// # Panics
229    ///
230    /// Panics if the allocator, or any of the field values, is not associated
231    /// with the given store.
232    pub fn new(
233        mut store: impl AsContextMut,
234        allocator: &StructRefPre,
235        fields: &[Val],
236    ) -> Result<Rooted<StructRef>> {
237        let (mut limiter, store) = store
238            .as_context_mut()
239            .0
240            .validate_sync_resource_limiter_and_store_opaque()?;
241        vm::assert_ready(Self::_new_async(
242            store,
243            limiter.as_mut(),
244            allocator,
245            fields,
246            Asyncness::No,
247        ))
248    }
249
250    /// Asynchronously allocate a new `struct` and get a reference to it.
251    ///
252    /// # Automatic Garbage Collection
253    ///
254    /// If the GC heap is at capacity, and there isn't room for allocating this
255    /// new struct, then this method will automatically trigger a synchronous
256    /// collection in an attempt to free up space in the GC heap.
257    ///
258    /// # Errors
259    ///
260    /// If the given `fields` values' types do not match the field types of the
261    /// `allocator`'s struct type, an error is returned.
262    ///
263    /// If the allocation cannot be satisfied because the GC heap is currently
264    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
265    /// error is returned. The allocation might succeed on a second attempt if
266    /// you drop some rooted GC references and try again.
267    ///
268    /// # Panics
269    ///
270    /// Panics if the allocator, or any of the field values, is not associated
271    /// with the given store.
272    #[cfg(feature = "async")]
273    pub async fn new_async(
274        mut store: impl AsContextMut,
275        allocator: &StructRefPre,
276        fields: &[Val],
277    ) -> Result<Rooted<StructRef>> {
278        let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
279        Self::_new_async(store, limiter.as_mut(), allocator, fields, Asyncness::Yes).await
280    }
281
282    pub(crate) async fn _new_async(
283        store: &mut StoreOpaque,
284        limiter: Option<&mut StoreResourceLimiter<'_>>,
285        allocator: &StructRefPre,
286        fields: &[Val],
287        asyncness: Asyncness,
288    ) -> Result<Rooted<StructRef>> {
289        Self::type_check_fields(store, allocator, fields)?;
290        store
291            .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
292                Self::new_unchecked(store, allocator, fields)
293            })
294            .await
295    }
296
297    /// Type check the field values before allocating a new struct.
298    fn type_check_fields(
299        store: &mut StoreOpaque,
300        allocator: &StructRefPre,
301        fields: &[Val],
302    ) -> Result<(), Error> {
303        let expected_len = allocator.ty.fields().len();
304        let actual_len = fields.len();
305        ensure!(
306            actual_len == expected_len,
307            "expected {expected_len} fields, got {actual_len}"
308        );
309        for (ty, val) in allocator.ty.fields().zip(fields) {
310            assert!(
311                val.comes_from_same_store(store),
312                "field value comes from the wrong store",
313            );
314            let ty = ty.element_type().unpack();
315            val.ensure_matches_ty(store, ty)
316                .context("field type mismatch")?;
317        }
318        Ok(())
319    }
320
321    /// Given that the field values have already been type checked, allocate a
322    /// new struct.
323    ///
324    /// Does not attempt GC+retry on OOM, that is the caller's responsibility.
325    fn new_unchecked(
326        store: &mut StoreOpaque,
327        allocator: &StructRefPre,
328        fields: &[Val],
329    ) -> Result<Rooted<StructRef>> {
330        assert_eq!(
331            store.id(),
332            allocator.store_id,
333            "attempted to use a `StructRefPre` with the wrong store"
334        );
335
336        // Allocate the struct and write each field value into the appropriate
337        // offset.
338        let structref = store
339            .require_gc_store_mut()?
340            .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
341            .context("unrecoverable error when allocating new `structref`")?
342            .map_err(|n| GcHeapOutOfMemory::new((), n))?;
343
344        // From this point on, if we get any errors, then the struct is not
345        // fully initialized, so we need to eagerly deallocate it before the
346        // next GC where the collector might try to interpret one of the
347        // uninitialized fields as a GC reference.
348        let mut store = AutoAssertNoGc::new(store);
349        match (|| {
350            for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
351                structref.initialize_field(
352                    &mut store,
353                    allocator.layout(),
354                    ty.element_type(),
355                    index,
356                    *val,
357                )?;
358            }
359            Ok(())
360        })() {
361            Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
362            Err(e) => {
363                store
364                    .require_gc_store_mut()?
365                    .dealloc_uninit_struct(structref);
366                Err(e)
367            }
368        }
369    }
370
371    #[inline]
372    pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
373        self.inner.comes_from_same_store(store)
374    }
375
376    /// Get this `structref`'s type.
377    ///
378    /// # Errors
379    ///
380    /// Return an error if this reference has been unrooted.
381    ///
382    /// # Panics
383    ///
384    /// Panics if this reference is associated with a different store.
385    pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
386        self._ty(store.as_context().0)
387    }
388
389    pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
390        assert!(self.comes_from_same_store(store));
391        let index = self.type_index(store)?;
392        Ok(StructType::from_shared_type_index(store.engine(), index))
393    }
394
395    /// Does this `structref` match the given type?
396    ///
397    /// That is, is this struct's type a subtype of the given type?
398    ///
399    /// # Errors
400    ///
401    /// Return an error if this reference has been unrooted.
402    ///
403    /// # Panics
404    ///
405    /// Panics if this reference is associated with a different store or if the
406    /// type is not associated with the store's engine.
407    pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
408        self._matches_ty(store.as_context().0, ty)
409    }
410
411    pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
412        assert!(self.comes_from_same_store(store));
413        Ok(self._ty(store)?.matches(ty))
414    }
415
416    pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
417        if !self.comes_from_same_store(store) {
418            bail!("function used with wrong store");
419        }
420        if self._matches_ty(store, ty)? {
421            Ok(())
422        } else {
423            let actual_ty = self._ty(store)?;
424            bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
425        }
426    }
427
428    /// Get the values of this struct's fields.
429    ///
430    /// Note that `i8` and `i16` field values are zero-extended into
431    /// `Val::I32(_)`s.
432    ///
433    /// # Errors
434    ///
435    /// Return an error if this reference has been unrooted.
436    ///
437    /// # Panics
438    ///
439    /// Panics if this reference is associated with a different store.
440    pub fn fields<'a, T: 'static>(
441        &'a self,
442        store: impl Into<StoreContextMut<'a, T>>,
443    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
444        self._fields(store.into().0)
445    }
446
447    pub(crate) fn _fields<'a>(
448        &'a self,
449        store: &'a mut StoreOpaque,
450    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
451        assert!(self.comes_from_same_store(store));
452        let store = AutoAssertNoGc::new(store);
453
454        let gc_ref = self.inner.try_gc_ref(&store)?;
455        let header = store.require_gc_store()?.header(gc_ref);
456        debug_assert!(header.kind().matches(VMGcKind::StructRef));
457
458        let index = header.ty().expect("structrefs should have concrete types");
459        let ty = StructType::from_shared_type_index(store.engine(), index);
460        let len = ty.fields().len();
461
462        return Ok(Fields {
463            structref: self,
464            store,
465            index: 0,
466            len,
467        });
468
469        struct Fields<'a, 'b> {
470            structref: &'a StructRef,
471            store: AutoAssertNoGc<'b>,
472            index: usize,
473            len: usize,
474        }
475
476        impl Iterator for Fields<'_, '_> {
477            type Item = Val;
478
479            #[inline]
480            fn next(&mut self) -> Option<Self::Item> {
481                let i = self.index;
482                debug_assert!(i <= self.len);
483                if i >= self.len {
484                    return None;
485                }
486                self.index += 1;
487                Some(self.structref._field(&mut self.store, i).unwrap())
488            }
489
490            #[inline]
491            fn size_hint(&self) -> (usize, Option<usize>) {
492                let len = self.len - self.index;
493                (len, Some(len))
494            }
495        }
496
497        impl ExactSizeIterator for Fields<'_, '_> {
498            #[inline]
499            fn len(&self) -> usize {
500                self.len - self.index
501            }
502        }
503    }
504
505    fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
506        assert!(self.comes_from_same_store(&store));
507        let gc_ref = self.inner.try_gc_ref(store)?;
508        Ok(store.require_gc_store()?.header(gc_ref))
509    }
510
511    fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
512        assert!(self.comes_from_same_store(&store));
513        let gc_ref = self.inner.try_gc_ref(store)?;
514        debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
515        Ok(gc_ref.as_structref_unchecked())
516    }
517
518    fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
519        assert!(self.comes_from_same_store(&store));
520        let type_index = self.type_index(store)?;
521        let layout = store
522            .engine()
523            .signatures()
524            .layout(type_index)
525            .expect("struct types should have GC layouts");
526        match layout {
527            GcLayout::Struct(s) => Ok(s),
528            GcLayout::Array(_) => unreachable!(),
529        }
530    }
531
532    fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
533        let ty = self._ty(store)?;
534        match ty.field(field) {
535            Some(f) => Ok(f),
536            None => {
537                let len = ty.fields().len();
538                bail!("cannot access field {field}: struct only has {len} fields")
539            }
540        }
541    }
542
543    /// Get this struct's `index`th field.
544    ///
545    /// Note that `i8` and `i16` field values are zero-extended into
546    /// `Val::I32(_)`s.
547    ///
548    /// # Errors
549    ///
550    /// Returns an `Err(_)` if the index is out of bounds or this reference has
551    /// been unrooted.
552    ///
553    /// # Panics
554    ///
555    /// Panics if this reference is associated with a different store.
556    pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
557        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
558        self._field(&mut store, index)
559    }
560
561    pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
562        assert!(self.comes_from_same_store(store));
563        let structref = self.structref(store)?.unchecked_copy();
564        let field_ty = self.field_ty(store, index)?;
565        let layout = self.layout(store)?;
566        Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
567    }
568
569    /// Set this struct's `index`th field.
570    ///
571    /// # Errors
572    ///
573    /// Returns an error in the following scenarios:
574    ///
575    /// * When given a value of the wrong type, such as trying to set an `f32`
576    ///   field to an `i64` value.
577    ///
578    /// * When the field is not mutable.
579    ///
580    /// * When this struct does not have an `index`th field, i.e. `index` is out
581    ///   of bounds.
582    ///
583    /// * When `value` is a GC reference that has since been unrooted.
584    ///
585    /// # Panics
586    ///
587    /// Panics if this reference is associated with a different store.
588    pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
589        self._set_field(store.as_context_mut().0, index, value)
590    }
591
592    pub(crate) fn _set_field(
593        &self,
594        store: &mut StoreOpaque,
595        index: usize,
596        value: Val,
597    ) -> Result<()> {
598        assert!(self.comes_from_same_store(store));
599        let mut store = AutoAssertNoGc::new(store);
600
601        let field_ty = self.field_ty(&store, index)?;
602        ensure!(
603            field_ty.mutability().is_var(),
604            "cannot set field {index}: field is not mutable"
605        );
606
607        value
608            .ensure_matches_ty(&store, &field_ty.element_type().unpack())
609            .with_context(|| format!("cannot set field {index}: type mismatch"))?;
610
611        let layout = self.layout(&store)?;
612        let structref = self.structref(&store)?.unchecked_copy();
613
614        structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
615    }
616
617    pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
618        let gc_ref = self.inner.try_gc_ref(store)?;
619        let header = store.require_gc_store()?.header(gc_ref);
620        debug_assert!(header.kind().matches(VMGcKind::StructRef));
621        Ok(header.ty().expect("structrefs should have concrete types"))
622    }
623
624    /// Create a new `Rooted<StructRef>` from the given GC reference.
625    ///
626    /// `gc_ref` should point to a valid `structref` and should belong to the
627    /// store's GC heap. Failure to uphold these invariants is memory safe but
628    /// will lead to general incorrectness such as panics or wrong results.
629    pub(crate) fn from_cloned_gc_ref(
630        store: &mut AutoAssertNoGc<'_>,
631        gc_ref: VMGcRef,
632    ) -> Rooted<Self> {
633        debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
634        Rooted::new(store, gc_ref)
635    }
636}
637
638unsafe impl WasmTy for Rooted<StructRef> {
639    #[inline]
640    fn valtype() -> ValType {
641        ValType::Ref(RefType::new(false, HeapType::Struct))
642    }
643
644    #[inline]
645    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
646        self.comes_from_same_store(store)
647    }
648
649    #[inline]
650    fn dynamic_concrete_type_check(
651        &self,
652        store: &StoreOpaque,
653        _nullable: bool,
654        ty: &HeapType,
655    ) -> Result<()> {
656        match ty {
657            HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
658            HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
659
660            HeapType::Extern
661            | HeapType::NoExtern
662            | HeapType::Func
663            | HeapType::ConcreteFunc(_)
664            | HeapType::NoFunc
665            | HeapType::I31
666            | HeapType::Array
667            | HeapType::ConcreteArray(_)
668            | HeapType::None
669            | HeapType::NoCont
670            | HeapType::Cont
671            | HeapType::ConcreteCont(_)
672            | HeapType::NoExn
673            | HeapType::Exn
674            | HeapType::ConcreteExn(_) => bail!(
675                "type mismatch: expected `(ref {ty})`, got `(ref {})`",
676                self._ty(store)?,
677            ),
678        }
679    }
680
681    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
682        self.wasm_ty_store(store, ptr, ValRaw::anyref)
683    }
684
685    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
686        Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
687    }
688}
689
690unsafe impl WasmTy for Option<Rooted<StructRef>> {
691    #[inline]
692    fn valtype() -> ValType {
693        ValType::STRUCTREF
694    }
695
696    #[inline]
697    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
698        self.map_or(true, |x| x.comes_from_same_store(store))
699    }
700
701    #[inline]
702    fn dynamic_concrete_type_check(
703        &self,
704        store: &StoreOpaque,
705        nullable: bool,
706        ty: &HeapType,
707    ) -> Result<()> {
708        match self {
709            Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
710            None => {
711                ensure!(
712                    nullable,
713                    "expected a non-null reference, but found a null reference"
714                );
715                Ok(())
716            }
717        }
718    }
719
720    #[inline]
721    fn is_vmgcref_and_points_to_object(&self) -> bool {
722        self.is_some()
723    }
724
725    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
726        <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
727    }
728
729    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
730        <Rooted<StructRef>>::wasm_ty_option_load(
731            store,
732            ptr.get_anyref(),
733            StructRef::from_cloned_gc_ref,
734        )
735    }
736}
737
738unsafe impl WasmTy for OwnedRooted<StructRef> {
739    #[inline]
740    fn valtype() -> ValType {
741        ValType::Ref(RefType::new(false, HeapType::Struct))
742    }
743
744    #[inline]
745    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
746        self.comes_from_same_store(store)
747    }
748
749    #[inline]
750    fn dynamic_concrete_type_check(
751        &self,
752        store: &StoreOpaque,
753        _: bool,
754        ty: &HeapType,
755    ) -> Result<()> {
756        match ty {
757            HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
758            HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
759
760            HeapType::Extern
761            | HeapType::NoExtern
762            | HeapType::Func
763            | HeapType::ConcreteFunc(_)
764            | HeapType::NoFunc
765            | HeapType::I31
766            | HeapType::Array
767            | HeapType::ConcreteArray(_)
768            | HeapType::None
769            | HeapType::NoCont
770            | HeapType::Cont
771            | HeapType::ConcreteCont(_)
772            | HeapType::NoExn
773            | HeapType::Exn
774            | HeapType::ConcreteExn(_) => bail!(
775                "type mismatch: expected `(ref {ty})`, got `(ref {})`",
776                self._ty(store)?,
777            ),
778        }
779    }
780
781    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
782        self.wasm_ty_store(store, ptr, ValRaw::anyref)
783    }
784
785    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
786        Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
787    }
788}
789
790unsafe impl WasmTy for Option<OwnedRooted<StructRef>> {
791    #[inline]
792    fn valtype() -> ValType {
793        ValType::STRUCTREF
794    }
795
796    #[inline]
797    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
798        self.as_ref()
799            .map_or(true, |x| x.comes_from_same_store(store))
800    }
801
802    #[inline]
803    fn dynamic_concrete_type_check(
804        &self,
805        store: &StoreOpaque,
806        nullable: bool,
807        ty: &HeapType,
808    ) -> Result<()> {
809        match self {
810            Some(s) => {
811                OwnedRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
812            }
813            None => {
814                ensure!(
815                    nullable,
816                    "expected a non-null reference, but found a null reference"
817                );
818                Ok(())
819            }
820        }
821    }
822
823    #[inline]
824    fn is_vmgcref_and_points_to_object(&self) -> bool {
825        self.is_some()
826    }
827
828    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
829        <OwnedRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
830    }
831
832    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
833        <OwnedRooted<StructRef>>::wasm_ty_option_load(
834            store,
835            ptr.get_anyref(),
836            StructRef::from_cloned_gc_ref,
837        )
838    }
839}