Skip to main content

wasmtime/runtime/gc/enabled/
structref.rs

1//! Working with GC `struct` objects.
2#![cfg(feature = "gc")]
3
4use crate::runtime::vm::VMGcRef;
5use crate::store::{Asyncness, StoreId};
6#[cfg(feature = "async")]
7use crate::vm::VMStore;
8use crate::vm::{self, VMGcHeader, VMStructRef};
9use crate::{AnyRef, FieldType};
10use crate::{
11    AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
12    OwnedRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
13    prelude::*,
14    store::{AutoAssertNoGc, StoreContextMut, StoreOpaque, StoreResourceLimiter},
15};
16use alloc::sync::Arc;
17use core::mem::{self, MaybeUninit};
18use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
19
20/// An allocator for a particular Wasm GC struct type.
21///
22/// Every `StructRefPre` is associated with a particular
23/// [`Store`][crate::Store] and a particular [StructType][crate::StructType].
24///
25/// Reusing an allocator across many allocations amortizes some per-type runtime
26/// overheads inside Wasmtime. A `StructRefPre` is to `StructRef`s as an
27/// `InstancePre` is to `Instance`s.
28///
29/// # Example
30///
31/// ```
32/// use wasmtime::*;
33///
34/// # fn foo() -> Result<()> {
35/// let mut config = Config::new();
36/// config.wasm_function_references(true);
37/// config.wasm_gc(true);
38///
39/// let engine = Engine::new(&config)?;
40/// let mut store = Store::new(&engine, ());
41///
42/// // Define a struct type.
43/// let struct_ty = StructType::new(
44///    store.engine(),
45///    [FieldType::new(Mutability::Var, StorageType::I8)],
46/// )?;
47///
48/// // Create an allocator for the struct type.
49/// let allocator = StructRefPre::new(&mut store, struct_ty);
50///
51/// {
52///     let mut scope = RootScope::new(&mut store);
53///
54///     // Allocate a bunch of instances of our struct type using the same
55///     // allocator! This is faster than creating a new allocator for each
56///     // instance we want to allocate.
57///     for i in 0..10 {
58///         StructRef::new(&mut scope, &allocator, &[Val::I32(i)])?;
59///     }
60/// }
61/// # Ok(())
62/// # }
63/// # foo().unwrap();
64/// ```
65pub struct StructRefPre {
66    store_id: StoreId,
67    ty: StructType,
68}
69
70impl StructRefPre {
71    /// Create a new `StructRefPre` that is associated with the given store
72    /// and type.
73    pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
74        Self::_new(store.as_context_mut().0, ty)
75    }
76
77    pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
78        store.insert_gc_host_alloc_type(ty.registered_type().clone());
79        let store_id = store.id();
80
81        StructRefPre { store_id, ty }
82    }
83
84    pub(crate) fn layout(&self) -> &GcStructLayout {
85        self.ty
86            .registered_type()
87            .layout()
88            .expect("struct types have a layout")
89            .unwrap_struct()
90    }
91
92    pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
93        self.ty.registered_type().index()
94    }
95}
96
97/// A reference to a GC-managed `struct` instance.
98///
99/// WebAssembly `struct`s are static, fixed-length, ordered sequences of
100/// fields. Fields are named by index, not by identifier; in this way, they are
101/// similar to Rust's tuples. Each field is mutable or constant and stores
102/// unpacked [`Val`][crate::Val]s or packed 8-/16-bit integers.
103///
104/// Like all WebAssembly references, these are opaque and unforgeable to Wasm:
105/// they cannot be faked and Wasm cannot, for example, cast the integer
106/// `0x12345678` into a reference, pretend it is a valid `structref`, and trick
107/// the host into dereferencing it and segfaulting or worse.
108///
109/// Note that you can also use `Rooted<StructRef>` and
110/// `OwnedRooted<StructRef>` as a type parameter with
111/// [`Func::typed`][crate::Func::typed]- and
112/// [`Func::wrap`][crate::Func::wrap]-style APIs.
113///
114/// # Example
115///
116/// ```
117/// use wasmtime::*;
118///
119/// # fn foo() -> Result<()> {
120/// let mut config = Config::new();
121/// config.wasm_function_references(true);
122/// config.wasm_gc(true);
123///
124/// let engine = Engine::new(&config)?;
125/// let mut store = Store::new(&engine, ());
126///
127/// // Define a struct type.
128/// let struct_ty = StructType::new(
129///    store.engine(),
130///    [FieldType::new(Mutability::Var, StorageType::I8)],
131/// )?;
132///
133/// // Create an allocator for the struct type.
134/// let allocator = StructRefPre::new(&mut store, struct_ty);
135///
136/// {
137///     let mut scope = RootScope::new(&mut store);
138///
139///     // Allocate an instance of the struct type.
140///     let my_struct = StructRef::new(&mut scope, &allocator, &[Val::I32(42)])?;
141///
142///     // That instance's field should have the expected value.
143///     let val = my_struct.field(&mut scope, 0)?.unwrap_i32();
144///     assert_eq!(val, 42);
145///
146///     // And we can update the field's value because it is a mutable field.
147///     my_struct.set_field(&mut scope, 0, Val::I32(36))?;
148///     let new_val = my_struct.field(&mut scope, 0)?.unwrap_i32();
149///     assert_eq!(new_val, 36);
150/// }
151/// # Ok(())
152/// # }
153/// # foo().unwrap();
154/// ```
155#[derive(Debug)]
156#[repr(transparent)]
157pub struct StructRef {
158    pub(super) inner: GcRootIndex,
159}
160
161unsafe impl GcRefImpl for StructRef {
162    fn transmute_ref(index: &GcRootIndex) -> &Self {
163        // Safety: `StructRef` is a newtype of a `GcRootIndex`.
164        let me: &Self = unsafe { mem::transmute(index) };
165
166        // Assert we really are just a newtype of a `GcRootIndex`.
167        assert!(matches!(
168            me,
169            Self {
170                inner: GcRootIndex { .. },
171            }
172        ));
173
174        me
175    }
176}
177
178impl Rooted<StructRef> {
179    /// Upcast this `structref` into an `anyref`.
180    #[inline]
181    pub fn to_anyref(self) -> Rooted<AnyRef> {
182        self.unchecked_cast()
183    }
184
185    /// Upcast this `structref` into an `eqref`.
186    #[inline]
187    pub fn to_eqref(self) -> Rooted<EqRef> {
188        self.unchecked_cast()
189    }
190}
191
192impl OwnedRooted<StructRef> {
193    /// Upcast this `structref` into an `anyref`.
194    #[inline]
195    pub fn to_anyref(self) -> OwnedRooted<AnyRef> {
196        self.unchecked_cast()
197    }
198
199    /// Upcast this `structref` into an `eqref`.
200    #[inline]
201    pub fn to_eqref(self) -> OwnedRooted<EqRef> {
202        self.unchecked_cast()
203    }
204}
205
206impl StructRef {
207    /// Synchronously allocate a new `struct` and get a reference to it.
208    ///
209    /// # Automatic Garbage Collection
210    ///
211    /// If the GC heap is at capacity, and there isn't room for allocating this
212    /// new struct, then this method will automatically trigger a synchronous
213    /// collection in an attempt to free up space in the GC heap.
214    ///
215    /// # Errors
216    ///
217    /// If the given `fields` values' types do not match the field types of the
218    /// `allocator`'s struct type, an error is returned.
219    ///
220    /// If the allocation cannot be satisfied because the GC heap is currently
221    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
222    /// error is returned. The allocation might succeed on a second attempt if
223    /// you drop some rooted GC references and try again.
224    ///
225    /// If `store` is configured with a
226    /// [`ResourceLimiterAsync`](crate::ResourceLimiterAsync) then an error
227    /// will be returned because [`StructRef::new_async`] should be used
228    /// instead.
229    ///
230    /// # Panics
231    ///
232    /// Panics if the allocator, or any of the field values, is not associated
233    /// with the given store.
234    pub fn new(
235        mut store: impl AsContextMut,
236        allocator: &StructRefPre,
237        fields: &[Val],
238    ) -> Result<Rooted<StructRef>> {
239        let (mut limiter, store) = store
240            .as_context_mut()
241            .0
242            .validate_sync_resource_limiter_and_store_opaque()?;
243        vm::assert_ready(Self::_new_async(
244            store,
245            limiter.as_mut(),
246            allocator,
247            fields,
248            Asyncness::No,
249        ))
250    }
251
252    /// Asynchronously allocate a new `struct` and get a reference to it.
253    ///
254    /// # Automatic Garbage Collection
255    ///
256    /// If the GC heap is at capacity, and there isn't room for allocating this
257    /// new struct, then this method will automatically trigger a synchronous
258    /// collection in an attempt to free up space in the GC heap.
259    ///
260    /// # Errors
261    ///
262    /// If the given `fields` values' types do not match the field types of the
263    /// `allocator`'s struct type, an error is returned.
264    ///
265    /// If the allocation cannot be satisfied because the GC heap is currently
266    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
267    /// error is returned. The allocation might succeed on a second attempt if
268    /// you drop some rooted GC references and try again.
269    ///
270    /// # Panics
271    ///
272    /// Panics if the allocator, or any of the field values, is not associated
273    /// with the given store.
274    #[cfg(feature = "async")]
275    pub async fn new_async(
276        mut store: impl AsContextMut,
277        allocator: &StructRefPre,
278        fields: &[Val],
279    ) -> Result<Rooted<StructRef>> {
280        let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
281        Self::_new_async(store, limiter.as_mut(), allocator, fields, Asyncness::Yes).await
282    }
283
284    pub(crate) async fn _new_async(
285        store: &mut StoreOpaque,
286        limiter: Option<&mut StoreResourceLimiter<'_>>,
287        allocator: &StructRefPre,
288        fields: &[Val],
289        asyncness: Asyncness,
290    ) -> Result<Rooted<StructRef>> {
291        Self::type_check_fields(store, allocator, fields)?;
292        store
293            .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
294                Self::new_unchecked(store, allocator, fields)
295            })
296            .await
297    }
298
299    /// Type check the field values before allocating a new struct.
300    fn type_check_fields(
301        store: &mut StoreOpaque,
302        allocator: &StructRefPre,
303        fields: &[Val],
304    ) -> Result<(), Error> {
305        let expected_len = allocator.ty.fields().len();
306        let actual_len = fields.len();
307        ensure!(
308            actual_len == expected_len,
309            "expected {expected_len} fields, got {actual_len}"
310        );
311        for (ty, val) in allocator.ty.fields().zip(fields) {
312            assert!(
313                val.comes_from_same_store(store),
314                "field value comes from the wrong store",
315            );
316            let ty = ty.element_type().unpack();
317            val.ensure_matches_ty(store, ty)
318                .context("field type mismatch")?;
319        }
320        Ok(())
321    }
322
323    /// Given that the field values have already been type checked, allocate a
324    /// new struct.
325    ///
326    /// Does not attempt GC+retry on OOM, that is the caller's responsibility.
327    fn new_unchecked(
328        store: &mut StoreOpaque,
329        allocator: &StructRefPre,
330        fields: &[Val],
331    ) -> Result<Rooted<StructRef>> {
332        assert_eq!(
333            store.id(),
334            allocator.store_id,
335            "attempted to use a `StructRefPre` with the wrong store"
336        );
337
338        // Allocate the struct and write each field value into the appropriate
339        // offset.
340        let structref = store
341            .require_gc_store_mut()?
342            .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
343            .context("unrecoverable error when allocating new `structref`")?
344            .map_err(|n| GcHeapOutOfMemory::new((), n))?;
345
346        // From this point on, if we get any errors, then the struct is not
347        // fully initialized, so we need to eagerly deallocate it before the
348        // next GC where the collector might try to interpret one of the
349        // uninitialized fields as a GC reference.
350        let mut store = AutoAssertNoGc::new(store);
351        match (|| {
352            for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
353                structref.initialize_field(
354                    &mut store,
355                    allocator.layout(),
356                    ty.element_type(),
357                    index,
358                    *val,
359                )?;
360            }
361            Ok(())
362        })() {
363            Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
364            Err(e) => {
365                store
366                    .require_gc_store_mut()?
367                    .dealloc_uninit_struct(structref);
368                Err(e)
369            }
370        }
371    }
372
373    #[inline]
374    pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
375        self.inner.comes_from_same_store(store)
376    }
377
378    /// Get this `structref`'s type.
379    ///
380    /// # Errors
381    ///
382    /// Return an error if this reference has been unrooted.
383    ///
384    /// # Panics
385    ///
386    /// Panics if this reference is associated with a different store.
387    pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
388        self._ty(store.as_context().0)
389    }
390
391    pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
392        assert!(self.comes_from_same_store(store));
393        let index = self.type_index(store)?;
394        Ok(StructType::from_shared_type_index(store.engine(), index))
395    }
396
397    /// Does this `structref` match the given type?
398    ///
399    /// That is, is this struct's type a subtype of the given type?
400    ///
401    /// # Errors
402    ///
403    /// Return an error if this reference has been unrooted.
404    ///
405    /// # Panics
406    ///
407    /// Panics if this reference is associated with a different store or if the
408    /// type is not associated with the store's engine.
409    pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
410        self._matches_ty(store.as_context().0, ty)
411    }
412
413    pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
414        assert!(self.comes_from_same_store(store));
415        Ok(self._ty(store)?.matches(ty))
416    }
417
418    pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
419        if !self.comes_from_same_store(store) {
420            bail!("function used with wrong store");
421        }
422        if self._matches_ty(store, ty)? {
423            Ok(())
424        } else {
425            let actual_ty = self._ty(store)?;
426            bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
427        }
428    }
429
430    /// Get the values of this struct's fields.
431    ///
432    /// Note that `i8` and `i16` field values are zero-extended into
433    /// `Val::I32(_)`s.
434    ///
435    /// # Errors
436    ///
437    /// Return an error if this reference has been unrooted.
438    ///
439    /// # Panics
440    ///
441    /// Panics if this reference is associated with a different store.
442    pub fn fields<'a, T: 'static>(
443        &'a self,
444        store: impl Into<StoreContextMut<'a, T>>,
445    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
446        self._fields(store.into().0)
447    }
448
449    pub(crate) fn _fields<'a>(
450        &'a self,
451        store: &'a mut StoreOpaque,
452    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
453        assert!(self.comes_from_same_store(store));
454        let store = AutoAssertNoGc::new(store);
455
456        let gc_ref = self.inner.try_gc_ref(&store)?;
457        let header = store.require_gc_store()?.header(gc_ref);
458        debug_assert!(header.kind().matches(VMGcKind::StructRef));
459
460        let index = header.ty().expect("structrefs should have concrete types");
461        let ty = StructType::from_shared_type_index(store.engine(), index);
462        let len = ty.fields().len();
463
464        return Ok(Fields {
465            structref: self,
466            store,
467            index: 0,
468            len,
469        });
470
471        struct Fields<'a, 'b> {
472            structref: &'a StructRef,
473            store: AutoAssertNoGc<'b>,
474            index: usize,
475            len: usize,
476        }
477
478        impl Iterator for Fields<'_, '_> {
479            type Item = Val;
480
481            #[inline]
482            fn next(&mut self) -> Option<Self::Item> {
483                let i = self.index;
484                debug_assert!(i <= self.len);
485                if i >= self.len {
486                    return None;
487                }
488                self.index += 1;
489                Some(self.structref._field(&mut self.store, i).unwrap())
490            }
491
492            #[inline]
493            fn size_hint(&self) -> (usize, Option<usize>) {
494                let len = self.len - self.index;
495                (len, Some(len))
496            }
497        }
498
499        impl ExactSizeIterator for Fields<'_, '_> {
500            #[inline]
501            fn len(&self) -> usize {
502                self.len - self.index
503            }
504        }
505    }
506
507    fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
508        assert!(self.comes_from_same_store(&store));
509        let gc_ref = self.inner.try_gc_ref(store)?;
510        Ok(store.require_gc_store()?.header(gc_ref))
511    }
512
513    fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
514        assert!(self.comes_from_same_store(&store));
515        let gc_ref = self.inner.try_gc_ref(store)?;
516        debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
517        Ok(gc_ref.as_structref_unchecked())
518    }
519
520    fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<Arc<GcStructLayout>> {
521        assert!(self.comes_from_same_store(&store));
522        let type_index = self.type_index(store)?;
523        let layout = store
524            .engine()
525            .signatures()
526            .layout(type_index)
527            .expect("struct types should have GC layouts");
528        match layout {
529            GcLayout::Struct(s) => Ok(s),
530            GcLayout::Array(_) => unreachable!(),
531        }
532    }
533
534    fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
535        let ty = self._ty(store)?;
536        match ty.field(field) {
537            Some(f) => Ok(f),
538            None => {
539                let len = ty.fields().len();
540                bail!("cannot access field {field}: struct only has {len} fields")
541            }
542        }
543    }
544
545    /// Get this struct's `index`th field.
546    ///
547    /// Note that `i8` and `i16` field values are zero-extended into
548    /// `Val::I32(_)`s.
549    ///
550    /// # Errors
551    ///
552    /// Returns an `Err(_)` if the index is out of bounds or this reference has
553    /// been unrooted.
554    ///
555    /// # Panics
556    ///
557    /// Panics if this reference is associated with a different store.
558    pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
559        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
560        self._field(&mut store, index)
561    }
562
563    pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
564        assert!(self.comes_from_same_store(store));
565        let structref = self.structref(store)?.unchecked_copy();
566        let field_ty = self.field_ty(store, index)?;
567        let layout = self.layout(store)?;
568        Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
569    }
570
571    /// Set this struct's `index`th field.
572    ///
573    /// # Errors
574    ///
575    /// Returns an error in the following scenarios:
576    ///
577    /// * When given a value of the wrong type, such as trying to set an `f32`
578    ///   field to an `i64` value.
579    ///
580    /// * When the field is not mutable.
581    ///
582    /// * When this struct does not have an `index`th field, i.e. `index` is out
583    ///   of bounds.
584    ///
585    /// * When `value` is a GC reference that has since been unrooted.
586    ///
587    /// # Panics
588    ///
589    /// Panics if this reference is associated with a different store.
590    pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
591        self._set_field(store.as_context_mut().0, index, value)
592    }
593
594    pub(crate) fn _set_field(
595        &self,
596        store: &mut StoreOpaque,
597        index: usize,
598        value: Val,
599    ) -> Result<()> {
600        assert!(self.comes_from_same_store(store));
601        let mut store = AutoAssertNoGc::new(store);
602
603        let field_ty = self.field_ty(&store, index)?;
604        ensure!(
605            field_ty.mutability().is_var(),
606            "cannot set field {index}: field is not mutable"
607        );
608
609        value
610            .ensure_matches_ty(&store, &field_ty.element_type().unpack())
611            .with_context(|| format!("cannot set field {index}: type mismatch"))?;
612
613        let layout = self.layout(&store)?;
614        let structref = self.structref(&store)?.unchecked_copy();
615
616        structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
617    }
618
619    pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
620        let gc_ref = self.inner.try_gc_ref(store)?;
621        let header = store.require_gc_store()?.header(gc_ref);
622        debug_assert!(header.kind().matches(VMGcKind::StructRef));
623        Ok(header.ty().expect("structrefs should have concrete types"))
624    }
625
626    /// Create a new `Rooted<StructRef>` from the given GC reference.
627    ///
628    /// `gc_ref` should point to a valid `structref` and should belong to the
629    /// store's GC heap. Failure to uphold these invariants is memory safe but
630    /// will lead to general incorrectness such as panics or wrong results.
631    pub(crate) fn from_cloned_gc_ref(
632        store: &mut AutoAssertNoGc<'_>,
633        gc_ref: VMGcRef,
634    ) -> Rooted<Self> {
635        debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
636        Rooted::new(store, gc_ref)
637    }
638}
639
640unsafe impl WasmTy for Rooted<StructRef> {
641    #[inline]
642    fn valtype() -> ValType {
643        ValType::Ref(RefType::new(false, HeapType::Struct))
644    }
645
646    #[inline]
647    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
648        self.comes_from_same_store(store)
649    }
650
651    #[inline]
652    fn dynamic_concrete_type_check(
653        &self,
654        store: &StoreOpaque,
655        _nullable: bool,
656        ty: &HeapType,
657    ) -> Result<()> {
658        match ty {
659            HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
660            HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
661
662            HeapType::Extern
663            | HeapType::NoExtern
664            | HeapType::Func
665            | HeapType::ConcreteFunc(_)
666            | HeapType::NoFunc
667            | HeapType::I31
668            | HeapType::Array
669            | HeapType::ConcreteArray(_)
670            | HeapType::None
671            | HeapType::NoCont
672            | HeapType::Cont
673            | HeapType::ConcreteCont(_)
674            | HeapType::NoExn
675            | HeapType::Exn
676            | HeapType::ConcreteExn(_) => bail!(
677                "type mismatch: expected `(ref {ty})`, got `(ref {})`",
678                self._ty(store)?,
679            ),
680        }
681    }
682
683    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
684        self.wasm_ty_store(store, ptr, ValRaw::anyref)
685    }
686
687    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
688        Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
689    }
690}
691
692unsafe impl WasmTy for Option<Rooted<StructRef>> {
693    #[inline]
694    fn valtype() -> ValType {
695        ValType::STRUCTREF
696    }
697
698    #[inline]
699    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
700        self.map_or(true, |x| x.comes_from_same_store(store))
701    }
702
703    #[inline]
704    fn dynamic_concrete_type_check(
705        &self,
706        store: &StoreOpaque,
707        nullable: bool,
708        ty: &HeapType,
709    ) -> Result<()> {
710        match self {
711            Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
712            None => {
713                ensure!(
714                    nullable,
715                    "expected a non-null reference, but found a null reference"
716                );
717                Ok(())
718            }
719        }
720    }
721
722    #[inline]
723    fn is_vmgcref_and_points_to_object(&self) -> bool {
724        self.is_some()
725    }
726
727    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
728        <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
729    }
730
731    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
732        <Rooted<StructRef>>::wasm_ty_option_load(
733            store,
734            ptr.get_anyref(),
735            StructRef::from_cloned_gc_ref,
736        )
737    }
738}
739
740unsafe impl WasmTy for OwnedRooted<StructRef> {
741    #[inline]
742    fn valtype() -> ValType {
743        ValType::Ref(RefType::new(false, HeapType::Struct))
744    }
745
746    #[inline]
747    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
748        self.comes_from_same_store(store)
749    }
750
751    #[inline]
752    fn dynamic_concrete_type_check(
753        &self,
754        store: &StoreOpaque,
755        _: bool,
756        ty: &HeapType,
757    ) -> Result<()> {
758        match ty {
759            HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
760            HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
761
762            HeapType::Extern
763            | HeapType::NoExtern
764            | HeapType::Func
765            | HeapType::ConcreteFunc(_)
766            | HeapType::NoFunc
767            | HeapType::I31
768            | HeapType::Array
769            | HeapType::ConcreteArray(_)
770            | HeapType::None
771            | HeapType::NoCont
772            | HeapType::Cont
773            | HeapType::ConcreteCont(_)
774            | HeapType::NoExn
775            | HeapType::Exn
776            | HeapType::ConcreteExn(_) => bail!(
777                "type mismatch: expected `(ref {ty})`, got `(ref {})`",
778                self._ty(store)?,
779            ),
780        }
781    }
782
783    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
784        self.wasm_ty_store(store, ptr, ValRaw::anyref)
785    }
786
787    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
788        Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
789    }
790}
791
792unsafe impl WasmTy for Option<OwnedRooted<StructRef>> {
793    #[inline]
794    fn valtype() -> ValType {
795        ValType::STRUCTREF
796    }
797
798    #[inline]
799    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
800        self.as_ref()
801            .map_or(true, |x| x.comes_from_same_store(store))
802    }
803
804    #[inline]
805    fn dynamic_concrete_type_check(
806        &self,
807        store: &StoreOpaque,
808        nullable: bool,
809        ty: &HeapType,
810    ) -> Result<()> {
811        match self {
812            Some(s) => {
813                OwnedRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
814            }
815            None => {
816                ensure!(
817                    nullable,
818                    "expected a non-null reference, but found a null reference"
819                );
820                Ok(())
821            }
822        }
823    }
824
825    #[inline]
826    fn is_vmgcref_and_points_to_object(&self) -> bool {
827        self.is_some()
828    }
829
830    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
831        <OwnedRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
832    }
833
834    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
835        <OwnedRooted<StructRef>>::wasm_ty_option_load(
836            store,
837            ptr.get_anyref(),
838            StructRef::from_cloned_gc_ref,
839        )
840    }
841}