Skip to main content

wasmtime/runtime/gc/enabled/
structref.rs

1//! Working with GC `struct` objects.
2
3use crate::runtime::vm::VMGcRef;
4use crate::store::{Asyncness, StoreId};
5#[cfg(feature = "async")]
6use crate::vm::VMStore;
7use crate::vm::{self, VMGcHeader, VMStructRef};
8use crate::{AnyRef, FieldType};
9use crate::{
10    AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
11    OwnedRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
12    prelude::*,
13    store::{AutoAssertNoGc, StoreContextMut, StoreOpaque, StoreResourceLimiter},
14};
15use alloc::sync::Arc;
16use core::mem::{self, MaybeUninit};
17use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
18
19/// An allocator for a particular Wasm GC struct type.
20///
21/// Every `StructRefPre` is associated with a particular
22/// [`Store`][crate::Store] and a particular [StructType][crate::StructType].
23///
24/// Reusing an allocator across many allocations amortizes some per-type runtime
25/// overheads inside Wasmtime. A `StructRefPre` is to `StructRef`s as an
26/// `InstancePre` is to `Instance`s.
27///
28/// # Example
29///
30/// ```
31/// use wasmtime::*;
32///
33/// # fn foo() -> Result<()> {
34/// let mut config = Config::new();
35/// config.wasm_function_references(true);
36/// config.wasm_gc(true);
37///
38/// let engine = Engine::new(&config)?;
39/// let mut store = Store::new(&engine, ());
40///
41/// // Define a struct type.
42/// let struct_ty = StructType::new(
43///    store.engine(),
44///    [FieldType::new(Mutability::Var, StorageType::I8)],
45/// )?;
46///
47/// // Create an allocator for the struct type.
48/// let allocator = StructRefPre::new(&mut store, struct_ty);
49///
50/// {
51///     let mut scope = RootScope::new(&mut store);
52///
53///     // Allocate a bunch of instances of our struct type using the same
54///     // allocator! This is faster than creating a new allocator for each
55///     // instance we want to allocate.
56///     for i in 0..10 {
57///         StructRef::new(&mut scope, &allocator, &[Val::I32(i)])?;
58///     }
59/// }
60/// # Ok(())
61/// # }
62/// # foo().unwrap();
63/// ```
64pub struct StructRefPre {
65    store_id: StoreId,
66    ty: StructType,
67}
68
69impl StructRefPre {
70    /// Create a new `StructRefPre` that is associated with the given store
71    /// and type.
72    pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
73        Self::_new(store.as_context_mut().0, ty)
74    }
75
76    pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
77        store.insert_gc_host_alloc_type(ty.registered_type().clone());
78        let store_id = store.id();
79
80        StructRefPre { store_id, ty }
81    }
82
83    pub(crate) fn layout(&self) -> &GcStructLayout {
84        self.ty
85            .registered_type()
86            .layout()
87            .expect("struct types have a layout")
88            .unwrap_struct()
89    }
90
91    pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
92        self.ty.registered_type().index()
93    }
94}
95
96/// A reference to a GC-managed `struct` instance.
97///
98/// WebAssembly `struct`s are static, fixed-length, ordered sequences of
99/// fields. Fields are named by index, not by identifier; in this way, they are
100/// similar to Rust's tuples. Each field is mutable or constant and stores
101/// unpacked [`Val`][crate::Val]s or packed 8-/16-bit integers.
102///
103/// Like all WebAssembly references, these are opaque and unforgeable to Wasm:
104/// they cannot be faked and Wasm cannot, for example, cast the integer
105/// `0x12345678` into a reference, pretend it is a valid `structref`, and trick
106/// the host into dereferencing it and segfaulting or worse.
107///
108/// Note that you can also use `Rooted<StructRef>` and
109/// `OwnedRooted<StructRef>` as a type parameter with
110/// [`Func::typed`][crate::Func::typed]- and
111/// [`Func::wrap`][crate::Func::wrap]-style APIs.
112///
113/// # Example
114///
115/// ```
116/// use wasmtime::*;
117///
118/// # fn foo() -> Result<()> {
119/// let mut config = Config::new();
120/// config.wasm_function_references(true);
121/// config.wasm_gc(true);
122///
123/// let engine = Engine::new(&config)?;
124/// let mut store = Store::new(&engine, ());
125///
126/// // Define a struct type.
127/// let struct_ty = StructType::new(
128///    store.engine(),
129///    [FieldType::new(Mutability::Var, StorageType::I8)],
130/// )?;
131///
132/// // Create an allocator for the struct type.
133/// let allocator = StructRefPre::new(&mut store, struct_ty);
134///
135/// {
136///     let mut scope = RootScope::new(&mut store);
137///
138///     // Allocate an instance of the struct type.
139///     let my_struct = StructRef::new(&mut scope, &allocator, &[Val::I32(42)])?;
140///
141///     // That instance's field should have the expected value.
142///     let val = my_struct.field(&mut scope, 0)?.unwrap_i32();
143///     assert_eq!(val, 42);
144///
145///     // And we can update the field's value because it is a mutable field.
146///     my_struct.set_field(&mut scope, 0, Val::I32(36))?;
147///     let new_val = my_struct.field(&mut scope, 0)?.unwrap_i32();
148///     assert_eq!(new_val, 36);
149/// }
150/// # Ok(())
151/// # }
152/// # foo().unwrap();
153/// ```
154#[derive(Debug)]
155#[repr(transparent)]
156pub struct StructRef {
157    pub(super) inner: GcRootIndex,
158}
159
160unsafe impl GcRefImpl for StructRef {
161    fn transmute_ref(index: &GcRootIndex) -> &Self {
162        // Safety: `StructRef` is a newtype of a `GcRootIndex`.
163        let me: &Self = unsafe { mem::transmute(index) };
164
165        // Assert we really are just a newtype of a `GcRootIndex`.
166        assert!(matches!(
167            me,
168            Self {
169                inner: GcRootIndex { .. },
170            }
171        ));
172
173        me
174    }
175}
176
177impl Rooted<StructRef> {
178    /// Upcast this `structref` into an `anyref`.
179    #[inline]
180    pub fn to_anyref(self) -> Rooted<AnyRef> {
181        self.unchecked_cast()
182    }
183
184    /// Upcast this `structref` into an `eqref`.
185    #[inline]
186    pub fn to_eqref(self) -> Rooted<EqRef> {
187        self.unchecked_cast()
188    }
189}
190
191impl OwnedRooted<StructRef> {
192    /// Upcast this `structref` into an `anyref`.
193    #[inline]
194    pub fn to_anyref(self) -> OwnedRooted<AnyRef> {
195        self.unchecked_cast()
196    }
197
198    /// Upcast this `structref` into an `eqref`.
199    #[inline]
200    pub fn to_eqref(self) -> OwnedRooted<EqRef> {
201        self.unchecked_cast()
202    }
203}
204
205impl StructRef {
206    /// Synchronously allocate a new `struct` and get a reference to it.
207    ///
208    /// # Automatic Garbage Collection
209    ///
210    /// If the GC heap is at capacity, and there isn't room for allocating this
211    /// new struct, then this method will automatically trigger a synchronous
212    /// collection in an attempt to free up space in the GC heap.
213    ///
214    /// # Errors
215    ///
216    /// If the given `fields` values' types do not match the field types of the
217    /// `allocator`'s struct type, an error is returned.
218    ///
219    /// If the allocation cannot be satisfied because the GC heap is currently
220    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
221    /// error is returned. The allocation might succeed on a second attempt if
222    /// you drop some rooted GC references and try again.
223    ///
224    /// If `store` is configured with a
225    /// [`ResourceLimiterAsync`](crate::ResourceLimiterAsync) then an error
226    /// will be returned because [`StructRef::new_async`] should be used
227    /// instead.
228    ///
229    /// # Panics
230    ///
231    /// Panics if the allocator, or any of the field values, is not associated
232    /// with the given store.
233    pub fn new(
234        mut store: impl AsContextMut,
235        allocator: &StructRefPre,
236        fields: &[Val],
237    ) -> Result<Rooted<StructRef>> {
238        let (mut limiter, store) = store
239            .as_context_mut()
240            .0
241            .validate_sync_resource_limiter_and_store_opaque()?;
242        vm::assert_ready(Self::_new_async(
243            store,
244            limiter.as_mut(),
245            allocator,
246            fields,
247            Asyncness::No,
248        ))
249    }
250
251    /// Asynchronously allocate a new `struct` and get a reference to it.
252    ///
253    /// # Automatic Garbage Collection
254    ///
255    /// If the GC heap is at capacity, and there isn't room for allocating this
256    /// new struct, then this method will automatically trigger a synchronous
257    /// collection in an attempt to free up space in the GC heap.
258    ///
259    /// # Errors
260    ///
261    /// If the given `fields` values' types do not match the field types of the
262    /// `allocator`'s struct type, an error is returned.
263    ///
264    /// If the allocation cannot be satisfied because the GC heap is currently
265    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
266    /// error is returned. The allocation might succeed on a second attempt if
267    /// you drop some rooted GC references and try again.
268    ///
269    /// # Panics
270    ///
271    /// Panics if the allocator, or any of the field values, is not associated
272    /// with the given store.
273    #[cfg(feature = "async")]
274    pub async fn new_async(
275        mut store: impl AsContextMut,
276        allocator: &StructRefPre,
277        fields: &[Val],
278    ) -> Result<Rooted<StructRef>> {
279        let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
280        Self::_new_async(store, limiter.as_mut(), allocator, fields, Asyncness::Yes).await
281    }
282
283    pub(crate) async fn _new_async(
284        store: &mut StoreOpaque,
285        limiter: Option<&mut StoreResourceLimiter<'_>>,
286        allocator: &StructRefPre,
287        fields: &[Val],
288        asyncness: Asyncness,
289    ) -> Result<Rooted<StructRef>> {
290        Self::type_check_fields(store, allocator, fields)?;
291        store
292            .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
293                Self::new_unchecked(store, allocator, fields)
294            })
295            .await
296    }
297
298    /// Type check the field values before allocating a new struct.
299    fn type_check_fields(
300        store: &mut StoreOpaque,
301        allocator: &StructRefPre,
302        fields: &[Val],
303    ) -> Result<(), Error> {
304        let expected_len = allocator.ty.fields().len();
305        let actual_len = fields.len();
306        ensure!(
307            actual_len == expected_len,
308            "expected {expected_len} fields, got {actual_len}"
309        );
310        for (ty, val) in allocator.ty.fields().zip(fields) {
311            assert!(
312                val.comes_from_same_store(store),
313                "field value comes from the wrong store",
314            );
315            let ty = ty.element_type().unpack();
316            val.ensure_matches_ty(store, ty)
317                .context("field type mismatch")?;
318        }
319        Ok(())
320    }
321
322    /// Given that the field values have already been type checked, allocate a
323    /// new struct.
324    ///
325    /// Does not attempt GC+retry on OOM, that is the caller's responsibility.
326    fn new_unchecked(
327        store: &mut StoreOpaque,
328        allocator: &StructRefPre,
329        fields: &[Val],
330    ) -> Result<Rooted<StructRef>> {
331        assert_eq!(
332            store.id(),
333            allocator.store_id,
334            "attempted to use a `StructRefPre` with the wrong store"
335        );
336
337        // Allocate the struct and write each field value into the appropriate
338        // offset.
339        let structref = store
340            .require_gc_store_mut()?
341            .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
342            .context("unrecoverable error when allocating new `structref`")?
343            .map_err(|n| GcHeapOutOfMemory::new((), n))?;
344
345        // From this point on, if we get any errors, then the struct is not
346        // fully initialized, so we need to eagerly deallocate it before the
347        // next GC where the collector might try to interpret one of the
348        // uninitialized fields as a GC reference.
349        let mut store = AutoAssertNoGc::new(store);
350        match (|| {
351            for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
352                structref.initialize_field(
353                    &mut store,
354                    allocator.layout(),
355                    ty.element_type(),
356                    index,
357                    *val,
358                )?;
359            }
360            Ok(())
361        })() {
362            Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
363            Err(e) => {
364                store
365                    .require_gc_store_mut()?
366                    .dealloc_uninit_struct(structref);
367                Err(e)
368            }
369        }
370    }
371
372    #[inline]
373    pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
374        self.inner.comes_from_same_store(store)
375    }
376
377    /// Get this `structref`'s type.
378    ///
379    /// # Errors
380    ///
381    /// Return an error if this reference has been unrooted.
382    ///
383    /// # Panics
384    ///
385    /// Panics if this reference is associated with a different store.
386    pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
387        self._ty(store.as_context().0)
388    }
389
390    pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
391        assert!(self.comes_from_same_store(store));
392        let index = self.type_index(store)?;
393        Ok(StructType::from_shared_type_index(store.engine(), index))
394    }
395
396    /// Does this `structref` match the given type?
397    ///
398    /// That is, is this struct's type a subtype of the given type?
399    ///
400    /// # Errors
401    ///
402    /// Return an error if this reference has been unrooted.
403    ///
404    /// # Panics
405    ///
406    /// Panics if this reference is associated with a different store or if the
407    /// type is not associated with the store's engine.
408    pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
409        self._matches_ty(store.as_context().0, ty)
410    }
411
412    pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
413        assert!(self.comes_from_same_store(store));
414        Ok(self._ty(store)?.matches(ty))
415    }
416
417    pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
418        if !self.comes_from_same_store(store) {
419            bail!("function used with wrong store");
420        }
421        if self._matches_ty(store, ty)? {
422            Ok(())
423        } else {
424            let actual_ty = self._ty(store)?;
425            bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
426        }
427    }
428
429    /// Get the values of this struct's fields.
430    ///
431    /// Note that `i8` and `i16` field values are zero-extended into
432    /// `Val::I32(_)`s.
433    ///
434    /// # Errors
435    ///
436    /// Return an error if this reference has been unrooted.
437    ///
438    /// # Panics
439    ///
440    /// Panics if this reference is associated with a different store.
441    pub fn fields<'a, T: 'static>(
442        &'a self,
443        store: impl Into<StoreContextMut<'a, T>>,
444    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
445        self._fields(store.into().0)
446    }
447
448    pub(crate) fn _fields<'a>(
449        &'a self,
450        store: &'a mut StoreOpaque,
451    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
452        assert!(self.comes_from_same_store(store));
453        let store = AutoAssertNoGc::new(store);
454
455        let gc_ref = self.inner.try_gc_ref(&store)?;
456        let header = store.require_gc_store()?.header(gc_ref);
457        debug_assert!(header.kind().matches(VMGcKind::StructRef));
458
459        let index = header.ty().expect("structrefs should have concrete types");
460        let ty = StructType::from_shared_type_index(store.engine(), index);
461        let len = ty.fields().len();
462
463        return Ok(Fields {
464            structref: self,
465            store,
466            index: 0,
467            len,
468        });
469
470        struct Fields<'a, 'b> {
471            structref: &'a StructRef,
472            store: AutoAssertNoGc<'b>,
473            index: usize,
474            len: usize,
475        }
476
477        impl Iterator for Fields<'_, '_> {
478            type Item = Val;
479
480            #[inline]
481            fn next(&mut self) -> Option<Self::Item> {
482                let i = self.index;
483                debug_assert!(i <= self.len);
484                if i >= self.len {
485                    return None;
486                }
487                self.index += 1;
488                Some(self.structref._field(&mut self.store, i).unwrap())
489            }
490
491            #[inline]
492            fn size_hint(&self) -> (usize, Option<usize>) {
493                let len = self.len - self.index;
494                (len, Some(len))
495            }
496        }
497
498        impl ExactSizeIterator for Fields<'_, '_> {
499            #[inline]
500            fn len(&self) -> usize {
501                self.len - self.index
502            }
503        }
504    }
505
506    fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
507        assert!(self.comes_from_same_store(&store));
508        let gc_ref = self.inner.try_gc_ref(store)?;
509        Ok(store.require_gc_store()?.header(gc_ref))
510    }
511
512    fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
513        assert!(self.comes_from_same_store(&store));
514        let gc_ref = self.inner.try_gc_ref(store)?;
515        debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
516        Ok(gc_ref.as_structref_unchecked())
517    }
518
519    fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<Arc<GcStructLayout>> {
520        assert!(self.comes_from_same_store(&store));
521        let type_index = self.type_index(store)?;
522        let layout = store
523            .engine()
524            .signatures()
525            .layout(type_index)
526            .expect("struct types should have GC layouts");
527        match layout {
528            GcLayout::Struct(s) => Ok(s),
529            GcLayout::Array(_) => unreachable!(),
530        }
531    }
532
533    fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
534        let ty = self._ty(store)?;
535        match ty.field(field) {
536            Some(f) => Ok(f),
537            None => {
538                let len = ty.fields().len();
539                bail!("cannot access field {field}: struct only has {len} fields")
540            }
541        }
542    }
543
544    /// Get this struct's `index`th field.
545    ///
546    /// Note that `i8` and `i16` field values are zero-extended into
547    /// `Val::I32(_)`s.
548    ///
549    /// # Errors
550    ///
551    /// Returns an `Err(_)` if the index is out of bounds or this reference has
552    /// been unrooted.
553    ///
554    /// # Panics
555    ///
556    /// Panics if this reference is associated with a different store.
557    pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
558        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
559        self._field(&mut store, index)
560    }
561
562    pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
563        assert!(self.comes_from_same_store(store));
564        let structref = self.structref(store)?.unchecked_copy();
565        let field_ty = self.field_ty(store, index)?;
566        let layout = self.layout(store)?;
567        Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
568    }
569
570    /// Set this struct's `index`th field.
571    ///
572    /// # Errors
573    ///
574    /// Returns an error in the following scenarios:
575    ///
576    /// * When given a value of the wrong type, such as trying to set an `f32`
577    ///   field to an `i64` value.
578    ///
579    /// * When the field is not mutable.
580    ///
581    /// * When this struct does not have an `index`th field, i.e. `index` is out
582    ///   of bounds.
583    ///
584    /// * When `value` is a GC reference that has since been unrooted.
585    ///
586    /// # Panics
587    ///
588    /// Panics if this reference is associated with a different store.
589    pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
590        self._set_field(store.as_context_mut().0, index, value)
591    }
592
593    pub(crate) fn _set_field(
594        &self,
595        store: &mut StoreOpaque,
596        index: usize,
597        value: Val,
598    ) -> Result<()> {
599        assert!(self.comes_from_same_store(store));
600        let mut store = AutoAssertNoGc::new(store);
601
602        let field_ty = self.field_ty(&store, index)?;
603        ensure!(
604            field_ty.mutability().is_var(),
605            "cannot set field {index}: field is not mutable"
606        );
607
608        value
609            .ensure_matches_ty(&store, &field_ty.element_type().unpack())
610            .with_context(|| format!("cannot set field {index}: type mismatch"))?;
611
612        let layout = self.layout(&store)?;
613        let structref = self.structref(&store)?.unchecked_copy();
614
615        structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
616    }
617
618    pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
619        let gc_ref = self.inner.try_gc_ref(store)?;
620        let header = store.require_gc_store()?.header(gc_ref);
621        debug_assert!(header.kind().matches(VMGcKind::StructRef));
622        Ok(header.ty().expect("structrefs should have concrete types"))
623    }
624
625    /// Create a new `Rooted<StructRef>` from the given GC reference.
626    ///
627    /// `gc_ref` should point to a valid `structref` and should belong to the
628    /// store's GC heap. Failure to uphold these invariants is memory safe but
629    /// will lead to general incorrectness such as panics or wrong results.
630    pub(crate) fn from_cloned_gc_ref(
631        store: &mut AutoAssertNoGc<'_>,
632        gc_ref: VMGcRef,
633    ) -> Rooted<Self> {
634        debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
635        Rooted::new(store, gc_ref)
636    }
637}
638
639unsafe impl WasmTy for Rooted<StructRef> {
640    #[inline]
641    fn valtype() -> ValType {
642        ValType::Ref(RefType::new(false, HeapType::Struct))
643    }
644
645    #[inline]
646    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
647        self.comes_from_same_store(store)
648    }
649
650    #[inline]
651    fn dynamic_concrete_type_check(
652        &self,
653        store: &StoreOpaque,
654        _nullable: bool,
655        ty: &HeapType,
656    ) -> Result<()> {
657        match ty {
658            HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
659            HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
660
661            HeapType::Extern
662            | HeapType::NoExtern
663            | HeapType::Func
664            | HeapType::ConcreteFunc(_)
665            | HeapType::NoFunc
666            | HeapType::I31
667            | HeapType::Array
668            | HeapType::ConcreteArray(_)
669            | HeapType::None
670            | HeapType::NoCont
671            | HeapType::Cont
672            | HeapType::ConcreteCont(_)
673            | HeapType::NoExn
674            | HeapType::Exn
675            | HeapType::ConcreteExn(_) => bail!(
676                "type mismatch: expected `(ref {ty})`, got `(ref {})`",
677                self._ty(store)?,
678            ),
679        }
680    }
681
682    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
683        self.wasm_ty_store(store, ptr, ValRaw::anyref)
684    }
685
686    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
687        Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
688    }
689}
690
691unsafe impl WasmTy for Option<Rooted<StructRef>> {
692    #[inline]
693    fn valtype() -> ValType {
694        ValType::STRUCTREF
695    }
696
697    #[inline]
698    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
699        self.map_or(true, |x| x.comes_from_same_store(store))
700    }
701
702    #[inline]
703    fn dynamic_concrete_type_check(
704        &self,
705        store: &StoreOpaque,
706        nullable: bool,
707        ty: &HeapType,
708    ) -> Result<()> {
709        match self {
710            Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
711            None => {
712                ensure!(
713                    nullable,
714                    "expected a non-null reference, but found a null reference"
715                );
716                Ok(())
717            }
718        }
719    }
720
721    #[inline]
722    fn is_vmgcref_and_points_to_object(&self) -> bool {
723        self.is_some()
724    }
725
726    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
727        <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
728    }
729
730    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
731        <Rooted<StructRef>>::wasm_ty_option_load(
732            store,
733            ptr.get_anyref(),
734            StructRef::from_cloned_gc_ref,
735        )
736    }
737}
738
739unsafe impl WasmTy for OwnedRooted<StructRef> {
740    #[inline]
741    fn valtype() -> ValType {
742        ValType::Ref(RefType::new(false, HeapType::Struct))
743    }
744
745    #[inline]
746    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
747        self.comes_from_same_store(store)
748    }
749
750    #[inline]
751    fn dynamic_concrete_type_check(
752        &self,
753        store: &StoreOpaque,
754        _: bool,
755        ty: &HeapType,
756    ) -> Result<()> {
757        match ty {
758            HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
759            HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
760
761            HeapType::Extern
762            | HeapType::NoExtern
763            | HeapType::Func
764            | HeapType::ConcreteFunc(_)
765            | HeapType::NoFunc
766            | HeapType::I31
767            | HeapType::Array
768            | HeapType::ConcreteArray(_)
769            | HeapType::None
770            | HeapType::NoCont
771            | HeapType::Cont
772            | HeapType::ConcreteCont(_)
773            | HeapType::NoExn
774            | HeapType::Exn
775            | HeapType::ConcreteExn(_) => bail!(
776                "type mismatch: expected `(ref {ty})`, got `(ref {})`",
777                self._ty(store)?,
778            ),
779        }
780    }
781
782    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
783        self.wasm_ty_store(store, ptr, ValRaw::anyref)
784    }
785
786    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
787        Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
788    }
789}
790
791unsafe impl WasmTy for Option<OwnedRooted<StructRef>> {
792    #[inline]
793    fn valtype() -> ValType {
794        ValType::STRUCTREF
795    }
796
797    #[inline]
798    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
799        self.as_ref()
800            .map_or(true, |x| x.comes_from_same_store(store))
801    }
802
803    #[inline]
804    fn dynamic_concrete_type_check(
805        &self,
806        store: &StoreOpaque,
807        nullable: bool,
808        ty: &HeapType,
809    ) -> Result<()> {
810        match self {
811            Some(s) => {
812                OwnedRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
813            }
814            None => {
815                ensure!(
816                    nullable,
817                    "expected a non-null reference, but found a null reference"
818                );
819                Ok(())
820            }
821        }
822    }
823
824    #[inline]
825    fn is_vmgcref_and_points_to_object(&self) -> bool {
826        self.is_some()
827    }
828
829    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
830        <OwnedRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
831    }
832
833    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
834        <OwnedRooted<StructRef>>::wasm_ty_option_load(
835            store,
836            ptr.get_anyref(),
837            StructRef::from_cloned_gc_ref,
838        )
839    }
840}