wasmtime/runtime/gc/enabled/
structref.rs

1//! Working with GC `struct` objects.
2
3use crate::runtime::vm::VMGcRef;
4use crate::store::StoreId;
5use crate::vm::{VMGcHeader, VMStructRef};
6use crate::{
7    prelude::*,
8    store::{AutoAssertNoGc, StoreContextMut, StoreOpaque},
9    AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
10    ManuallyRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
11};
12use crate::{AnyRef, FieldType};
13use core::mem::{self, MaybeUninit};
14use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
15
16/// An allocator for a particular Wasm GC struct type.
17///
18/// Every `StructRefPre` is associated with a particular
19/// [`Store`][crate::Store] and a particular [StructType][crate::StructType].
20///
21/// Reusing an allocator across many allocations amortizes some per-type runtime
22/// overheads inside Wasmtime. A `StructRefPre` is to `StructRef`s as an
23/// `InstancePre` is to `Instance`s.
24///
25/// # Example
26///
27/// ```
28/// use wasmtime::*;
29///
30/// # fn foo() -> Result<()> {
31/// let mut config = Config::new();
32/// config.wasm_function_references(true);
33/// config.wasm_gc(true);
34///
35/// let engine = Engine::new(&config)?;
36/// let mut store = Store::new(&engine, ());
37///
38/// // Define a struct type.
39/// let struct_ty = StructType::new(
40///    store.engine(),
41///    [FieldType::new(Mutability::Var, StorageType::I8)],
42/// )?;
43///
44/// // Create an allocator for the struct type.
45/// let allocator = StructRefPre::new(&mut store, struct_ty);
46///
47/// {
48///     let mut scope = RootScope::new(&mut store);
49///
50///     // Allocate a bunch of instances of our struct type using the same
51///     // allocator! This is faster than creating a new allocator for each
52///     // instance we want to allocate.
53///     for i in 0..10 {
54///         StructRef::new(&mut scope, &allocator, &[Val::I32(i)])?;
55///     }
56/// }
57/// # Ok(())
58/// # }
59/// # foo().unwrap();
60/// ```
61pub struct StructRefPre {
62    store_id: StoreId,
63    ty: StructType,
64}
65
66impl StructRefPre {
67    /// Create a new `StructRefPre` that is associated with the given store
68    /// and type.
69    pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
70        Self::_new(store.as_context_mut().0, ty)
71    }
72
73    pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
74        store.insert_gc_host_alloc_type(ty.registered_type().clone());
75        let store_id = store.id();
76
77        StructRefPre { store_id, ty }
78    }
79
80    pub(crate) fn layout(&self) -> &GcStructLayout {
81        self.ty
82            .registered_type()
83            .layout()
84            .expect("struct types have a layout")
85            .unwrap_struct()
86    }
87
88    pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
89        self.ty.registered_type().index()
90    }
91}
92
93/// A reference to a GC-managed `struct` instance.
94///
95/// WebAssembly `struct`s are static, fixed-length, ordered sequences of
96/// fields. Fields are named by index, not by identifier; in this way, they are
97/// similar to Rust's tuples. Each field is mutable or constant and stores
98/// unpacked [`Val`][crate::Val]s or packed 8-/16-bit integers.
99///
100/// Like all WebAssembly references, these are opaque and unforgeable to Wasm:
101/// they cannot be faked and Wasm cannot, for example, cast the integer
102/// `0x12345678` into a reference, pretend it is a valid `structref`, and trick
103/// the host into dereferencing it and segfaulting or worse.
104///
105/// Note that you can also use `Rooted<StructRef>` and
106/// `ManuallyRooted<StructRef>` as a type parameter with
107/// [`Func::typed`][crate::Func::typed]- and
108/// [`Func::wrap`][crate::Func::wrap]-style APIs.
109///
110/// # Example
111///
112/// ```
113/// use wasmtime::*;
114///
115/// # fn foo() -> Result<()> {
116/// let mut config = Config::new();
117/// config.wasm_function_references(true);
118/// config.wasm_gc(true);
119///
120/// let engine = Engine::new(&config)?;
121/// let mut store = Store::new(&engine, ());
122///
123/// // Define a struct type.
124/// let struct_ty = StructType::new(
125///    store.engine(),
126///    [FieldType::new(Mutability::Var, StorageType::I8)],
127/// )?;
128///
129/// // Create an allocator for the struct type.
130/// let allocator = StructRefPre::new(&mut store, struct_ty);
131///
132/// {
133///     let mut scope = RootScope::new(&mut store);
134///
135///     // Allocate an instance of the struct type.
136///     let my_struct = StructRef::new(&mut scope, &allocator, &[Val::I32(42)])?;
137///
138///     // That instance's field should have the expected value.
139///     let val = my_struct.field(&mut scope, 0)?.unwrap_i32();
140///     assert_eq!(val, 42);
141///
142///     // And we can update the field's value because it is a mutable field.
143///     my_struct.set_field(&mut scope, 0, Val::I32(36))?;
144///     let new_val = my_struct.field(&mut scope, 0)?.unwrap_i32();
145///     assert_eq!(new_val, 36);
146/// }
147/// # Ok(())
148/// # }
149/// # foo().unwrap();
150/// ```
151#[derive(Debug)]
152#[repr(transparent)]
153pub struct StructRef {
154    pub(super) inner: GcRootIndex,
155}
156
157unsafe impl GcRefImpl for StructRef {
158    #[allow(private_interfaces)]
159    fn transmute_ref(index: &GcRootIndex) -> &Self {
160        // Safety: `StructRef` is a newtype of a `GcRootIndex`.
161        let me: &Self = unsafe { mem::transmute(index) };
162
163        // Assert we really are just a newtype of a `GcRootIndex`.
164        assert!(matches!(
165            me,
166            Self {
167                inner: GcRootIndex { .. },
168            }
169        ));
170
171        me
172    }
173}
174
175impl Rooted<StructRef> {
176    /// Upcast this `structref` into an `anyref`.
177    #[inline]
178    pub fn to_anyref(self) -> Rooted<AnyRef> {
179        self.unchecked_cast()
180    }
181
182    /// Upcast this `structref` into an `eqref`.
183    #[inline]
184    pub fn to_eqref(self) -> Rooted<EqRef> {
185        self.unchecked_cast()
186    }
187}
188
189impl ManuallyRooted<StructRef> {
190    /// Upcast this `structref` into an `anyref`.
191    #[inline]
192    pub fn to_anyref(self) -> ManuallyRooted<AnyRef> {
193        self.unchecked_cast()
194    }
195
196    /// Upcast this `structref` into an `eqref`.
197    #[inline]
198    pub fn to_eqref(self) -> ManuallyRooted<EqRef> {
199        self.unchecked_cast()
200    }
201}
202
203impl StructRef {
204    /// Synchronously allocate a new `struct` and get a reference to it.
205    ///
206    /// # Automatic Garbage Collection
207    ///
208    /// If the GC heap is at capacity, and there isn't room for allocating this
209    /// new struct, then this method will automatically trigger a synchronous
210    /// collection in an attempt to free up space in the GC heap.
211    ///
212    /// # Errors
213    ///
214    /// If the given `fields` values' types do not match the field types of the
215    /// `allocator`'s struct type, an error is returned.
216    ///
217    /// If the allocation cannot be satisfied because the GC heap is currently
218    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
219    /// error is returned. The allocation might succeed on a second attempt if
220    /// you drop some rooted GC references and try again.
221    ///
222    /// # Panics
223    ///
224    /// Panics if your engine is configured for async; use
225    /// [`StructRef::new_async`][crate::StructRef::new_async] to perform
226    /// synchronous allocation instead.
227    ///
228    /// Panics if the allocator, or any of the field values, is not associated
229    /// with the given store.
230    pub fn new(
231        mut store: impl AsContextMut,
232        allocator: &StructRefPre,
233        fields: &[Val],
234    ) -> Result<Rooted<StructRef>> {
235        Self::_new(store.as_context_mut().0, allocator, fields)
236    }
237
238    pub(crate) fn _new(
239        store: &mut StoreOpaque,
240        allocator: &StructRefPre,
241        fields: &[Val],
242    ) -> Result<Rooted<StructRef>> {
243        assert!(
244            !store.async_support(),
245            "use `StructRef::new_async` with asynchronous stores"
246        );
247        Self::type_check_fields(store, allocator, fields)?;
248        store.retry_after_gc((), |store, ()| {
249            Self::new_unchecked(store, allocator, fields)
250        })
251    }
252
253    /// Asynchronously allocate a new `struct` and get a reference to it.
254    ///
255    /// # Automatic Garbage Collection
256    ///
257    /// If the GC heap is at capacity, and there isn't room for allocating this
258    /// new struct, then this method will automatically trigger a synchronous
259    /// collection in an attempt to free up space in the GC heap.
260    ///
261    /// # Errors
262    ///
263    /// If the given `fields` values' types do not match the field types of the
264    /// `allocator`'s struct type, an error is returned.
265    ///
266    /// If the allocation cannot be satisfied because the GC heap is currently
267    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
268    /// error is returned. The allocation might succeed on a second attempt if
269    /// you drop some rooted GC references and try again.
270    ///
271    /// # Panics
272    ///
273    /// Panics if your engine is not configured for async; use
274    /// [`StructRef::new`][crate::StructRef::new] to perform synchronous
275    /// allocation instead.
276    ///
277    /// Panics if the allocator, or any of the field values, is not associated
278    /// with the given store.
279    #[cfg(feature = "async")]
280    pub async fn new_async(
281        mut store: impl AsContextMut,
282        allocator: &StructRefPre,
283        fields: &[Val],
284    ) -> Result<Rooted<StructRef>> {
285        Self::_new_async(store.as_context_mut().0, allocator, fields).await
286    }
287
288    #[cfg(feature = "async")]
289    pub(crate) async fn _new_async(
290        store: &mut StoreOpaque,
291        allocator: &StructRefPre,
292        fields: &[Val],
293    ) -> Result<Rooted<StructRef>> {
294        assert!(
295            store.async_support(),
296            "use `StructRef::new` with synchronous stores"
297        );
298        Self::type_check_fields(store, allocator, fields)?;
299        store
300            .retry_after_gc_async((), |store, ()| {
301                Self::new_unchecked(store, allocator, fields)
302            })
303            .await
304    }
305
306    /// Like `Self::new` but caller's must ensure that if the store is
307    /// configured for async, this is only ever called from on a fiber stack.
308    pub(crate) unsafe fn new_maybe_async(
309        store: &mut StoreOpaque,
310        allocator: &StructRefPre,
311        fields: &[Val],
312    ) -> Result<Rooted<StructRef>> {
313        Self::type_check_fields(store, allocator, fields)?;
314        unsafe {
315            store.retry_after_gc_maybe_async((), |store, ()| {
316                Self::new_unchecked(store, allocator, fields)
317            })
318        }
319    }
320
321    /// Type check the field values before allocating a new struct.
322    fn type_check_fields(
323        store: &mut StoreOpaque,
324        allocator: &StructRefPre,
325        fields: &[Val],
326    ) -> Result<(), Error> {
327        let expected_len = allocator.ty.fields().len();
328        let actual_len = fields.len();
329        ensure!(
330            actual_len == expected_len,
331            "expected {expected_len} fields, got {actual_len}"
332        );
333        for (ty, val) in allocator.ty.fields().zip(fields) {
334            assert!(
335                val.comes_from_same_store(store),
336                "field value comes from the wrong store",
337            );
338            let ty = ty.element_type().unpack();
339            val.ensure_matches_ty(store, ty)
340                .context("field type mismatch")?;
341        }
342        Ok(())
343    }
344
345    /// Given that the field values have already been type checked, allocate a
346    /// new struct.
347    ///
348    /// Does not attempt GC+retry on OOM, that is the caller's responsibility.
349    fn new_unchecked(
350        store: &mut StoreOpaque,
351        allocator: &StructRefPre,
352        fields: &[Val],
353    ) -> Result<Rooted<StructRef>> {
354        assert_eq!(
355            store.id(),
356            allocator.store_id,
357            "attempted to use a `StructRefPre` with the wrong store"
358        );
359
360        // Allocate the struct and write each field value into the appropriate
361        // offset.
362        let structref = store
363            .gc_store_mut()?
364            .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
365            .context("unrecoverable error when allocating new `structref`")?
366            .map_err(|n| GcHeapOutOfMemory::new((), n))?;
367
368        // From this point on, if we get any errors, then the struct is not
369        // fully initialized, so we need to eagerly deallocate it before the
370        // next GC where the collector might try to interpret one of the
371        // uninitialized fields as a GC reference.
372        let mut store = AutoAssertNoGc::new(store);
373        match (|| {
374            for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
375                structref.initialize_field(
376                    &mut store,
377                    allocator.layout(),
378                    ty.element_type(),
379                    index,
380                    *val,
381                )?;
382            }
383            Ok(())
384        })() {
385            Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
386            Err(e) => {
387                store.gc_store_mut()?.dealloc_uninit_struct(structref);
388                Err(e)
389            }
390        }
391    }
392
393    #[inline]
394    pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
395        self.inner.comes_from_same_store(store)
396    }
397
398    /// Get this `structref`'s type.
399    ///
400    /// # Errors
401    ///
402    /// Return an error if this reference has been unrooted.
403    ///
404    /// # Panics
405    ///
406    /// Panics if this reference is associated with a different store.
407    pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
408        self._ty(store.as_context().0)
409    }
410
411    pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
412        assert!(self.comes_from_same_store(store));
413        let index = self.type_index(store)?;
414        Ok(StructType::from_shared_type_index(store.engine(), index))
415    }
416
417    /// Does this `structref` match the given type?
418    ///
419    /// That is, is this struct's type a subtype of the given type?
420    ///
421    /// # Errors
422    ///
423    /// Return an error if this reference has been unrooted.
424    ///
425    /// # Panics
426    ///
427    /// Panics if this reference is associated with a different store or if the
428    /// type is not associated with the store's engine.
429    pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
430        self._matches_ty(store.as_context().0, ty)
431    }
432
433    pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
434        assert!(self.comes_from_same_store(store));
435        Ok(self._ty(store)?.matches(ty))
436    }
437
438    pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
439        if !self.comes_from_same_store(store) {
440            bail!("function used with wrong store");
441        }
442        if self._matches_ty(store, ty)? {
443            Ok(())
444        } else {
445            let actual_ty = self._ty(store)?;
446            bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
447        }
448    }
449
450    /// Get the values of this struct's fields.
451    ///
452    /// Note that `i8` and `i16` field values are zero-extended into
453    /// `Val::I32(_)`s.
454    ///
455    /// # Errors
456    ///
457    /// Return an error if this reference has been unrooted.
458    ///
459    /// # Panics
460    ///
461    /// Panics if this reference is associated with a different store.
462    pub fn fields<'a, T: 'a>(
463        &'a self,
464        store: impl Into<StoreContextMut<'a, T>>,
465    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
466        self._fields(store.into().0)
467    }
468
469    pub(crate) fn _fields<'a>(
470        &'a self,
471        store: &'a mut StoreOpaque,
472    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
473        assert!(self.comes_from_same_store(store));
474        let store = AutoAssertNoGc::new(store);
475
476        let gc_ref = self.inner.try_gc_ref(&store)?;
477        let header = store.gc_store()?.header(gc_ref);
478        debug_assert!(header.kind().matches(VMGcKind::StructRef));
479
480        let index = header.ty().expect("structrefs should have concrete types");
481        let ty = StructType::from_shared_type_index(store.engine(), index);
482        let len = ty.fields().len();
483
484        return Ok(Fields {
485            structref: self,
486            store,
487            index: 0,
488            len,
489        });
490
491        struct Fields<'a, 'b> {
492            structref: &'a StructRef,
493            store: AutoAssertNoGc<'b>,
494            index: usize,
495            len: usize,
496        }
497
498        impl Iterator for Fields<'_, '_> {
499            type Item = Val;
500
501            #[inline]
502            fn next(&mut self) -> Option<Self::Item> {
503                let i = self.index;
504                debug_assert!(i <= self.len);
505                if i >= self.len {
506                    return None;
507                }
508                self.index += 1;
509                Some(self.structref._field(&mut self.store, i).unwrap())
510            }
511
512            #[inline]
513            fn size_hint(&self) -> (usize, Option<usize>) {
514                let len = self.len - self.index;
515                (len, Some(len))
516            }
517        }
518
519        impl ExactSizeIterator for Fields<'_, '_> {
520            #[inline]
521            fn len(&self) -> usize {
522                self.len - self.index
523            }
524        }
525    }
526
527    fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
528        assert!(self.comes_from_same_store(&store));
529        let gc_ref = self.inner.try_gc_ref(store)?;
530        Ok(store.gc_store()?.header(gc_ref))
531    }
532
533    fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
534        assert!(self.comes_from_same_store(&store));
535        let gc_ref = self.inner.try_gc_ref(store)?;
536        debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
537        Ok(gc_ref.as_structref_unchecked())
538    }
539
540    fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
541        assert!(self.comes_from_same_store(&store));
542        let type_index = self.type_index(store)?;
543        let layout = store
544            .engine()
545            .signatures()
546            .layout(type_index)
547            .expect("struct types should have GC layouts");
548        match layout {
549            GcLayout::Struct(s) => Ok(s),
550            GcLayout::Array(_) => unreachable!(),
551        }
552    }
553
554    fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
555        let ty = self._ty(store)?;
556        match ty.field(field) {
557            Some(f) => Ok(f),
558            None => {
559                let len = ty.fields().len();
560                bail!("cannot access field {field}: struct only has {len} fields")
561            }
562        }
563    }
564
565    /// Get this struct's `index`th field.
566    ///
567    /// Note that `i8` and `i16` field values are zero-extended into
568    /// `Val::I32(_)`s.
569    ///
570    /// # Errors
571    ///
572    /// Returns an `Err(_)` if the index is out of bounds or this reference has
573    /// been unrooted.
574    ///
575    /// # Panics
576    ///
577    /// Panics if this reference is associated with a different store.
578    pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
579        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
580        self._field(&mut store, index)
581    }
582
583    pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
584        assert!(self.comes_from_same_store(store));
585        let structref = self.structref(store)?.unchecked_copy();
586        let field_ty = self.field_ty(store, index)?;
587        let layout = self.layout(store)?;
588        Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
589    }
590
591    /// Set this struct's `index`th field.
592    ///
593    /// # Errors
594    ///
595    /// Returns an error in the following scenarios:
596    ///
597    /// * When given a value of the wrong type, such as trying to set an `f32`
598    ///   field to an `i64` value.
599    ///
600    /// * When the field is not mutable.
601    ///
602    /// * When this struct does not have an `index`th field, i.e. `index` is out
603    ///   of bounds.
604    ///
605    /// * When `value` is a GC reference that has since been unrooted.
606    ///
607    /// # Panics
608    ///
609    /// Panics if this reference is associated with a different store.
610    pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
611        self._set_field(store.as_context_mut().0, index, value)
612    }
613
614    pub(crate) fn _set_field(
615        &self,
616        store: &mut StoreOpaque,
617        index: usize,
618        value: Val,
619    ) -> Result<()> {
620        assert!(self.comes_from_same_store(store));
621        let mut store = AutoAssertNoGc::new(store);
622
623        let field_ty = self.field_ty(&store, index)?;
624        ensure!(
625            field_ty.mutability().is_var(),
626            "cannot set field {index}: field is not mutable"
627        );
628
629        value
630            .ensure_matches_ty(&store, &field_ty.element_type().unpack())
631            .with_context(|| format!("cannot set field {index}: type mismatch"))?;
632
633        let layout = self.layout(&store)?;
634        let structref = self.structref(&store)?.unchecked_copy();
635
636        structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
637    }
638
639    pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
640        let gc_ref = self.inner.try_gc_ref(store)?;
641        let header = store.gc_store()?.header(gc_ref);
642        debug_assert!(header.kind().matches(VMGcKind::StructRef));
643        Ok(header.ty().expect("structrefs should have concrete types"))
644    }
645
646    /// Create a new `Rooted<StructRef>` from the given GC reference.
647    ///
648    /// `gc_ref` should point to a valid `structref` and should belong to the
649    /// store's GC heap. Failure to uphold these invariants is memory safe but
650    /// will lead to general incorrectness such as panics or wrong results.
651    pub(crate) fn from_cloned_gc_ref(
652        store: &mut AutoAssertNoGc<'_>,
653        gc_ref: VMGcRef,
654    ) -> Rooted<Self> {
655        debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
656        Rooted::new(store, gc_ref)
657    }
658}
659
660unsafe impl WasmTy for Rooted<StructRef> {
661    #[inline]
662    fn valtype() -> ValType {
663        ValType::Ref(RefType::new(false, HeapType::Struct))
664    }
665
666    #[inline]
667    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
668        self.comes_from_same_store(store)
669    }
670
671    #[inline]
672    fn dynamic_concrete_type_check(
673        &self,
674        store: &StoreOpaque,
675        _nullable: bool,
676        ty: &HeapType,
677    ) -> Result<()> {
678        match ty {
679            HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
680            HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
681
682            HeapType::Extern
683            | HeapType::NoExtern
684            | HeapType::Func
685            | HeapType::ConcreteFunc(_)
686            | HeapType::NoFunc
687            | HeapType::I31
688            | HeapType::Array
689            | HeapType::ConcreteArray(_)
690            | HeapType::None => bail!(
691                "type mismatch: expected `(ref {ty})`, got `(ref {})`",
692                self._ty(store)?,
693            ),
694        }
695    }
696
697    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
698        self.wasm_ty_store(store, ptr, ValRaw::anyref)
699    }
700
701    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
702        Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
703    }
704}
705
706unsafe impl WasmTy for Option<Rooted<StructRef>> {
707    #[inline]
708    fn valtype() -> ValType {
709        ValType::STRUCTREF
710    }
711
712    #[inline]
713    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
714        self.map_or(true, |x| x.comes_from_same_store(store))
715    }
716
717    #[inline]
718    fn dynamic_concrete_type_check(
719        &self,
720        store: &StoreOpaque,
721        nullable: bool,
722        ty: &HeapType,
723    ) -> Result<()> {
724        match self {
725            Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
726            None => {
727                ensure!(
728                    nullable,
729                    "expected a non-null reference, but found a null reference"
730                );
731                Ok(())
732            }
733        }
734    }
735
736    #[inline]
737    fn is_vmgcref_and_points_to_object(&self) -> bool {
738        self.is_some()
739    }
740
741    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
742        <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
743    }
744
745    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
746        <Rooted<StructRef>>::wasm_ty_option_load(
747            store,
748            ptr.get_anyref(),
749            StructRef::from_cloned_gc_ref,
750        )
751    }
752}
753
754unsafe impl WasmTy for ManuallyRooted<StructRef> {
755    #[inline]
756    fn valtype() -> ValType {
757        ValType::Ref(RefType::new(false, HeapType::Struct))
758    }
759
760    #[inline]
761    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
762        self.comes_from_same_store(store)
763    }
764
765    #[inline]
766    fn dynamic_concrete_type_check(
767        &self,
768        store: &StoreOpaque,
769        _: bool,
770        ty: &HeapType,
771    ) -> Result<()> {
772        match ty {
773            HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
774            HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
775
776            HeapType::Extern
777            | HeapType::NoExtern
778            | HeapType::Func
779            | HeapType::ConcreteFunc(_)
780            | HeapType::NoFunc
781            | HeapType::I31
782            | HeapType::Array
783            | HeapType::ConcreteArray(_)
784            | HeapType::None => bail!(
785                "type mismatch: expected `(ref {ty})`, got `(ref {})`",
786                self._ty(store)?,
787            ),
788        }
789    }
790
791    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
792        self.wasm_ty_store(store, ptr, ValRaw::anyref)
793    }
794
795    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
796        Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
797    }
798}
799
800unsafe impl WasmTy for Option<ManuallyRooted<StructRef>> {
801    #[inline]
802    fn valtype() -> ValType {
803        ValType::STRUCTREF
804    }
805
806    #[inline]
807    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
808        self.as_ref()
809            .map_or(true, |x| x.comes_from_same_store(store))
810    }
811
812    #[inline]
813    fn dynamic_concrete_type_check(
814        &self,
815        store: &StoreOpaque,
816        nullable: bool,
817        ty: &HeapType,
818    ) -> Result<()> {
819        match self {
820            Some(s) => {
821                ManuallyRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
822            }
823            None => {
824                ensure!(
825                    nullable,
826                    "expected a non-null reference, but found a null reference"
827                );
828                Ok(())
829            }
830        }
831    }
832
833    #[inline]
834    fn is_vmgcref_and_points_to_object(&self) -> bool {
835        self.is_some()
836    }
837
838    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
839        <ManuallyRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
840    }
841
842    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
843        <ManuallyRooted<StructRef>>::wasm_ty_option_load(
844            store,
845            ptr.get_anyref(),
846            StructRef::from_cloned_gc_ref,
847        )
848    }
849}