wasmtime/runtime/gc/enabled/
exnref.rs

1//! Implementation of `exnref` in Wasmtime.
2
3use crate::runtime::vm::{VMGcRef, VMStore};
4use crate::store::{StoreId, StoreResourceLimiter};
5use crate::vm::{self, VMExnRef, VMGcHeader};
6use crate::{
7    AsContext, AsContextMut, GcRefImpl, GcRootIndex, HeapType, OwnedRooted, RefType, Result,
8    Rooted, Val, ValRaw, ValType, WasmTy,
9    store::{AutoAssertNoGc, StoreOpaque},
10};
11use crate::{ExnType, FieldType, GcHeapOutOfMemory, StoreContextMut, Tag, prelude::*};
12use core::mem;
13use core::mem::MaybeUninit;
14use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
15
16/// An allocator for a particular Wasm GC exception type.
17///
18/// Every `ExnRefPre` is associated with a particular
19/// [`Store`][crate::Store] and a particular
20/// [ExnType][crate::ExnType].
21///
22/// Reusing an allocator across many allocations amortizes some
23/// per-type runtime overheads inside Wasmtime. An `ExnRefPre` is to
24/// `ExnRef`s as an `InstancePre` is to `Instance`s.
25///
26/// # Example
27///
28/// ```
29/// use wasmtime::*;
30///
31/// # fn foo() -> Result<()> {
32/// let mut config = Config::new();
33/// config.wasm_function_references(true);
34/// config.wasm_gc(true);
35///
36/// let engine = Engine::new(&config)?;
37/// let mut store = Store::new(&engine, ());
38///
39/// // Define a exn type.
40/// let exn_ty = ExnType::new(
41///    store.engine(),
42///    [ValType::I32],
43/// )?;
44///
45/// // Create an allocator for the exn type.
46/// let allocator = ExnRefPre::new(&mut store, exn_ty.clone());
47///
48/// // Create a tag instance to associate with our exception objects.
49/// let tag = Tag::new(&mut store, &exn_ty.tag_type()).unwrap();
50///
51/// {
52///     let mut scope = RootScope::new(&mut store);
53///
54///     // Allocate a bunch of instances of our exception type using the same
55///     // allocator! This is faster than creating a new allocator for each
56///     // instance we want to allocate.
57///     for i in 0..10 {
58///         ExnRef::new(&mut scope, &allocator, &tag, &[Val::I32(i)])?;
59///     }
60/// }
61/// # Ok(())
62/// # }
63/// # foo().unwrap();
64/// ```
65pub struct ExnRefPre {
66    store_id: StoreId,
67    ty: ExnType,
68}
69
70impl ExnRefPre {
71    /// Create a new `ExnRefPre` that is associated with the given store
72    /// and type.
73    pub fn new(mut store: impl AsContextMut, ty: ExnType) -> Self {
74        Self::_new(store.as_context_mut().0, ty)
75    }
76
77    pub(crate) fn _new(store: &mut StoreOpaque, ty: ExnType) -> Self {
78        store.insert_gc_host_alloc_type(ty.registered_type().clone());
79        let store_id = store.id();
80
81        ExnRefPre { store_id, ty }
82    }
83
84    pub(crate) fn layout(&self) -> &GcStructLayout {
85        self.ty
86            .registered_type()
87            .layout()
88            .expect("exn types have a layout")
89            .unwrap_struct()
90    }
91
92    pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
93        self.ty.registered_type().index()
94    }
95}
96
97/// An `exnref` GC reference.
98///
99/// The `ExnRef` type represents WebAssembly `exnref` values. These
100/// are references to exception objects created either by catching a
101/// thrown exception in WebAssembly with a `catch_ref` clause of a
102/// `try_table`, or by allocating via the host API.
103///
104/// Note that you can also use `Rooted<ExnRef>` and `OwnedRooted<ExnRef>` as
105/// a type parameter with [`Func::typed`][crate::Func::typed]- and
106/// [`Func::wrap`][crate::Func::wrap]-style APIs.
107#[derive(Debug)]
108#[repr(transparent)]
109pub struct ExnRef {
110    pub(super) inner: GcRootIndex,
111}
112
113unsafe impl GcRefImpl for ExnRef {
114    fn transmute_ref(index: &GcRootIndex) -> &Self {
115        // Safety: `ExnRef` is a newtype of a `GcRootIndex`.
116        let me: &Self = unsafe { mem::transmute(index) };
117
118        // Assert we really are just a newtype of a `GcRootIndex`.
119        assert!(matches!(
120            me,
121            Self {
122                inner: GcRootIndex { .. },
123            }
124        ));
125
126        me
127    }
128}
129
130impl ExnRef {
131    /// Creates a new strongly-owned [`ExnRef`] from the raw value provided.
132    ///
133    /// This is intended to be used in conjunction with [`Func::new_unchecked`],
134    /// [`Func::call_unchecked`], and [`ValRaw`] with its `anyref` field.
135    ///
136    /// This function assumes that `raw` is an `exnref` value which is currently
137    /// rooted within the [`Store`].
138    ///
139    /// # Correctness
140    ///
141    /// This function is tricky to get right because `raw` not only must be a
142    /// valid `exnref` value produced prior by [`ExnRef::to_raw`] but it must
143    /// also be correctly rooted within the store. When arguments are provided
144    /// to a callback with [`Func::new_unchecked`], for example, or returned via
145    /// [`Func::call_unchecked`], if a GC is performed within the store then
146    /// floating `exnref` values are not rooted and will be GC'd, meaning that
147    /// this function will no longer be correct to call with the values cleaned
148    /// up. This function must be invoked *before* possible GC operations can
149    /// happen (such as calling Wasm).
150    ///
151    /// When in doubt try to not use this. Instead use the Rust APIs of
152    /// [`TypedFunc`] and friends. Note though that this function is not
153    /// `unsafe` as any value can be passed in. Incorrect values can result in
154    /// runtime panics, however, so care must still be taken with this method.
155    ///
156    /// [`Func::call_unchecked`]: crate::Func::call_unchecked
157    /// [`Func::new_unchecked`]: crate::Func::new_unchecked
158    /// [`Store`]: crate::Store
159    /// [`TypedFunc`]: crate::TypedFunc
160    /// [`ValRaw`]: crate::ValRaw
161    pub fn from_raw(mut store: impl AsContextMut, raw: u32) -> Option<Rooted<Self>> {
162        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
163        Self::_from_raw(&mut store, raw)
164    }
165
166    // (Not actually memory unsafe since we have indexed GC heaps.)
167    pub(crate) fn _from_raw(store: &mut AutoAssertNoGc, raw: u32) -> Option<Rooted<Self>> {
168        let gc_ref = VMGcRef::from_raw_u32(raw)?;
169        let gc_ref = store.clone_gc_ref(&gc_ref);
170        Some(Self::from_cloned_gc_ref(store, gc_ref))
171    }
172
173    /// Synchronously allocate a new exception object and get a
174    /// reference to it.
175    ///
176    /// # Automatic Garbage Collection
177    ///
178    /// If the GC heap is at capacity, and there isn't room for
179    /// allocating this new exception object, then this method will
180    /// automatically trigger a synchronous collection in an attempt
181    /// to free up space in the GC heap.
182    ///
183    /// # Errors
184    ///
185    /// If the given `fields` values' types do not match the field
186    /// types of the `allocator`'s exception type, an error is
187    /// returned.
188    ///
189    /// If the allocation cannot be satisfied because the GC heap is currently
190    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
191    /// error is returned. The allocation might succeed on a second attempt if
192    /// you drop some rooted GC references and try again.
193    ///
194    /// # Panics
195    ///
196    /// Panics if your engine is configured for async; use
197    /// [`ExnRef::new_async`][crate::ExnRef::new_async] to perform
198    /// synchronous allocation instead.
199    ///
200    /// Panics if the allocator, or any of the field values, is not associated
201    /// with the given store.
202    pub fn new(
203        mut store: impl AsContextMut,
204        allocator: &ExnRefPre,
205        tag: &Tag,
206        fields: &[Val],
207    ) -> Result<Rooted<ExnRef>> {
208        let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
209        assert!(!store.async_support());
210        vm::assert_ready(Self::_new_async(
211            store,
212            limiter.as_mut(),
213            allocator,
214            tag,
215            fields,
216        ))
217    }
218
219    /// Asynchronously allocate a new exception object and get a
220    /// reference to it.
221    ///
222    /// # Automatic Garbage Collection
223    ///
224    /// If the GC heap is at capacity, and there isn't room for allocating this
225    /// new exn, then this method will automatically trigger a synchronous
226    /// collection in an attempt to free up space in the GC heap.
227    ///
228    /// # Errors
229    ///
230    /// If the given `fields` values' types do not match the field
231    /// types of the `allocator`'s exception type, an error is
232    /// returned.
233    ///
234    /// If the allocation cannot be satisfied because the GC heap is currently
235    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
236    /// error is returned. The allocation might succeed on a second attempt if
237    /// you drop some rooted GC references and try again.
238    ///
239    /// # Panics
240    ///
241    /// Panics if your engine is not configured for async; use
242    /// [`ExnRef::new`][crate::ExnRef::new] to perform synchronous
243    /// allocation instead.
244    ///
245    /// Panics if the allocator, or any of the field values, is not associated
246    /// with the given store.
247    #[cfg(feature = "async")]
248    pub async fn new_async(
249        mut store: impl AsContextMut,
250        allocator: &ExnRefPre,
251        tag: &Tag,
252        fields: &[Val],
253    ) -> Result<Rooted<ExnRef>> {
254        let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
255        Self::_new_async(store, limiter.as_mut(), allocator, tag, fields).await
256    }
257
258    pub(crate) async fn _new_async(
259        store: &mut StoreOpaque,
260        limiter: Option<&mut StoreResourceLimiter<'_>>,
261        allocator: &ExnRefPre,
262        tag: &Tag,
263        fields: &[Val],
264    ) -> Result<Rooted<ExnRef>> {
265        Self::type_check_tag_and_fields(store, allocator, tag, fields)?;
266        store
267            .retry_after_gc_async(limiter, (), |store, ()| {
268                Self::new_unchecked(store, allocator, tag, fields)
269            })
270            .await
271    }
272
273    /// Type check the tag instance and field values before allocating
274    /// a new exception object.
275    fn type_check_tag_and_fields(
276        store: &mut StoreOpaque,
277        allocator: &ExnRefPre,
278        tag: &Tag,
279        fields: &[Val],
280    ) -> Result<(), Error> {
281        assert!(
282            tag.comes_from_same_store(store),
283            "tag comes from the wrong store"
284        );
285        ensure!(
286            tag.wasmtime_ty(store).signature.unwrap_engine_type_index()
287                == allocator.ty.tag_type().ty().type_index(),
288            "incorrect signature for tag when creating exception object"
289        );
290        let expected_len = allocator.ty.fields().len();
291        let actual_len = fields.len();
292        ensure!(
293            actual_len == expected_len,
294            "expected {expected_len} fields, got {actual_len}"
295        );
296        for (ty, val) in allocator.ty.fields().zip(fields) {
297            assert!(
298                val.comes_from_same_store(store),
299                "field value comes from the wrong store",
300            );
301            let ty = ty.element_type().unpack();
302            val.ensure_matches_ty(store, ty)
303                .context("field type mismatch")?;
304        }
305        Ok(())
306    }
307
308    /// Given that the field values have already been type checked, allocate a
309    /// new exn.
310    ///
311    /// Does not attempt GC+retry on OOM, that is the caller's responsibility.
312    fn new_unchecked(
313        store: &mut StoreOpaque,
314        allocator: &ExnRefPre,
315        tag: &Tag,
316        fields: &[Val],
317    ) -> Result<Rooted<ExnRef>> {
318        assert_eq!(
319            store.id(),
320            allocator.store_id,
321            "attempted to use a `ExnRefPre` with the wrong store"
322        );
323
324        // Allocate the exn and write each field value into the appropriate
325        // offset.
326        let exnref = store
327            .require_gc_store_mut()?
328            .alloc_uninit_exn(allocator.type_index(), &allocator.layout())
329            .context("unrecoverable error when allocating new `exnref`")?
330            .map_err(|n| GcHeapOutOfMemory::new((), n))?;
331
332        // From this point on, if we get any errors, then the exn is not
333        // fully initialized, so we need to eagerly deallocate it before the
334        // next GC where the collector might try to interpret one of the
335        // uninitialized fields as a GC reference.
336        let mut store = AutoAssertNoGc::new(store);
337        match (|| {
338            let (instance, index) = tag.to_raw_indices();
339            exnref.initialize_tag(&mut store, instance, index)?;
340            for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
341                exnref.initialize_field(
342                    &mut store,
343                    allocator.layout(),
344                    ty.element_type(),
345                    index,
346                    *val,
347                )?;
348            }
349            Ok(())
350        })() {
351            Ok(()) => Ok(Rooted::new(&mut store, exnref.into())),
352            Err(e) => {
353                store.require_gc_store_mut()?.dealloc_uninit_exn(exnref);
354                Err(e)
355            }
356        }
357    }
358
359    pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
360        let gc_ref = self.inner.try_gc_ref(store)?;
361        let header = store.require_gc_store()?.header(gc_ref);
362        debug_assert!(header.kind().matches(VMGcKind::ExnRef));
363        Ok(header.ty().expect("exnrefs should have concrete types"))
364    }
365
366    /// Create a new `Rooted<ExnRef>` from the given GC reference.
367    ///
368    /// `gc_ref` should point to a valid `exnref` and should belong to
369    /// the store's GC heap. Failure to uphold these invariants is
370    /// memory safe but will lead to general incorrectness such as
371    /// panics or wrong results.
372    pub(crate) fn from_cloned_gc_ref(
373        store: &mut AutoAssertNoGc<'_>,
374        gc_ref: VMGcRef,
375    ) -> Rooted<Self> {
376        debug_assert!(
377            store
378                .unwrap_gc_store()
379                .header(&gc_ref)
380                .kind()
381                .matches(VMGcKind::ExnRef)
382        );
383        Rooted::new(store, gc_ref)
384    }
385
386    #[inline]
387    pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
388        self.inner.comes_from_same_store(store)
389    }
390
391    /// Converts this [`ExnRef`] to a raw value suitable to store within a
392    /// [`ValRaw`].
393    ///
394    /// Returns an error if this `exnref` has been unrooted.
395    ///
396    /// # Correctness
397    ///
398    /// Produces a raw value which is only valid to pass into a store if a GC
399    /// doesn't happen between when the value is produce and when it's passed
400    /// into the store.
401    ///
402    /// [`ValRaw`]: crate::ValRaw
403    pub fn to_raw(&self, mut store: impl AsContextMut) -> Result<u32> {
404        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
405        self._to_raw(&mut store)
406    }
407
408    pub(crate) fn _to_raw(&self, store: &mut AutoAssertNoGc<'_>) -> Result<u32> {
409        let gc_ref = self.inner.try_clone_gc_ref(store)?;
410        let raw = if gc_ref.is_i31() {
411            gc_ref.as_raw_non_zero_u32()
412        } else {
413            store.require_gc_store_mut()?.expose_gc_ref_to_wasm(gc_ref)
414        };
415        Ok(raw.get())
416    }
417
418    /// Get the type of this reference.
419    ///
420    /// # Errors
421    ///
422    /// Return an error if this reference has been unrooted.
423    ///
424    /// # Panics
425    ///
426    /// Panics if this reference is associated with a different store.
427    pub fn ty(&self, store: impl AsContext) -> Result<ExnType> {
428        self._ty(store.as_context().0)
429    }
430
431    pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ExnType> {
432        assert!(self.comes_from_same_store(store));
433        let index = self.type_index(store)?;
434        Ok(ExnType::from_shared_type_index(store.engine(), index))
435    }
436
437    /// Does this `exnref` match the given type?
438    ///
439    /// That is, is this object's type a subtype of the given type?
440    ///
441    /// # Errors
442    ///
443    /// Return an error if this reference has been unrooted.
444    ///
445    /// # Panics
446    ///
447    /// Panics if this reference is associated with a different store.
448    pub fn matches_ty(&self, store: impl AsContext, ty: &HeapType) -> Result<bool> {
449        self._matches_ty(store.as_context().0, ty)
450    }
451
452    pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<bool> {
453        assert!(self.comes_from_same_store(store));
454        Ok(HeapType::from(self._ty(store)?).matches(ty))
455    }
456
457    pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<()> {
458        if !self.comes_from_same_store(store) {
459            bail!("function used with wrong store");
460        }
461        if self._matches_ty(store, ty)? {
462            Ok(())
463        } else {
464            let actual_ty = self._ty(store)?;
465            bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
466        }
467    }
468
469    /// Get the values of this exception object's fields.
470    ///
471    /// # Errors
472    ///
473    /// Return an error if this reference has been unrooted.
474    ///
475    /// # Panics
476    ///
477    /// Panics if this reference is associated with a different store.
478    pub fn fields<'a, T: 'static>(
479        &'a self,
480        store: impl Into<StoreContextMut<'a, T>>,
481    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
482        self._fields(store.into().0)
483    }
484
485    pub(crate) fn _fields<'a>(
486        &'a self,
487        store: &'a mut StoreOpaque,
488    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
489        assert!(self.comes_from_same_store(store));
490        let store = AutoAssertNoGc::new(store);
491
492        let gc_ref = self.inner.try_gc_ref(&store)?;
493        let header = store.require_gc_store()?.header(gc_ref);
494        debug_assert!(header.kind().matches(VMGcKind::ExnRef));
495
496        let index = header.ty().expect("exnrefs should have concrete types");
497        let ty = ExnType::from_shared_type_index(store.engine(), index);
498        let len = ty.fields().len();
499
500        return Ok(Fields {
501            exnref: self,
502            store,
503            index: 0,
504            len,
505        });
506
507        struct Fields<'a, 'b> {
508            exnref: &'a ExnRef,
509            store: AutoAssertNoGc<'b>,
510            index: usize,
511            len: usize,
512        }
513
514        impl Iterator for Fields<'_, '_> {
515            type Item = Val;
516
517            #[inline]
518            fn next(&mut self) -> Option<Self::Item> {
519                let i = self.index;
520                debug_assert!(i <= self.len);
521                if i >= self.len {
522                    return None;
523                }
524                self.index += 1;
525                Some(self.exnref._field(&mut self.store, i).unwrap())
526            }
527
528            #[inline]
529            fn size_hint(&self) -> (usize, Option<usize>) {
530                let len = self.len - self.index;
531                (len, Some(len))
532            }
533        }
534
535        impl ExactSizeIterator for Fields<'_, '_> {
536            #[inline]
537            fn len(&self) -> usize {
538                self.len - self.index
539            }
540        }
541    }
542
543    fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
544        assert!(self.comes_from_same_store(&store));
545        let gc_ref = self.inner.try_gc_ref(store)?;
546        Ok(store.require_gc_store()?.header(gc_ref))
547    }
548
549    fn exnref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMExnRef> {
550        assert!(self.comes_from_same_store(&store));
551        let gc_ref = self.inner.try_gc_ref(store)?;
552        debug_assert!(self.header(store)?.kind().matches(VMGcKind::ExnRef));
553        Ok(gc_ref.as_exnref_unchecked())
554    }
555
556    fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
557        assert!(self.comes_from_same_store(&store));
558        let type_index = self.type_index(store)?;
559        let layout = store
560            .engine()
561            .signatures()
562            .layout(type_index)
563            .expect("exn types should have GC layouts");
564        match layout {
565            GcLayout::Struct(s) => Ok(s),
566            GcLayout::Array(_) => unreachable!(),
567        }
568    }
569
570    fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
571        let ty = self._ty(store)?;
572        match ty.field(field) {
573            Some(f) => Ok(f),
574            None => {
575                let len = ty.fields().len();
576                bail!("cannot access field {field}: exn only has {len} fields")
577            }
578        }
579    }
580
581    /// Get this exception object's `index`th field.
582    ///
583    /// # Errors
584    ///
585    /// Returns an `Err(_)` if the index is out of bounds or this reference has
586    /// been unrooted.
587    ///
588    /// # Panics
589    ///
590    /// Panics if this reference is associated with a different store.
591    pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
592        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
593        self._field(&mut store, index)
594    }
595
596    pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
597        assert!(self.comes_from_same_store(store));
598        let exnref = self.exnref(store)?.unchecked_copy();
599        let field_ty = self.field_ty(store, index)?;
600        let layout = self.layout(store)?;
601        Ok(exnref.read_field(store, &layout, field_ty.element_type(), index))
602    }
603
604    /// Get this exception object's associated tag.
605    ///
606    /// # Errors
607    ///
608    /// Returns an `Err(_)` if this reference has been unrooted.
609    ///
610    /// # Panics
611    ///
612    /// Panics if this reference is associated with a different store.
613    pub fn tag(&self, mut store: impl AsContextMut) -> Result<Tag> {
614        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
615        assert!(self.comes_from_same_store(&store));
616        let exnref = self.exnref(&store)?.unchecked_copy();
617        let (instance, index) = exnref.tag(&mut store)?;
618        Ok(Tag::from_raw_indices(&*store, instance, index))
619    }
620}
621
622unsafe impl WasmTy for Rooted<ExnRef> {
623    #[inline]
624    fn valtype() -> ValType {
625        ValType::Ref(RefType::new(false, HeapType::Exn))
626    }
627
628    #[inline]
629    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
630        self.comes_from_same_store(store)
631    }
632
633    #[inline]
634    fn dynamic_concrete_type_check(
635        &self,
636        _store: &StoreOpaque,
637        _nullable: bool,
638        _ty: &HeapType,
639    ) -> Result<()> {
640        // Wasm can't specify a concrete exn type, so there are no
641        // dynamic checks.
642        Ok(())
643    }
644
645    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
646        self.wasm_ty_store(store, ptr, ValRaw::anyref)
647    }
648
649    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
650        Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
651    }
652}
653
654unsafe impl WasmTy for Option<Rooted<ExnRef>> {
655    #[inline]
656    fn valtype() -> ValType {
657        ValType::EXNREF
658    }
659
660    #[inline]
661    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
662        self.map_or(true, |x| x.comes_from_same_store(store))
663    }
664
665    #[inline]
666    fn dynamic_concrete_type_check(
667        &self,
668        store: &StoreOpaque,
669        nullable: bool,
670        ty: &HeapType,
671    ) -> Result<()> {
672        match self {
673            Some(a) => a.ensure_matches_ty(store, ty),
674            None => {
675                ensure!(
676                    nullable,
677                    "expected a non-null reference, but found a null reference"
678                );
679                Ok(())
680            }
681        }
682    }
683
684    #[inline]
685    fn is_vmgcref_and_points_to_object(&self) -> bool {
686        self.is_some()
687    }
688
689    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
690        <Rooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
691    }
692
693    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
694        <Rooted<ExnRef>>::wasm_ty_option_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
695    }
696}
697
698unsafe impl WasmTy for OwnedRooted<ExnRef> {
699    #[inline]
700    fn valtype() -> ValType {
701        ValType::Ref(RefType::new(false, HeapType::Exn))
702    }
703
704    #[inline]
705    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
706        self.comes_from_same_store(store)
707    }
708
709    #[inline]
710    fn dynamic_concrete_type_check(
711        &self,
712        store: &StoreOpaque,
713        _nullable: bool,
714        ty: &HeapType,
715    ) -> Result<()> {
716        self.ensure_matches_ty(store, ty)
717    }
718
719    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
720        self.wasm_ty_store(store, ptr, ValRaw::anyref)
721    }
722
723    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
724        Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
725    }
726}
727
728unsafe impl WasmTy for Option<OwnedRooted<ExnRef>> {
729    #[inline]
730    fn valtype() -> ValType {
731        ValType::EXNREF
732    }
733
734    #[inline]
735    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
736        self.as_ref()
737            .map_or(true, |x| x.comes_from_same_store(store))
738    }
739
740    #[inline]
741    fn dynamic_concrete_type_check(
742        &self,
743        store: &StoreOpaque,
744        nullable: bool,
745        ty: &HeapType,
746    ) -> Result<()> {
747        match self {
748            Some(a) => a.ensure_matches_ty(store, ty),
749            None => {
750                ensure!(
751                    nullable,
752                    "expected a non-null reference, but found a null reference"
753                );
754                Ok(())
755            }
756        }
757    }
758
759    #[inline]
760    fn is_vmgcref_and_points_to_object(&self) -> bool {
761        self.is_some()
762    }
763
764    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
765        <OwnedRooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
766    }
767
768    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
769        <OwnedRooted<ExnRef>>::wasm_ty_option_load(
770            store,
771            ptr.get_anyref(),
772            ExnRef::from_cloned_gc_ref,
773        )
774    }
775}