Skip to main content

wasmtime/runtime/gc/enabled/
exnref.rs

1//! Implementation of `exnref` in Wasmtime.
2
3use crate::runtime::vm::VMGcRef;
4use crate::store::{Asyncness, StoreId, StoreResourceLimiter};
5#[cfg(feature = "async")]
6use crate::vm::VMStore;
7use crate::vm::{self, VMExnRef, VMGcHeader};
8use crate::{
9    AsContext, AsContextMut, GcRefImpl, GcRootIndex, HeapType, OwnedRooted, RefType, Result,
10    Rooted, Val, ValRaw, ValType, WasmTy,
11    store::{AutoAssertNoGc, StoreOpaque},
12};
13use crate::{ExnType, FieldType, GcHeapOutOfMemory, StoreContextMut, Tag, prelude::*};
14use core::mem;
15use core::mem::MaybeUninit;
16use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
17
18/// An allocator for a particular Wasm GC exception type.
19///
20/// Every `ExnRefPre` is associated with a particular
21/// [`Store`][crate::Store] and a particular
22/// [ExnType][crate::ExnType].
23///
24/// Reusing an allocator across many allocations amortizes some
25/// per-type runtime overheads inside Wasmtime. An `ExnRefPre` is to
26/// `ExnRef`s as an `InstancePre` is to `Instance`s.
27///
28/// # Example
29///
30/// ```
31/// use wasmtime::*;
32///
33/// # fn foo() -> Result<()> {
34/// let mut config = Config::new();
35/// config.wasm_function_references(true);
36/// config.wasm_gc(true);
37///
38/// let engine = Engine::new(&config)?;
39/// let mut store = Store::new(&engine, ());
40///
41/// // Define a exn type.
42/// let exn_ty = ExnType::new(
43///    store.engine(),
44///    [ValType::I32],
45/// )?;
46///
47/// // Create an allocator for the exn type.
48/// let allocator = ExnRefPre::new(&mut store, exn_ty.clone());
49///
50/// // Create a tag instance to associate with our exception objects.
51/// let tag = Tag::new(&mut store, &exn_ty.tag_type()).unwrap();
52///
53/// {
54///     let mut scope = RootScope::new(&mut store);
55///
56///     // Allocate a bunch of instances of our exception type using the same
57///     // allocator! This is faster than creating a new allocator for each
58///     // instance we want to allocate.
59///     for i in 0..10 {
60///         ExnRef::new(&mut scope, &allocator, &tag, &[Val::I32(i)])?;
61///     }
62/// }
63/// # Ok(())
64/// # }
65/// # foo().unwrap();
66/// ```
67pub struct ExnRefPre {
68    store_id: StoreId,
69    ty: ExnType,
70}
71
72impl ExnRefPre {
73    /// Create a new `ExnRefPre` that is associated with the given store
74    /// and type.
75    pub fn new(mut store: impl AsContextMut, ty: ExnType) -> Self {
76        Self::_new(store.as_context_mut().0, ty)
77    }
78
79    pub(crate) fn _new(store: &mut StoreOpaque, ty: ExnType) -> Self {
80        store.insert_gc_host_alloc_type(ty.registered_type().clone());
81        let store_id = store.id();
82
83        ExnRefPre { store_id, ty }
84    }
85
86    pub(crate) fn layout(&self) -> &GcStructLayout {
87        self.ty
88            .registered_type()
89            .layout()
90            .expect("exn types have a layout")
91            .unwrap_struct()
92    }
93
94    pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
95        self.ty.registered_type().index()
96    }
97}
98
99/// An `exnref` GC reference.
100///
101/// The `ExnRef` type represents WebAssembly `exnref` values. These
102/// are references to exception objects created either by catching a
103/// thrown exception in WebAssembly with a `catch_ref` clause of a
104/// `try_table`, or by allocating via the host API.
105///
106/// Note that you can also use `Rooted<ExnRef>` and `OwnedRooted<ExnRef>` as
107/// a type parameter with [`Func::typed`][crate::Func::typed]- and
108/// [`Func::wrap`][crate::Func::wrap]-style APIs.
109#[derive(Debug)]
110#[repr(transparent)]
111pub struct ExnRef {
112    pub(super) inner: GcRootIndex,
113}
114
115unsafe impl GcRefImpl for ExnRef {
116    fn transmute_ref(index: &GcRootIndex) -> &Self {
117        // Safety: `ExnRef` is a newtype of a `GcRootIndex`.
118        let me: &Self = unsafe { mem::transmute(index) };
119
120        // Assert we really are just a newtype of a `GcRootIndex`.
121        assert!(matches!(
122            me,
123            Self {
124                inner: GcRootIndex { .. },
125            }
126        ));
127
128        me
129    }
130}
131
132impl ExnRef {
133    /// Creates a new strongly-owned [`ExnRef`] from the raw value provided.
134    ///
135    /// This is intended to be used in conjunction with [`Func::new_unchecked`],
136    /// [`Func::call_unchecked`], and [`ValRaw`] with its `anyref` field.
137    ///
138    /// This function assumes that `raw` is an `exnref` value which is currently
139    /// rooted within the [`Store`].
140    ///
141    /// # Correctness
142    ///
143    /// This function is tricky to get right because `raw` not only must be a
144    /// valid `exnref` value produced prior by [`ExnRef::to_raw`] but it must
145    /// also be correctly rooted within the store. When arguments are provided
146    /// to a callback with [`Func::new_unchecked`], for example, or returned via
147    /// [`Func::call_unchecked`], if a GC is performed within the store then
148    /// floating `exnref` values are not rooted and will be GC'd, meaning that
149    /// this function will no longer be correct to call with the values cleaned
150    /// up. This function must be invoked *before* possible GC operations can
151    /// happen (such as calling Wasm).
152    ///
153    /// When in doubt try to not use this. Instead use the Rust APIs of
154    /// [`TypedFunc`] and friends. Note though that this function is not
155    /// `unsafe` as any value can be passed in. Incorrect values can result in
156    /// runtime panics, however, so care must still be taken with this method.
157    ///
158    /// [`Func::call_unchecked`]: crate::Func::call_unchecked
159    /// [`Func::new_unchecked`]: crate::Func::new_unchecked
160    /// [`Store`]: crate::Store
161    /// [`TypedFunc`]: crate::TypedFunc
162    /// [`ValRaw`]: crate::ValRaw
163    pub fn from_raw(mut store: impl AsContextMut, raw: u32) -> Option<Rooted<Self>> {
164        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
165        Self::_from_raw(&mut store, raw)
166    }
167
168    // (Not actually memory unsafe since we have indexed GC heaps.)
169    pub(crate) fn _from_raw(store: &mut AutoAssertNoGc, raw: u32) -> Option<Rooted<Self>> {
170        let gc_ref = VMGcRef::from_raw_u32(raw)?;
171        let gc_ref = store.clone_gc_ref(&gc_ref);
172        Some(Self::from_cloned_gc_ref(store, gc_ref))
173    }
174
175    /// Synchronously allocate a new exception object and get a
176    /// reference to it.
177    ///
178    /// # Automatic Garbage Collection
179    ///
180    /// If the GC heap is at capacity, and there isn't room for
181    /// allocating this new exception object, then this method will
182    /// automatically trigger a synchronous collection in an attempt
183    /// to free up space in the GC heap.
184    ///
185    /// # Errors
186    ///
187    /// If the given `fields` values' types do not match the field
188    /// types of the `allocator`'s exception type, an error is
189    /// returned.
190    ///
191    /// If the allocation cannot be satisfied because the GC heap is currently
192    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
193    /// error is returned. The allocation might succeed on a second attempt if
194    /// you drop some rooted GC references and try again.
195    ///
196    /// If `store` is configured with a
197    /// [`ResourceLimiterAsync`](crate::ResourceLimiterAsync) then an error
198    /// will be returned because [`ExnRef::new_async`] should be used instead.
199    ///
200    /// # Panics
201    ///
202    /// Panics if the allocator, or any of the field values, is not associated
203    /// with the given store.
204    pub fn new(
205        mut store: impl AsContextMut,
206        allocator: &ExnRefPre,
207        tag: &Tag,
208        fields: &[Val],
209    ) -> Result<Rooted<ExnRef>> {
210        let (mut limiter, store) = store
211            .as_context_mut()
212            .0
213            .validate_sync_resource_limiter_and_store_opaque()?;
214        vm::assert_ready(Self::_new_async(
215            store,
216            limiter.as_mut(),
217            allocator,
218            tag,
219            fields,
220            Asyncness::No,
221        ))
222    }
223
224    /// Asynchronously allocate a new exception object and get a
225    /// reference to it.
226    ///
227    /// # Automatic Garbage Collection
228    ///
229    /// If the GC heap is at capacity, and there isn't room for allocating this
230    /// new exn, then this method will automatically trigger a synchronous
231    /// collection in an attempt to free up space in the GC heap.
232    ///
233    /// # Errors
234    ///
235    /// If the given `fields` values' types do not match the field
236    /// types of the `allocator`'s exception type, an error is
237    /// returned.
238    ///
239    /// If the allocation cannot be satisfied because the GC heap is currently
240    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
241    /// error is returned. The allocation might succeed on a second attempt if
242    /// you drop some rooted GC references and try again.
243    ///
244    /// # Panics
245    ///
246    /// Panics if the allocator, or any of the field values, is not associated
247    /// with the given store.
248    #[cfg(feature = "async")]
249    pub async fn new_async(
250        mut store: impl AsContextMut,
251        allocator: &ExnRefPre,
252        tag: &Tag,
253        fields: &[Val],
254    ) -> Result<Rooted<ExnRef>> {
255        let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
256        Self::_new_async(
257            store,
258            limiter.as_mut(),
259            allocator,
260            tag,
261            fields,
262            Asyncness::Yes,
263        )
264        .await
265    }
266
267    pub(crate) async fn _new_async(
268        store: &mut StoreOpaque,
269        limiter: Option<&mut StoreResourceLimiter<'_>>,
270        allocator: &ExnRefPre,
271        tag: &Tag,
272        fields: &[Val],
273        asyncness: Asyncness,
274    ) -> Result<Rooted<ExnRef>> {
275        Self::type_check_tag_and_fields(store, allocator, tag, fields)?;
276        store
277            .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
278                Self::new_unchecked(store, allocator, tag, fields)
279            })
280            .await
281    }
282
283    /// Type check the tag instance and field values before allocating
284    /// a new exception object.
285    fn type_check_tag_and_fields(
286        store: &mut StoreOpaque,
287        allocator: &ExnRefPre,
288        tag: &Tag,
289        fields: &[Val],
290    ) -> Result<(), Error> {
291        assert!(
292            tag.comes_from_same_store(store),
293            "tag comes from the wrong store"
294        );
295        ensure!(
296            tag.wasmtime_ty(store).signature.unwrap_engine_type_index()
297                == allocator.ty.tag_type().ty().type_index(),
298            "incorrect signature for tag when creating exception object"
299        );
300        let expected_len = allocator.ty.fields().len();
301        let actual_len = fields.len();
302        ensure!(
303            actual_len == expected_len,
304            "expected {expected_len} fields, got {actual_len}"
305        );
306        for (ty, val) in allocator.ty.fields().zip(fields) {
307            assert!(
308                val.comes_from_same_store(store),
309                "field value comes from the wrong store",
310            );
311            let ty = ty.element_type().unpack();
312            val.ensure_matches_ty(store, ty)
313                .context("field type mismatch")?;
314        }
315        Ok(())
316    }
317
318    /// Given that the field values have already been type checked, allocate a
319    /// new exn.
320    ///
321    /// Does not attempt GC+retry on OOM, that is the caller's responsibility.
322    fn new_unchecked(
323        store: &mut StoreOpaque,
324        allocator: &ExnRefPre,
325        tag: &Tag,
326        fields: &[Val],
327    ) -> Result<Rooted<ExnRef>> {
328        assert_eq!(
329            store.id(),
330            allocator.store_id,
331            "attempted to use a `ExnRefPre` with the wrong store"
332        );
333
334        // Allocate the exn and write each field value into the appropriate
335        // offset.
336        let exnref = store
337            .require_gc_store_mut()?
338            .alloc_uninit_exn(allocator.type_index(), &allocator.layout())
339            .context("unrecoverable error when allocating new `exnref`")?
340            .map_err(|n| GcHeapOutOfMemory::new((), n))?;
341
342        // From this point on, if we get any errors, then the exn is not
343        // fully initialized, so we need to eagerly deallocate it before the
344        // next GC where the collector might try to interpret one of the
345        // uninitialized fields as a GC reference.
346        let mut store = AutoAssertNoGc::new(store);
347        match (|| {
348            let (instance, index) = tag.to_raw_indices();
349            exnref.initialize_tag(&mut store, instance, index)?;
350            for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
351                exnref.initialize_field(
352                    &mut store,
353                    allocator.layout(),
354                    ty.element_type(),
355                    index,
356                    *val,
357                )?;
358            }
359            Ok(())
360        })() {
361            Ok(()) => Ok(Rooted::new(&mut store, exnref.into())),
362            Err(e) => {
363                store.require_gc_store_mut()?.dealloc_uninit_exn(exnref);
364                Err(e)
365            }
366        }
367    }
368
369    pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
370        let gc_ref = self.inner.try_gc_ref(store)?;
371        let header = store.require_gc_store()?.header(gc_ref);
372        debug_assert!(header.kind().matches(VMGcKind::ExnRef));
373        Ok(header.ty().expect("exnrefs should have concrete types"))
374    }
375
376    /// Create a new `Rooted<ExnRef>` from the given GC reference.
377    ///
378    /// `gc_ref` should point to a valid `exnref` and should belong to
379    /// the store's GC heap. Failure to uphold these invariants is
380    /// memory safe but will lead to general incorrectness such as
381    /// panics or wrong results.
382    pub(crate) fn from_cloned_gc_ref(
383        store: &mut AutoAssertNoGc<'_>,
384        gc_ref: VMGcRef,
385    ) -> Rooted<Self> {
386        debug_assert!(
387            store
388                .unwrap_gc_store()
389                .header(&gc_ref)
390                .kind()
391                .matches(VMGcKind::ExnRef)
392        );
393        Rooted::new(store, gc_ref)
394    }
395
396    #[inline]
397    pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
398        self.inner.comes_from_same_store(store)
399    }
400
401    /// Converts this [`ExnRef`] to a raw value suitable to store within a
402    /// [`ValRaw`].
403    ///
404    /// Returns an error if this `exnref` has been unrooted.
405    ///
406    /// # Correctness
407    ///
408    /// Produces a raw value which is only valid to pass into a store if a GC
409    /// doesn't happen between when the value is produce and when it's passed
410    /// into the store.
411    ///
412    /// [`ValRaw`]: crate::ValRaw
413    pub fn to_raw(&self, mut store: impl AsContextMut) -> Result<u32> {
414        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
415        self._to_raw(&mut store)
416    }
417
418    pub(crate) fn _to_raw(&self, store: &mut AutoAssertNoGc<'_>) -> Result<u32> {
419        let gc_ref = self.inner.try_clone_gc_ref(store)?;
420        let raw = if gc_ref.is_i31() {
421            gc_ref.as_raw_non_zero_u32()
422        } else {
423            store.require_gc_store_mut()?.expose_gc_ref_to_wasm(gc_ref)
424        };
425        Ok(raw.get())
426    }
427
428    /// Get the type of this reference.
429    ///
430    /// # Errors
431    ///
432    /// Return an error if this reference has been unrooted.
433    ///
434    /// # Panics
435    ///
436    /// Panics if this reference is associated with a different store.
437    pub fn ty(&self, store: impl AsContext) -> Result<ExnType> {
438        self._ty(store.as_context().0)
439    }
440
441    pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ExnType> {
442        assert!(self.comes_from_same_store(store));
443        let index = self.type_index(store)?;
444        Ok(ExnType::from_shared_type_index(store.engine(), index))
445    }
446
447    /// Does this `exnref` match the given type?
448    ///
449    /// That is, is this object's type a subtype of the given type?
450    ///
451    /// # Errors
452    ///
453    /// Return an error if this reference has been unrooted.
454    ///
455    /// # Panics
456    ///
457    /// Panics if this reference is associated with a different store.
458    pub fn matches_ty(&self, store: impl AsContext, ty: &HeapType) -> Result<bool> {
459        self._matches_ty(store.as_context().0, ty)
460    }
461
462    pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<bool> {
463        assert!(self.comes_from_same_store(store));
464        Ok(HeapType::from(self._ty(store)?).matches(ty))
465    }
466
467    pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<()> {
468        if !self.comes_from_same_store(store) {
469            bail!("function used with wrong store");
470        }
471        if self._matches_ty(store, ty)? {
472            Ok(())
473        } else {
474            let actual_ty = self._ty(store)?;
475            bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
476        }
477    }
478
479    /// Get the values of this exception object's fields.
480    ///
481    /// # Errors
482    ///
483    /// Return an error if this reference has been unrooted.
484    ///
485    /// # Panics
486    ///
487    /// Panics if this reference is associated with a different store.
488    pub fn fields<'a, T: 'static>(
489        &'a self,
490        store: impl Into<StoreContextMut<'a, T>>,
491    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
492        self._fields(store.into().0)
493    }
494
495    pub(crate) fn _fields<'a>(
496        &'a self,
497        store: &'a mut StoreOpaque,
498    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
499        assert!(self.comes_from_same_store(store));
500        let store = AutoAssertNoGc::new(store);
501
502        let gc_ref = self.inner.try_gc_ref(&store)?;
503        let header = store.require_gc_store()?.header(gc_ref);
504        debug_assert!(header.kind().matches(VMGcKind::ExnRef));
505
506        let index = header.ty().expect("exnrefs should have concrete types");
507        let ty = ExnType::from_shared_type_index(store.engine(), index);
508        let len = ty.fields().len();
509
510        return Ok(Fields {
511            exnref: self,
512            store,
513            index: 0,
514            len,
515        });
516
517        struct Fields<'a, 'b> {
518            exnref: &'a ExnRef,
519            store: AutoAssertNoGc<'b>,
520            index: usize,
521            len: usize,
522        }
523
524        impl Iterator for Fields<'_, '_> {
525            type Item = Val;
526
527            #[inline]
528            fn next(&mut self) -> Option<Self::Item> {
529                let i = self.index;
530                debug_assert!(i <= self.len);
531                if i >= self.len {
532                    return None;
533                }
534                self.index += 1;
535                Some(self.exnref._field(&mut self.store, i).unwrap())
536            }
537
538            #[inline]
539            fn size_hint(&self) -> (usize, Option<usize>) {
540                let len = self.len - self.index;
541                (len, Some(len))
542            }
543        }
544
545        impl ExactSizeIterator for Fields<'_, '_> {
546            #[inline]
547            fn len(&self) -> usize {
548                self.len - self.index
549            }
550        }
551    }
552
553    fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
554        assert!(self.comes_from_same_store(&store));
555        let gc_ref = self.inner.try_gc_ref(store)?;
556        Ok(store.require_gc_store()?.header(gc_ref))
557    }
558
559    fn exnref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMExnRef> {
560        assert!(self.comes_from_same_store(&store));
561        let gc_ref = self.inner.try_gc_ref(store)?;
562        debug_assert!(self.header(store)?.kind().matches(VMGcKind::ExnRef));
563        Ok(gc_ref.as_exnref_unchecked())
564    }
565
566    fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
567        assert!(self.comes_from_same_store(&store));
568        let type_index = self.type_index(store)?;
569        let layout = store
570            .engine()
571            .signatures()
572            .layout(type_index)
573            .expect("exn types should have GC layouts");
574        match layout {
575            GcLayout::Struct(s) => Ok(s),
576            GcLayout::Array(_) => unreachable!(),
577        }
578    }
579
580    fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
581        let ty = self._ty(store)?;
582        match ty.field(field) {
583            Some(f) => Ok(f),
584            None => {
585                let len = ty.fields().len();
586                bail!("cannot access field {field}: exn only has {len} fields")
587            }
588        }
589    }
590
591    /// Get this exception object's `index`th field.
592    ///
593    /// # Errors
594    ///
595    /// Returns an `Err(_)` if the index is out of bounds or this reference has
596    /// been unrooted.
597    ///
598    /// # Panics
599    ///
600    /// Panics if this reference is associated with a different store.
601    pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
602        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
603        self._field(&mut store, index)
604    }
605
606    pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
607        assert!(self.comes_from_same_store(store));
608        let exnref = self.exnref(store)?.unchecked_copy();
609        let field_ty = self.field_ty(store, index)?;
610        let layout = self.layout(store)?;
611        Ok(exnref.read_field(store, &layout, field_ty.element_type(), index))
612    }
613
614    /// Get this exception object's associated tag.
615    ///
616    /// # Errors
617    ///
618    /// Returns an `Err(_)` if this reference has been unrooted.
619    ///
620    /// # Panics
621    ///
622    /// Panics if this reference is associated with a different store.
623    pub fn tag(&self, mut store: impl AsContextMut) -> Result<Tag> {
624        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
625        assert!(self.comes_from_same_store(&store));
626        let exnref = self.exnref(&store)?.unchecked_copy();
627        let (instance, index) = exnref.tag(&mut store)?;
628        Ok(Tag::from_raw_indices(&*store, instance, index))
629    }
630}
631
632unsafe impl WasmTy for Rooted<ExnRef> {
633    #[inline]
634    fn valtype() -> ValType {
635        ValType::Ref(RefType::new(false, HeapType::Exn))
636    }
637
638    #[inline]
639    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
640        self.comes_from_same_store(store)
641    }
642
643    #[inline]
644    fn dynamic_concrete_type_check(
645        &self,
646        _store: &StoreOpaque,
647        _nullable: bool,
648        _ty: &HeapType,
649    ) -> Result<()> {
650        // Wasm can't specify a concrete exn type, so there are no
651        // dynamic checks.
652        Ok(())
653    }
654
655    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
656        self.wasm_ty_store(store, ptr, ValRaw::anyref)
657    }
658
659    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
660        Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
661    }
662}
663
664unsafe impl WasmTy for Option<Rooted<ExnRef>> {
665    #[inline]
666    fn valtype() -> ValType {
667        ValType::EXNREF
668    }
669
670    #[inline]
671    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
672        self.map_or(true, |x| x.comes_from_same_store(store))
673    }
674
675    #[inline]
676    fn dynamic_concrete_type_check(
677        &self,
678        store: &StoreOpaque,
679        nullable: bool,
680        ty: &HeapType,
681    ) -> Result<()> {
682        match self {
683            Some(a) => a.ensure_matches_ty(store, ty),
684            None => {
685                ensure!(
686                    nullable,
687                    "expected a non-null reference, but found a null reference"
688                );
689                Ok(())
690            }
691        }
692    }
693
694    #[inline]
695    fn is_vmgcref_and_points_to_object(&self) -> bool {
696        self.is_some()
697    }
698
699    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
700        <Rooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
701    }
702
703    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
704        <Rooted<ExnRef>>::wasm_ty_option_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
705    }
706}
707
708unsafe impl WasmTy for OwnedRooted<ExnRef> {
709    #[inline]
710    fn valtype() -> ValType {
711        ValType::Ref(RefType::new(false, HeapType::Exn))
712    }
713
714    #[inline]
715    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
716        self.comes_from_same_store(store)
717    }
718
719    #[inline]
720    fn dynamic_concrete_type_check(
721        &self,
722        store: &StoreOpaque,
723        _nullable: bool,
724        ty: &HeapType,
725    ) -> Result<()> {
726        self.ensure_matches_ty(store, ty)
727    }
728
729    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
730        self.wasm_ty_store(store, ptr, ValRaw::anyref)
731    }
732
733    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
734        Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
735    }
736}
737
738unsafe impl WasmTy for Option<OwnedRooted<ExnRef>> {
739    #[inline]
740    fn valtype() -> ValType {
741        ValType::EXNREF
742    }
743
744    #[inline]
745    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
746        self.as_ref()
747            .map_or(true, |x| x.comes_from_same_store(store))
748    }
749
750    #[inline]
751    fn dynamic_concrete_type_check(
752        &self,
753        store: &StoreOpaque,
754        nullable: bool,
755        ty: &HeapType,
756    ) -> Result<()> {
757        match self {
758            Some(a) => a.ensure_matches_ty(store, ty),
759            None => {
760                ensure!(
761                    nullable,
762                    "expected a non-null reference, but found a null reference"
763                );
764                Ok(())
765            }
766        }
767    }
768
769    #[inline]
770    fn is_vmgcref_and_points_to_object(&self) -> bool {
771        self.is_some()
772    }
773
774    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
775        <OwnedRooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
776    }
777
778    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
779        <OwnedRooted<ExnRef>>::wasm_ty_option_load(
780            store,
781            ptr.get_anyref(),
782            ExnRef::from_cloned_gc_ref,
783        )
784    }
785}