Skip to main content

wasmtime/runtime/gc/enabled/
exnref.rs

1//! Implementation of `exnref` in Wasmtime.
2
3use crate::runtime::vm::VMGcRef;
4use crate::store::{Asyncness, StoreId, StoreResourceLimiter};
5#[cfg(feature = "async")]
6use crate::vm::VMStore;
7use crate::vm::{self, VMExnRef, VMGcHeader};
8use crate::{
9    AsContext, AsContextMut, GcRefImpl, GcRootIndex, HeapType, OwnedRooted, RefType, Result,
10    Rooted, Val, ValRaw, ValType, WasmTy,
11    store::{AutoAssertNoGc, StoreOpaque},
12};
13use crate::{ExnType, FieldType, GcHeapOutOfMemory, StoreContextMut, Tag, prelude::*};
14use alloc::sync::Arc;
15use core::mem;
16use core::mem::MaybeUninit;
17use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
18
19/// An allocator for a particular Wasm GC exception type.
20///
21/// Every `ExnRefPre` is associated with a particular
22/// [`Store`][crate::Store] and a particular
23/// [ExnType][crate::ExnType].
24///
25/// Reusing an allocator across many allocations amortizes some
26/// per-type runtime overheads inside Wasmtime. An `ExnRefPre` is to
27/// `ExnRef`s as an `InstancePre` is to `Instance`s.
28///
29/// # Example
30///
31/// ```
32/// use wasmtime::*;
33///
34/// # fn foo() -> Result<()> {
35/// let mut config = Config::new();
36/// config.wasm_function_references(true);
37/// config.wasm_gc(true);
38///
39/// let engine = Engine::new(&config)?;
40/// let mut store = Store::new(&engine, ());
41///
42/// // Define a exn type.
43/// let exn_ty = ExnType::new(
44///    store.engine(),
45///    [ValType::I32],
46/// )?;
47///
48/// // Create an allocator for the exn type.
49/// let allocator = ExnRefPre::new(&mut store, exn_ty.clone());
50///
51/// // Create a tag instance to associate with our exception objects.
52/// let tag = Tag::new(&mut store, &exn_ty.tag_type()).unwrap();
53///
54/// {
55///     let mut scope = RootScope::new(&mut store);
56///
57///     // Allocate a bunch of instances of our exception type using the same
58///     // allocator! This is faster than creating a new allocator for each
59///     // instance we want to allocate.
60///     for i in 0..10 {
61///         ExnRef::new(&mut scope, &allocator, &tag, &[Val::I32(i)])?;
62///     }
63/// }
64/// # Ok(())
65/// # }
66/// # foo().unwrap();
67/// ```
68pub struct ExnRefPre {
69    store_id: StoreId,
70    ty: ExnType,
71}
72
73impl ExnRefPre {
74    /// Create a new `ExnRefPre` that is associated with the given store
75    /// and type.
76    pub fn new(mut store: impl AsContextMut, ty: ExnType) -> Self {
77        Self::_new(store.as_context_mut().0, ty)
78    }
79
80    pub(crate) fn _new(store: &mut StoreOpaque, ty: ExnType) -> Self {
81        store.insert_gc_host_alloc_type(ty.registered_type().clone());
82        let store_id = store.id();
83
84        ExnRefPre { store_id, ty }
85    }
86
87    pub(crate) fn layout(&self) -> &GcStructLayout {
88        self.ty
89            .registered_type()
90            .layout()
91            .expect("exn types have a layout")
92            .unwrap_struct()
93    }
94
95    pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
96        self.ty.registered_type().index()
97    }
98}
99
100/// An `exnref` GC reference.
101///
102/// The `ExnRef` type represents WebAssembly `exnref` values. These
103/// are references to exception objects created either by catching a
104/// thrown exception in WebAssembly with a `catch_ref` clause of a
105/// `try_table`, or by allocating via the host API.
106///
107/// Note that you can also use `Rooted<ExnRef>` and `OwnedRooted<ExnRef>` as
108/// a type parameter with [`Func::typed`][crate::Func::typed]- and
109/// [`Func::wrap`][crate::Func::wrap]-style APIs.
110#[derive(Debug)]
111#[repr(transparent)]
112pub struct ExnRef {
113    pub(super) inner: GcRootIndex,
114}
115
116unsafe impl GcRefImpl for ExnRef {
117    fn transmute_ref(index: &GcRootIndex) -> &Self {
118        // Safety: `ExnRef` is a newtype of a `GcRootIndex`.
119        let me: &Self = unsafe { mem::transmute(index) };
120
121        // Assert we really are just a newtype of a `GcRootIndex`.
122        assert!(matches!(
123            me,
124            Self {
125                inner: GcRootIndex { .. },
126            }
127        ));
128
129        me
130    }
131}
132
133impl ExnRef {
134    /// Creates a new strongly-owned [`ExnRef`] from the raw value provided.
135    ///
136    /// This is intended to be used in conjunction with [`Func::new_unchecked`],
137    /// [`Func::call_unchecked`], and [`ValRaw`] with its `anyref` field.
138    ///
139    /// This function assumes that `raw` is an `exnref` value which is currently
140    /// rooted within the [`Store`].
141    ///
142    /// # Correctness
143    ///
144    /// This function is tricky to get right because `raw` not only must be a
145    /// valid `exnref` value produced prior by [`ExnRef::to_raw`] but it must
146    /// also be correctly rooted within the store. When arguments are provided
147    /// to a callback with [`Func::new_unchecked`], for example, or returned via
148    /// [`Func::call_unchecked`], if a GC is performed within the store then
149    /// floating `exnref` values are not rooted and will be GC'd, meaning that
150    /// this function will no longer be correct to call with the values cleaned
151    /// up. This function must be invoked *before* possible GC operations can
152    /// happen (such as calling Wasm).
153    ///
154    /// When in doubt try to not use this. Instead use the Rust APIs of
155    /// [`TypedFunc`] and friends. Note though that this function is not
156    /// `unsafe` as any value can be passed in. Incorrect values can result in
157    /// runtime panics, however, so care must still be taken with this method.
158    ///
159    /// [`Func::call_unchecked`]: crate::Func::call_unchecked
160    /// [`Func::new_unchecked`]: crate::Func::new_unchecked
161    /// [`Store`]: crate::Store
162    /// [`TypedFunc`]: crate::TypedFunc
163    /// [`ValRaw`]: crate::ValRaw
164    pub fn from_raw(mut store: impl AsContextMut, raw: u32) -> Option<Rooted<Self>> {
165        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
166        Self::_from_raw(&mut store, raw)
167    }
168
169    // (Not actually memory unsafe since we have indexed GC heaps.)
170    pub(crate) fn _from_raw(store: &mut AutoAssertNoGc, raw: u32) -> Option<Rooted<Self>> {
171        let gc_ref = VMGcRef::from_raw_u32(raw)?;
172        let gc_ref = store.clone_gc_ref(&gc_ref);
173        Some(Self::from_cloned_gc_ref(store, gc_ref))
174    }
175
176    /// Synchronously allocate a new exception object and get a
177    /// reference to it.
178    ///
179    /// # Automatic Garbage Collection
180    ///
181    /// If the GC heap is at capacity, and there isn't room for
182    /// allocating this new exception object, then this method will
183    /// automatically trigger a synchronous collection in an attempt
184    /// to free up space in the GC heap.
185    ///
186    /// # Errors
187    ///
188    /// If the given `fields` values' types do not match the field
189    /// types of the `allocator`'s exception type, an error is
190    /// returned.
191    ///
192    /// If the allocation cannot be satisfied because the GC heap is currently
193    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
194    /// error is returned. The allocation might succeed on a second attempt if
195    /// you drop some rooted GC references and try again.
196    ///
197    /// If `store` is configured with a
198    /// [`ResourceLimiterAsync`](crate::ResourceLimiterAsync) then an error
199    /// will be returned because [`ExnRef::new_async`] should be used instead.
200    ///
201    /// # Panics
202    ///
203    /// Panics if the allocator, or any of the field values, is not associated
204    /// with the given store.
205    pub fn new(
206        mut store: impl AsContextMut,
207        allocator: &ExnRefPre,
208        tag: &Tag,
209        fields: &[Val],
210    ) -> Result<Rooted<ExnRef>> {
211        let (mut limiter, store) = store
212            .as_context_mut()
213            .0
214            .validate_sync_resource_limiter_and_store_opaque()?;
215        vm::assert_ready(Self::_new_async(
216            store,
217            limiter.as_mut(),
218            allocator,
219            tag,
220            fields,
221            Asyncness::No,
222        ))
223    }
224
225    /// Asynchronously allocate a new exception object and get a
226    /// reference to it.
227    ///
228    /// # Automatic Garbage Collection
229    ///
230    /// If the GC heap is at capacity, and there isn't room for allocating this
231    /// new exn, then this method will automatically trigger a synchronous
232    /// collection in an attempt to free up space in the GC heap.
233    ///
234    /// # Errors
235    ///
236    /// If the given `fields` values' types do not match the field
237    /// types of the `allocator`'s exception type, an error is
238    /// returned.
239    ///
240    /// If the allocation cannot be satisfied because the GC heap is currently
241    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
242    /// error is returned. The allocation might succeed on a second attempt if
243    /// you drop some rooted GC references and try again.
244    ///
245    /// # Panics
246    ///
247    /// Panics if the allocator, or any of the field values, is not associated
248    /// with the given store.
249    #[cfg(feature = "async")]
250    pub async fn new_async(
251        mut store: impl AsContextMut,
252        allocator: &ExnRefPre,
253        tag: &Tag,
254        fields: &[Val],
255    ) -> Result<Rooted<ExnRef>> {
256        let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
257        Self::_new_async(
258            store,
259            limiter.as_mut(),
260            allocator,
261            tag,
262            fields,
263            Asyncness::Yes,
264        )
265        .await
266    }
267
268    pub(crate) async fn _new_async(
269        store: &mut StoreOpaque,
270        limiter: Option<&mut StoreResourceLimiter<'_>>,
271        allocator: &ExnRefPre,
272        tag: &Tag,
273        fields: &[Val],
274        asyncness: Asyncness,
275    ) -> Result<Rooted<ExnRef>> {
276        Self::type_check_tag_and_fields(store, allocator, tag, fields)?;
277        store
278            .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
279                Self::new_unchecked(store, allocator, tag, fields)
280            })
281            .await
282    }
283
284    /// Type check the tag instance and field values before allocating
285    /// a new exception object.
286    fn type_check_tag_and_fields(
287        store: &mut StoreOpaque,
288        allocator: &ExnRefPre,
289        tag: &Tag,
290        fields: &[Val],
291    ) -> Result<(), Error> {
292        assert!(
293            tag.comes_from_same_store(store),
294            "tag comes from the wrong store"
295        );
296        ensure!(
297            tag.wasmtime_ty(store).signature.unwrap_engine_type_index()
298                == allocator.ty.tag_type().ty().type_index(),
299            "incorrect signature for tag when creating exception object"
300        );
301        let expected_len = allocator.ty.fields().len();
302        let actual_len = fields.len();
303        ensure!(
304            actual_len == expected_len,
305            "expected {expected_len} fields, got {actual_len}"
306        );
307        for (ty, val) in allocator.ty.fields().zip(fields) {
308            assert!(
309                val.comes_from_same_store(store),
310                "field value comes from the wrong store",
311            );
312            let ty = ty.element_type().unpack();
313            val.ensure_matches_ty(store, ty)
314                .context("field type mismatch")?;
315        }
316        Ok(())
317    }
318
319    /// Given that the field values have already been type checked, allocate a
320    /// new exn.
321    ///
322    /// Does not attempt GC+retry on OOM, that is the caller's responsibility.
323    fn new_unchecked(
324        store: &mut StoreOpaque,
325        allocator: &ExnRefPre,
326        tag: &Tag,
327        fields: &[Val],
328    ) -> Result<Rooted<ExnRef>> {
329        assert_eq!(
330            store.id(),
331            allocator.store_id,
332            "attempted to use a `ExnRefPre` with the wrong store"
333        );
334
335        // Allocate the exn and write each field value into the appropriate
336        // offset.
337        let exnref = store
338            .require_gc_store_mut()?
339            .alloc_uninit_exn(allocator.type_index(), &allocator.layout())
340            .context("unrecoverable error when allocating new `exnref`")?
341            .map_err(|n| GcHeapOutOfMemory::new((), n))?;
342
343        // From this point on, if we get any errors, then the exn is not
344        // fully initialized, so we need to eagerly deallocate it before the
345        // next GC where the collector might try to interpret one of the
346        // uninitialized fields as a GC reference.
347        let mut store = AutoAssertNoGc::new(store);
348        match (|| {
349            let (instance, index) = tag.to_raw_indices();
350            exnref.initialize_tag(&mut store, instance, index)?;
351            for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
352                exnref.initialize_field(
353                    &mut store,
354                    allocator.layout(),
355                    ty.element_type(),
356                    index,
357                    *val,
358                )?;
359            }
360            Ok(())
361        })() {
362            Ok(()) => Ok(Rooted::new(&mut store, exnref.into())),
363            Err(e) => {
364                store.require_gc_store_mut()?.dealloc_uninit_exn(exnref);
365                Err(e)
366            }
367        }
368    }
369
370    pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
371        let gc_ref = self.inner.try_gc_ref(store)?;
372        let header = store.require_gc_store()?.header(gc_ref);
373        debug_assert!(header.kind().matches(VMGcKind::ExnRef));
374        Ok(header.ty().expect("exnrefs should have concrete types"))
375    }
376
377    /// Create a new `Rooted<ExnRef>` from the given GC reference.
378    ///
379    /// `gc_ref` should point to a valid `exnref` and should belong to
380    /// the store's GC heap. Failure to uphold these invariants is
381    /// memory safe but will lead to general incorrectness such as
382    /// panics or wrong results.
383    pub(crate) fn from_cloned_gc_ref(
384        store: &mut AutoAssertNoGc<'_>,
385        gc_ref: VMGcRef,
386    ) -> Rooted<Self> {
387        debug_assert!(
388            store
389                .unwrap_gc_store()
390                .header(&gc_ref)
391                .kind()
392                .matches(VMGcKind::ExnRef)
393        );
394        Rooted::new(store, gc_ref)
395    }
396
397    #[inline]
398    pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
399        self.inner.comes_from_same_store(store)
400    }
401
402    /// Converts this [`ExnRef`] to a raw value suitable to store within a
403    /// [`ValRaw`].
404    ///
405    /// Returns an error if this `exnref` has been unrooted.
406    ///
407    /// # Correctness
408    ///
409    /// Produces a raw value which is only valid to pass into a store if a GC
410    /// doesn't happen between when the value is produce and when it's passed
411    /// into the store.
412    ///
413    /// [`ValRaw`]: crate::ValRaw
414    pub fn to_raw(&self, mut store: impl AsContextMut) -> Result<u32> {
415        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
416        self._to_raw(&mut store)
417    }
418
419    pub(crate) fn _to_raw(&self, store: &mut AutoAssertNoGc<'_>) -> Result<u32> {
420        let gc_ref = self.inner.try_clone_gc_ref(store)?;
421        let raw = if gc_ref.is_i31() {
422            gc_ref.as_raw_non_zero_u32()
423        } else {
424            store.require_gc_store_mut()?.expose_gc_ref_to_wasm(gc_ref)
425        };
426        Ok(raw.get())
427    }
428
429    /// Get the type of this reference.
430    ///
431    /// # Errors
432    ///
433    /// Return an error if this reference has been unrooted.
434    ///
435    /// # Panics
436    ///
437    /// Panics if this reference is associated with a different store.
438    pub fn ty(&self, store: impl AsContext) -> Result<ExnType> {
439        self._ty(store.as_context().0)
440    }
441
442    pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ExnType> {
443        assert!(self.comes_from_same_store(store));
444        let index = self.type_index(store)?;
445        Ok(ExnType::from_shared_type_index(store.engine(), index))
446    }
447
448    /// Does this `exnref` match the given type?
449    ///
450    /// That is, is this object's type a subtype of the given type?
451    ///
452    /// # Errors
453    ///
454    /// Return an error if this reference has been unrooted.
455    ///
456    /// # Panics
457    ///
458    /// Panics if this reference is associated with a different store.
459    pub fn matches_ty(&self, store: impl AsContext, ty: &HeapType) -> Result<bool> {
460        self._matches_ty(store.as_context().0, ty)
461    }
462
463    pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<bool> {
464        assert!(self.comes_from_same_store(store));
465        Ok(HeapType::from(self._ty(store)?).matches(ty))
466    }
467
468    pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<()> {
469        if !self.comes_from_same_store(store) {
470            bail!("function used with wrong store");
471        }
472        if self._matches_ty(store, ty)? {
473            Ok(())
474        } else {
475            let actual_ty = self._ty(store)?;
476            bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
477        }
478    }
479
480    /// Get the values of this exception object's fields.
481    ///
482    /// # Errors
483    ///
484    /// Return an error if this reference has been unrooted.
485    ///
486    /// # Panics
487    ///
488    /// Panics if this reference is associated with a different store.
489    pub fn fields<'a, T: 'static>(
490        &'a self,
491        store: impl Into<StoreContextMut<'a, T>>,
492    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
493        self._fields(store.into().0)
494    }
495
496    pub(crate) fn _fields<'a>(
497        &'a self,
498        store: &'a mut StoreOpaque,
499    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
500        assert!(self.comes_from_same_store(store));
501        let store = AutoAssertNoGc::new(store);
502
503        let gc_ref = self.inner.try_gc_ref(&store)?;
504        let header = store.require_gc_store()?.header(gc_ref);
505        debug_assert!(header.kind().matches(VMGcKind::ExnRef));
506
507        let index = header.ty().expect("exnrefs should have concrete types");
508        let ty = ExnType::from_shared_type_index(store.engine(), index);
509        let len = ty.fields().len();
510
511        return Ok(Fields {
512            exnref: self,
513            store,
514            index: 0,
515            len,
516        });
517
518        struct Fields<'a, 'b> {
519            exnref: &'a ExnRef,
520            store: AutoAssertNoGc<'b>,
521            index: usize,
522            len: usize,
523        }
524
525        impl Iterator for Fields<'_, '_> {
526            type Item = Val;
527
528            #[inline]
529            fn next(&mut self) -> Option<Self::Item> {
530                let i = self.index;
531                debug_assert!(i <= self.len);
532                if i >= self.len {
533                    return None;
534                }
535                self.index += 1;
536                Some(self.exnref._field(&mut self.store, i).unwrap())
537            }
538
539            #[inline]
540            fn size_hint(&self) -> (usize, Option<usize>) {
541                let len = self.len - self.index;
542                (len, Some(len))
543            }
544        }
545
546        impl ExactSizeIterator for Fields<'_, '_> {
547            #[inline]
548            fn len(&self) -> usize {
549                self.len - self.index
550            }
551        }
552    }
553
554    fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
555        assert!(self.comes_from_same_store(&store));
556        let gc_ref = self.inner.try_gc_ref(store)?;
557        Ok(store.require_gc_store()?.header(gc_ref))
558    }
559
560    fn exnref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMExnRef> {
561        assert!(self.comes_from_same_store(&store));
562        let gc_ref = self.inner.try_gc_ref(store)?;
563        debug_assert!(self.header(store)?.kind().matches(VMGcKind::ExnRef));
564        Ok(gc_ref.as_exnref_unchecked())
565    }
566
567    fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<Arc<GcStructLayout>> {
568        assert!(self.comes_from_same_store(&store));
569        let type_index = self.type_index(store)?;
570        let layout = store
571            .engine()
572            .signatures()
573            .layout(type_index)
574            .expect("exn types should have GC layouts");
575        match layout {
576            GcLayout::Struct(s) => Ok(s),
577            GcLayout::Array(_) => unreachable!(),
578        }
579    }
580
581    fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
582        let ty = self._ty(store)?;
583        match ty.field(field) {
584            Some(f) => Ok(f),
585            None => {
586                let len = ty.fields().len();
587                bail!("cannot access field {field}: exn only has {len} fields")
588            }
589        }
590    }
591
592    /// Get this exception object's `index`th field.
593    ///
594    /// # Errors
595    ///
596    /// Returns an `Err(_)` if the index is out of bounds or this reference has
597    /// been unrooted.
598    ///
599    /// # Panics
600    ///
601    /// Panics if this reference is associated with a different store.
602    pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
603        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
604        self._field(&mut store, index)
605    }
606
607    pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
608        assert!(self.comes_from_same_store(store));
609        let exnref = self.exnref(store)?.unchecked_copy();
610        let field_ty = self.field_ty(store, index)?;
611        let layout = self.layout(store)?;
612        Ok(exnref.read_field(store, &layout, field_ty.element_type(), index))
613    }
614
615    /// Get this exception object's associated tag.
616    ///
617    /// # Errors
618    ///
619    /// Returns an `Err(_)` if this reference has been unrooted.
620    ///
621    /// # Panics
622    ///
623    /// Panics if this reference is associated with a different store.
624    pub fn tag(&self, mut store: impl AsContextMut) -> Result<Tag> {
625        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
626        assert!(self.comes_from_same_store(&store));
627        let exnref = self.exnref(&store)?.unchecked_copy();
628        let (instance, index) = exnref.tag(&mut store)?;
629        Ok(Tag::from_raw_indices(&*store, instance, index))
630    }
631}
632
633unsafe impl WasmTy for Rooted<ExnRef> {
634    #[inline]
635    fn valtype() -> ValType {
636        ValType::Ref(RefType::new(false, HeapType::Exn))
637    }
638
639    #[inline]
640    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
641        self.comes_from_same_store(store)
642    }
643
644    #[inline]
645    fn dynamic_concrete_type_check(
646        &self,
647        _store: &StoreOpaque,
648        _nullable: bool,
649        _ty: &HeapType,
650    ) -> Result<()> {
651        // Wasm can't specify a concrete exn type, so there are no
652        // dynamic checks.
653        Ok(())
654    }
655
656    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
657        self.wasm_ty_store(store, ptr, ValRaw::anyref)
658    }
659
660    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
661        Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
662    }
663}
664
665unsafe impl WasmTy for Option<Rooted<ExnRef>> {
666    #[inline]
667    fn valtype() -> ValType {
668        ValType::EXNREF
669    }
670
671    #[inline]
672    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
673        self.map_or(true, |x| x.comes_from_same_store(store))
674    }
675
676    #[inline]
677    fn dynamic_concrete_type_check(
678        &self,
679        store: &StoreOpaque,
680        nullable: bool,
681        ty: &HeapType,
682    ) -> Result<()> {
683        match self {
684            Some(a) => a.ensure_matches_ty(store, ty),
685            None => {
686                ensure!(
687                    nullable,
688                    "expected a non-null reference, but found a null reference"
689                );
690                Ok(())
691            }
692        }
693    }
694
695    #[inline]
696    fn is_vmgcref_and_points_to_object(&self) -> bool {
697        self.is_some()
698    }
699
700    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
701        <Rooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
702    }
703
704    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
705        <Rooted<ExnRef>>::wasm_ty_option_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
706    }
707}
708
709unsafe impl WasmTy for OwnedRooted<ExnRef> {
710    #[inline]
711    fn valtype() -> ValType {
712        ValType::Ref(RefType::new(false, HeapType::Exn))
713    }
714
715    #[inline]
716    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
717        self.comes_from_same_store(store)
718    }
719
720    #[inline]
721    fn dynamic_concrete_type_check(
722        &self,
723        store: &StoreOpaque,
724        _nullable: bool,
725        ty: &HeapType,
726    ) -> Result<()> {
727        self.ensure_matches_ty(store, ty)
728    }
729
730    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
731        self.wasm_ty_store(store, ptr, ValRaw::anyref)
732    }
733
734    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
735        Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
736    }
737}
738
739unsafe impl WasmTy for Option<OwnedRooted<ExnRef>> {
740    #[inline]
741    fn valtype() -> ValType {
742        ValType::EXNREF
743    }
744
745    #[inline]
746    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
747        self.as_ref()
748            .map_or(true, |x| x.comes_from_same_store(store))
749    }
750
751    #[inline]
752    fn dynamic_concrete_type_check(
753        &self,
754        store: &StoreOpaque,
755        nullable: bool,
756        ty: &HeapType,
757    ) -> Result<()> {
758        match self {
759            Some(a) => a.ensure_matches_ty(store, ty),
760            None => {
761                ensure!(
762                    nullable,
763                    "expected a non-null reference, but found a null reference"
764                );
765                Ok(())
766            }
767        }
768    }
769
770    #[inline]
771    fn is_vmgcref_and_points_to_object(&self) -> bool {
772        self.is_some()
773    }
774
775    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
776        <OwnedRooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
777    }
778
779    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
780        <OwnedRooted<ExnRef>>::wasm_ty_option_load(
781            store,
782            ptr.get_anyref(),
783            ExnRef::from_cloned_gc_ref,
784        )
785    }
786}