1use crate::runtime::vm::{VMGcRef, VMStore};
4use crate::store::{StoreId, StoreResourceLimiter};
5use crate::vm::{self, VMExnRef, VMGcHeader};
6use crate::{
7    AsContext, AsContextMut, GcRefImpl, GcRootIndex, HeapType, OwnedRooted, RefType, Result,
8    Rooted, Val, ValRaw, ValType, WasmTy,
9    store::{AutoAssertNoGc, StoreOpaque},
10};
11use crate::{ExnType, FieldType, GcHeapOutOfMemory, StoreContextMut, Tag, prelude::*};
12use core::mem;
13use core::mem::MaybeUninit;
14use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
15
16pub struct ExnRefPre {
66    store_id: StoreId,
67    ty: ExnType,
68}
69
70impl ExnRefPre {
71    pub fn new(mut store: impl AsContextMut, ty: ExnType) -> Self {
74        Self::_new(store.as_context_mut().0, ty)
75    }
76
77    pub(crate) fn _new(store: &mut StoreOpaque, ty: ExnType) -> Self {
78        store.insert_gc_host_alloc_type(ty.registered_type().clone());
79        let store_id = store.id();
80
81        ExnRefPre { store_id, ty }
82    }
83
84    pub(crate) fn layout(&self) -> &GcStructLayout {
85        self.ty
86            .registered_type()
87            .layout()
88            .expect("exn types have a layout")
89            .unwrap_struct()
90    }
91
92    pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
93        self.ty.registered_type().index()
94    }
95}
96
97#[derive(Debug)]
108#[repr(transparent)]
109pub struct ExnRef {
110    pub(super) inner: GcRootIndex,
111}
112
113unsafe impl GcRefImpl for ExnRef {
114    fn transmute_ref(index: &GcRootIndex) -> &Self {
115        let me: &Self = unsafe { mem::transmute(index) };
117
118        assert!(matches!(
120            me,
121            Self {
122                inner: GcRootIndex { .. },
123            }
124        ));
125
126        me
127    }
128}
129
130impl ExnRef {
131    pub fn from_raw(mut store: impl AsContextMut, raw: u32) -> Option<Rooted<Self>> {
162        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
163        Self::_from_raw(&mut store, raw)
164    }
165
166    pub(crate) fn _from_raw(store: &mut AutoAssertNoGc, raw: u32) -> Option<Rooted<Self>> {
168        let gc_ref = VMGcRef::from_raw_u32(raw)?;
169        let gc_ref = store.clone_gc_ref(&gc_ref);
170        Some(Self::from_cloned_gc_ref(store, gc_ref))
171    }
172
173    pub fn new(
203        mut store: impl AsContextMut,
204        allocator: &ExnRefPre,
205        tag: &Tag,
206        fields: &[Val],
207    ) -> Result<Rooted<ExnRef>> {
208        let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
209        assert!(!store.async_support());
210        vm::assert_ready(Self::_new_async(
211            store,
212            limiter.as_mut(),
213            allocator,
214            tag,
215            fields,
216        ))
217    }
218
219    #[cfg(feature = "async")]
248    pub async fn new_async(
249        mut store: impl AsContextMut,
250        allocator: &ExnRefPre,
251        tag: &Tag,
252        fields: &[Val],
253    ) -> Result<Rooted<ExnRef>> {
254        let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
255        Self::_new_async(store, limiter.as_mut(), allocator, tag, fields).await
256    }
257
258    pub(crate) async fn _new_async(
259        store: &mut StoreOpaque,
260        limiter: Option<&mut StoreResourceLimiter<'_>>,
261        allocator: &ExnRefPre,
262        tag: &Tag,
263        fields: &[Val],
264    ) -> Result<Rooted<ExnRef>> {
265        Self::type_check_tag_and_fields(store, allocator, tag, fields)?;
266        store
267            .retry_after_gc_async(limiter, (), |store, ()| {
268                Self::new_unchecked(store, allocator, tag, fields)
269            })
270            .await
271    }
272
273    fn type_check_tag_and_fields(
276        store: &mut StoreOpaque,
277        allocator: &ExnRefPre,
278        tag: &Tag,
279        fields: &[Val],
280    ) -> Result<(), Error> {
281        assert!(
282            tag.comes_from_same_store(store),
283            "tag comes from the wrong store"
284        );
285        ensure!(
286            tag.wasmtime_ty(store).signature.unwrap_engine_type_index()
287                == allocator.ty.tag_type().ty().type_index(),
288            "incorrect signature for tag when creating exception object"
289        );
290        let expected_len = allocator.ty.fields().len();
291        let actual_len = fields.len();
292        ensure!(
293            actual_len == expected_len,
294            "expected {expected_len} fields, got {actual_len}"
295        );
296        for (ty, val) in allocator.ty.fields().zip(fields) {
297            assert!(
298                val.comes_from_same_store(store),
299                "field value comes from the wrong store",
300            );
301            let ty = ty.element_type().unpack();
302            val.ensure_matches_ty(store, ty)
303                .context("field type mismatch")?;
304        }
305        Ok(())
306    }
307
308    fn new_unchecked(
313        store: &mut StoreOpaque,
314        allocator: &ExnRefPre,
315        tag: &Tag,
316        fields: &[Val],
317    ) -> Result<Rooted<ExnRef>> {
318        assert_eq!(
319            store.id(),
320            allocator.store_id,
321            "attempted to use a `ExnRefPre` with the wrong store"
322        );
323
324        let exnref = store
327            .require_gc_store_mut()?
328            .alloc_uninit_exn(allocator.type_index(), &allocator.layout())
329            .context("unrecoverable error when allocating new `exnref`")?
330            .map_err(|n| GcHeapOutOfMemory::new((), n))?;
331
332        let mut store = AutoAssertNoGc::new(store);
337        match (|| {
338            let (instance, index) = tag.to_raw_indices();
339            exnref.initialize_tag(&mut store, instance, index)?;
340            for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
341                exnref.initialize_field(
342                    &mut store,
343                    allocator.layout(),
344                    ty.element_type(),
345                    index,
346                    *val,
347                )?;
348            }
349            Ok(())
350        })() {
351            Ok(()) => Ok(Rooted::new(&mut store, exnref.into())),
352            Err(e) => {
353                store.require_gc_store_mut()?.dealloc_uninit_exn(exnref);
354                Err(e)
355            }
356        }
357    }
358
359    pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
360        let gc_ref = self.inner.try_gc_ref(store)?;
361        let header = store.require_gc_store()?.header(gc_ref);
362        debug_assert!(header.kind().matches(VMGcKind::ExnRef));
363        Ok(header.ty().expect("exnrefs should have concrete types"))
364    }
365
366    pub(crate) fn from_cloned_gc_ref(
373        store: &mut AutoAssertNoGc<'_>,
374        gc_ref: VMGcRef,
375    ) -> Rooted<Self> {
376        debug_assert!(
377            store
378                .unwrap_gc_store()
379                .header(&gc_ref)
380                .kind()
381                .matches(VMGcKind::ExnRef)
382        );
383        Rooted::new(store, gc_ref)
384    }
385
386    #[inline]
387    pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
388        self.inner.comes_from_same_store(store)
389    }
390
391    pub fn to_raw(&self, mut store: impl AsContextMut) -> Result<u32> {
404        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
405        self._to_raw(&mut store)
406    }
407
408    pub(crate) fn _to_raw(&self, store: &mut AutoAssertNoGc<'_>) -> Result<u32> {
409        let gc_ref = self.inner.try_clone_gc_ref(store)?;
410        let raw = if gc_ref.is_i31() {
411            gc_ref.as_raw_non_zero_u32()
412        } else {
413            store.require_gc_store_mut()?.expose_gc_ref_to_wasm(gc_ref)
414        };
415        Ok(raw.get())
416    }
417
418    pub fn ty(&self, store: impl AsContext) -> Result<ExnType> {
428        self._ty(store.as_context().0)
429    }
430
431    pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ExnType> {
432        assert!(self.comes_from_same_store(store));
433        let index = self.type_index(store)?;
434        Ok(ExnType::from_shared_type_index(store.engine(), index))
435    }
436
437    pub fn matches_ty(&self, store: impl AsContext, ty: &HeapType) -> Result<bool> {
449        self._matches_ty(store.as_context().0, ty)
450    }
451
452    pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<bool> {
453        assert!(self.comes_from_same_store(store));
454        Ok(HeapType::from(self._ty(store)?).matches(ty))
455    }
456
457    pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<()> {
458        if !self.comes_from_same_store(store) {
459            bail!("function used with wrong store");
460        }
461        if self._matches_ty(store, ty)? {
462            Ok(())
463        } else {
464            let actual_ty = self._ty(store)?;
465            bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
466        }
467    }
468
469    pub fn fields<'a, T: 'static>(
479        &'a self,
480        store: impl Into<StoreContextMut<'a, T>>,
481    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
482        self._fields(store.into().0)
483    }
484
485    pub(crate) fn _fields<'a>(
486        &'a self,
487        store: &'a mut StoreOpaque,
488    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
489        assert!(self.comes_from_same_store(store));
490        let store = AutoAssertNoGc::new(store);
491
492        let gc_ref = self.inner.try_gc_ref(&store)?;
493        let header = store.require_gc_store()?.header(gc_ref);
494        debug_assert!(header.kind().matches(VMGcKind::ExnRef));
495
496        let index = header.ty().expect("exnrefs should have concrete types");
497        let ty = ExnType::from_shared_type_index(store.engine(), index);
498        let len = ty.fields().len();
499
500        return Ok(Fields {
501            exnref: self,
502            store,
503            index: 0,
504            len,
505        });
506
507        struct Fields<'a, 'b> {
508            exnref: &'a ExnRef,
509            store: AutoAssertNoGc<'b>,
510            index: usize,
511            len: usize,
512        }
513
514        impl Iterator for Fields<'_, '_> {
515            type Item = Val;
516
517            #[inline]
518            fn next(&mut self) -> Option<Self::Item> {
519                let i = self.index;
520                debug_assert!(i <= self.len);
521                if i >= self.len {
522                    return None;
523                }
524                self.index += 1;
525                Some(self.exnref._field(&mut self.store, i).unwrap())
526            }
527
528            #[inline]
529            fn size_hint(&self) -> (usize, Option<usize>) {
530                let len = self.len - self.index;
531                (len, Some(len))
532            }
533        }
534
535        impl ExactSizeIterator for Fields<'_, '_> {
536            #[inline]
537            fn len(&self) -> usize {
538                self.len - self.index
539            }
540        }
541    }
542
543    fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
544        assert!(self.comes_from_same_store(&store));
545        let gc_ref = self.inner.try_gc_ref(store)?;
546        Ok(store.require_gc_store()?.header(gc_ref))
547    }
548
549    fn exnref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMExnRef> {
550        assert!(self.comes_from_same_store(&store));
551        let gc_ref = self.inner.try_gc_ref(store)?;
552        debug_assert!(self.header(store)?.kind().matches(VMGcKind::ExnRef));
553        Ok(gc_ref.as_exnref_unchecked())
554    }
555
556    fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
557        assert!(self.comes_from_same_store(&store));
558        let type_index = self.type_index(store)?;
559        let layout = store
560            .engine()
561            .signatures()
562            .layout(type_index)
563            .expect("exn types should have GC layouts");
564        match layout {
565            GcLayout::Struct(s) => Ok(s),
566            GcLayout::Array(_) => unreachable!(),
567        }
568    }
569
570    fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
571        let ty = self._ty(store)?;
572        match ty.field(field) {
573            Some(f) => Ok(f),
574            None => {
575                let len = ty.fields().len();
576                bail!("cannot access field {field}: exn only has {len} fields")
577            }
578        }
579    }
580
581    pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
592        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
593        self._field(&mut store, index)
594    }
595
596    pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
597        assert!(self.comes_from_same_store(store));
598        let exnref = self.exnref(store)?.unchecked_copy();
599        let field_ty = self.field_ty(store, index)?;
600        let layout = self.layout(store)?;
601        Ok(exnref.read_field(store, &layout, field_ty.element_type(), index))
602    }
603
604    pub fn tag(&self, mut store: impl AsContextMut) -> Result<Tag> {
614        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
615        assert!(self.comes_from_same_store(&store));
616        let exnref = self.exnref(&store)?.unchecked_copy();
617        let (instance, index) = exnref.tag(&mut store)?;
618        Ok(Tag::from_raw_indices(&*store, instance, index))
619    }
620}
621
622unsafe impl WasmTy for Rooted<ExnRef> {
623    #[inline]
624    fn valtype() -> ValType {
625        ValType::Ref(RefType::new(false, HeapType::Exn))
626    }
627
628    #[inline]
629    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
630        self.comes_from_same_store(store)
631    }
632
633    #[inline]
634    fn dynamic_concrete_type_check(
635        &self,
636        _store: &StoreOpaque,
637        _nullable: bool,
638        _ty: &HeapType,
639    ) -> Result<()> {
640        Ok(())
643    }
644
645    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
646        self.wasm_ty_store(store, ptr, ValRaw::anyref)
647    }
648
649    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
650        Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
651    }
652}
653
654unsafe impl WasmTy for Option<Rooted<ExnRef>> {
655    #[inline]
656    fn valtype() -> ValType {
657        ValType::EXNREF
658    }
659
660    #[inline]
661    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
662        self.map_or(true, |x| x.comes_from_same_store(store))
663    }
664
665    #[inline]
666    fn dynamic_concrete_type_check(
667        &self,
668        store: &StoreOpaque,
669        nullable: bool,
670        ty: &HeapType,
671    ) -> Result<()> {
672        match self {
673            Some(a) => a.ensure_matches_ty(store, ty),
674            None => {
675                ensure!(
676                    nullable,
677                    "expected a non-null reference, but found a null reference"
678                );
679                Ok(())
680            }
681        }
682    }
683
684    #[inline]
685    fn is_vmgcref_and_points_to_object(&self) -> bool {
686        self.is_some()
687    }
688
689    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
690        <Rooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
691    }
692
693    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
694        <Rooted<ExnRef>>::wasm_ty_option_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
695    }
696}
697
698unsafe impl WasmTy for OwnedRooted<ExnRef> {
699    #[inline]
700    fn valtype() -> ValType {
701        ValType::Ref(RefType::new(false, HeapType::Exn))
702    }
703
704    #[inline]
705    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
706        self.comes_from_same_store(store)
707    }
708
709    #[inline]
710    fn dynamic_concrete_type_check(
711        &self,
712        store: &StoreOpaque,
713        _nullable: bool,
714        ty: &HeapType,
715    ) -> Result<()> {
716        self.ensure_matches_ty(store, ty)
717    }
718
719    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
720        self.wasm_ty_store(store, ptr, ValRaw::anyref)
721    }
722
723    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
724        Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
725    }
726}
727
728unsafe impl WasmTy for Option<OwnedRooted<ExnRef>> {
729    #[inline]
730    fn valtype() -> ValType {
731        ValType::EXNREF
732    }
733
734    #[inline]
735    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
736        self.as_ref()
737            .map_or(true, |x| x.comes_from_same_store(store))
738    }
739
740    #[inline]
741    fn dynamic_concrete_type_check(
742        &self,
743        store: &StoreOpaque,
744        nullable: bool,
745        ty: &HeapType,
746    ) -> Result<()> {
747        match self {
748            Some(a) => a.ensure_matches_ty(store, ty),
749            None => {
750                ensure!(
751                    nullable,
752                    "expected a non-null reference, but found a null reference"
753                );
754                Ok(())
755            }
756        }
757    }
758
759    #[inline]
760    fn is_vmgcref_and_points_to_object(&self) -> bool {
761        self.is_some()
762    }
763
764    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
765        <OwnedRooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
766    }
767
768    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
769        <OwnedRooted<ExnRef>>::wasm_ty_option_load(
770            store,
771            ptr.get_anyref(),
772            ExnRef::from_cloned_gc_ref,
773        )
774    }
775}