1use crate::runtime::vm::VMGcRef;
4use crate::store::StoreId;
5use crate::vm::{VMExnRef, VMGcHeader};
6use crate::{
7 AsContext, AsContextMut, GcRefImpl, GcRootIndex, HeapType, ManuallyRooted, RefType, Result,
8 Rooted, Val, ValRaw, ValType, WasmTy,
9 store::{AutoAssertNoGc, StoreOpaque},
10};
11use crate::{ExnType, FieldType, GcHeapOutOfMemory, StoreContextMut, Tag, prelude::*};
12use core::mem;
13use core::mem::MaybeUninit;
14use wasmtime_environ::{GcExceptionLayout, GcLayout, VMGcKind, VMSharedTypeIndex};
15
16pub struct ExnRefPre {
66 store_id: StoreId,
67 ty: ExnType,
68}
69
70impl ExnRefPre {
71 pub fn new(mut store: impl AsContextMut, ty: ExnType) -> Self {
74 Self::_new(store.as_context_mut().0, ty)
75 }
76
77 pub(crate) fn _new(store: &mut StoreOpaque, ty: ExnType) -> Self {
78 store.insert_gc_host_alloc_type(ty.registered_type().clone());
79 let store_id = store.id();
80
81 ExnRefPre { store_id, ty }
82 }
83
84 pub(crate) fn layout(&self) -> &GcExceptionLayout {
85 self.ty
86 .registered_type()
87 .layout()
88 .expect("exn types have a layout")
89 .unwrap_exception()
90 }
91
92 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
93 self.ty.registered_type().index()
94 }
95}
96
97#[derive(Debug)]
108#[repr(transparent)]
109pub struct ExnRef {
110 pub(super) inner: GcRootIndex,
111}
112
113unsafe impl GcRefImpl for ExnRef {
114 fn transmute_ref(index: &GcRootIndex) -> &Self {
115 let me: &Self = unsafe { mem::transmute(index) };
117
118 assert!(matches!(
120 me,
121 Self {
122 inner: GcRootIndex { .. },
123 }
124 ));
125
126 me
127 }
128}
129
130impl ExnRef {
131 pub fn from_raw(mut store: impl AsContextMut, raw: u32) -> Option<Rooted<Self>> {
162 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
163 Self::_from_raw(&mut store, raw)
164 }
165
166 pub(crate) fn _from_raw(store: &mut AutoAssertNoGc, raw: u32) -> Option<Rooted<Self>> {
168 let gc_ref = VMGcRef::from_raw_u32(raw)?;
169 let gc_ref = store.unwrap_gc_store_mut().clone_gc_ref(&gc_ref);
170 Some(Self::from_cloned_gc_ref(store, gc_ref))
171 }
172
173 pub fn new(
203 mut store: impl AsContextMut,
204 allocator: &ExnRefPre,
205 tag: &Tag,
206 fields: &[Val],
207 ) -> Result<Rooted<ExnRef>> {
208 Self::_new(store.as_context_mut().0, allocator, tag, fields)
209 }
210
211 pub(crate) fn _new(
212 store: &mut StoreOpaque,
213 allocator: &ExnRefPre,
214 tag: &Tag,
215 fields: &[Val],
216 ) -> Result<Rooted<ExnRef>> {
217 assert!(
218 !store.async_support(),
219 "use `ExnRef::new_async` with asynchronous stores"
220 );
221 Self::type_check_tag_and_fields(store, allocator, tag, fields)?;
222 store.retry_after_gc((), |store, ()| {
223 Self::new_unchecked(store, allocator, tag, fields)
224 })
225 }
226
227 #[cfg(feature = "async")]
256 pub async fn new_async(
257 mut store: impl AsContextMut,
258 allocator: &ExnRefPre,
259 tag: &Tag,
260 fields: &[Val],
261 ) -> Result<Rooted<ExnRef>> {
262 Self::_new_async(store.as_context_mut().0, allocator, tag, fields).await
263 }
264
265 #[cfg(feature = "async")]
266 pub(crate) async fn _new_async(
267 store: &mut StoreOpaque,
268 allocator: &ExnRefPre,
269 tag: &Tag,
270 fields: &[Val],
271 ) -> Result<Rooted<ExnRef>> {
272 assert!(
273 store.async_support(),
274 "use `ExnRef::new` with synchronous stores"
275 );
276 Self::type_check_tag_and_fields(store, allocator, tag, fields)?;
277 store
278 .retry_after_gc_async((), |store, ()| {
279 Self::new_unchecked(store, allocator, tag, fields)
280 })
281 .await
282 }
283
284 fn type_check_tag_and_fields(
287 store: &mut StoreOpaque,
288 allocator: &ExnRefPre,
289 tag: &Tag,
290 fields: &[Val],
291 ) -> Result<(), Error> {
292 assert!(
293 tag.comes_from_same_store(store),
294 "tag comes from the wrong store"
295 );
296 ensure!(
297 tag.wasmtime_ty(store).signature.unwrap_engine_type_index()
298 == allocator.ty.tag_type().ty().type_index(),
299 "incorrect signature for tag when creating exception object"
300 );
301 let expected_len = allocator.ty.fields().len();
302 let actual_len = fields.len();
303 ensure!(
304 actual_len == expected_len,
305 "expected {expected_len} fields, got {actual_len}"
306 );
307 for (ty, val) in allocator.ty.fields().zip(fields) {
308 assert!(
309 val.comes_from_same_store(store),
310 "field value comes from the wrong store",
311 );
312 let ty = ty.element_type().unpack();
313 val.ensure_matches_ty(store, ty)
314 .context("field type mismatch")?;
315 }
316 Ok(())
317 }
318
319 fn new_unchecked(
324 store: &mut StoreOpaque,
325 allocator: &ExnRefPre,
326 tag: &Tag,
327 fields: &[Val],
328 ) -> Result<Rooted<ExnRef>> {
329 assert_eq!(
330 store.id(),
331 allocator.store_id,
332 "attempted to use a `ExnRefPre` with the wrong store"
333 );
334
335 let exnref = store
338 .gc_store_mut()?
339 .alloc_uninit_exn(allocator.type_index(), &allocator.layout())
340 .context("unrecoverable error when allocating new `exnref`")?
341 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
342
343 let mut store = AutoAssertNoGc::new(store);
348 match (|| {
349 let (instance, index) = tag.to_raw_indices();
350 exnref.initialize_tag(&mut store, allocator.layout(), instance, index)?;
351 for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
352 exnref.initialize_field(
353 &mut store,
354 allocator.layout(),
355 ty.element_type(),
356 index,
357 *val,
358 )?;
359 }
360 Ok(())
361 })() {
362 Ok(()) => Ok(Rooted::new(&mut store, exnref.into())),
363 Err(e) => {
364 store.gc_store_mut()?.dealloc_uninit_exn(exnref);
365 Err(e)
366 }
367 }
368 }
369
370 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
371 let gc_ref = self.inner.try_gc_ref(store)?;
372 let header = store.gc_store()?.header(gc_ref);
373 debug_assert!(header.kind().matches(VMGcKind::ExnRef));
374 Ok(header.ty().expect("exnrefs should have concrete types"))
375 }
376
377 pub(crate) fn from_cloned_gc_ref(
384 store: &mut AutoAssertNoGc<'_>,
385 gc_ref: VMGcRef,
386 ) -> Rooted<Self> {
387 debug_assert!(
388 store
389 .unwrap_gc_store()
390 .header(&gc_ref)
391 .kind()
392 .matches(VMGcKind::ExnRef)
393 );
394 Rooted::new(store, gc_ref)
395 }
396
397 #[inline]
398 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
399 self.inner.comes_from_same_store(store)
400 }
401
402 pub fn to_raw(&self, mut store: impl AsContextMut) -> Result<u32> {
415 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
416 self._to_raw(&mut store)
417 }
418
419 pub(crate) fn _to_raw(&self, store: &mut AutoAssertNoGc<'_>) -> Result<u32> {
420 let gc_ref = self.inner.try_clone_gc_ref(store)?;
421 let raw = if gc_ref.is_i31() {
422 gc_ref.as_raw_non_zero_u32()
423 } else {
424 store.gc_store_mut()?.expose_gc_ref_to_wasm(gc_ref)
425 };
426 Ok(raw.get())
427 }
428
429 pub fn ty(&self, store: impl AsContext) -> Result<ExnType> {
439 self._ty(store.as_context().0)
440 }
441
442 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ExnType> {
443 assert!(self.comes_from_same_store(store));
444 let index = self.type_index(store)?;
445 Ok(ExnType::from_shared_type_index(store.engine(), index))
446 }
447
448 pub fn matches_ty(&self, store: impl AsContext, ty: &HeapType) -> Result<bool> {
460 self._matches_ty(store.as_context().0, ty)
461 }
462
463 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<bool> {
464 assert!(self.comes_from_same_store(store));
465 Ok(HeapType::from(self._ty(store)?).matches(ty))
466 }
467
468 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<()> {
469 if !self.comes_from_same_store(store) {
470 bail!("function used with wrong store");
471 }
472 if self._matches_ty(store, ty)? {
473 Ok(())
474 } else {
475 let actual_ty = self._ty(store)?;
476 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
477 }
478 }
479
480 pub fn fields<'a, T: 'static>(
490 &'a self,
491 store: impl Into<StoreContextMut<'a, T>>,
492 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
493 self._fields(store.into().0)
494 }
495
496 pub(crate) fn _fields<'a>(
497 &'a self,
498 store: &'a mut StoreOpaque,
499 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
500 assert!(self.comes_from_same_store(store));
501 let store = AutoAssertNoGc::new(store);
502
503 let gc_ref = self.inner.try_gc_ref(&store)?;
504 let header = store.gc_store()?.header(gc_ref);
505 debug_assert!(header.kind().matches(VMGcKind::ExnRef));
506
507 let index = header.ty().expect("exnrefs should have concrete types");
508 let ty = ExnType::from_shared_type_index(store.engine(), index);
509 let len = ty.fields().len();
510
511 return Ok(Fields {
512 exnref: self,
513 store,
514 index: 0,
515 len,
516 });
517
518 struct Fields<'a, 'b> {
519 exnref: &'a ExnRef,
520 store: AutoAssertNoGc<'b>,
521 index: usize,
522 len: usize,
523 }
524
525 impl Iterator for Fields<'_, '_> {
526 type Item = Val;
527
528 #[inline]
529 fn next(&mut self) -> Option<Self::Item> {
530 let i = self.index;
531 debug_assert!(i <= self.len);
532 if i >= self.len {
533 return None;
534 }
535 self.index += 1;
536 Some(self.exnref._field(&mut self.store, i).unwrap())
537 }
538
539 #[inline]
540 fn size_hint(&self) -> (usize, Option<usize>) {
541 let len = self.len - self.index;
542 (len, Some(len))
543 }
544 }
545
546 impl ExactSizeIterator for Fields<'_, '_> {
547 #[inline]
548 fn len(&self) -> usize {
549 self.len - self.index
550 }
551 }
552 }
553
554 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
555 assert!(self.comes_from_same_store(&store));
556 let gc_ref = self.inner.try_gc_ref(store)?;
557 Ok(store.gc_store()?.header(gc_ref))
558 }
559
560 fn exnref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMExnRef> {
561 assert!(self.comes_from_same_store(&store));
562 let gc_ref = self.inner.try_gc_ref(store)?;
563 debug_assert!(self.header(store)?.kind().matches(VMGcKind::ExnRef));
564 Ok(gc_ref.as_exnref_unchecked())
565 }
566
567 fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcExceptionLayout> {
568 assert!(self.comes_from_same_store(&store));
569 let type_index = self.type_index(store)?;
570 let layout = store
571 .engine()
572 .signatures()
573 .layout(type_index)
574 .expect("exn types should have GC layouts");
575 match layout {
576 GcLayout::Struct(_) => unreachable!(),
577 GcLayout::Array(_) => unreachable!(),
578 GcLayout::Exception(e) => Ok(e),
579 }
580 }
581
582 fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
583 let ty = self._ty(store)?;
584 match ty.field(field) {
585 Some(f) => Ok(f),
586 None => {
587 let len = ty.fields().len();
588 bail!("cannot access field {field}: exn only has {len} fields")
589 }
590 }
591 }
592
593 pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
604 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
605 self._field(&mut store, index)
606 }
607
608 pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
609 assert!(self.comes_from_same_store(store));
610 let exnref = self.exnref(store)?.unchecked_copy();
611 let field_ty = self.field_ty(store, index)?;
612 let layout = self.layout(store)?;
613 Ok(exnref.read_field(store, &layout, field_ty.element_type(), index))
614 }
615
616 pub fn tag(&self, mut store: impl AsContextMut) -> Result<Tag> {
626 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
627 assert!(self.comes_from_same_store(&store));
628 let exnref = self.exnref(&store)?.unchecked_copy();
629 let layout = self.layout(&store)?;
630 let (instance, index) = exnref.tag(&mut store, &layout)?;
631 Ok(Tag::from_raw_indices(&*store, instance, index))
632 }
633}
634
635unsafe impl WasmTy for Rooted<ExnRef> {
636 #[inline]
637 fn valtype() -> ValType {
638 ValType::Ref(RefType::new(false, HeapType::Exn))
639 }
640
641 #[inline]
642 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
643 self.comes_from_same_store(store)
644 }
645
646 #[inline]
647 fn dynamic_concrete_type_check(
648 &self,
649 _store: &StoreOpaque,
650 _nullable: bool,
651 _ty: &HeapType,
652 ) -> Result<()> {
653 Ok(())
656 }
657
658 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
659 self.wasm_ty_store(store, ptr, ValRaw::anyref)
660 }
661
662 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
663 Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
664 }
665}
666
667unsafe impl WasmTy for Option<Rooted<ExnRef>> {
668 #[inline]
669 fn valtype() -> ValType {
670 ValType::EXNREF
671 }
672
673 #[inline]
674 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
675 self.map_or(true, |x| x.comes_from_same_store(store))
676 }
677
678 #[inline]
679 fn dynamic_concrete_type_check(
680 &self,
681 store: &StoreOpaque,
682 nullable: bool,
683 ty: &HeapType,
684 ) -> Result<()> {
685 match self {
686 Some(a) => a.ensure_matches_ty(store, ty),
687 None => {
688 ensure!(
689 nullable,
690 "expected a non-null reference, but found a null reference"
691 );
692 Ok(())
693 }
694 }
695 }
696
697 #[inline]
698 fn is_vmgcref_and_points_to_object(&self) -> bool {
699 self.is_some()
700 }
701
702 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
703 <Rooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
704 }
705
706 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
707 <Rooted<ExnRef>>::wasm_ty_option_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
708 }
709}
710
711unsafe impl WasmTy for ManuallyRooted<ExnRef> {
712 #[inline]
713 fn valtype() -> ValType {
714 ValType::Ref(RefType::new(false, HeapType::Exn))
715 }
716
717 #[inline]
718 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
719 self.comes_from_same_store(store)
720 }
721
722 #[inline]
723 fn dynamic_concrete_type_check(
724 &self,
725 store: &StoreOpaque,
726 _nullable: bool,
727 ty: &HeapType,
728 ) -> Result<()> {
729 self.ensure_matches_ty(store, ty)
730 }
731
732 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
733 self.wasm_ty_store(store, ptr, ValRaw::anyref)
734 }
735
736 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
737 Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
738 }
739}
740
741unsafe impl WasmTy for Option<ManuallyRooted<ExnRef>> {
742 #[inline]
743 fn valtype() -> ValType {
744 ValType::EXNREF
745 }
746
747 #[inline]
748 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
749 self.as_ref()
750 .map_or(true, |x| x.comes_from_same_store(store))
751 }
752
753 #[inline]
754 fn dynamic_concrete_type_check(
755 &self,
756 store: &StoreOpaque,
757 nullable: bool,
758 ty: &HeapType,
759 ) -> Result<()> {
760 match self {
761 Some(a) => a.ensure_matches_ty(store, ty),
762 None => {
763 ensure!(
764 nullable,
765 "expected a non-null reference, but found a null reference"
766 );
767 Ok(())
768 }
769 }
770 }
771
772 #[inline]
773 fn is_vmgcref_and_points_to_object(&self) -> bool {
774 self.is_some()
775 }
776
777 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
778 <ManuallyRooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
779 }
780
781 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
782 <ManuallyRooted<ExnRef>>::wasm_ty_option_load(
783 store,
784 ptr.get_anyref(),
785 ExnRef::from_cloned_gc_ref,
786 )
787 }
788}