1use crate::runtime::vm::VMGcRef;
4use crate::store::StoreId;
5use crate::vm::{VMExnRef, VMGcHeader};
6use crate::{
7 AsContext, AsContextMut, GcRefImpl, GcRootIndex, HeapType, ManuallyRooted, RefType, Result,
8 Rooted, Val, ValRaw, ValType, WasmTy,
9 store::{AutoAssertNoGc, StoreOpaque},
10};
11use crate::{ExnType, FieldType, GcHeapOutOfMemory, StoreContextMut, Tag, prelude::*};
12use core::mem;
13use core::mem::MaybeUninit;
14use wasmtime_environ::{GcExceptionLayout, GcLayout, VMGcKind, VMSharedTypeIndex};
15
16pub struct ExnRefPre {
66 store_id: StoreId,
67 ty: ExnType,
68}
69
70impl ExnRefPre {
71 pub fn new(mut store: impl AsContextMut, ty: ExnType) -> Self {
74 Self::_new(store.as_context_mut().0, ty)
75 }
76
77 pub(crate) fn _new(store: &mut StoreOpaque, ty: ExnType) -> Self {
78 store.insert_gc_host_alloc_type(ty.registered_type().clone());
79 let store_id = store.id();
80
81 ExnRefPre { store_id, ty }
82 }
83
84 pub(crate) fn layout(&self) -> &GcExceptionLayout {
85 self.ty
86 .registered_type()
87 .layout()
88 .expect("exn types have a layout")
89 .unwrap_exception()
90 }
91
92 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
93 self.ty.registered_type().index()
94 }
95}
96
97#[derive(Debug)]
108#[repr(transparent)]
109pub struct ExnRef {
110 pub(super) inner: GcRootIndex,
111}
112
113unsafe impl GcRefImpl for ExnRef {
114 fn transmute_ref(index: &GcRootIndex) -> &Self {
115 let me: &Self = unsafe { mem::transmute(index) };
117
118 assert!(matches!(
120 me,
121 Self {
122 inner: GcRootIndex { .. },
123 }
124 ));
125
126 me
127 }
128}
129
130impl ExnRef {
131 pub unsafe fn from_raw(mut store: impl AsContextMut, raw: u32) -> Option<Rooted<Self>> {
160 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
161 Self::_from_raw(&mut store, raw)
162 }
163
164 pub(crate) fn _from_raw(store: &mut AutoAssertNoGc, raw: u32) -> Option<Rooted<Self>> {
166 let gc_ref = VMGcRef::from_raw_u32(raw)?;
167 let gc_ref = store.unwrap_gc_store_mut().clone_gc_ref(&gc_ref);
168 Some(Self::from_cloned_gc_ref(store, gc_ref))
169 }
170
171 pub fn new(
201 mut store: impl AsContextMut,
202 allocator: &ExnRefPre,
203 tag: &Tag,
204 fields: &[Val],
205 ) -> Result<Rooted<ExnRef>> {
206 Self::_new(store.as_context_mut().0, allocator, tag, fields)
207 }
208
209 pub(crate) fn _new(
210 store: &mut StoreOpaque,
211 allocator: &ExnRefPre,
212 tag: &Tag,
213 fields: &[Val],
214 ) -> Result<Rooted<ExnRef>> {
215 assert!(
216 !store.async_support(),
217 "use `ExnRef::new_async` with asynchronous stores"
218 );
219 Self::type_check_tag_and_fields(store, allocator, tag, fields)?;
220 store.retry_after_gc((), |store, ()| {
221 Self::new_unchecked(store, allocator, tag, fields)
222 })
223 }
224
225 #[cfg(feature = "async")]
254 pub async fn new_async(
255 mut store: impl AsContextMut,
256 allocator: &ExnRefPre,
257 tag: &Tag,
258 fields: &[Val],
259 ) -> Result<Rooted<ExnRef>> {
260 Self::_new_async(store.as_context_mut().0, allocator, tag, fields).await
261 }
262
263 #[cfg(feature = "async")]
264 pub(crate) async fn _new_async(
265 store: &mut StoreOpaque,
266 allocator: &ExnRefPre,
267 tag: &Tag,
268 fields: &[Val],
269 ) -> Result<Rooted<ExnRef>> {
270 assert!(
271 store.async_support(),
272 "use `ExnRef::new` with synchronous stores"
273 );
274 Self::type_check_tag_and_fields(store, allocator, tag, fields)?;
275 store
276 .retry_after_gc_async((), |store, ()| {
277 Self::new_unchecked(store, allocator, tag, fields)
278 })
279 .await
280 }
281
282 fn type_check_tag_and_fields(
285 store: &mut StoreOpaque,
286 allocator: &ExnRefPre,
287 tag: &Tag,
288 fields: &[Val],
289 ) -> Result<(), Error> {
290 assert!(
291 tag.comes_from_same_store(store),
292 "tag comes from the wrong store"
293 );
294 ensure!(
295 tag.wasmtime_ty(store).signature.unwrap_engine_type_index()
296 == allocator.ty.tag_type().ty().type_index(),
297 "incorrect signature for tag when creating exception object"
298 );
299 let expected_len = allocator.ty.fields().len();
300 let actual_len = fields.len();
301 ensure!(
302 actual_len == expected_len,
303 "expected {expected_len} fields, got {actual_len}"
304 );
305 for (ty, val) in allocator.ty.fields().zip(fields) {
306 assert!(
307 val.comes_from_same_store(store),
308 "field value comes from the wrong store",
309 );
310 let ty = ty.element_type().unpack();
311 val.ensure_matches_ty(store, ty)
312 .context("field type mismatch")?;
313 }
314 Ok(())
315 }
316
317 fn new_unchecked(
322 store: &mut StoreOpaque,
323 allocator: &ExnRefPre,
324 tag: &Tag,
325 fields: &[Val],
326 ) -> Result<Rooted<ExnRef>> {
327 assert_eq!(
328 store.id(),
329 allocator.store_id,
330 "attempted to use a `ExnRefPre` with the wrong store"
331 );
332
333 let exnref = store
336 .gc_store_mut()?
337 .alloc_uninit_exn(allocator.type_index(), &allocator.layout())
338 .context("unrecoverable error when allocating new `exnref`")?
339 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
340
341 let mut store = AutoAssertNoGc::new(store);
346 match (|| {
347 let (instance, index) = tag.to_raw_indices();
348 exnref.initialize_tag(&mut store, allocator.layout(), instance, index)?;
349 for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
350 exnref.initialize_field(
351 &mut store,
352 allocator.layout(),
353 ty.element_type(),
354 index,
355 *val,
356 )?;
357 }
358 Ok(())
359 })() {
360 Ok(()) => Ok(Rooted::new(&mut store, exnref.into())),
361 Err(e) => {
362 store.gc_store_mut()?.dealloc_uninit_exn(exnref);
363 Err(e)
364 }
365 }
366 }
367
368 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
369 let gc_ref = self.inner.try_gc_ref(store)?;
370 let header = store.gc_store()?.header(gc_ref);
371 debug_assert!(header.kind().matches(VMGcKind::ExnRef));
372 Ok(header.ty().expect("exnrefs should have concrete types"))
373 }
374
375 pub(crate) fn from_cloned_gc_ref(
382 store: &mut AutoAssertNoGc<'_>,
383 gc_ref: VMGcRef,
384 ) -> Rooted<Self> {
385 debug_assert!(
386 store
387 .unwrap_gc_store()
388 .header(&gc_ref)
389 .kind()
390 .matches(VMGcKind::ExnRef)
391 );
392 Rooted::new(store, gc_ref)
393 }
394
395 #[inline]
396 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
397 self.inner.comes_from_same_store(store)
398 }
399
400 pub unsafe fn to_raw(&self, mut store: impl AsContextMut) -> Result<u32> {
413 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
414 self._to_raw(&mut store)
415 }
416
417 pub(crate) unsafe fn _to_raw(&self, store: &mut AutoAssertNoGc<'_>) -> Result<u32> {
418 let gc_ref = self.inner.try_clone_gc_ref(store)?;
419 let raw = if gc_ref.is_i31() {
420 gc_ref.as_raw_non_zero_u32()
421 } else {
422 store.gc_store_mut()?.expose_gc_ref_to_wasm(gc_ref)
423 };
424 Ok(raw.get())
425 }
426
427 pub fn ty(&self, store: impl AsContext) -> Result<ExnType> {
437 self._ty(store.as_context().0)
438 }
439
440 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ExnType> {
441 assert!(self.comes_from_same_store(store));
442 let index = self.type_index(store)?;
443 Ok(ExnType::from_shared_type_index(store.engine(), index))
444 }
445
446 pub fn matches_ty(&self, store: impl AsContext, ty: &HeapType) -> Result<bool> {
458 self._matches_ty(store.as_context().0, ty)
459 }
460
461 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<bool> {
462 assert!(self.comes_from_same_store(store));
463 Ok(HeapType::from(self._ty(store)?).matches(ty))
464 }
465
466 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<()> {
467 if !self.comes_from_same_store(store) {
468 bail!("function used with wrong store");
469 }
470 if self._matches_ty(store, ty)? {
471 Ok(())
472 } else {
473 let actual_ty = self._ty(store)?;
474 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
475 }
476 }
477
478 pub fn fields<'a, T: 'static>(
488 &'a self,
489 store: impl Into<StoreContextMut<'a, T>>,
490 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
491 self._fields(store.into().0)
492 }
493
494 pub(crate) fn _fields<'a>(
495 &'a self,
496 store: &'a mut StoreOpaque,
497 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
498 assert!(self.comes_from_same_store(store));
499 let store = AutoAssertNoGc::new(store);
500
501 let gc_ref = self.inner.try_gc_ref(&store)?;
502 let header = store.gc_store()?.header(gc_ref);
503 debug_assert!(header.kind().matches(VMGcKind::ExnRef));
504
505 let index = header.ty().expect("exnrefs should have concrete types");
506 let ty = ExnType::from_shared_type_index(store.engine(), index);
507 let len = ty.fields().len();
508
509 return Ok(Fields {
510 exnref: self,
511 store,
512 index: 0,
513 len,
514 });
515
516 struct Fields<'a, 'b> {
517 exnref: &'a ExnRef,
518 store: AutoAssertNoGc<'b>,
519 index: usize,
520 len: usize,
521 }
522
523 impl Iterator for Fields<'_, '_> {
524 type Item = Val;
525
526 #[inline]
527 fn next(&mut self) -> Option<Self::Item> {
528 let i = self.index;
529 debug_assert!(i <= self.len);
530 if i >= self.len {
531 return None;
532 }
533 self.index += 1;
534 Some(self.exnref._field(&mut self.store, i).unwrap())
535 }
536
537 #[inline]
538 fn size_hint(&self) -> (usize, Option<usize>) {
539 let len = self.len - self.index;
540 (len, Some(len))
541 }
542 }
543
544 impl ExactSizeIterator for Fields<'_, '_> {
545 #[inline]
546 fn len(&self) -> usize {
547 self.len - self.index
548 }
549 }
550 }
551
552 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
553 assert!(self.comes_from_same_store(&store));
554 let gc_ref = self.inner.try_gc_ref(store)?;
555 Ok(store.gc_store()?.header(gc_ref))
556 }
557
558 fn exnref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMExnRef> {
559 assert!(self.comes_from_same_store(&store));
560 let gc_ref = self.inner.try_gc_ref(store)?;
561 debug_assert!(self.header(store)?.kind().matches(VMGcKind::ExnRef));
562 Ok(gc_ref.as_exnref_unchecked())
563 }
564
565 fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcExceptionLayout> {
566 assert!(self.comes_from_same_store(&store));
567 let type_index = self.type_index(store)?;
568 let layout = store
569 .engine()
570 .signatures()
571 .layout(type_index)
572 .expect("exn types should have GC layouts");
573 match layout {
574 GcLayout::Struct(_) => unreachable!(),
575 GcLayout::Array(_) => unreachable!(),
576 GcLayout::Exception(e) => Ok(e),
577 }
578 }
579
580 fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
581 let ty = self._ty(store)?;
582 match ty.field(field) {
583 Some(f) => Ok(f),
584 None => {
585 let len = ty.fields().len();
586 bail!("cannot access field {field}: exn only has {len} fields")
587 }
588 }
589 }
590
591 pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
602 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
603 self._field(&mut store, index)
604 }
605
606 pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
607 assert!(self.comes_from_same_store(store));
608 let exnref = self.exnref(store)?.unchecked_copy();
609 let field_ty = self.field_ty(store, index)?;
610 let layout = self.layout(store)?;
611 Ok(exnref.read_field(store, &layout, field_ty.element_type(), index))
612 }
613
614 pub fn tag(&self, mut store: impl AsContextMut) -> Result<Tag> {
624 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
625 assert!(self.comes_from_same_store(&store));
626 let exnref = self.exnref(&store)?.unchecked_copy();
627 let layout = self.layout(&store)?;
628 let (instance, index) = exnref.tag(&mut store, &layout)?;
629 Ok(Tag::from_raw_indices(&*store, instance, index))
630 }
631}
632
633unsafe impl WasmTy for Rooted<ExnRef> {
634 #[inline]
635 fn valtype() -> ValType {
636 ValType::Ref(RefType::new(false, HeapType::Exn))
637 }
638
639 #[inline]
640 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
641 self.comes_from_same_store(store)
642 }
643
644 #[inline]
645 fn dynamic_concrete_type_check(
646 &self,
647 _store: &StoreOpaque,
648 _nullable: bool,
649 _ty: &HeapType,
650 ) -> Result<()> {
651 Ok(())
654 }
655
656 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
657 self.wasm_ty_store(store, ptr, ValRaw::anyref)
658 }
659
660 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
661 Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
662 }
663}
664
665unsafe impl WasmTy for Option<Rooted<ExnRef>> {
666 #[inline]
667 fn valtype() -> ValType {
668 ValType::EXNREF
669 }
670
671 #[inline]
672 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
673 self.map_or(true, |x| x.comes_from_same_store(store))
674 }
675
676 #[inline]
677 fn dynamic_concrete_type_check(
678 &self,
679 store: &StoreOpaque,
680 nullable: bool,
681 ty: &HeapType,
682 ) -> Result<()> {
683 match self {
684 Some(a) => a.ensure_matches_ty(store, ty),
685 None => {
686 ensure!(
687 nullable,
688 "expected a non-null reference, but found a null reference"
689 );
690 Ok(())
691 }
692 }
693 }
694
695 #[inline]
696 fn is_vmgcref_and_points_to_object(&self) -> bool {
697 self.is_some()
698 }
699
700 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
701 <Rooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
702 }
703
704 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
705 <Rooted<ExnRef>>::wasm_ty_option_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
706 }
707}
708
709unsafe impl WasmTy for ManuallyRooted<ExnRef> {
710 #[inline]
711 fn valtype() -> ValType {
712 ValType::Ref(RefType::new(false, HeapType::Exn))
713 }
714
715 #[inline]
716 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
717 self.comes_from_same_store(store)
718 }
719
720 #[inline]
721 fn dynamic_concrete_type_check(
722 &self,
723 store: &StoreOpaque,
724 _nullable: bool,
725 ty: &HeapType,
726 ) -> Result<()> {
727 self.ensure_matches_ty(store, ty)
728 }
729
730 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
731 self.wasm_ty_store(store, ptr, ValRaw::anyref)
732 }
733
734 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
735 Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
736 }
737}
738
739unsafe impl WasmTy for Option<ManuallyRooted<ExnRef>> {
740 #[inline]
741 fn valtype() -> ValType {
742 ValType::EXNREF
743 }
744
745 #[inline]
746 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
747 self.as_ref()
748 .map_or(true, |x| x.comes_from_same_store(store))
749 }
750
751 #[inline]
752 fn dynamic_concrete_type_check(
753 &self,
754 store: &StoreOpaque,
755 nullable: bool,
756 ty: &HeapType,
757 ) -> Result<()> {
758 match self {
759 Some(a) => a.ensure_matches_ty(store, ty),
760 None => {
761 ensure!(
762 nullable,
763 "expected a non-null reference, but found a null reference"
764 );
765 Ok(())
766 }
767 }
768 }
769
770 #[inline]
771 fn is_vmgcref_and_points_to_object(&self) -> bool {
772 self.is_some()
773 }
774
775 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
776 <ManuallyRooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
777 }
778
779 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
780 <ManuallyRooted<ExnRef>>::wasm_ty_option_load(
781 store,
782 ptr.get_anyref(),
783 ExnRef::from_cloned_gc_ref,
784 )
785 }
786}