1use crate::runtime::vm::VMGcRef;
4use crate::store::{Asyncness, StoreId, StoreResourceLimiter};
5#[cfg(feature = "async")]
6use crate::vm::VMStore;
7use crate::vm::{self, VMExnRef, VMGcHeader};
8use crate::{
9 AsContext, AsContextMut, GcRefImpl, GcRootIndex, HeapType, OwnedRooted, RefType, Result,
10 Rooted, Val, ValRaw, ValType, WasmTy,
11 store::{AutoAssertNoGc, StoreOpaque},
12};
13use crate::{ExnType, FieldType, GcHeapOutOfMemory, StoreContextMut, Tag, prelude::*};
14use alloc::sync::Arc;
15use core::mem;
16use core::mem::MaybeUninit;
17use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
18
19pub struct ExnRefPre {
69 store_id: StoreId,
70 ty: ExnType,
71}
72
73impl ExnRefPre {
74 pub fn new(mut store: impl AsContextMut, ty: ExnType) -> Self {
77 Self::_new(store.as_context_mut().0, ty)
78 }
79
80 pub(crate) fn _new(store: &mut StoreOpaque, ty: ExnType) -> Self {
81 store.insert_gc_host_alloc_type(ty.registered_type().clone());
82 let store_id = store.id();
83
84 ExnRefPre { store_id, ty }
85 }
86
87 pub(crate) fn layout(&self) -> &GcStructLayout {
88 self.ty
89 .registered_type()
90 .layout()
91 .expect("exn types have a layout")
92 .unwrap_struct()
93 }
94
95 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
96 self.ty.registered_type().index()
97 }
98}
99
100#[derive(Debug)]
111#[repr(transparent)]
112pub struct ExnRef {
113 pub(super) inner: GcRootIndex,
114}
115
116unsafe impl GcRefImpl for ExnRef {
117 fn transmute_ref(index: &GcRootIndex) -> &Self {
118 let me: &Self = unsafe { mem::transmute(index) };
120
121 assert!(matches!(
123 me,
124 Self {
125 inner: GcRootIndex { .. },
126 }
127 ));
128
129 me
130 }
131}
132
133impl ExnRef {
134 pub fn from_raw(mut store: impl AsContextMut, raw: u32) -> Option<Rooted<Self>> {
165 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
166 Self::_from_raw(&mut store, raw)
167 }
168
169 pub(crate) fn _from_raw(store: &mut AutoAssertNoGc, raw: u32) -> Option<Rooted<Self>> {
171 let gc_ref = VMGcRef::from_raw_u32(raw)?;
172 let gc_ref = store.clone_gc_ref(&gc_ref);
173 Some(Self::from_cloned_gc_ref(store, gc_ref))
174 }
175
176 pub fn new(
206 mut store: impl AsContextMut,
207 allocator: &ExnRefPre,
208 tag: &Tag,
209 fields: &[Val],
210 ) -> Result<Rooted<ExnRef>> {
211 let (mut limiter, store) = store
212 .as_context_mut()
213 .0
214 .validate_sync_resource_limiter_and_store_opaque()?;
215 vm::assert_ready(Self::_new_async(
216 store,
217 limiter.as_mut(),
218 allocator,
219 tag,
220 fields,
221 Asyncness::No,
222 ))
223 }
224
225 #[cfg(feature = "async")]
250 pub async fn new_async(
251 mut store: impl AsContextMut,
252 allocator: &ExnRefPre,
253 tag: &Tag,
254 fields: &[Val],
255 ) -> Result<Rooted<ExnRef>> {
256 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
257 Self::_new_async(
258 store,
259 limiter.as_mut(),
260 allocator,
261 tag,
262 fields,
263 Asyncness::Yes,
264 )
265 .await
266 }
267
268 pub(crate) async fn _new_async(
269 store: &mut StoreOpaque,
270 limiter: Option<&mut StoreResourceLimiter<'_>>,
271 allocator: &ExnRefPre,
272 tag: &Tag,
273 fields: &[Val],
274 asyncness: Asyncness,
275 ) -> Result<Rooted<ExnRef>> {
276 Self::type_check_tag_and_fields(store, allocator, tag, fields)?;
277 store
278 .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
279 Self::new_unchecked(store, allocator, tag, fields)
280 })
281 .await
282 }
283
284 fn type_check_tag_and_fields(
287 store: &mut StoreOpaque,
288 allocator: &ExnRefPre,
289 tag: &Tag,
290 fields: &[Val],
291 ) -> Result<(), Error> {
292 assert!(
293 tag.comes_from_same_store(store),
294 "tag comes from the wrong store"
295 );
296 ensure!(
297 tag.wasmtime_ty(store).signature.unwrap_engine_type_index()
298 == allocator.ty.tag_type().ty().type_index(),
299 "incorrect signature for tag when creating exception object"
300 );
301 let expected_len = allocator.ty.fields().len();
302 let actual_len = fields.len();
303 ensure!(
304 actual_len == expected_len,
305 "expected {expected_len} fields, got {actual_len}"
306 );
307 for (ty, val) in allocator.ty.fields().zip(fields) {
308 assert!(
309 val.comes_from_same_store(store),
310 "field value comes from the wrong store",
311 );
312 let ty = ty.element_type().unpack();
313 val.ensure_matches_ty(store, ty)
314 .context("field type mismatch")?;
315 }
316 Ok(())
317 }
318
319 fn new_unchecked(
324 store: &mut StoreOpaque,
325 allocator: &ExnRefPre,
326 tag: &Tag,
327 fields: &[Val],
328 ) -> Result<Rooted<ExnRef>> {
329 assert_eq!(
330 store.id(),
331 allocator.store_id,
332 "attempted to use a `ExnRefPre` with the wrong store"
333 );
334
335 let exnref = store
338 .require_gc_store_mut()?
339 .alloc_uninit_exn(allocator.type_index(), &allocator.layout())
340 .context("unrecoverable error when allocating new `exnref`")?
341 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
342
343 let mut store = AutoAssertNoGc::new(store);
348 match (|| {
349 let (instance, index) = tag.to_raw_indices();
350 exnref.initialize_tag(&mut store, instance, index)?;
351 for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
352 exnref.initialize_field(
353 &mut store,
354 allocator.layout(),
355 ty.element_type(),
356 index,
357 *val,
358 )?;
359 }
360 Ok(())
361 })() {
362 Ok(()) => Ok(Rooted::new(&mut store, exnref.into())),
363 Err(e) => {
364 store.require_gc_store_mut()?.dealloc_uninit_exn(exnref);
365 Err(e)
366 }
367 }
368 }
369
370 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
371 let gc_ref = self.inner.try_gc_ref(store)?;
372 let header = store.require_gc_store()?.header(gc_ref);
373 debug_assert!(header.kind().matches(VMGcKind::ExnRef));
374 Ok(header.ty().expect("exnrefs should have concrete types"))
375 }
376
377 pub(crate) fn from_cloned_gc_ref(
384 store: &mut AutoAssertNoGc<'_>,
385 gc_ref: VMGcRef,
386 ) -> Rooted<Self> {
387 debug_assert!(
388 store
389 .unwrap_gc_store()
390 .header(&gc_ref)
391 .kind()
392 .matches(VMGcKind::ExnRef)
393 );
394 Rooted::new(store, gc_ref)
395 }
396
397 #[inline]
398 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
399 self.inner.comes_from_same_store(store)
400 }
401
402 pub fn to_raw(&self, mut store: impl AsContextMut) -> Result<u32> {
415 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
416 self._to_raw(&mut store)
417 }
418
419 pub(crate) fn _to_raw(&self, store: &mut AutoAssertNoGc<'_>) -> Result<u32> {
420 let gc_ref = self.inner.try_clone_gc_ref(store)?;
421 let raw = if gc_ref.is_i31() {
422 gc_ref.as_raw_non_zero_u32()
423 } else {
424 store.require_gc_store_mut()?.expose_gc_ref_to_wasm(gc_ref)
425 };
426 Ok(raw.get())
427 }
428
429 pub fn ty(&self, store: impl AsContext) -> Result<ExnType> {
439 self._ty(store.as_context().0)
440 }
441
442 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ExnType> {
443 assert!(self.comes_from_same_store(store));
444 let index = self.type_index(store)?;
445 Ok(ExnType::from_shared_type_index(store.engine(), index))
446 }
447
448 pub fn matches_ty(&self, store: impl AsContext, ty: &HeapType) -> Result<bool> {
460 self._matches_ty(store.as_context().0, ty)
461 }
462
463 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<bool> {
464 assert!(self.comes_from_same_store(store));
465 Ok(HeapType::from(self._ty(store)?).matches(ty))
466 }
467
468 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<()> {
469 if !self.comes_from_same_store(store) {
470 bail!("function used with wrong store");
471 }
472 if self._matches_ty(store, ty)? {
473 Ok(())
474 } else {
475 let actual_ty = self._ty(store)?;
476 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
477 }
478 }
479
480 pub fn fields<'a, T: 'static>(
490 &'a self,
491 store: impl Into<StoreContextMut<'a, T>>,
492 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
493 self._fields(store.into().0)
494 }
495
496 pub(crate) fn _fields<'a>(
497 &'a self,
498 store: &'a mut StoreOpaque,
499 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
500 assert!(self.comes_from_same_store(store));
501 let store = AutoAssertNoGc::new(store);
502
503 let gc_ref = self.inner.try_gc_ref(&store)?;
504 let header = store.require_gc_store()?.header(gc_ref);
505 debug_assert!(header.kind().matches(VMGcKind::ExnRef));
506
507 let index = header.ty().expect("exnrefs should have concrete types");
508 let ty = ExnType::from_shared_type_index(store.engine(), index);
509 let len = ty.fields().len();
510
511 return Ok(Fields {
512 exnref: self,
513 store,
514 index: 0,
515 len,
516 });
517
518 struct Fields<'a, 'b> {
519 exnref: &'a ExnRef,
520 store: AutoAssertNoGc<'b>,
521 index: usize,
522 len: usize,
523 }
524
525 impl Iterator for Fields<'_, '_> {
526 type Item = Val;
527
528 #[inline]
529 fn next(&mut self) -> Option<Self::Item> {
530 let i = self.index;
531 debug_assert!(i <= self.len);
532 if i >= self.len {
533 return None;
534 }
535 self.index += 1;
536 Some(self.exnref._field(&mut self.store, i).unwrap())
537 }
538
539 #[inline]
540 fn size_hint(&self) -> (usize, Option<usize>) {
541 let len = self.len - self.index;
542 (len, Some(len))
543 }
544 }
545
546 impl ExactSizeIterator for Fields<'_, '_> {
547 #[inline]
548 fn len(&self) -> usize {
549 self.len - self.index
550 }
551 }
552 }
553
554 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
555 assert!(self.comes_from_same_store(&store));
556 let gc_ref = self.inner.try_gc_ref(store)?;
557 Ok(store.require_gc_store()?.header(gc_ref))
558 }
559
560 fn exnref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMExnRef> {
561 assert!(self.comes_from_same_store(&store));
562 let gc_ref = self.inner.try_gc_ref(store)?;
563 debug_assert!(self.header(store)?.kind().matches(VMGcKind::ExnRef));
564 Ok(gc_ref.as_exnref_unchecked())
565 }
566
567 fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<Arc<GcStructLayout>> {
568 assert!(self.comes_from_same_store(&store));
569 let type_index = self.type_index(store)?;
570 let layout = store
571 .engine()
572 .signatures()
573 .layout(type_index)
574 .expect("exn types should have GC layouts");
575 match layout {
576 GcLayout::Struct(s) => Ok(s),
577 GcLayout::Array(_) => unreachable!(),
578 }
579 }
580
581 fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
582 let ty = self._ty(store)?;
583 match ty.field(field) {
584 Some(f) => Ok(f),
585 None => {
586 let len = ty.fields().len();
587 bail!("cannot access field {field}: exn only has {len} fields")
588 }
589 }
590 }
591
592 pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
603 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
604 self._field(&mut store, index)
605 }
606
607 pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
608 assert!(self.comes_from_same_store(store));
609 let exnref = self.exnref(store)?.unchecked_copy();
610 let field_ty = self.field_ty(store, index)?;
611 let layout = self.layout(store)?;
612 Ok(exnref.read_field(store, &layout, field_ty.element_type(), index))
613 }
614
615 pub fn tag(&self, mut store: impl AsContextMut) -> Result<Tag> {
625 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
626 assert!(self.comes_from_same_store(&store));
627 let exnref = self.exnref(&store)?.unchecked_copy();
628 let (instance, index) = exnref.tag(&mut store)?;
629 Ok(Tag::from_raw_indices(&*store, instance, index))
630 }
631}
632
633unsafe impl WasmTy for Rooted<ExnRef> {
634 #[inline]
635 fn valtype() -> ValType {
636 ValType::Ref(RefType::new(false, HeapType::Exn))
637 }
638
639 #[inline]
640 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
641 self.comes_from_same_store(store)
642 }
643
644 #[inline]
645 fn dynamic_concrete_type_check(
646 &self,
647 _store: &StoreOpaque,
648 _nullable: bool,
649 _ty: &HeapType,
650 ) -> Result<()> {
651 Ok(())
654 }
655
656 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
657 self.wasm_ty_store(store, ptr, ValRaw::anyref)
658 }
659
660 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
661 Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
662 }
663}
664
665unsafe impl WasmTy for Option<Rooted<ExnRef>> {
666 #[inline]
667 fn valtype() -> ValType {
668 ValType::EXNREF
669 }
670
671 #[inline]
672 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
673 self.map_or(true, |x| x.comes_from_same_store(store))
674 }
675
676 #[inline]
677 fn dynamic_concrete_type_check(
678 &self,
679 store: &StoreOpaque,
680 nullable: bool,
681 ty: &HeapType,
682 ) -> Result<()> {
683 match self {
684 Some(a) => a.ensure_matches_ty(store, ty),
685 None => {
686 ensure!(
687 nullable,
688 "expected a non-null reference, but found a null reference"
689 );
690 Ok(())
691 }
692 }
693 }
694
695 #[inline]
696 fn is_vmgcref_and_points_to_object(&self) -> bool {
697 self.is_some()
698 }
699
700 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
701 <Rooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
702 }
703
704 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
705 <Rooted<ExnRef>>::wasm_ty_option_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
706 }
707}
708
709unsafe impl WasmTy for OwnedRooted<ExnRef> {
710 #[inline]
711 fn valtype() -> ValType {
712 ValType::Ref(RefType::new(false, HeapType::Exn))
713 }
714
715 #[inline]
716 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
717 self.comes_from_same_store(store)
718 }
719
720 #[inline]
721 fn dynamic_concrete_type_check(
722 &self,
723 store: &StoreOpaque,
724 _nullable: bool,
725 ty: &HeapType,
726 ) -> Result<()> {
727 self.ensure_matches_ty(store, ty)
728 }
729
730 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
731 self.wasm_ty_store(store, ptr, ValRaw::anyref)
732 }
733
734 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
735 Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
736 }
737}
738
739unsafe impl WasmTy for Option<OwnedRooted<ExnRef>> {
740 #[inline]
741 fn valtype() -> ValType {
742 ValType::EXNREF
743 }
744
745 #[inline]
746 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
747 self.as_ref()
748 .map_or(true, |x| x.comes_from_same_store(store))
749 }
750
751 #[inline]
752 fn dynamic_concrete_type_check(
753 &self,
754 store: &StoreOpaque,
755 nullable: bool,
756 ty: &HeapType,
757 ) -> Result<()> {
758 match self {
759 Some(a) => a.ensure_matches_ty(store, ty),
760 None => {
761 ensure!(
762 nullable,
763 "expected a non-null reference, but found a null reference"
764 );
765 Ok(())
766 }
767 }
768 }
769
770 #[inline]
771 fn is_vmgcref_and_points_to_object(&self) -> bool {
772 self.is_some()
773 }
774
775 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
776 <OwnedRooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
777 }
778
779 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
780 <OwnedRooted<ExnRef>>::wasm_ty_option_load(
781 store,
782 ptr.get_anyref(),
783 ExnRef::from_cloned_gc_ref,
784 )
785 }
786}