1use crate::runtime::vm::VMGcRef;
4use crate::store::{Asyncness, StoreId, StoreResourceLimiter};
5#[cfg(feature = "async")]
6use crate::vm::VMStore;
7use crate::vm::{self, VMExnRef, VMGcHeader};
8use crate::{
9 AsContext, AsContextMut, GcRefImpl, GcRootIndex, HeapType, OwnedRooted, RefType, Result,
10 Rooted, Val, ValRaw, ValType, WasmTy,
11 store::{AutoAssertNoGc, StoreOpaque},
12};
13use crate::{ExnType, FieldType, GcHeapOutOfMemory, StoreContextMut, Tag, prelude::*};
14use core::mem;
15use core::mem::MaybeUninit;
16use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
17
18pub struct ExnRefPre {
68 store_id: StoreId,
69 ty: ExnType,
70}
71
72impl ExnRefPre {
73 pub fn new(mut store: impl AsContextMut, ty: ExnType) -> Self {
76 Self::_new(store.as_context_mut().0, ty)
77 }
78
79 pub(crate) fn _new(store: &mut StoreOpaque, ty: ExnType) -> Self {
80 store.insert_gc_host_alloc_type(ty.registered_type().clone());
81 let store_id = store.id();
82
83 ExnRefPre { store_id, ty }
84 }
85
86 pub(crate) fn layout(&self) -> &GcStructLayout {
87 self.ty
88 .registered_type()
89 .layout()
90 .expect("exn types have a layout")
91 .unwrap_struct()
92 }
93
94 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
95 self.ty.registered_type().index()
96 }
97}
98
99#[derive(Debug)]
110#[repr(transparent)]
111pub struct ExnRef {
112 pub(super) inner: GcRootIndex,
113}
114
115unsafe impl GcRefImpl for ExnRef {
116 fn transmute_ref(index: &GcRootIndex) -> &Self {
117 let me: &Self = unsafe { mem::transmute(index) };
119
120 assert!(matches!(
122 me,
123 Self {
124 inner: GcRootIndex { .. },
125 }
126 ));
127
128 me
129 }
130}
131
132impl ExnRef {
133 pub fn from_raw(mut store: impl AsContextMut, raw: u32) -> Option<Rooted<Self>> {
164 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
165 Self::_from_raw(&mut store, raw)
166 }
167
168 pub(crate) fn _from_raw(store: &mut AutoAssertNoGc, raw: u32) -> Option<Rooted<Self>> {
170 let gc_ref = VMGcRef::from_raw_u32(raw)?;
171 let gc_ref = store.clone_gc_ref(&gc_ref);
172 Some(Self::from_cloned_gc_ref(store, gc_ref))
173 }
174
175 pub fn new(
205 mut store: impl AsContextMut,
206 allocator: &ExnRefPre,
207 tag: &Tag,
208 fields: &[Val],
209 ) -> Result<Rooted<ExnRef>> {
210 let (mut limiter, store) = store
211 .as_context_mut()
212 .0
213 .validate_sync_resource_limiter_and_store_opaque()?;
214 vm::assert_ready(Self::_new_async(
215 store,
216 limiter.as_mut(),
217 allocator,
218 tag,
219 fields,
220 Asyncness::No,
221 ))
222 }
223
224 #[cfg(feature = "async")]
249 pub async fn new_async(
250 mut store: impl AsContextMut,
251 allocator: &ExnRefPre,
252 tag: &Tag,
253 fields: &[Val],
254 ) -> Result<Rooted<ExnRef>> {
255 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
256 Self::_new_async(
257 store,
258 limiter.as_mut(),
259 allocator,
260 tag,
261 fields,
262 Asyncness::Yes,
263 )
264 .await
265 }
266
267 pub(crate) async fn _new_async(
268 store: &mut StoreOpaque,
269 limiter: Option<&mut StoreResourceLimiter<'_>>,
270 allocator: &ExnRefPre,
271 tag: &Tag,
272 fields: &[Val],
273 asyncness: Asyncness,
274 ) -> Result<Rooted<ExnRef>> {
275 Self::type_check_tag_and_fields(store, allocator, tag, fields)?;
276 store
277 .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
278 Self::new_unchecked(store, allocator, tag, fields)
279 })
280 .await
281 }
282
283 fn type_check_tag_and_fields(
286 store: &mut StoreOpaque,
287 allocator: &ExnRefPre,
288 tag: &Tag,
289 fields: &[Val],
290 ) -> Result<(), Error> {
291 assert!(
292 tag.comes_from_same_store(store),
293 "tag comes from the wrong store"
294 );
295 ensure!(
296 tag.wasmtime_ty(store).signature.unwrap_engine_type_index()
297 == allocator.ty.tag_type().ty().type_index(),
298 "incorrect signature for tag when creating exception object"
299 );
300 let expected_len = allocator.ty.fields().len();
301 let actual_len = fields.len();
302 ensure!(
303 actual_len == expected_len,
304 "expected {expected_len} fields, got {actual_len}"
305 );
306 for (ty, val) in allocator.ty.fields().zip(fields) {
307 assert!(
308 val.comes_from_same_store(store),
309 "field value comes from the wrong store",
310 );
311 let ty = ty.element_type().unpack();
312 val.ensure_matches_ty(store, ty)
313 .context("field type mismatch")?;
314 }
315 Ok(())
316 }
317
318 fn new_unchecked(
323 store: &mut StoreOpaque,
324 allocator: &ExnRefPre,
325 tag: &Tag,
326 fields: &[Val],
327 ) -> Result<Rooted<ExnRef>> {
328 assert_eq!(
329 store.id(),
330 allocator.store_id,
331 "attempted to use a `ExnRefPre` with the wrong store"
332 );
333
334 let exnref = store
337 .require_gc_store_mut()?
338 .alloc_uninit_exn(allocator.type_index(), &allocator.layout())
339 .context("unrecoverable error when allocating new `exnref`")?
340 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
341
342 let mut store = AutoAssertNoGc::new(store);
347 match (|| {
348 let (instance, index) = tag.to_raw_indices();
349 exnref.initialize_tag(&mut store, instance, index)?;
350 for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
351 exnref.initialize_field(
352 &mut store,
353 allocator.layout(),
354 ty.element_type(),
355 index,
356 *val,
357 )?;
358 }
359 Ok(())
360 })() {
361 Ok(()) => Ok(Rooted::new(&mut store, exnref.into())),
362 Err(e) => {
363 store.require_gc_store_mut()?.dealloc_uninit_exn(exnref);
364 Err(e)
365 }
366 }
367 }
368
369 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
370 let gc_ref = self.inner.try_gc_ref(store)?;
371 let header = store.require_gc_store()?.header(gc_ref);
372 debug_assert!(header.kind().matches(VMGcKind::ExnRef));
373 Ok(header.ty().expect("exnrefs should have concrete types"))
374 }
375
376 pub(crate) fn from_cloned_gc_ref(
383 store: &mut AutoAssertNoGc<'_>,
384 gc_ref: VMGcRef,
385 ) -> Rooted<Self> {
386 debug_assert!(
387 store
388 .unwrap_gc_store()
389 .header(&gc_ref)
390 .kind()
391 .matches(VMGcKind::ExnRef)
392 );
393 Rooted::new(store, gc_ref)
394 }
395
396 #[inline]
397 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
398 self.inner.comes_from_same_store(store)
399 }
400
401 pub fn to_raw(&self, mut store: impl AsContextMut) -> Result<u32> {
414 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
415 self._to_raw(&mut store)
416 }
417
418 pub(crate) fn _to_raw(&self, store: &mut AutoAssertNoGc<'_>) -> Result<u32> {
419 let gc_ref = self.inner.try_clone_gc_ref(store)?;
420 let raw = if gc_ref.is_i31() {
421 gc_ref.as_raw_non_zero_u32()
422 } else {
423 store.require_gc_store_mut()?.expose_gc_ref_to_wasm(gc_ref)
424 };
425 Ok(raw.get())
426 }
427
428 pub fn ty(&self, store: impl AsContext) -> Result<ExnType> {
438 self._ty(store.as_context().0)
439 }
440
441 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ExnType> {
442 assert!(self.comes_from_same_store(store));
443 let index = self.type_index(store)?;
444 Ok(ExnType::from_shared_type_index(store.engine(), index))
445 }
446
447 pub fn matches_ty(&self, store: impl AsContext, ty: &HeapType) -> Result<bool> {
459 self._matches_ty(store.as_context().0, ty)
460 }
461
462 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<bool> {
463 assert!(self.comes_from_same_store(store));
464 Ok(HeapType::from(self._ty(store)?).matches(ty))
465 }
466
467 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &HeapType) -> Result<()> {
468 if !self.comes_from_same_store(store) {
469 bail!("function used with wrong store");
470 }
471 if self._matches_ty(store, ty)? {
472 Ok(())
473 } else {
474 let actual_ty = self._ty(store)?;
475 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
476 }
477 }
478
479 pub fn fields<'a, T: 'static>(
489 &'a self,
490 store: impl Into<StoreContextMut<'a, T>>,
491 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
492 self._fields(store.into().0)
493 }
494
495 pub(crate) fn _fields<'a>(
496 &'a self,
497 store: &'a mut StoreOpaque,
498 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
499 assert!(self.comes_from_same_store(store));
500 let store = AutoAssertNoGc::new(store);
501
502 let gc_ref = self.inner.try_gc_ref(&store)?;
503 let header = store.require_gc_store()?.header(gc_ref);
504 debug_assert!(header.kind().matches(VMGcKind::ExnRef));
505
506 let index = header.ty().expect("exnrefs should have concrete types");
507 let ty = ExnType::from_shared_type_index(store.engine(), index);
508 let len = ty.fields().len();
509
510 return Ok(Fields {
511 exnref: self,
512 store,
513 index: 0,
514 len,
515 });
516
517 struct Fields<'a, 'b> {
518 exnref: &'a ExnRef,
519 store: AutoAssertNoGc<'b>,
520 index: usize,
521 len: usize,
522 }
523
524 impl Iterator for Fields<'_, '_> {
525 type Item = Val;
526
527 #[inline]
528 fn next(&mut self) -> Option<Self::Item> {
529 let i = self.index;
530 debug_assert!(i <= self.len);
531 if i >= self.len {
532 return None;
533 }
534 self.index += 1;
535 Some(self.exnref._field(&mut self.store, i).unwrap())
536 }
537
538 #[inline]
539 fn size_hint(&self) -> (usize, Option<usize>) {
540 let len = self.len - self.index;
541 (len, Some(len))
542 }
543 }
544
545 impl ExactSizeIterator for Fields<'_, '_> {
546 #[inline]
547 fn len(&self) -> usize {
548 self.len - self.index
549 }
550 }
551 }
552
553 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
554 assert!(self.comes_from_same_store(&store));
555 let gc_ref = self.inner.try_gc_ref(store)?;
556 Ok(store.require_gc_store()?.header(gc_ref))
557 }
558
559 fn exnref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMExnRef> {
560 assert!(self.comes_from_same_store(&store));
561 let gc_ref = self.inner.try_gc_ref(store)?;
562 debug_assert!(self.header(store)?.kind().matches(VMGcKind::ExnRef));
563 Ok(gc_ref.as_exnref_unchecked())
564 }
565
566 fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
567 assert!(self.comes_from_same_store(&store));
568 let type_index = self.type_index(store)?;
569 let layout = store
570 .engine()
571 .signatures()
572 .layout(type_index)
573 .expect("exn types should have GC layouts");
574 match layout {
575 GcLayout::Struct(s) => Ok(s),
576 GcLayout::Array(_) => unreachable!(),
577 }
578 }
579
580 fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
581 let ty = self._ty(store)?;
582 match ty.field(field) {
583 Some(f) => Ok(f),
584 None => {
585 let len = ty.fields().len();
586 bail!("cannot access field {field}: exn only has {len} fields")
587 }
588 }
589 }
590
591 pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
602 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
603 self._field(&mut store, index)
604 }
605
606 pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
607 assert!(self.comes_from_same_store(store));
608 let exnref = self.exnref(store)?.unchecked_copy();
609 let field_ty = self.field_ty(store, index)?;
610 let layout = self.layout(store)?;
611 Ok(exnref.read_field(store, &layout, field_ty.element_type(), index))
612 }
613
614 pub fn tag(&self, mut store: impl AsContextMut) -> Result<Tag> {
624 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
625 assert!(self.comes_from_same_store(&store));
626 let exnref = self.exnref(&store)?.unchecked_copy();
627 let (instance, index) = exnref.tag(&mut store)?;
628 Ok(Tag::from_raw_indices(&*store, instance, index))
629 }
630}
631
632unsafe impl WasmTy for Rooted<ExnRef> {
633 #[inline]
634 fn valtype() -> ValType {
635 ValType::Ref(RefType::new(false, HeapType::Exn))
636 }
637
638 #[inline]
639 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
640 self.comes_from_same_store(store)
641 }
642
643 #[inline]
644 fn dynamic_concrete_type_check(
645 &self,
646 _store: &StoreOpaque,
647 _nullable: bool,
648 _ty: &HeapType,
649 ) -> Result<()> {
650 Ok(())
653 }
654
655 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
656 self.wasm_ty_store(store, ptr, ValRaw::anyref)
657 }
658
659 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
660 Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
661 }
662}
663
664unsafe impl WasmTy for Option<Rooted<ExnRef>> {
665 #[inline]
666 fn valtype() -> ValType {
667 ValType::EXNREF
668 }
669
670 #[inline]
671 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
672 self.map_or(true, |x| x.comes_from_same_store(store))
673 }
674
675 #[inline]
676 fn dynamic_concrete_type_check(
677 &self,
678 store: &StoreOpaque,
679 nullable: bool,
680 ty: &HeapType,
681 ) -> Result<()> {
682 match self {
683 Some(a) => a.ensure_matches_ty(store, ty),
684 None => {
685 ensure!(
686 nullable,
687 "expected a non-null reference, but found a null reference"
688 );
689 Ok(())
690 }
691 }
692 }
693
694 #[inline]
695 fn is_vmgcref_and_points_to_object(&self) -> bool {
696 self.is_some()
697 }
698
699 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
700 <Rooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
701 }
702
703 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
704 <Rooted<ExnRef>>::wasm_ty_option_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
705 }
706}
707
708unsafe impl WasmTy for OwnedRooted<ExnRef> {
709 #[inline]
710 fn valtype() -> ValType {
711 ValType::Ref(RefType::new(false, HeapType::Exn))
712 }
713
714 #[inline]
715 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
716 self.comes_from_same_store(store)
717 }
718
719 #[inline]
720 fn dynamic_concrete_type_check(
721 &self,
722 store: &StoreOpaque,
723 _nullable: bool,
724 ty: &HeapType,
725 ) -> Result<()> {
726 self.ensure_matches_ty(store, ty)
727 }
728
729 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
730 self.wasm_ty_store(store, ptr, ValRaw::anyref)
731 }
732
733 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
734 Self::wasm_ty_load(store, ptr.get_anyref(), ExnRef::from_cloned_gc_ref)
735 }
736}
737
738unsafe impl WasmTy for Option<OwnedRooted<ExnRef>> {
739 #[inline]
740 fn valtype() -> ValType {
741 ValType::EXNREF
742 }
743
744 #[inline]
745 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
746 self.as_ref()
747 .map_or(true, |x| x.comes_from_same_store(store))
748 }
749
750 #[inline]
751 fn dynamic_concrete_type_check(
752 &self,
753 store: &StoreOpaque,
754 nullable: bool,
755 ty: &HeapType,
756 ) -> Result<()> {
757 match self {
758 Some(a) => a.ensure_matches_ty(store, ty),
759 None => {
760 ensure!(
761 nullable,
762 "expected a non-null reference, but found a null reference"
763 );
764 Ok(())
765 }
766 }
767 }
768
769 #[inline]
770 fn is_vmgcref_and_points_to_object(&self) -> bool {
771 self.is_some()
772 }
773
774 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
775 <OwnedRooted<ExnRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
776 }
777
778 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
779 <OwnedRooted<ExnRef>>::wasm_ty_option_load(
780 store,
781 ptr.get_anyref(),
782 ExnRef::from_cloned_gc_ref,
783 )
784 }
785}