1use crate::runtime::vm::VMGcRef;
4use crate::store::{Asyncness, StoreId};
5#[cfg(feature = "async")]
6use crate::vm::VMStore;
7use crate::vm::{self, VMGcHeader, VMStructRef};
8use crate::{AnyRef, FieldType};
9use crate::{
10 AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
11 OwnedRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
12 prelude::*,
13 store::{AutoAssertNoGc, StoreContextMut, StoreOpaque, StoreResourceLimiter},
14};
15use core::mem::{self, MaybeUninit};
16use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
17
18pub struct StructRefPre {
64 store_id: StoreId,
65 ty: StructType,
66}
67
68impl StructRefPre {
69 pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
72 Self::_new(store.as_context_mut().0, ty)
73 }
74
75 pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
76 store.insert_gc_host_alloc_type(ty.registered_type().clone());
77 let store_id = store.id();
78
79 StructRefPre { store_id, ty }
80 }
81
82 pub(crate) fn layout(&self) -> &GcStructLayout {
83 self.ty
84 .registered_type()
85 .layout()
86 .expect("struct types have a layout")
87 .unwrap_struct()
88 }
89
90 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
91 self.ty.registered_type().index()
92 }
93}
94
95#[derive(Debug)]
154#[repr(transparent)]
155pub struct StructRef {
156 pub(super) inner: GcRootIndex,
157}
158
159unsafe impl GcRefImpl for StructRef {
160 fn transmute_ref(index: &GcRootIndex) -> &Self {
161 let me: &Self = unsafe { mem::transmute(index) };
163
164 assert!(matches!(
166 me,
167 Self {
168 inner: GcRootIndex { .. },
169 }
170 ));
171
172 me
173 }
174}
175
176impl Rooted<StructRef> {
177 #[inline]
179 pub fn to_anyref(self) -> Rooted<AnyRef> {
180 self.unchecked_cast()
181 }
182
183 #[inline]
185 pub fn to_eqref(self) -> Rooted<EqRef> {
186 self.unchecked_cast()
187 }
188}
189
190impl OwnedRooted<StructRef> {
191 #[inline]
193 pub fn to_anyref(self) -> OwnedRooted<AnyRef> {
194 self.unchecked_cast()
195 }
196
197 #[inline]
199 pub fn to_eqref(self) -> OwnedRooted<EqRef> {
200 self.unchecked_cast()
201 }
202}
203
204impl StructRef {
205 pub fn new(
233 mut store: impl AsContextMut,
234 allocator: &StructRefPre,
235 fields: &[Val],
236 ) -> Result<Rooted<StructRef>> {
237 let (mut limiter, store) = store
238 .as_context_mut()
239 .0
240 .validate_sync_resource_limiter_and_store_opaque()?;
241 vm::assert_ready(Self::_new_async(
242 store,
243 limiter.as_mut(),
244 allocator,
245 fields,
246 Asyncness::No,
247 ))
248 }
249
250 #[cfg(feature = "async")]
273 pub async fn new_async(
274 mut store: impl AsContextMut,
275 allocator: &StructRefPre,
276 fields: &[Val],
277 ) -> Result<Rooted<StructRef>> {
278 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
279 Self::_new_async(store, limiter.as_mut(), allocator, fields, Asyncness::Yes).await
280 }
281
282 pub(crate) async fn _new_async(
283 store: &mut StoreOpaque,
284 limiter: Option<&mut StoreResourceLimiter<'_>>,
285 allocator: &StructRefPre,
286 fields: &[Val],
287 asyncness: Asyncness,
288 ) -> Result<Rooted<StructRef>> {
289 Self::type_check_fields(store, allocator, fields)?;
290 store
291 .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
292 Self::new_unchecked(store, allocator, fields)
293 })
294 .await
295 }
296
297 fn type_check_fields(
299 store: &mut StoreOpaque,
300 allocator: &StructRefPre,
301 fields: &[Val],
302 ) -> Result<(), Error> {
303 let expected_len = allocator.ty.fields().len();
304 let actual_len = fields.len();
305 ensure!(
306 actual_len == expected_len,
307 "expected {expected_len} fields, got {actual_len}"
308 );
309 for (ty, val) in allocator.ty.fields().zip(fields) {
310 assert!(
311 val.comes_from_same_store(store),
312 "field value comes from the wrong store",
313 );
314 let ty = ty.element_type().unpack();
315 val.ensure_matches_ty(store, ty)
316 .context("field type mismatch")?;
317 }
318 Ok(())
319 }
320
321 fn new_unchecked(
326 store: &mut StoreOpaque,
327 allocator: &StructRefPre,
328 fields: &[Val],
329 ) -> Result<Rooted<StructRef>> {
330 assert_eq!(
331 store.id(),
332 allocator.store_id,
333 "attempted to use a `StructRefPre` with the wrong store"
334 );
335
336 let structref = store
339 .require_gc_store_mut()?
340 .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
341 .context("unrecoverable error when allocating new `structref`")?
342 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
343
344 let mut store = AutoAssertNoGc::new(store);
349 match (|| {
350 for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
351 structref.initialize_field(
352 &mut store,
353 allocator.layout(),
354 ty.element_type(),
355 index,
356 *val,
357 )?;
358 }
359 Ok(())
360 })() {
361 Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
362 Err(e) => {
363 store
364 .require_gc_store_mut()?
365 .dealloc_uninit_struct(structref);
366 Err(e)
367 }
368 }
369 }
370
371 #[inline]
372 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
373 self.inner.comes_from_same_store(store)
374 }
375
376 pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
386 self._ty(store.as_context().0)
387 }
388
389 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
390 assert!(self.comes_from_same_store(store));
391 let index = self.type_index(store)?;
392 Ok(StructType::from_shared_type_index(store.engine(), index))
393 }
394
395 pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
408 self._matches_ty(store.as_context().0, ty)
409 }
410
411 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
412 assert!(self.comes_from_same_store(store));
413 Ok(self._ty(store)?.matches(ty))
414 }
415
416 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
417 if !self.comes_from_same_store(store) {
418 bail!("function used with wrong store");
419 }
420 if self._matches_ty(store, ty)? {
421 Ok(())
422 } else {
423 let actual_ty = self._ty(store)?;
424 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
425 }
426 }
427
428 pub fn fields<'a, T: 'static>(
441 &'a self,
442 store: impl Into<StoreContextMut<'a, T>>,
443 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
444 self._fields(store.into().0)
445 }
446
447 pub(crate) fn _fields<'a>(
448 &'a self,
449 store: &'a mut StoreOpaque,
450 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
451 assert!(self.comes_from_same_store(store));
452 let store = AutoAssertNoGc::new(store);
453
454 let gc_ref = self.inner.try_gc_ref(&store)?;
455 let header = store.require_gc_store()?.header(gc_ref);
456 debug_assert!(header.kind().matches(VMGcKind::StructRef));
457
458 let index = header.ty().expect("structrefs should have concrete types");
459 let ty = StructType::from_shared_type_index(store.engine(), index);
460 let len = ty.fields().len();
461
462 return Ok(Fields {
463 structref: self,
464 store,
465 index: 0,
466 len,
467 });
468
469 struct Fields<'a, 'b> {
470 structref: &'a StructRef,
471 store: AutoAssertNoGc<'b>,
472 index: usize,
473 len: usize,
474 }
475
476 impl Iterator for Fields<'_, '_> {
477 type Item = Val;
478
479 #[inline]
480 fn next(&mut self) -> Option<Self::Item> {
481 let i = self.index;
482 debug_assert!(i <= self.len);
483 if i >= self.len {
484 return None;
485 }
486 self.index += 1;
487 Some(self.structref._field(&mut self.store, i).unwrap())
488 }
489
490 #[inline]
491 fn size_hint(&self) -> (usize, Option<usize>) {
492 let len = self.len - self.index;
493 (len, Some(len))
494 }
495 }
496
497 impl ExactSizeIterator for Fields<'_, '_> {
498 #[inline]
499 fn len(&self) -> usize {
500 self.len - self.index
501 }
502 }
503 }
504
505 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
506 assert!(self.comes_from_same_store(&store));
507 let gc_ref = self.inner.try_gc_ref(store)?;
508 Ok(store.require_gc_store()?.header(gc_ref))
509 }
510
511 fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
512 assert!(self.comes_from_same_store(&store));
513 let gc_ref = self.inner.try_gc_ref(store)?;
514 debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
515 Ok(gc_ref.as_structref_unchecked())
516 }
517
518 fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
519 assert!(self.comes_from_same_store(&store));
520 let type_index = self.type_index(store)?;
521 let layout = store
522 .engine()
523 .signatures()
524 .layout(type_index)
525 .expect("struct types should have GC layouts");
526 match layout {
527 GcLayout::Struct(s) => Ok(s),
528 GcLayout::Array(_) => unreachable!(),
529 }
530 }
531
532 fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
533 let ty = self._ty(store)?;
534 match ty.field(field) {
535 Some(f) => Ok(f),
536 None => {
537 let len = ty.fields().len();
538 bail!("cannot access field {field}: struct only has {len} fields")
539 }
540 }
541 }
542
543 pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
557 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
558 self._field(&mut store, index)
559 }
560
561 pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
562 assert!(self.comes_from_same_store(store));
563 let structref = self.structref(store)?.unchecked_copy();
564 let field_ty = self.field_ty(store, index)?;
565 let layout = self.layout(store)?;
566 Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
567 }
568
569 pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
589 self._set_field(store.as_context_mut().0, index, value)
590 }
591
592 pub(crate) fn _set_field(
593 &self,
594 store: &mut StoreOpaque,
595 index: usize,
596 value: Val,
597 ) -> Result<()> {
598 assert!(self.comes_from_same_store(store));
599 let mut store = AutoAssertNoGc::new(store);
600
601 let field_ty = self.field_ty(&store, index)?;
602 ensure!(
603 field_ty.mutability().is_var(),
604 "cannot set field {index}: field is not mutable"
605 );
606
607 value
608 .ensure_matches_ty(&store, &field_ty.element_type().unpack())
609 .with_context(|| format!("cannot set field {index}: type mismatch"))?;
610
611 let layout = self.layout(&store)?;
612 let structref = self.structref(&store)?.unchecked_copy();
613
614 structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
615 }
616
617 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
618 let gc_ref = self.inner.try_gc_ref(store)?;
619 let header = store.require_gc_store()?.header(gc_ref);
620 debug_assert!(header.kind().matches(VMGcKind::StructRef));
621 Ok(header.ty().expect("structrefs should have concrete types"))
622 }
623
624 pub(crate) fn from_cloned_gc_ref(
630 store: &mut AutoAssertNoGc<'_>,
631 gc_ref: VMGcRef,
632 ) -> Rooted<Self> {
633 debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
634 Rooted::new(store, gc_ref)
635 }
636}
637
638unsafe impl WasmTy for Rooted<StructRef> {
639 #[inline]
640 fn valtype() -> ValType {
641 ValType::Ref(RefType::new(false, HeapType::Struct))
642 }
643
644 #[inline]
645 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
646 self.comes_from_same_store(store)
647 }
648
649 #[inline]
650 fn dynamic_concrete_type_check(
651 &self,
652 store: &StoreOpaque,
653 _nullable: bool,
654 ty: &HeapType,
655 ) -> Result<()> {
656 match ty {
657 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
658 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
659
660 HeapType::Extern
661 | HeapType::NoExtern
662 | HeapType::Func
663 | HeapType::ConcreteFunc(_)
664 | HeapType::NoFunc
665 | HeapType::I31
666 | HeapType::Array
667 | HeapType::ConcreteArray(_)
668 | HeapType::None
669 | HeapType::NoCont
670 | HeapType::Cont
671 | HeapType::ConcreteCont(_)
672 | HeapType::NoExn
673 | HeapType::Exn
674 | HeapType::ConcreteExn(_) => bail!(
675 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
676 self._ty(store)?,
677 ),
678 }
679 }
680
681 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
682 self.wasm_ty_store(store, ptr, ValRaw::anyref)
683 }
684
685 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
686 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
687 }
688}
689
690unsafe impl WasmTy for Option<Rooted<StructRef>> {
691 #[inline]
692 fn valtype() -> ValType {
693 ValType::STRUCTREF
694 }
695
696 #[inline]
697 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
698 self.map_or(true, |x| x.comes_from_same_store(store))
699 }
700
701 #[inline]
702 fn dynamic_concrete_type_check(
703 &self,
704 store: &StoreOpaque,
705 nullable: bool,
706 ty: &HeapType,
707 ) -> Result<()> {
708 match self {
709 Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
710 None => {
711 ensure!(
712 nullable,
713 "expected a non-null reference, but found a null reference"
714 );
715 Ok(())
716 }
717 }
718 }
719
720 #[inline]
721 fn is_vmgcref_and_points_to_object(&self) -> bool {
722 self.is_some()
723 }
724
725 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
726 <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
727 }
728
729 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
730 <Rooted<StructRef>>::wasm_ty_option_load(
731 store,
732 ptr.get_anyref(),
733 StructRef::from_cloned_gc_ref,
734 )
735 }
736}
737
738unsafe impl WasmTy for OwnedRooted<StructRef> {
739 #[inline]
740 fn valtype() -> ValType {
741 ValType::Ref(RefType::new(false, HeapType::Struct))
742 }
743
744 #[inline]
745 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
746 self.comes_from_same_store(store)
747 }
748
749 #[inline]
750 fn dynamic_concrete_type_check(
751 &self,
752 store: &StoreOpaque,
753 _: bool,
754 ty: &HeapType,
755 ) -> Result<()> {
756 match ty {
757 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
758 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
759
760 HeapType::Extern
761 | HeapType::NoExtern
762 | HeapType::Func
763 | HeapType::ConcreteFunc(_)
764 | HeapType::NoFunc
765 | HeapType::I31
766 | HeapType::Array
767 | HeapType::ConcreteArray(_)
768 | HeapType::None
769 | HeapType::NoCont
770 | HeapType::Cont
771 | HeapType::ConcreteCont(_)
772 | HeapType::NoExn
773 | HeapType::Exn
774 | HeapType::ConcreteExn(_) => bail!(
775 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
776 self._ty(store)?,
777 ),
778 }
779 }
780
781 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
782 self.wasm_ty_store(store, ptr, ValRaw::anyref)
783 }
784
785 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
786 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
787 }
788}
789
790unsafe impl WasmTy for Option<OwnedRooted<StructRef>> {
791 #[inline]
792 fn valtype() -> ValType {
793 ValType::STRUCTREF
794 }
795
796 #[inline]
797 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
798 self.as_ref()
799 .map_or(true, |x| x.comes_from_same_store(store))
800 }
801
802 #[inline]
803 fn dynamic_concrete_type_check(
804 &self,
805 store: &StoreOpaque,
806 nullable: bool,
807 ty: &HeapType,
808 ) -> Result<()> {
809 match self {
810 Some(s) => {
811 OwnedRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
812 }
813 None => {
814 ensure!(
815 nullable,
816 "expected a non-null reference, but found a null reference"
817 );
818 Ok(())
819 }
820 }
821 }
822
823 #[inline]
824 fn is_vmgcref_and_points_to_object(&self) -> bool {
825 self.is_some()
826 }
827
828 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
829 <OwnedRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
830 }
831
832 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
833 <OwnedRooted<StructRef>>::wasm_ty_option_load(
834 store,
835 ptr.get_anyref(),
836 StructRef::from_cloned_gc_ref,
837 )
838 }
839}