1use crate::runtime::vm::VMGcRef;
4use crate::store::{Asyncness, StoreId};
5#[cfg(feature = "async")]
6use crate::vm::VMStore;
7use crate::vm::{self, VMGcHeader, VMStructRef};
8use crate::{AnyRef, FieldType};
9use crate::{
10 AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
11 OwnedRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
12 prelude::*,
13 store::{AutoAssertNoGc, StoreContextMut, StoreOpaque, StoreResourceLimiter},
14};
15use alloc::sync::Arc;
16use core::mem::{self, MaybeUninit};
17use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
18
19pub struct StructRefPre {
65 store_id: StoreId,
66 ty: StructType,
67}
68
69impl StructRefPre {
70 pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
73 Self::_new(store.as_context_mut().0, ty)
74 }
75
76 pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
77 store.insert_gc_host_alloc_type(ty.registered_type().clone());
78 let store_id = store.id();
79
80 StructRefPre { store_id, ty }
81 }
82
83 pub(crate) fn layout(&self) -> &GcStructLayout {
84 self.ty
85 .registered_type()
86 .layout()
87 .expect("struct types have a layout")
88 .unwrap_struct()
89 }
90
91 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
92 self.ty.registered_type().index()
93 }
94}
95
96#[derive(Debug)]
155#[repr(transparent)]
156pub struct StructRef {
157 pub(super) inner: GcRootIndex,
158}
159
160unsafe impl GcRefImpl for StructRef {
161 fn transmute_ref(index: &GcRootIndex) -> &Self {
162 let me: &Self = unsafe { mem::transmute(index) };
164
165 assert!(matches!(
167 me,
168 Self {
169 inner: GcRootIndex { .. },
170 }
171 ));
172
173 me
174 }
175}
176
177impl Rooted<StructRef> {
178 #[inline]
180 pub fn to_anyref(self) -> Rooted<AnyRef> {
181 self.unchecked_cast()
182 }
183
184 #[inline]
186 pub fn to_eqref(self) -> Rooted<EqRef> {
187 self.unchecked_cast()
188 }
189}
190
191impl OwnedRooted<StructRef> {
192 #[inline]
194 pub fn to_anyref(self) -> OwnedRooted<AnyRef> {
195 self.unchecked_cast()
196 }
197
198 #[inline]
200 pub fn to_eqref(self) -> OwnedRooted<EqRef> {
201 self.unchecked_cast()
202 }
203}
204
205impl StructRef {
206 pub fn new(
234 mut store: impl AsContextMut,
235 allocator: &StructRefPre,
236 fields: &[Val],
237 ) -> Result<Rooted<StructRef>> {
238 let (mut limiter, store) = store
239 .as_context_mut()
240 .0
241 .validate_sync_resource_limiter_and_store_opaque()?;
242 vm::assert_ready(Self::_new_async(
243 store,
244 limiter.as_mut(),
245 allocator,
246 fields,
247 Asyncness::No,
248 ))
249 }
250
251 #[cfg(feature = "async")]
274 pub async fn new_async(
275 mut store: impl AsContextMut,
276 allocator: &StructRefPre,
277 fields: &[Val],
278 ) -> Result<Rooted<StructRef>> {
279 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
280 Self::_new_async(store, limiter.as_mut(), allocator, fields, Asyncness::Yes).await
281 }
282
283 pub(crate) async fn _new_async(
284 store: &mut StoreOpaque,
285 limiter: Option<&mut StoreResourceLimiter<'_>>,
286 allocator: &StructRefPre,
287 fields: &[Val],
288 asyncness: Asyncness,
289 ) -> Result<Rooted<StructRef>> {
290 Self::type_check_fields(store, allocator, fields)?;
291 store
292 .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
293 Self::new_unchecked(store, allocator, fields)
294 })
295 .await
296 }
297
298 fn type_check_fields(
300 store: &mut StoreOpaque,
301 allocator: &StructRefPre,
302 fields: &[Val],
303 ) -> Result<(), Error> {
304 let expected_len = allocator.ty.fields().len();
305 let actual_len = fields.len();
306 ensure!(
307 actual_len == expected_len,
308 "expected {expected_len} fields, got {actual_len}"
309 );
310 for (ty, val) in allocator.ty.fields().zip(fields) {
311 assert!(
312 val.comes_from_same_store(store),
313 "field value comes from the wrong store",
314 );
315 let ty = ty.element_type().unpack();
316 val.ensure_matches_ty(store, ty)
317 .context("field type mismatch")?;
318 }
319 Ok(())
320 }
321
322 fn new_unchecked(
327 store: &mut StoreOpaque,
328 allocator: &StructRefPre,
329 fields: &[Val],
330 ) -> Result<Rooted<StructRef>> {
331 assert_eq!(
332 store.id(),
333 allocator.store_id,
334 "attempted to use a `StructRefPre` with the wrong store"
335 );
336
337 let structref = store
340 .require_gc_store_mut()?
341 .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
342 .context("unrecoverable error when allocating new `structref`")?
343 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
344
345 let mut store = AutoAssertNoGc::new(store);
350 match (|| {
351 for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
352 structref.initialize_field(
353 &mut store,
354 allocator.layout(),
355 ty.element_type(),
356 index,
357 *val,
358 )?;
359 }
360 Ok(())
361 })() {
362 Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
363 Err(e) => {
364 store
365 .require_gc_store_mut()?
366 .dealloc_uninit_struct(structref);
367 Err(e)
368 }
369 }
370 }
371
372 #[inline]
373 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
374 self.inner.comes_from_same_store(store)
375 }
376
377 pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
387 self._ty(store.as_context().0)
388 }
389
390 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
391 assert!(self.comes_from_same_store(store));
392 let index = self.type_index(store)?;
393 Ok(StructType::from_shared_type_index(store.engine(), index))
394 }
395
396 pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
409 self._matches_ty(store.as_context().0, ty)
410 }
411
412 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
413 assert!(self.comes_from_same_store(store));
414 Ok(self._ty(store)?.matches(ty))
415 }
416
417 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
418 if !self.comes_from_same_store(store) {
419 bail!("function used with wrong store");
420 }
421 if self._matches_ty(store, ty)? {
422 Ok(())
423 } else {
424 let actual_ty = self._ty(store)?;
425 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
426 }
427 }
428
429 pub fn fields<'a, T: 'static>(
442 &'a self,
443 store: impl Into<StoreContextMut<'a, T>>,
444 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
445 self._fields(store.into().0)
446 }
447
448 pub(crate) fn _fields<'a>(
449 &'a self,
450 store: &'a mut StoreOpaque,
451 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
452 assert!(self.comes_from_same_store(store));
453 let store = AutoAssertNoGc::new(store);
454
455 let gc_ref = self.inner.try_gc_ref(&store)?;
456 let header = store.require_gc_store()?.header(gc_ref);
457 debug_assert!(header.kind().matches(VMGcKind::StructRef));
458
459 let index = header.ty().expect("structrefs should have concrete types");
460 let ty = StructType::from_shared_type_index(store.engine(), index);
461 let len = ty.fields().len();
462
463 return Ok(Fields {
464 structref: self,
465 store,
466 index: 0,
467 len,
468 });
469
470 struct Fields<'a, 'b> {
471 structref: &'a StructRef,
472 store: AutoAssertNoGc<'b>,
473 index: usize,
474 len: usize,
475 }
476
477 impl Iterator for Fields<'_, '_> {
478 type Item = Val;
479
480 #[inline]
481 fn next(&mut self) -> Option<Self::Item> {
482 let i = self.index;
483 debug_assert!(i <= self.len);
484 if i >= self.len {
485 return None;
486 }
487 self.index += 1;
488 Some(self.structref._field(&mut self.store, i).unwrap())
489 }
490
491 #[inline]
492 fn size_hint(&self) -> (usize, Option<usize>) {
493 let len = self.len - self.index;
494 (len, Some(len))
495 }
496 }
497
498 impl ExactSizeIterator for Fields<'_, '_> {
499 #[inline]
500 fn len(&self) -> usize {
501 self.len - self.index
502 }
503 }
504 }
505
506 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
507 assert!(self.comes_from_same_store(&store));
508 let gc_ref = self.inner.try_gc_ref(store)?;
509 Ok(store.require_gc_store()?.header(gc_ref))
510 }
511
512 fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
513 assert!(self.comes_from_same_store(&store));
514 let gc_ref = self.inner.try_gc_ref(store)?;
515 debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
516 Ok(gc_ref.as_structref_unchecked())
517 }
518
519 fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<Arc<GcStructLayout>> {
520 assert!(self.comes_from_same_store(&store));
521 let type_index = self.type_index(store)?;
522 let layout = store
523 .engine()
524 .signatures()
525 .layout(type_index)
526 .expect("struct types should have GC layouts");
527 match layout {
528 GcLayout::Struct(s) => Ok(s),
529 GcLayout::Array(_) => unreachable!(),
530 }
531 }
532
533 fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
534 let ty = self._ty(store)?;
535 match ty.field(field) {
536 Some(f) => Ok(f),
537 None => {
538 let len = ty.fields().len();
539 bail!("cannot access field {field}: struct only has {len} fields")
540 }
541 }
542 }
543
544 pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
558 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
559 self._field(&mut store, index)
560 }
561
562 pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
563 assert!(self.comes_from_same_store(store));
564 let structref = self.structref(store)?.unchecked_copy();
565 let field_ty = self.field_ty(store, index)?;
566 let layout = self.layout(store)?;
567 Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
568 }
569
570 pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
590 self._set_field(store.as_context_mut().0, index, value)
591 }
592
593 pub(crate) fn _set_field(
594 &self,
595 store: &mut StoreOpaque,
596 index: usize,
597 value: Val,
598 ) -> Result<()> {
599 assert!(self.comes_from_same_store(store));
600 let mut store = AutoAssertNoGc::new(store);
601
602 let field_ty = self.field_ty(&store, index)?;
603 ensure!(
604 field_ty.mutability().is_var(),
605 "cannot set field {index}: field is not mutable"
606 );
607
608 value
609 .ensure_matches_ty(&store, &field_ty.element_type().unpack())
610 .with_context(|| format!("cannot set field {index}: type mismatch"))?;
611
612 let layout = self.layout(&store)?;
613 let structref = self.structref(&store)?.unchecked_copy();
614
615 structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
616 }
617
618 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
619 let gc_ref = self.inner.try_gc_ref(store)?;
620 let header = store.require_gc_store()?.header(gc_ref);
621 debug_assert!(header.kind().matches(VMGcKind::StructRef));
622 Ok(header.ty().expect("structrefs should have concrete types"))
623 }
624
625 pub(crate) fn from_cloned_gc_ref(
631 store: &mut AutoAssertNoGc<'_>,
632 gc_ref: VMGcRef,
633 ) -> Rooted<Self> {
634 debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
635 Rooted::new(store, gc_ref)
636 }
637}
638
639unsafe impl WasmTy for Rooted<StructRef> {
640 #[inline]
641 fn valtype() -> ValType {
642 ValType::Ref(RefType::new(false, HeapType::Struct))
643 }
644
645 #[inline]
646 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
647 self.comes_from_same_store(store)
648 }
649
650 #[inline]
651 fn dynamic_concrete_type_check(
652 &self,
653 store: &StoreOpaque,
654 _nullable: bool,
655 ty: &HeapType,
656 ) -> Result<()> {
657 match ty {
658 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
659 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
660
661 HeapType::Extern
662 | HeapType::NoExtern
663 | HeapType::Func
664 | HeapType::ConcreteFunc(_)
665 | HeapType::NoFunc
666 | HeapType::I31
667 | HeapType::Array
668 | HeapType::ConcreteArray(_)
669 | HeapType::None
670 | HeapType::NoCont
671 | HeapType::Cont
672 | HeapType::ConcreteCont(_)
673 | HeapType::NoExn
674 | HeapType::Exn
675 | HeapType::ConcreteExn(_) => bail!(
676 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
677 self._ty(store)?,
678 ),
679 }
680 }
681
682 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
683 self.wasm_ty_store(store, ptr, ValRaw::anyref)
684 }
685
686 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
687 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
688 }
689}
690
691unsafe impl WasmTy for Option<Rooted<StructRef>> {
692 #[inline]
693 fn valtype() -> ValType {
694 ValType::STRUCTREF
695 }
696
697 #[inline]
698 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
699 self.map_or(true, |x| x.comes_from_same_store(store))
700 }
701
702 #[inline]
703 fn dynamic_concrete_type_check(
704 &self,
705 store: &StoreOpaque,
706 nullable: bool,
707 ty: &HeapType,
708 ) -> Result<()> {
709 match self {
710 Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
711 None => {
712 ensure!(
713 nullable,
714 "expected a non-null reference, but found a null reference"
715 );
716 Ok(())
717 }
718 }
719 }
720
721 #[inline]
722 fn is_vmgcref_and_points_to_object(&self) -> bool {
723 self.is_some()
724 }
725
726 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
727 <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
728 }
729
730 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
731 <Rooted<StructRef>>::wasm_ty_option_load(
732 store,
733 ptr.get_anyref(),
734 StructRef::from_cloned_gc_ref,
735 )
736 }
737}
738
739unsafe impl WasmTy for OwnedRooted<StructRef> {
740 #[inline]
741 fn valtype() -> ValType {
742 ValType::Ref(RefType::new(false, HeapType::Struct))
743 }
744
745 #[inline]
746 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
747 self.comes_from_same_store(store)
748 }
749
750 #[inline]
751 fn dynamic_concrete_type_check(
752 &self,
753 store: &StoreOpaque,
754 _: bool,
755 ty: &HeapType,
756 ) -> Result<()> {
757 match ty {
758 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
759 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
760
761 HeapType::Extern
762 | HeapType::NoExtern
763 | HeapType::Func
764 | HeapType::ConcreteFunc(_)
765 | HeapType::NoFunc
766 | HeapType::I31
767 | HeapType::Array
768 | HeapType::ConcreteArray(_)
769 | HeapType::None
770 | HeapType::NoCont
771 | HeapType::Cont
772 | HeapType::ConcreteCont(_)
773 | HeapType::NoExn
774 | HeapType::Exn
775 | HeapType::ConcreteExn(_) => bail!(
776 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
777 self._ty(store)?,
778 ),
779 }
780 }
781
782 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
783 self.wasm_ty_store(store, ptr, ValRaw::anyref)
784 }
785
786 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
787 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
788 }
789}
790
791unsafe impl WasmTy for Option<OwnedRooted<StructRef>> {
792 #[inline]
793 fn valtype() -> ValType {
794 ValType::STRUCTREF
795 }
796
797 #[inline]
798 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
799 self.as_ref()
800 .map_or(true, |x| x.comes_from_same_store(store))
801 }
802
803 #[inline]
804 fn dynamic_concrete_type_check(
805 &self,
806 store: &StoreOpaque,
807 nullable: bool,
808 ty: &HeapType,
809 ) -> Result<()> {
810 match self {
811 Some(s) => {
812 OwnedRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
813 }
814 None => {
815 ensure!(
816 nullable,
817 "expected a non-null reference, but found a null reference"
818 );
819 Ok(())
820 }
821 }
822 }
823
824 #[inline]
825 fn is_vmgcref_and_points_to_object(&self) -> bool {
826 self.is_some()
827 }
828
829 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
830 <OwnedRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
831 }
832
833 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
834 <OwnedRooted<StructRef>>::wasm_ty_option_load(
835 store,
836 ptr.get_anyref(),
837 StructRef::from_cloned_gc_ref,
838 )
839 }
840}