1use crate::runtime::vm::VMGcRef;
4use crate::store::StoreId;
5use crate::vm::{VMGcHeader, VMStructRef};
6use crate::{AnyRef, FieldType};
7use crate::{
8 AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
9 ManuallyRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
10 prelude::*,
11 store::{AutoAssertNoGc, StoreContextMut, StoreOpaque},
12};
13use core::mem::{self, MaybeUninit};
14use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
15
16pub struct StructRefPre {
62 store_id: StoreId,
63 ty: StructType,
64}
65
66impl StructRefPre {
67 pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
70 Self::_new(store.as_context_mut().0, ty)
71 }
72
73 pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
74 store.insert_gc_host_alloc_type(ty.registered_type().clone());
75 let store_id = store.id();
76
77 StructRefPre { store_id, ty }
78 }
79
80 pub(crate) fn layout(&self) -> &GcStructLayout {
81 self.ty
82 .registered_type()
83 .layout()
84 .expect("struct types have a layout")
85 .unwrap_struct()
86 }
87
88 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
89 self.ty.registered_type().index()
90 }
91}
92
93#[derive(Debug)]
152#[repr(transparent)]
153pub struct StructRef {
154 pub(super) inner: GcRootIndex,
155}
156
157unsafe impl GcRefImpl for StructRef {
158 #[allow(private_interfaces)]
159 fn transmute_ref(index: &GcRootIndex) -> &Self {
160 let me: &Self = unsafe { mem::transmute(index) };
162
163 assert!(matches!(
165 me,
166 Self {
167 inner: GcRootIndex { .. },
168 }
169 ));
170
171 me
172 }
173}
174
175impl Rooted<StructRef> {
176 #[inline]
178 pub fn to_anyref(self) -> Rooted<AnyRef> {
179 self.unchecked_cast()
180 }
181
182 #[inline]
184 pub fn to_eqref(self) -> Rooted<EqRef> {
185 self.unchecked_cast()
186 }
187}
188
189impl ManuallyRooted<StructRef> {
190 #[inline]
192 pub fn to_anyref(self) -> ManuallyRooted<AnyRef> {
193 self.unchecked_cast()
194 }
195
196 #[inline]
198 pub fn to_eqref(self) -> ManuallyRooted<EqRef> {
199 self.unchecked_cast()
200 }
201}
202
203impl StructRef {
204 pub fn new(
231 mut store: impl AsContextMut,
232 allocator: &StructRefPre,
233 fields: &[Val],
234 ) -> Result<Rooted<StructRef>> {
235 Self::_new(store.as_context_mut().0, allocator, fields)
236 }
237
238 pub(crate) fn _new(
239 store: &mut StoreOpaque,
240 allocator: &StructRefPre,
241 fields: &[Val],
242 ) -> Result<Rooted<StructRef>> {
243 assert!(
244 !store.async_support(),
245 "use `StructRef::new_async` with asynchronous stores"
246 );
247 Self::type_check_fields(store, allocator, fields)?;
248 store.retry_after_gc((), |store, ()| {
249 Self::new_unchecked(store, allocator, fields)
250 })
251 }
252
253 #[cfg(feature = "async")]
280 pub async fn new_async(
281 mut store: impl AsContextMut,
282 allocator: &StructRefPre,
283 fields: &[Val],
284 ) -> Result<Rooted<StructRef>> {
285 Self::_new_async(store.as_context_mut().0, allocator, fields).await
286 }
287
288 #[cfg(feature = "async")]
289 pub(crate) async fn _new_async(
290 store: &mut StoreOpaque,
291 allocator: &StructRefPre,
292 fields: &[Val],
293 ) -> Result<Rooted<StructRef>> {
294 assert!(
295 store.async_support(),
296 "use `StructRef::new` with synchronous stores"
297 );
298 Self::type_check_fields(store, allocator, fields)?;
299 store
300 .retry_after_gc_async((), |store, ()| {
301 Self::new_unchecked(store, allocator, fields)
302 })
303 .await
304 }
305
306 pub(crate) unsafe fn new_maybe_async(
309 store: &mut StoreOpaque,
310 allocator: &StructRefPre,
311 fields: &[Val],
312 ) -> Result<Rooted<StructRef>> {
313 Self::type_check_fields(store, allocator, fields)?;
314 unsafe {
315 store.retry_after_gc_maybe_async((), |store, ()| {
316 Self::new_unchecked(store, allocator, fields)
317 })
318 }
319 }
320
321 fn type_check_fields(
323 store: &mut StoreOpaque,
324 allocator: &StructRefPre,
325 fields: &[Val],
326 ) -> Result<(), Error> {
327 let expected_len = allocator.ty.fields().len();
328 let actual_len = fields.len();
329 ensure!(
330 actual_len == expected_len,
331 "expected {expected_len} fields, got {actual_len}"
332 );
333 for (ty, val) in allocator.ty.fields().zip(fields) {
334 assert!(
335 val.comes_from_same_store(store),
336 "field value comes from the wrong store",
337 );
338 let ty = ty.element_type().unpack();
339 val.ensure_matches_ty(store, ty)
340 .context("field type mismatch")?;
341 }
342 Ok(())
343 }
344
345 fn new_unchecked(
350 store: &mut StoreOpaque,
351 allocator: &StructRefPre,
352 fields: &[Val],
353 ) -> Result<Rooted<StructRef>> {
354 assert_eq!(
355 store.id(),
356 allocator.store_id,
357 "attempted to use a `StructRefPre` with the wrong store"
358 );
359
360 let structref = store
363 .gc_store_mut()?
364 .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
365 .context("unrecoverable error when allocating new `structref`")?
366 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
367
368 let mut store = AutoAssertNoGc::new(store);
373 match (|| {
374 for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
375 structref.initialize_field(
376 &mut store,
377 allocator.layout(),
378 ty.element_type(),
379 index,
380 *val,
381 )?;
382 }
383 Ok(())
384 })() {
385 Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
386 Err(e) => {
387 store.gc_store_mut()?.dealloc_uninit_struct(structref);
388 Err(e)
389 }
390 }
391 }
392
393 #[inline]
394 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
395 self.inner.comes_from_same_store(store)
396 }
397
398 pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
408 self._ty(store.as_context().0)
409 }
410
411 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
412 assert!(self.comes_from_same_store(store));
413 let index = self.type_index(store)?;
414 Ok(StructType::from_shared_type_index(store.engine(), index))
415 }
416
417 pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
430 self._matches_ty(store.as_context().0, ty)
431 }
432
433 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
434 assert!(self.comes_from_same_store(store));
435 Ok(self._ty(store)?.matches(ty))
436 }
437
438 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
439 if !self.comes_from_same_store(store) {
440 bail!("function used with wrong store");
441 }
442 if self._matches_ty(store, ty)? {
443 Ok(())
444 } else {
445 let actual_ty = self._ty(store)?;
446 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
447 }
448 }
449
450 pub fn fields<'a, T: 'static>(
463 &'a self,
464 store: impl Into<StoreContextMut<'a, T>>,
465 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
466 self._fields(store.into().0)
467 }
468
469 pub(crate) fn _fields<'a>(
470 &'a self,
471 store: &'a mut StoreOpaque,
472 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
473 assert!(self.comes_from_same_store(store));
474 let store = AutoAssertNoGc::new(store);
475
476 let gc_ref = self.inner.try_gc_ref(&store)?;
477 let header = store.gc_store()?.header(gc_ref);
478 debug_assert!(header.kind().matches(VMGcKind::StructRef));
479
480 let index = header.ty().expect("structrefs should have concrete types");
481 let ty = StructType::from_shared_type_index(store.engine(), index);
482 let len = ty.fields().len();
483
484 return Ok(Fields {
485 structref: self,
486 store,
487 index: 0,
488 len,
489 });
490
491 struct Fields<'a, 'b> {
492 structref: &'a StructRef,
493 store: AutoAssertNoGc<'b>,
494 index: usize,
495 len: usize,
496 }
497
498 impl Iterator for Fields<'_, '_> {
499 type Item = Val;
500
501 #[inline]
502 fn next(&mut self) -> Option<Self::Item> {
503 let i = self.index;
504 debug_assert!(i <= self.len);
505 if i >= self.len {
506 return None;
507 }
508 self.index += 1;
509 Some(self.structref._field(&mut self.store, i).unwrap())
510 }
511
512 #[inline]
513 fn size_hint(&self) -> (usize, Option<usize>) {
514 let len = self.len - self.index;
515 (len, Some(len))
516 }
517 }
518
519 impl ExactSizeIterator for Fields<'_, '_> {
520 #[inline]
521 fn len(&self) -> usize {
522 self.len - self.index
523 }
524 }
525 }
526
527 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
528 assert!(self.comes_from_same_store(&store));
529 let gc_ref = self.inner.try_gc_ref(store)?;
530 Ok(store.gc_store()?.header(gc_ref))
531 }
532
533 fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
534 assert!(self.comes_from_same_store(&store));
535 let gc_ref = self.inner.try_gc_ref(store)?;
536 debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
537 Ok(gc_ref.as_structref_unchecked())
538 }
539
540 fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
541 assert!(self.comes_from_same_store(&store));
542 let type_index = self.type_index(store)?;
543 let layout = store
544 .engine()
545 .signatures()
546 .layout(type_index)
547 .expect("struct types should have GC layouts");
548 match layout {
549 GcLayout::Struct(s) => Ok(s),
550 GcLayout::Array(_) => unreachable!(),
551 }
552 }
553
554 fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
555 let ty = self._ty(store)?;
556 match ty.field(field) {
557 Some(f) => Ok(f),
558 None => {
559 let len = ty.fields().len();
560 bail!("cannot access field {field}: struct only has {len} fields")
561 }
562 }
563 }
564
565 pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
579 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
580 self._field(&mut store, index)
581 }
582
583 pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
584 assert!(self.comes_from_same_store(store));
585 let structref = self.structref(store)?.unchecked_copy();
586 let field_ty = self.field_ty(store, index)?;
587 let layout = self.layout(store)?;
588 Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
589 }
590
591 pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
611 self._set_field(store.as_context_mut().0, index, value)
612 }
613
614 pub(crate) fn _set_field(
615 &self,
616 store: &mut StoreOpaque,
617 index: usize,
618 value: Val,
619 ) -> Result<()> {
620 assert!(self.comes_from_same_store(store));
621 let mut store = AutoAssertNoGc::new(store);
622
623 let field_ty = self.field_ty(&store, index)?;
624 ensure!(
625 field_ty.mutability().is_var(),
626 "cannot set field {index}: field is not mutable"
627 );
628
629 value
630 .ensure_matches_ty(&store, &field_ty.element_type().unpack())
631 .with_context(|| format!("cannot set field {index}: type mismatch"))?;
632
633 let layout = self.layout(&store)?;
634 let structref = self.structref(&store)?.unchecked_copy();
635
636 structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
637 }
638
639 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
640 let gc_ref = self.inner.try_gc_ref(store)?;
641 let header = store.gc_store()?.header(gc_ref);
642 debug_assert!(header.kind().matches(VMGcKind::StructRef));
643 Ok(header.ty().expect("structrefs should have concrete types"))
644 }
645
646 pub(crate) fn from_cloned_gc_ref(
652 store: &mut AutoAssertNoGc<'_>,
653 gc_ref: VMGcRef,
654 ) -> Rooted<Self> {
655 debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
656 Rooted::new(store, gc_ref)
657 }
658}
659
660unsafe impl WasmTy for Rooted<StructRef> {
661 #[inline]
662 fn valtype() -> ValType {
663 ValType::Ref(RefType::new(false, HeapType::Struct))
664 }
665
666 #[inline]
667 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
668 self.comes_from_same_store(store)
669 }
670
671 #[inline]
672 fn dynamic_concrete_type_check(
673 &self,
674 store: &StoreOpaque,
675 _nullable: bool,
676 ty: &HeapType,
677 ) -> Result<()> {
678 match ty {
679 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
680 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
681
682 HeapType::Extern
683 | HeapType::NoExtern
684 | HeapType::Func
685 | HeapType::ConcreteFunc(_)
686 | HeapType::NoFunc
687 | HeapType::I31
688 | HeapType::Array
689 | HeapType::ConcreteArray(_)
690 | HeapType::None
691 | HeapType::NoCont
692 | HeapType::Cont
693 | HeapType::ConcreteCont(_) => bail!(
694 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
695 self._ty(store)?,
696 ),
697 }
698 }
699
700 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
701 self.wasm_ty_store(store, ptr, ValRaw::anyref)
702 }
703
704 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
705 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
706 }
707}
708
709unsafe impl WasmTy for Option<Rooted<StructRef>> {
710 #[inline]
711 fn valtype() -> ValType {
712 ValType::STRUCTREF
713 }
714
715 #[inline]
716 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
717 self.map_or(true, |x| x.comes_from_same_store(store))
718 }
719
720 #[inline]
721 fn dynamic_concrete_type_check(
722 &self,
723 store: &StoreOpaque,
724 nullable: bool,
725 ty: &HeapType,
726 ) -> Result<()> {
727 match self {
728 Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
729 None => {
730 ensure!(
731 nullable,
732 "expected a non-null reference, but found a null reference"
733 );
734 Ok(())
735 }
736 }
737 }
738
739 #[inline]
740 fn is_vmgcref_and_points_to_object(&self) -> bool {
741 self.is_some()
742 }
743
744 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
745 <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
746 }
747
748 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
749 <Rooted<StructRef>>::wasm_ty_option_load(
750 store,
751 ptr.get_anyref(),
752 StructRef::from_cloned_gc_ref,
753 )
754 }
755}
756
757unsafe impl WasmTy for ManuallyRooted<StructRef> {
758 #[inline]
759 fn valtype() -> ValType {
760 ValType::Ref(RefType::new(false, HeapType::Struct))
761 }
762
763 #[inline]
764 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
765 self.comes_from_same_store(store)
766 }
767
768 #[inline]
769 fn dynamic_concrete_type_check(
770 &self,
771 store: &StoreOpaque,
772 _: bool,
773 ty: &HeapType,
774 ) -> Result<()> {
775 match ty {
776 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
777 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
778
779 HeapType::Extern
780 | HeapType::NoExtern
781 | HeapType::Func
782 | HeapType::ConcreteFunc(_)
783 | HeapType::NoFunc
784 | HeapType::I31
785 | HeapType::Array
786 | HeapType::ConcreteArray(_)
787 | HeapType::None
788 | HeapType::NoCont
789 | HeapType::Cont
790 | HeapType::ConcreteCont(_) => bail!(
791 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
792 self._ty(store)?,
793 ),
794 }
795 }
796
797 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
798 self.wasm_ty_store(store, ptr, ValRaw::anyref)
799 }
800
801 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
802 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
803 }
804}
805
806unsafe impl WasmTy for Option<ManuallyRooted<StructRef>> {
807 #[inline]
808 fn valtype() -> ValType {
809 ValType::STRUCTREF
810 }
811
812 #[inline]
813 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
814 self.as_ref()
815 .map_or(true, |x| x.comes_from_same_store(store))
816 }
817
818 #[inline]
819 fn dynamic_concrete_type_check(
820 &self,
821 store: &StoreOpaque,
822 nullable: bool,
823 ty: &HeapType,
824 ) -> Result<()> {
825 match self {
826 Some(s) => {
827 ManuallyRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
828 }
829 None => {
830 ensure!(
831 nullable,
832 "expected a non-null reference, but found a null reference"
833 );
834 Ok(())
835 }
836 }
837 }
838
839 #[inline]
840 fn is_vmgcref_and_points_to_object(&self) -> bool {
841 self.is_some()
842 }
843
844 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
845 <ManuallyRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
846 }
847
848 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
849 <ManuallyRooted<StructRef>>::wasm_ty_option_load(
850 store,
851 ptr.get_anyref(),
852 StructRef::from_cloned_gc_ref,
853 )
854 }
855}