1use crate::runtime::vm::VMGcRef;
4use crate::store::StoreId;
5use crate::vm::{VMGcHeader, VMStructRef};
6use crate::{
7 prelude::*,
8 store::{AutoAssertNoGc, StoreContextMut, StoreOpaque},
9 AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
10 ManuallyRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
11};
12use crate::{AnyRef, FieldType};
13use core::mem::{self, MaybeUninit};
14use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
15
16pub struct StructRefPre {
62 store_id: StoreId,
63 ty: StructType,
64}
65
66impl StructRefPre {
67 pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
70 Self::_new(store.as_context_mut().0, ty)
71 }
72
73 pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
74 store.insert_gc_host_alloc_type(ty.registered_type().clone());
75 let store_id = store.id();
76
77 StructRefPre { store_id, ty }
78 }
79
80 pub(crate) fn layout(&self) -> &GcStructLayout {
81 self.ty
82 .registered_type()
83 .layout()
84 .expect("struct types have a layout")
85 .unwrap_struct()
86 }
87
88 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
89 self.ty.registered_type().index()
90 }
91}
92
93#[derive(Debug)]
152#[repr(transparent)]
153pub struct StructRef {
154 pub(super) inner: GcRootIndex,
155}
156
157unsafe impl GcRefImpl for StructRef {
158 #[allow(private_interfaces)]
159 fn transmute_ref(index: &GcRootIndex) -> &Self {
160 let me: &Self = unsafe { mem::transmute(index) };
162
163 assert!(matches!(
165 me,
166 Self {
167 inner: GcRootIndex { .. },
168 }
169 ));
170
171 me
172 }
173}
174
175impl Rooted<StructRef> {
176 #[inline]
178 pub fn to_anyref(self) -> Rooted<AnyRef> {
179 self.unchecked_cast()
180 }
181
182 #[inline]
184 pub fn to_eqref(self) -> Rooted<EqRef> {
185 self.unchecked_cast()
186 }
187}
188
189impl ManuallyRooted<StructRef> {
190 #[inline]
192 pub fn to_anyref(self) -> ManuallyRooted<AnyRef> {
193 self.unchecked_cast()
194 }
195
196 #[inline]
198 pub fn to_eqref(self) -> ManuallyRooted<EqRef> {
199 self.unchecked_cast()
200 }
201}
202
203impl StructRef {
204 pub fn new(
231 mut store: impl AsContextMut,
232 allocator: &StructRefPre,
233 fields: &[Val],
234 ) -> Result<Rooted<StructRef>> {
235 Self::_new(store.as_context_mut().0, allocator, fields)
236 }
237
238 pub(crate) fn _new(
239 store: &mut StoreOpaque,
240 allocator: &StructRefPre,
241 fields: &[Val],
242 ) -> Result<Rooted<StructRef>> {
243 assert!(
244 !store.async_support(),
245 "use `StructRef::new_async` with asynchronous stores"
246 );
247 Self::type_check_fields(store, allocator, fields)?;
248 store.retry_after_gc((), |store, ()| {
249 Self::new_unchecked(store, allocator, fields)
250 })
251 }
252
253 #[cfg(feature = "async")]
280 pub async fn new_async(
281 mut store: impl AsContextMut,
282 allocator: &StructRefPre,
283 fields: &[Val],
284 ) -> Result<Rooted<StructRef>> {
285 Self::_new_async(store.as_context_mut().0, allocator, fields).await
286 }
287
288 #[cfg(feature = "async")]
289 pub(crate) async fn _new_async(
290 store: &mut StoreOpaque,
291 allocator: &StructRefPre,
292 fields: &[Val],
293 ) -> Result<Rooted<StructRef>> {
294 assert!(
295 store.async_support(),
296 "use `StructRef::new` with synchronous stores"
297 );
298 Self::type_check_fields(store, allocator, fields)?;
299 store
300 .retry_after_gc_async((), |store, ()| {
301 Self::new_unchecked(store, allocator, fields)
302 })
303 .await
304 }
305
306 pub(crate) unsafe fn new_maybe_async(
309 store: &mut StoreOpaque,
310 allocator: &StructRefPre,
311 fields: &[Val],
312 ) -> Result<Rooted<StructRef>> {
313 Self::type_check_fields(store, allocator, fields)?;
314 unsafe {
315 store.retry_after_gc_maybe_async((), |store, ()| {
316 Self::new_unchecked(store, allocator, fields)
317 })
318 }
319 }
320
321 fn type_check_fields(
323 store: &mut StoreOpaque,
324 allocator: &StructRefPre,
325 fields: &[Val],
326 ) -> Result<(), Error> {
327 let expected_len = allocator.ty.fields().len();
328 let actual_len = fields.len();
329 ensure!(
330 actual_len == expected_len,
331 "expected {expected_len} fields, got {actual_len}"
332 );
333 for (ty, val) in allocator.ty.fields().zip(fields) {
334 assert!(
335 val.comes_from_same_store(store),
336 "field value comes from the wrong store",
337 );
338 let ty = ty.element_type().unpack();
339 val.ensure_matches_ty(store, ty)
340 .context("field type mismatch")?;
341 }
342 Ok(())
343 }
344
345 fn new_unchecked(
350 store: &mut StoreOpaque,
351 allocator: &StructRefPre,
352 fields: &[Val],
353 ) -> Result<Rooted<StructRef>> {
354 assert_eq!(
355 store.id(),
356 allocator.store_id,
357 "attempted to use a `StructRefPre` with the wrong store"
358 );
359
360 let structref = store
363 .gc_store_mut()?
364 .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
365 .context("unrecoverable error when allocating new `structref`")?
366 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
367
368 let mut store = AutoAssertNoGc::new(store);
373 match (|| {
374 for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
375 structref.initialize_field(
376 &mut store,
377 allocator.layout(),
378 ty.element_type(),
379 index,
380 *val,
381 )?;
382 }
383 Ok(())
384 })() {
385 Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
386 Err(e) => {
387 store.gc_store_mut()?.dealloc_uninit_struct(structref);
388 Err(e)
389 }
390 }
391 }
392
393 #[inline]
394 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
395 self.inner.comes_from_same_store(store)
396 }
397
398 pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
408 self._ty(store.as_context().0)
409 }
410
411 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
412 assert!(self.comes_from_same_store(store));
413 let index = self.type_index(store)?;
414 Ok(StructType::from_shared_type_index(store.engine(), index))
415 }
416
417 pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
430 self._matches_ty(store.as_context().0, ty)
431 }
432
433 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
434 assert!(self.comes_from_same_store(store));
435 Ok(self._ty(store)?.matches(ty))
436 }
437
438 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
439 if !self.comes_from_same_store(store) {
440 bail!("function used with wrong store");
441 }
442 if self._matches_ty(store, ty)? {
443 Ok(())
444 } else {
445 let actual_ty = self._ty(store)?;
446 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
447 }
448 }
449
450 pub fn fields<'a, T: 'a>(
463 &'a self,
464 store: impl Into<StoreContextMut<'a, T>>,
465 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
466 self._fields(store.into().0)
467 }
468
469 pub(crate) fn _fields<'a>(
470 &'a self,
471 store: &'a mut StoreOpaque,
472 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
473 assert!(self.comes_from_same_store(store));
474 let store = AutoAssertNoGc::new(store);
475
476 let gc_ref = self.inner.try_gc_ref(&store)?;
477 let header = store.gc_store()?.header(gc_ref);
478 debug_assert!(header.kind().matches(VMGcKind::StructRef));
479
480 let index = header.ty().expect("structrefs should have concrete types");
481 let ty = StructType::from_shared_type_index(store.engine(), index);
482 let len = ty.fields().len();
483
484 return Ok(Fields {
485 structref: self,
486 store,
487 index: 0,
488 len,
489 });
490
491 struct Fields<'a, 'b> {
492 structref: &'a StructRef,
493 store: AutoAssertNoGc<'b>,
494 index: usize,
495 len: usize,
496 }
497
498 impl Iterator for Fields<'_, '_> {
499 type Item = Val;
500
501 #[inline]
502 fn next(&mut self) -> Option<Self::Item> {
503 let i = self.index;
504 debug_assert!(i <= self.len);
505 if i >= self.len {
506 return None;
507 }
508 self.index += 1;
509 Some(self.structref._field(&mut self.store, i).unwrap())
510 }
511
512 #[inline]
513 fn size_hint(&self) -> (usize, Option<usize>) {
514 let len = self.len - self.index;
515 (len, Some(len))
516 }
517 }
518
519 impl ExactSizeIterator for Fields<'_, '_> {
520 #[inline]
521 fn len(&self) -> usize {
522 self.len - self.index
523 }
524 }
525 }
526
527 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
528 assert!(self.comes_from_same_store(&store));
529 let gc_ref = self.inner.try_gc_ref(store)?;
530 Ok(store.gc_store()?.header(gc_ref))
531 }
532
533 fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
534 assert!(self.comes_from_same_store(&store));
535 let gc_ref = self.inner.try_gc_ref(store)?;
536 debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
537 Ok(gc_ref.as_structref_unchecked())
538 }
539
540 fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
541 assert!(self.comes_from_same_store(&store));
542 let type_index = self.type_index(store)?;
543 let layout = store
544 .engine()
545 .signatures()
546 .layout(type_index)
547 .expect("struct types should have GC layouts");
548 match layout {
549 GcLayout::Struct(s) => Ok(s),
550 GcLayout::Array(_) => unreachable!(),
551 }
552 }
553
554 fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
555 let ty = self._ty(store)?;
556 match ty.field(field) {
557 Some(f) => Ok(f),
558 None => {
559 let len = ty.fields().len();
560 bail!("cannot access field {field}: struct only has {len} fields")
561 }
562 }
563 }
564
565 pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
579 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
580 self._field(&mut store, index)
581 }
582
583 pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
584 assert!(self.comes_from_same_store(store));
585 let structref = self.structref(store)?.unchecked_copy();
586 let field_ty = self.field_ty(store, index)?;
587 let layout = self.layout(store)?;
588 Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
589 }
590
591 pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
611 self._set_field(store.as_context_mut().0, index, value)
612 }
613
614 pub(crate) fn _set_field(
615 &self,
616 store: &mut StoreOpaque,
617 index: usize,
618 value: Val,
619 ) -> Result<()> {
620 assert!(self.comes_from_same_store(store));
621 let mut store = AutoAssertNoGc::new(store);
622
623 let field_ty = self.field_ty(&store, index)?;
624 ensure!(
625 field_ty.mutability().is_var(),
626 "cannot set field {index}: field is not mutable"
627 );
628
629 value
630 .ensure_matches_ty(&store, &field_ty.element_type().unpack())
631 .with_context(|| format!("cannot set field {index}: type mismatch"))?;
632
633 let layout = self.layout(&store)?;
634 let structref = self.structref(&store)?.unchecked_copy();
635
636 structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
637 }
638
639 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
640 let gc_ref = self.inner.try_gc_ref(store)?;
641 let header = store.gc_store()?.header(gc_ref);
642 debug_assert!(header.kind().matches(VMGcKind::StructRef));
643 Ok(header.ty().expect("structrefs should have concrete types"))
644 }
645
646 pub(crate) fn from_cloned_gc_ref(
652 store: &mut AutoAssertNoGc<'_>,
653 gc_ref: VMGcRef,
654 ) -> Rooted<Self> {
655 debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
656 Rooted::new(store, gc_ref)
657 }
658}
659
660unsafe impl WasmTy for Rooted<StructRef> {
661 #[inline]
662 fn valtype() -> ValType {
663 ValType::Ref(RefType::new(false, HeapType::Struct))
664 }
665
666 #[inline]
667 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
668 self.comes_from_same_store(store)
669 }
670
671 #[inline]
672 fn dynamic_concrete_type_check(
673 &self,
674 store: &StoreOpaque,
675 _nullable: bool,
676 ty: &HeapType,
677 ) -> Result<()> {
678 match ty {
679 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
680 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
681
682 HeapType::Extern
683 | HeapType::NoExtern
684 | HeapType::Func
685 | HeapType::ConcreteFunc(_)
686 | HeapType::NoFunc
687 | HeapType::I31
688 | HeapType::Array
689 | HeapType::ConcreteArray(_)
690 | HeapType::None => bail!(
691 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
692 self._ty(store)?,
693 ),
694 }
695 }
696
697 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
698 self.wasm_ty_store(store, ptr, ValRaw::anyref)
699 }
700
701 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
702 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
703 }
704}
705
706unsafe impl WasmTy for Option<Rooted<StructRef>> {
707 #[inline]
708 fn valtype() -> ValType {
709 ValType::STRUCTREF
710 }
711
712 #[inline]
713 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
714 self.map_or(true, |x| x.comes_from_same_store(store))
715 }
716
717 #[inline]
718 fn dynamic_concrete_type_check(
719 &self,
720 store: &StoreOpaque,
721 nullable: bool,
722 ty: &HeapType,
723 ) -> Result<()> {
724 match self {
725 Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
726 None => {
727 ensure!(
728 nullable,
729 "expected a non-null reference, but found a null reference"
730 );
731 Ok(())
732 }
733 }
734 }
735
736 #[inline]
737 fn is_vmgcref_and_points_to_object(&self) -> bool {
738 self.is_some()
739 }
740
741 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
742 <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
743 }
744
745 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
746 <Rooted<StructRef>>::wasm_ty_option_load(
747 store,
748 ptr.get_anyref(),
749 StructRef::from_cloned_gc_ref,
750 )
751 }
752}
753
754unsafe impl WasmTy for ManuallyRooted<StructRef> {
755 #[inline]
756 fn valtype() -> ValType {
757 ValType::Ref(RefType::new(false, HeapType::Struct))
758 }
759
760 #[inline]
761 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
762 self.comes_from_same_store(store)
763 }
764
765 #[inline]
766 fn dynamic_concrete_type_check(
767 &self,
768 store: &StoreOpaque,
769 _: bool,
770 ty: &HeapType,
771 ) -> Result<()> {
772 match ty {
773 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
774 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
775
776 HeapType::Extern
777 | HeapType::NoExtern
778 | HeapType::Func
779 | HeapType::ConcreteFunc(_)
780 | HeapType::NoFunc
781 | HeapType::I31
782 | HeapType::Array
783 | HeapType::ConcreteArray(_)
784 | HeapType::None => bail!(
785 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
786 self._ty(store)?,
787 ),
788 }
789 }
790
791 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
792 self.wasm_ty_store(store, ptr, ValRaw::anyref)
793 }
794
795 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
796 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
797 }
798}
799
800unsafe impl WasmTy for Option<ManuallyRooted<StructRef>> {
801 #[inline]
802 fn valtype() -> ValType {
803 ValType::STRUCTREF
804 }
805
806 #[inline]
807 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
808 self.as_ref()
809 .map_or(true, |x| x.comes_from_same_store(store))
810 }
811
812 #[inline]
813 fn dynamic_concrete_type_check(
814 &self,
815 store: &StoreOpaque,
816 nullable: bool,
817 ty: &HeapType,
818 ) -> Result<()> {
819 match self {
820 Some(s) => {
821 ManuallyRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
822 }
823 None => {
824 ensure!(
825 nullable,
826 "expected a non-null reference, but found a null reference"
827 );
828 Ok(())
829 }
830 }
831 }
832
833 #[inline]
834 fn is_vmgcref_and_points_to_object(&self) -> bool {
835 self.is_some()
836 }
837
838 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
839 <ManuallyRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
840 }
841
842 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
843 <ManuallyRooted<StructRef>>::wasm_ty_option_load(
844 store,
845 ptr.get_anyref(),
846 StructRef::from_cloned_gc_ref,
847 )
848 }
849}