1use crate::runtime::vm::VMGcRef;
4use crate::store::StoreId;
5use crate::vm::{VMGcHeader, VMStructRef};
6use crate::{AnyRef, FieldType};
7use crate::{
8 AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
9 ManuallyRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
10 prelude::*,
11 store::{AutoAssertNoGc, StoreContextMut, StoreOpaque},
12};
13use core::mem::{self, MaybeUninit};
14use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
15
16pub struct StructRefPre {
62 store_id: StoreId,
63 ty: StructType,
64}
65
66impl StructRefPre {
67 pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
70 Self::_new(store.as_context_mut().0, ty)
71 }
72
73 pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
74 store.insert_gc_host_alloc_type(ty.registered_type().clone());
75 let store_id = store.id();
76
77 StructRefPre { store_id, ty }
78 }
79
80 pub(crate) fn layout(&self) -> &GcStructLayout {
81 self.ty
82 .registered_type()
83 .layout()
84 .expect("struct types have a layout")
85 .unwrap_struct()
86 }
87
88 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
89 self.ty.registered_type().index()
90 }
91}
92
93#[derive(Debug)]
152#[repr(transparent)]
153pub struct StructRef {
154 pub(super) inner: GcRootIndex,
155}
156
157unsafe impl GcRefImpl for StructRef {
158 fn transmute_ref(index: &GcRootIndex) -> &Self {
159 let me: &Self = unsafe { mem::transmute(index) };
161
162 assert!(matches!(
164 me,
165 Self {
166 inner: GcRootIndex { .. },
167 }
168 ));
169
170 me
171 }
172}
173
174impl Rooted<StructRef> {
175 #[inline]
177 pub fn to_anyref(self) -> Rooted<AnyRef> {
178 self.unchecked_cast()
179 }
180
181 #[inline]
183 pub fn to_eqref(self) -> Rooted<EqRef> {
184 self.unchecked_cast()
185 }
186}
187
188impl ManuallyRooted<StructRef> {
189 #[inline]
191 pub fn to_anyref(self) -> ManuallyRooted<AnyRef> {
192 self.unchecked_cast()
193 }
194
195 #[inline]
197 pub fn to_eqref(self) -> ManuallyRooted<EqRef> {
198 self.unchecked_cast()
199 }
200}
201
202impl StructRef {
203 pub fn new(
230 mut store: impl AsContextMut,
231 allocator: &StructRefPre,
232 fields: &[Val],
233 ) -> Result<Rooted<StructRef>> {
234 Self::_new(store.as_context_mut().0, allocator, fields)
235 }
236
237 pub(crate) fn _new(
238 store: &mut StoreOpaque,
239 allocator: &StructRefPre,
240 fields: &[Val],
241 ) -> Result<Rooted<StructRef>> {
242 assert!(
243 !store.async_support(),
244 "use `StructRef::new_async` with asynchronous stores"
245 );
246 Self::type_check_fields(store, allocator, fields)?;
247 store.retry_after_gc((), |store, ()| {
248 Self::new_unchecked(store, allocator, fields)
249 })
250 }
251
252 #[cfg(feature = "async")]
279 pub async fn new_async(
280 mut store: impl AsContextMut,
281 allocator: &StructRefPre,
282 fields: &[Val],
283 ) -> Result<Rooted<StructRef>> {
284 Self::_new_async(store.as_context_mut().0, allocator, fields).await
285 }
286
287 #[cfg(feature = "async")]
288 pub(crate) async fn _new_async(
289 store: &mut StoreOpaque,
290 allocator: &StructRefPre,
291 fields: &[Val],
292 ) -> Result<Rooted<StructRef>> {
293 assert!(
294 store.async_support(),
295 "use `StructRef::new` with synchronous stores"
296 );
297 Self::type_check_fields(store, allocator, fields)?;
298 store
299 .retry_after_gc_async((), |store, ()| {
300 Self::new_unchecked(store, allocator, fields)
301 })
302 .await
303 }
304
305 pub(crate) unsafe fn new_maybe_async(
308 store: &mut StoreOpaque,
309 allocator: &StructRefPre,
310 fields: &[Val],
311 ) -> Result<Rooted<StructRef>> {
312 Self::type_check_fields(store, allocator, fields)?;
313 unsafe {
314 store.retry_after_gc_maybe_async((), |store, ()| {
315 Self::new_unchecked(store, allocator, fields)
316 })
317 }
318 }
319
320 fn type_check_fields(
322 store: &mut StoreOpaque,
323 allocator: &StructRefPre,
324 fields: &[Val],
325 ) -> Result<(), Error> {
326 let expected_len = allocator.ty.fields().len();
327 let actual_len = fields.len();
328 ensure!(
329 actual_len == expected_len,
330 "expected {expected_len} fields, got {actual_len}"
331 );
332 for (ty, val) in allocator.ty.fields().zip(fields) {
333 assert!(
334 val.comes_from_same_store(store),
335 "field value comes from the wrong store",
336 );
337 let ty = ty.element_type().unpack();
338 val.ensure_matches_ty(store, ty)
339 .context("field type mismatch")?;
340 }
341 Ok(())
342 }
343
344 fn new_unchecked(
349 store: &mut StoreOpaque,
350 allocator: &StructRefPre,
351 fields: &[Val],
352 ) -> Result<Rooted<StructRef>> {
353 assert_eq!(
354 store.id(),
355 allocator.store_id,
356 "attempted to use a `StructRefPre` with the wrong store"
357 );
358
359 let structref = store
362 .gc_store_mut()?
363 .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
364 .context("unrecoverable error when allocating new `structref`")?
365 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
366
367 let mut store = AutoAssertNoGc::new(store);
372 match (|| {
373 for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
374 structref.initialize_field(
375 &mut store,
376 allocator.layout(),
377 ty.element_type(),
378 index,
379 *val,
380 )?;
381 }
382 Ok(())
383 })() {
384 Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
385 Err(e) => {
386 store.gc_store_mut()?.dealloc_uninit_struct(structref);
387 Err(e)
388 }
389 }
390 }
391
392 #[inline]
393 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
394 self.inner.comes_from_same_store(store)
395 }
396
397 pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
407 self._ty(store.as_context().0)
408 }
409
410 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
411 assert!(self.comes_from_same_store(store));
412 let index = self.type_index(store)?;
413 Ok(StructType::from_shared_type_index(store.engine(), index))
414 }
415
416 pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
429 self._matches_ty(store.as_context().0, ty)
430 }
431
432 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
433 assert!(self.comes_from_same_store(store));
434 Ok(self._ty(store)?.matches(ty))
435 }
436
437 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
438 if !self.comes_from_same_store(store) {
439 bail!("function used with wrong store");
440 }
441 if self._matches_ty(store, ty)? {
442 Ok(())
443 } else {
444 let actual_ty = self._ty(store)?;
445 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
446 }
447 }
448
449 pub fn fields<'a, T: 'static>(
462 &'a self,
463 store: impl Into<StoreContextMut<'a, T>>,
464 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
465 self._fields(store.into().0)
466 }
467
468 pub(crate) fn _fields<'a>(
469 &'a self,
470 store: &'a mut StoreOpaque,
471 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
472 assert!(self.comes_from_same_store(store));
473 let store = AutoAssertNoGc::new(store);
474
475 let gc_ref = self.inner.try_gc_ref(&store)?;
476 let header = store.gc_store()?.header(gc_ref);
477 debug_assert!(header.kind().matches(VMGcKind::StructRef));
478
479 let index = header.ty().expect("structrefs should have concrete types");
480 let ty = StructType::from_shared_type_index(store.engine(), index);
481 let len = ty.fields().len();
482
483 return Ok(Fields {
484 structref: self,
485 store,
486 index: 0,
487 len,
488 });
489
490 struct Fields<'a, 'b> {
491 structref: &'a StructRef,
492 store: AutoAssertNoGc<'b>,
493 index: usize,
494 len: usize,
495 }
496
497 impl Iterator for Fields<'_, '_> {
498 type Item = Val;
499
500 #[inline]
501 fn next(&mut self) -> Option<Self::Item> {
502 let i = self.index;
503 debug_assert!(i <= self.len);
504 if i >= self.len {
505 return None;
506 }
507 self.index += 1;
508 Some(self.structref._field(&mut self.store, i).unwrap())
509 }
510
511 #[inline]
512 fn size_hint(&self) -> (usize, Option<usize>) {
513 let len = self.len - self.index;
514 (len, Some(len))
515 }
516 }
517
518 impl ExactSizeIterator for Fields<'_, '_> {
519 #[inline]
520 fn len(&self) -> usize {
521 self.len - self.index
522 }
523 }
524 }
525
526 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
527 assert!(self.comes_from_same_store(&store));
528 let gc_ref = self.inner.try_gc_ref(store)?;
529 Ok(store.gc_store()?.header(gc_ref))
530 }
531
532 fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
533 assert!(self.comes_from_same_store(&store));
534 let gc_ref = self.inner.try_gc_ref(store)?;
535 debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
536 Ok(gc_ref.as_structref_unchecked())
537 }
538
539 fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
540 assert!(self.comes_from_same_store(&store));
541 let type_index = self.type_index(store)?;
542 let layout = store
543 .engine()
544 .signatures()
545 .layout(type_index)
546 .expect("struct types should have GC layouts");
547 match layout {
548 GcLayout::Struct(s) => Ok(s),
549 GcLayout::Array(_) => unreachable!(),
550 GcLayout::Exception(_) => unreachable!(),
551 }
552 }
553
554 fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
555 let ty = self._ty(store)?;
556 match ty.field(field) {
557 Some(f) => Ok(f),
558 None => {
559 let len = ty.fields().len();
560 bail!("cannot access field {field}: struct only has {len} fields")
561 }
562 }
563 }
564
565 pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
579 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
580 self._field(&mut store, index)
581 }
582
583 pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
584 assert!(self.comes_from_same_store(store));
585 let structref = self.structref(store)?.unchecked_copy();
586 let field_ty = self.field_ty(store, index)?;
587 let layout = self.layout(store)?;
588 Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
589 }
590
591 pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
611 self._set_field(store.as_context_mut().0, index, value)
612 }
613
614 pub(crate) fn _set_field(
615 &self,
616 store: &mut StoreOpaque,
617 index: usize,
618 value: Val,
619 ) -> Result<()> {
620 assert!(self.comes_from_same_store(store));
621 let mut store = AutoAssertNoGc::new(store);
622
623 let field_ty = self.field_ty(&store, index)?;
624 ensure!(
625 field_ty.mutability().is_var(),
626 "cannot set field {index}: field is not mutable"
627 );
628
629 value
630 .ensure_matches_ty(&store, &field_ty.element_type().unpack())
631 .with_context(|| format!("cannot set field {index}: type mismatch"))?;
632
633 let layout = self.layout(&store)?;
634 let structref = self.structref(&store)?.unchecked_copy();
635
636 structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
637 }
638
639 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
640 let gc_ref = self.inner.try_gc_ref(store)?;
641 let header = store.gc_store()?.header(gc_ref);
642 debug_assert!(header.kind().matches(VMGcKind::StructRef));
643 Ok(header.ty().expect("structrefs should have concrete types"))
644 }
645
646 pub(crate) fn from_cloned_gc_ref(
652 store: &mut AutoAssertNoGc<'_>,
653 gc_ref: VMGcRef,
654 ) -> Rooted<Self> {
655 debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
656 Rooted::new(store, gc_ref)
657 }
658}
659
660unsafe impl WasmTy for Rooted<StructRef> {
661 #[inline]
662 fn valtype() -> ValType {
663 ValType::Ref(RefType::new(false, HeapType::Struct))
664 }
665
666 #[inline]
667 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
668 self.comes_from_same_store(store)
669 }
670
671 #[inline]
672 fn dynamic_concrete_type_check(
673 &self,
674 store: &StoreOpaque,
675 _nullable: bool,
676 ty: &HeapType,
677 ) -> Result<()> {
678 match ty {
679 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
680 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
681
682 HeapType::Extern
683 | HeapType::NoExtern
684 | HeapType::Func
685 | HeapType::ConcreteFunc(_)
686 | HeapType::NoFunc
687 | HeapType::I31
688 | HeapType::Array
689 | HeapType::ConcreteArray(_)
690 | HeapType::None
691 | HeapType::NoCont
692 | HeapType::Cont
693 | HeapType::ConcreteCont(_)
694 | HeapType::NoExn
695 | HeapType::Exn
696 | HeapType::ConcreteExn(_) => bail!(
697 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
698 self._ty(store)?,
699 ),
700 }
701 }
702
703 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
704 self.wasm_ty_store(store, ptr, ValRaw::anyref)
705 }
706
707 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
708 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
709 }
710}
711
712unsafe impl WasmTy for Option<Rooted<StructRef>> {
713 #[inline]
714 fn valtype() -> ValType {
715 ValType::STRUCTREF
716 }
717
718 #[inline]
719 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
720 self.map_or(true, |x| x.comes_from_same_store(store))
721 }
722
723 #[inline]
724 fn dynamic_concrete_type_check(
725 &self,
726 store: &StoreOpaque,
727 nullable: bool,
728 ty: &HeapType,
729 ) -> Result<()> {
730 match self {
731 Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
732 None => {
733 ensure!(
734 nullable,
735 "expected a non-null reference, but found a null reference"
736 );
737 Ok(())
738 }
739 }
740 }
741
742 #[inline]
743 fn is_vmgcref_and_points_to_object(&self) -> bool {
744 self.is_some()
745 }
746
747 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
748 <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
749 }
750
751 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
752 <Rooted<StructRef>>::wasm_ty_option_load(
753 store,
754 ptr.get_anyref(),
755 StructRef::from_cloned_gc_ref,
756 )
757 }
758}
759
760unsafe impl WasmTy for ManuallyRooted<StructRef> {
761 #[inline]
762 fn valtype() -> ValType {
763 ValType::Ref(RefType::new(false, HeapType::Struct))
764 }
765
766 #[inline]
767 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
768 self.comes_from_same_store(store)
769 }
770
771 #[inline]
772 fn dynamic_concrete_type_check(
773 &self,
774 store: &StoreOpaque,
775 _: bool,
776 ty: &HeapType,
777 ) -> Result<()> {
778 match ty {
779 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
780 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
781
782 HeapType::Extern
783 | HeapType::NoExtern
784 | HeapType::Func
785 | HeapType::ConcreteFunc(_)
786 | HeapType::NoFunc
787 | HeapType::I31
788 | HeapType::Array
789 | HeapType::ConcreteArray(_)
790 | HeapType::None
791 | HeapType::NoCont
792 | HeapType::Cont
793 | HeapType::ConcreteCont(_)
794 | HeapType::NoExn
795 | HeapType::Exn
796 | HeapType::ConcreteExn(_) => bail!(
797 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
798 self._ty(store)?,
799 ),
800 }
801 }
802
803 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
804 self.wasm_ty_store(store, ptr, ValRaw::anyref)
805 }
806
807 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
808 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
809 }
810}
811
812unsafe impl WasmTy for Option<ManuallyRooted<StructRef>> {
813 #[inline]
814 fn valtype() -> ValType {
815 ValType::STRUCTREF
816 }
817
818 #[inline]
819 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
820 self.as_ref()
821 .map_or(true, |x| x.comes_from_same_store(store))
822 }
823
824 #[inline]
825 fn dynamic_concrete_type_check(
826 &self,
827 store: &StoreOpaque,
828 nullable: bool,
829 ty: &HeapType,
830 ) -> Result<()> {
831 match self {
832 Some(s) => {
833 ManuallyRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
834 }
835 None => {
836 ensure!(
837 nullable,
838 "expected a non-null reference, but found a null reference"
839 );
840 Ok(())
841 }
842 }
843 }
844
845 #[inline]
846 fn is_vmgcref_and_points_to_object(&self) -> bool {
847 self.is_some()
848 }
849
850 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
851 <ManuallyRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
852 }
853
854 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
855 <ManuallyRooted<StructRef>>::wasm_ty_option_load(
856 store,
857 ptr.get_anyref(),
858 StructRef::from_cloned_gc_ref,
859 )
860 }
861}