1#![cfg(feature = "gc")]
3
4use crate::runtime::vm::VMGcRef;
5use crate::store::{Asyncness, StoreId};
6#[cfg(feature = "async")]
7use crate::vm::VMStore;
8use crate::vm::{self, VMGcHeader, VMStructRef};
9use crate::{AnyRef, FieldType};
10use crate::{
11 AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
12 OwnedRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
13 prelude::*,
14 store::{AutoAssertNoGc, StoreContextMut, StoreOpaque, StoreResourceLimiter},
15};
16use alloc::sync::Arc;
17use core::mem::{self, MaybeUninit};
18use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
19
20pub struct StructRefPre {
66 store_id: StoreId,
67 ty: StructType,
68}
69
70impl StructRefPre {
71 pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
74 Self::_new(store.as_context_mut().0, ty)
75 }
76
77 pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
78 store.insert_gc_host_alloc_type(ty.registered_type().clone());
79 let store_id = store.id();
80
81 StructRefPre { store_id, ty }
82 }
83
84 pub(crate) fn layout(&self) -> &GcStructLayout {
85 self.ty
86 .registered_type()
87 .layout()
88 .expect("struct types have a layout")
89 .unwrap_struct()
90 }
91
92 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
93 self.ty.registered_type().index()
94 }
95}
96
97#[derive(Debug)]
156#[repr(transparent)]
157pub struct StructRef {
158 pub(super) inner: GcRootIndex,
159}
160
161unsafe impl GcRefImpl for StructRef {
162 fn transmute_ref(index: &GcRootIndex) -> &Self {
163 let me: &Self = unsafe { mem::transmute(index) };
165
166 assert!(matches!(
168 me,
169 Self {
170 inner: GcRootIndex { .. },
171 }
172 ));
173
174 me
175 }
176}
177
178impl Rooted<StructRef> {
179 #[inline]
181 pub fn to_anyref(self) -> Rooted<AnyRef> {
182 self.unchecked_cast()
183 }
184
185 #[inline]
187 pub fn to_eqref(self) -> Rooted<EqRef> {
188 self.unchecked_cast()
189 }
190}
191
192impl OwnedRooted<StructRef> {
193 #[inline]
195 pub fn to_anyref(self) -> OwnedRooted<AnyRef> {
196 self.unchecked_cast()
197 }
198
199 #[inline]
201 pub fn to_eqref(self) -> OwnedRooted<EqRef> {
202 self.unchecked_cast()
203 }
204}
205
206impl StructRef {
207 pub fn new(
235 mut store: impl AsContextMut,
236 allocator: &StructRefPre,
237 fields: &[Val],
238 ) -> Result<Rooted<StructRef>> {
239 let (mut limiter, store) = store
240 .as_context_mut()
241 .0
242 .validate_sync_resource_limiter_and_store_opaque()?;
243 vm::assert_ready(Self::_new_async(
244 store,
245 limiter.as_mut(),
246 allocator,
247 fields,
248 Asyncness::No,
249 ))
250 }
251
252 #[cfg(feature = "async")]
275 pub async fn new_async(
276 mut store: impl AsContextMut,
277 allocator: &StructRefPre,
278 fields: &[Val],
279 ) -> Result<Rooted<StructRef>> {
280 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
281 Self::_new_async(store, limiter.as_mut(), allocator, fields, Asyncness::Yes).await
282 }
283
284 pub(crate) async fn _new_async(
285 store: &mut StoreOpaque,
286 limiter: Option<&mut StoreResourceLimiter<'_>>,
287 allocator: &StructRefPre,
288 fields: &[Val],
289 asyncness: Asyncness,
290 ) -> Result<Rooted<StructRef>> {
291 Self::type_check_fields(store, allocator, fields)?;
292 store
293 .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
294 Self::new_unchecked(store, allocator, fields)
295 })
296 .await
297 }
298
299 fn type_check_fields(
301 store: &mut StoreOpaque,
302 allocator: &StructRefPre,
303 fields: &[Val],
304 ) -> Result<(), Error> {
305 let expected_len = allocator.ty.fields().len();
306 let actual_len = fields.len();
307 ensure!(
308 actual_len == expected_len,
309 "expected {expected_len} fields, got {actual_len}"
310 );
311 for (ty, val) in allocator.ty.fields().zip(fields) {
312 assert!(
313 val.comes_from_same_store(store),
314 "field value comes from the wrong store",
315 );
316 let ty = ty.element_type().unpack();
317 val.ensure_matches_ty(store, ty)
318 .context("field type mismatch")?;
319 }
320 Ok(())
321 }
322
323 fn new_unchecked(
328 store: &mut StoreOpaque,
329 allocator: &StructRefPre,
330 fields: &[Val],
331 ) -> Result<Rooted<StructRef>> {
332 assert_eq!(
333 store.id(),
334 allocator.store_id,
335 "attempted to use a `StructRefPre` with the wrong store"
336 );
337
338 let structref = store
341 .require_gc_store_mut()?
342 .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
343 .context("unrecoverable error when allocating new `structref`")?
344 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
345
346 let mut store = AutoAssertNoGc::new(store);
351 match (|| {
352 for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
353 structref.initialize_field(
354 &mut store,
355 allocator.layout(),
356 ty.element_type(),
357 index,
358 *val,
359 )?;
360 }
361 Ok(())
362 })() {
363 Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
364 Err(e) => {
365 store
366 .require_gc_store_mut()?
367 .dealloc_uninit_struct(structref);
368 Err(e)
369 }
370 }
371 }
372
373 #[inline]
374 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
375 self.inner.comes_from_same_store(store)
376 }
377
378 pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
388 self._ty(store.as_context().0)
389 }
390
391 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
392 assert!(self.comes_from_same_store(store));
393 let index = self.type_index(store)?;
394 Ok(StructType::from_shared_type_index(store.engine(), index))
395 }
396
397 pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
410 self._matches_ty(store.as_context().0, ty)
411 }
412
413 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
414 assert!(self.comes_from_same_store(store));
415 Ok(self._ty(store)?.matches(ty))
416 }
417
418 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
419 if !self.comes_from_same_store(store) {
420 bail!("function used with wrong store");
421 }
422 if self._matches_ty(store, ty)? {
423 Ok(())
424 } else {
425 let actual_ty = self._ty(store)?;
426 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
427 }
428 }
429
430 pub fn fields<'a, T: 'static>(
443 &'a self,
444 store: impl Into<StoreContextMut<'a, T>>,
445 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
446 self._fields(store.into().0)
447 }
448
449 pub(crate) fn _fields<'a>(
450 &'a self,
451 store: &'a mut StoreOpaque,
452 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
453 assert!(self.comes_from_same_store(store));
454 let store = AutoAssertNoGc::new(store);
455
456 let gc_ref = self.inner.try_gc_ref(&store)?;
457 let header = store.require_gc_store()?.header(gc_ref);
458 debug_assert!(header.kind().matches(VMGcKind::StructRef));
459
460 let index = header.ty().expect("structrefs should have concrete types");
461 let ty = StructType::from_shared_type_index(store.engine(), index);
462 let len = ty.fields().len();
463
464 return Ok(Fields {
465 structref: self,
466 store,
467 index: 0,
468 len,
469 });
470
471 struct Fields<'a, 'b> {
472 structref: &'a StructRef,
473 store: AutoAssertNoGc<'b>,
474 index: usize,
475 len: usize,
476 }
477
478 impl Iterator for Fields<'_, '_> {
479 type Item = Val;
480
481 #[inline]
482 fn next(&mut self) -> Option<Self::Item> {
483 let i = self.index;
484 debug_assert!(i <= self.len);
485 if i >= self.len {
486 return None;
487 }
488 self.index += 1;
489 Some(self.structref._field(&mut self.store, i).unwrap())
490 }
491
492 #[inline]
493 fn size_hint(&self) -> (usize, Option<usize>) {
494 let len = self.len - self.index;
495 (len, Some(len))
496 }
497 }
498
499 impl ExactSizeIterator for Fields<'_, '_> {
500 #[inline]
501 fn len(&self) -> usize {
502 self.len - self.index
503 }
504 }
505 }
506
507 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
508 assert!(self.comes_from_same_store(&store));
509 let gc_ref = self.inner.try_gc_ref(store)?;
510 Ok(store.require_gc_store()?.header(gc_ref))
511 }
512
513 fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
514 assert!(self.comes_from_same_store(&store));
515 let gc_ref = self.inner.try_gc_ref(store)?;
516 debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
517 Ok(gc_ref.as_structref_unchecked())
518 }
519
520 fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<Arc<GcStructLayout>> {
521 assert!(self.comes_from_same_store(&store));
522 let type_index = self.type_index(store)?;
523 let layout = store
524 .engine()
525 .signatures()
526 .layout(type_index)
527 .expect("struct types should have GC layouts");
528 match layout {
529 GcLayout::Struct(s) => Ok(s),
530 GcLayout::Array(_) => unreachable!(),
531 }
532 }
533
534 fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
535 let ty = self._ty(store)?;
536 match ty.field(field) {
537 Some(f) => Ok(f),
538 None => {
539 let len = ty.fields().len();
540 bail!("cannot access field {field}: struct only has {len} fields")
541 }
542 }
543 }
544
545 pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
559 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
560 self._field(&mut store, index)
561 }
562
563 pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
564 assert!(self.comes_from_same_store(store));
565 let structref = self.structref(store)?.unchecked_copy();
566 let field_ty = self.field_ty(store, index)?;
567 let layout = self.layout(store)?;
568 Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
569 }
570
571 pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
591 self._set_field(store.as_context_mut().0, index, value)
592 }
593
594 pub(crate) fn _set_field(
595 &self,
596 store: &mut StoreOpaque,
597 index: usize,
598 value: Val,
599 ) -> Result<()> {
600 assert!(self.comes_from_same_store(store));
601 let mut store = AutoAssertNoGc::new(store);
602
603 let field_ty = self.field_ty(&store, index)?;
604 ensure!(
605 field_ty.mutability().is_var(),
606 "cannot set field {index}: field is not mutable"
607 );
608
609 value
610 .ensure_matches_ty(&store, &field_ty.element_type().unpack())
611 .with_context(|| format!("cannot set field {index}: type mismatch"))?;
612
613 let layout = self.layout(&store)?;
614 let structref = self.structref(&store)?.unchecked_copy();
615
616 structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
617 }
618
619 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
620 let gc_ref = self.inner.try_gc_ref(store)?;
621 let header = store.require_gc_store()?.header(gc_ref);
622 debug_assert!(header.kind().matches(VMGcKind::StructRef));
623 Ok(header.ty().expect("structrefs should have concrete types"))
624 }
625
626 pub(crate) fn from_cloned_gc_ref(
632 store: &mut AutoAssertNoGc<'_>,
633 gc_ref: VMGcRef,
634 ) -> Rooted<Self> {
635 debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
636 Rooted::new(store, gc_ref)
637 }
638}
639
640unsafe impl WasmTy for Rooted<StructRef> {
641 #[inline]
642 fn valtype() -> ValType {
643 ValType::Ref(RefType::new(false, HeapType::Struct))
644 }
645
646 #[inline]
647 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
648 self.comes_from_same_store(store)
649 }
650
651 #[inline]
652 fn dynamic_concrete_type_check(
653 &self,
654 store: &StoreOpaque,
655 _nullable: bool,
656 ty: &HeapType,
657 ) -> Result<()> {
658 match ty {
659 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
660 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
661
662 HeapType::Extern
663 | HeapType::NoExtern
664 | HeapType::Func
665 | HeapType::ConcreteFunc(_)
666 | HeapType::NoFunc
667 | HeapType::I31
668 | HeapType::Array
669 | HeapType::ConcreteArray(_)
670 | HeapType::None
671 | HeapType::NoCont
672 | HeapType::Cont
673 | HeapType::ConcreteCont(_)
674 | HeapType::NoExn
675 | HeapType::Exn
676 | HeapType::ConcreteExn(_) => bail!(
677 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
678 self._ty(store)?,
679 ),
680 }
681 }
682
683 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
684 self.wasm_ty_store(store, ptr, ValRaw::anyref)
685 }
686
687 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
688 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
689 }
690}
691
692unsafe impl WasmTy for Option<Rooted<StructRef>> {
693 #[inline]
694 fn valtype() -> ValType {
695 ValType::STRUCTREF
696 }
697
698 #[inline]
699 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
700 self.map_or(true, |x| x.comes_from_same_store(store))
701 }
702
703 #[inline]
704 fn dynamic_concrete_type_check(
705 &self,
706 store: &StoreOpaque,
707 nullable: bool,
708 ty: &HeapType,
709 ) -> Result<()> {
710 match self {
711 Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
712 None => {
713 ensure!(
714 nullable,
715 "expected a non-null reference, but found a null reference"
716 );
717 Ok(())
718 }
719 }
720 }
721
722 #[inline]
723 fn is_vmgcref_and_points_to_object(&self) -> bool {
724 self.is_some()
725 }
726
727 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
728 <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
729 }
730
731 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
732 <Rooted<StructRef>>::wasm_ty_option_load(
733 store,
734 ptr.get_anyref(),
735 StructRef::from_cloned_gc_ref,
736 )
737 }
738}
739
740unsafe impl WasmTy for OwnedRooted<StructRef> {
741 #[inline]
742 fn valtype() -> ValType {
743 ValType::Ref(RefType::new(false, HeapType::Struct))
744 }
745
746 #[inline]
747 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
748 self.comes_from_same_store(store)
749 }
750
751 #[inline]
752 fn dynamic_concrete_type_check(
753 &self,
754 store: &StoreOpaque,
755 _: bool,
756 ty: &HeapType,
757 ) -> Result<()> {
758 match ty {
759 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
760 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
761
762 HeapType::Extern
763 | HeapType::NoExtern
764 | HeapType::Func
765 | HeapType::ConcreteFunc(_)
766 | HeapType::NoFunc
767 | HeapType::I31
768 | HeapType::Array
769 | HeapType::ConcreteArray(_)
770 | HeapType::None
771 | HeapType::NoCont
772 | HeapType::Cont
773 | HeapType::ConcreteCont(_)
774 | HeapType::NoExn
775 | HeapType::Exn
776 | HeapType::ConcreteExn(_) => bail!(
777 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
778 self._ty(store)?,
779 ),
780 }
781 }
782
783 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
784 self.wasm_ty_store(store, ptr, ValRaw::anyref)
785 }
786
787 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
788 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
789 }
790}
791
792unsafe impl WasmTy for Option<OwnedRooted<StructRef>> {
793 #[inline]
794 fn valtype() -> ValType {
795 ValType::STRUCTREF
796 }
797
798 #[inline]
799 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
800 self.as_ref()
801 .map_or(true, |x| x.comes_from_same_store(store))
802 }
803
804 #[inline]
805 fn dynamic_concrete_type_check(
806 &self,
807 store: &StoreOpaque,
808 nullable: bool,
809 ty: &HeapType,
810 ) -> Result<()> {
811 match self {
812 Some(s) => {
813 OwnedRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
814 }
815 None => {
816 ensure!(
817 nullable,
818 "expected a non-null reference, but found a null reference"
819 );
820 Ok(())
821 }
822 }
823 }
824
825 #[inline]
826 fn is_vmgcref_and_points_to_object(&self) -> bool {
827 self.is_some()
828 }
829
830 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
831 <OwnedRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
832 }
833
834 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
835 <OwnedRooted<StructRef>>::wasm_ty_option_load(
836 store,
837 ptr.get_anyref(),
838 StructRef::from_cloned_gc_ref,
839 )
840 }
841}