1use crate::runtime::vm::VMGcRef;
4use crate::store::StoreId;
5use crate::vm::{self, VMGcHeader, VMStore, VMStructRef};
6use crate::{AnyRef, FieldType};
7use crate::{
8 AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
9 ManuallyRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
10 prelude::*,
11 store::{AutoAssertNoGc, StoreContextMut, StoreOpaque, StoreResourceLimiter},
12};
13use core::mem::{self, MaybeUninit};
14use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
15
16pub struct StructRefPre {
62 store_id: StoreId,
63 ty: StructType,
64}
65
66impl StructRefPre {
67 pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
70 Self::_new(store.as_context_mut().0, ty)
71 }
72
73 pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
74 store.insert_gc_host_alloc_type(ty.registered_type().clone());
75 let store_id = store.id();
76
77 StructRefPre { store_id, ty }
78 }
79
80 pub(crate) fn layout(&self) -> &GcStructLayout {
81 self.ty
82 .registered_type()
83 .layout()
84 .expect("struct types have a layout")
85 .unwrap_struct()
86 }
87
88 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
89 self.ty.registered_type().index()
90 }
91}
92
93#[derive(Debug)]
152#[repr(transparent)]
153pub struct StructRef {
154 pub(super) inner: GcRootIndex,
155}
156
157unsafe impl GcRefImpl for StructRef {
158 fn transmute_ref(index: &GcRootIndex) -> &Self {
159 let me: &Self = unsafe { mem::transmute(index) };
161
162 assert!(matches!(
164 me,
165 Self {
166 inner: GcRootIndex { .. },
167 }
168 ));
169
170 me
171 }
172}
173
174impl Rooted<StructRef> {
175 #[inline]
177 pub fn to_anyref(self) -> Rooted<AnyRef> {
178 self.unchecked_cast()
179 }
180
181 #[inline]
183 pub fn to_eqref(self) -> Rooted<EqRef> {
184 self.unchecked_cast()
185 }
186}
187
188impl ManuallyRooted<StructRef> {
189 #[inline]
191 pub fn to_anyref(self) -> ManuallyRooted<AnyRef> {
192 self.unchecked_cast()
193 }
194
195 #[inline]
197 pub fn to_eqref(self) -> ManuallyRooted<EqRef> {
198 self.unchecked_cast()
199 }
200}
201
202impl StructRef {
203 pub fn new(
230 mut store: impl AsContextMut,
231 allocator: &StructRefPre,
232 fields: &[Val],
233 ) -> Result<Rooted<StructRef>> {
234 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
235 assert!(!store.async_support());
236 vm::assert_ready(Self::_new_async(store, limiter.as_mut(), allocator, fields))
237 }
238
239 #[cfg(feature = "async")]
262 pub async fn new_async(
263 mut store: impl AsContextMut,
264 allocator: &StructRefPre,
265 fields: &[Val],
266 ) -> Result<Rooted<StructRef>> {
267 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
268 Self::_new_async(store, limiter.as_mut(), allocator, fields).await
269 }
270
271 pub(crate) async fn _new_async(
272 store: &mut StoreOpaque,
273 limiter: Option<&mut StoreResourceLimiter<'_>>,
274 allocator: &StructRefPre,
275 fields: &[Val],
276 ) -> Result<Rooted<StructRef>> {
277 Self::type_check_fields(store, allocator, fields)?;
278 store
279 .retry_after_gc_async(limiter, (), |store, ()| {
280 Self::new_unchecked(store, allocator, fields)
281 })
282 .await
283 }
284
285 fn type_check_fields(
287 store: &mut StoreOpaque,
288 allocator: &StructRefPre,
289 fields: &[Val],
290 ) -> Result<(), Error> {
291 let expected_len = allocator.ty.fields().len();
292 let actual_len = fields.len();
293 ensure!(
294 actual_len == expected_len,
295 "expected {expected_len} fields, got {actual_len}"
296 );
297 for (ty, val) in allocator.ty.fields().zip(fields) {
298 assert!(
299 val.comes_from_same_store(store),
300 "field value comes from the wrong store",
301 );
302 let ty = ty.element_type().unpack();
303 val.ensure_matches_ty(store, ty)
304 .context("field type mismatch")?;
305 }
306 Ok(())
307 }
308
309 fn new_unchecked(
314 store: &mut StoreOpaque,
315 allocator: &StructRefPre,
316 fields: &[Val],
317 ) -> Result<Rooted<StructRef>> {
318 assert_eq!(
319 store.id(),
320 allocator.store_id,
321 "attempted to use a `StructRefPre` with the wrong store"
322 );
323
324 let structref = store
327 .require_gc_store_mut()?
328 .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
329 .context("unrecoverable error when allocating new `structref`")?
330 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
331
332 let mut store = AutoAssertNoGc::new(store);
337 match (|| {
338 for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
339 structref.initialize_field(
340 &mut store,
341 allocator.layout(),
342 ty.element_type(),
343 index,
344 *val,
345 )?;
346 }
347 Ok(())
348 })() {
349 Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
350 Err(e) => {
351 store
352 .require_gc_store_mut()?
353 .dealloc_uninit_struct(structref);
354 Err(e)
355 }
356 }
357 }
358
359 #[inline]
360 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
361 self.inner.comes_from_same_store(store)
362 }
363
364 pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
374 self._ty(store.as_context().0)
375 }
376
377 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
378 assert!(self.comes_from_same_store(store));
379 let index = self.type_index(store)?;
380 Ok(StructType::from_shared_type_index(store.engine(), index))
381 }
382
383 pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
396 self._matches_ty(store.as_context().0, ty)
397 }
398
399 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
400 assert!(self.comes_from_same_store(store));
401 Ok(self._ty(store)?.matches(ty))
402 }
403
404 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
405 if !self.comes_from_same_store(store) {
406 bail!("function used with wrong store");
407 }
408 if self._matches_ty(store, ty)? {
409 Ok(())
410 } else {
411 let actual_ty = self._ty(store)?;
412 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
413 }
414 }
415
416 pub fn fields<'a, T: 'static>(
429 &'a self,
430 store: impl Into<StoreContextMut<'a, T>>,
431 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
432 self._fields(store.into().0)
433 }
434
435 pub(crate) fn _fields<'a>(
436 &'a self,
437 store: &'a mut StoreOpaque,
438 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
439 assert!(self.comes_from_same_store(store));
440 let store = AutoAssertNoGc::new(store);
441
442 let gc_ref = self.inner.try_gc_ref(&store)?;
443 let header = store.require_gc_store()?.header(gc_ref);
444 debug_assert!(header.kind().matches(VMGcKind::StructRef));
445
446 let index = header.ty().expect("structrefs should have concrete types");
447 let ty = StructType::from_shared_type_index(store.engine(), index);
448 let len = ty.fields().len();
449
450 return Ok(Fields {
451 structref: self,
452 store,
453 index: 0,
454 len,
455 });
456
457 struct Fields<'a, 'b> {
458 structref: &'a StructRef,
459 store: AutoAssertNoGc<'b>,
460 index: usize,
461 len: usize,
462 }
463
464 impl Iterator for Fields<'_, '_> {
465 type Item = Val;
466
467 #[inline]
468 fn next(&mut self) -> Option<Self::Item> {
469 let i = self.index;
470 debug_assert!(i <= self.len);
471 if i >= self.len {
472 return None;
473 }
474 self.index += 1;
475 Some(self.structref._field(&mut self.store, i).unwrap())
476 }
477
478 #[inline]
479 fn size_hint(&self) -> (usize, Option<usize>) {
480 let len = self.len - self.index;
481 (len, Some(len))
482 }
483 }
484
485 impl ExactSizeIterator for Fields<'_, '_> {
486 #[inline]
487 fn len(&self) -> usize {
488 self.len - self.index
489 }
490 }
491 }
492
493 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
494 assert!(self.comes_from_same_store(&store));
495 let gc_ref = self.inner.try_gc_ref(store)?;
496 Ok(store.require_gc_store()?.header(gc_ref))
497 }
498
499 fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
500 assert!(self.comes_from_same_store(&store));
501 let gc_ref = self.inner.try_gc_ref(store)?;
502 debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
503 Ok(gc_ref.as_structref_unchecked())
504 }
505
506 fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
507 assert!(self.comes_from_same_store(&store));
508 let type_index = self.type_index(store)?;
509 let layout = store
510 .engine()
511 .signatures()
512 .layout(type_index)
513 .expect("struct types should have GC layouts");
514 match layout {
515 GcLayout::Struct(s) => Ok(s),
516 GcLayout::Array(_) => unreachable!(),
517 }
518 }
519
520 fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
521 let ty = self._ty(store)?;
522 match ty.field(field) {
523 Some(f) => Ok(f),
524 None => {
525 let len = ty.fields().len();
526 bail!("cannot access field {field}: struct only has {len} fields")
527 }
528 }
529 }
530
531 pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
545 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
546 self._field(&mut store, index)
547 }
548
549 pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
550 assert!(self.comes_from_same_store(store));
551 let structref = self.structref(store)?.unchecked_copy();
552 let field_ty = self.field_ty(store, index)?;
553 let layout = self.layout(store)?;
554 Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
555 }
556
557 pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
577 self._set_field(store.as_context_mut().0, index, value)
578 }
579
580 pub(crate) fn _set_field(
581 &self,
582 store: &mut StoreOpaque,
583 index: usize,
584 value: Val,
585 ) -> Result<()> {
586 assert!(self.comes_from_same_store(store));
587 let mut store = AutoAssertNoGc::new(store);
588
589 let field_ty = self.field_ty(&store, index)?;
590 ensure!(
591 field_ty.mutability().is_var(),
592 "cannot set field {index}: field is not mutable"
593 );
594
595 value
596 .ensure_matches_ty(&store, &field_ty.element_type().unpack())
597 .with_context(|| format!("cannot set field {index}: type mismatch"))?;
598
599 let layout = self.layout(&store)?;
600 let structref = self.structref(&store)?.unchecked_copy();
601
602 structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
603 }
604
605 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
606 let gc_ref = self.inner.try_gc_ref(store)?;
607 let header = store.require_gc_store()?.header(gc_ref);
608 debug_assert!(header.kind().matches(VMGcKind::StructRef));
609 Ok(header.ty().expect("structrefs should have concrete types"))
610 }
611
612 pub(crate) fn from_cloned_gc_ref(
618 store: &mut AutoAssertNoGc<'_>,
619 gc_ref: VMGcRef,
620 ) -> Rooted<Self> {
621 debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
622 Rooted::new(store, gc_ref)
623 }
624}
625
626unsafe impl WasmTy for Rooted<StructRef> {
627 #[inline]
628 fn valtype() -> ValType {
629 ValType::Ref(RefType::new(false, HeapType::Struct))
630 }
631
632 #[inline]
633 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
634 self.comes_from_same_store(store)
635 }
636
637 #[inline]
638 fn dynamic_concrete_type_check(
639 &self,
640 store: &StoreOpaque,
641 _nullable: bool,
642 ty: &HeapType,
643 ) -> Result<()> {
644 match ty {
645 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
646 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
647
648 HeapType::Extern
649 | HeapType::NoExtern
650 | HeapType::Func
651 | HeapType::ConcreteFunc(_)
652 | HeapType::NoFunc
653 | HeapType::I31
654 | HeapType::Array
655 | HeapType::ConcreteArray(_)
656 | HeapType::None
657 | HeapType::NoCont
658 | HeapType::Cont
659 | HeapType::ConcreteCont(_)
660 | HeapType::NoExn
661 | HeapType::Exn
662 | HeapType::ConcreteExn(_) => bail!(
663 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
664 self._ty(store)?,
665 ),
666 }
667 }
668
669 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
670 self.wasm_ty_store(store, ptr, ValRaw::anyref)
671 }
672
673 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
674 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
675 }
676}
677
678unsafe impl WasmTy for Option<Rooted<StructRef>> {
679 #[inline]
680 fn valtype() -> ValType {
681 ValType::STRUCTREF
682 }
683
684 #[inline]
685 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
686 self.map_or(true, |x| x.comes_from_same_store(store))
687 }
688
689 #[inline]
690 fn dynamic_concrete_type_check(
691 &self,
692 store: &StoreOpaque,
693 nullable: bool,
694 ty: &HeapType,
695 ) -> Result<()> {
696 match self {
697 Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
698 None => {
699 ensure!(
700 nullable,
701 "expected a non-null reference, but found a null reference"
702 );
703 Ok(())
704 }
705 }
706 }
707
708 #[inline]
709 fn is_vmgcref_and_points_to_object(&self) -> bool {
710 self.is_some()
711 }
712
713 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
714 <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
715 }
716
717 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
718 <Rooted<StructRef>>::wasm_ty_option_load(
719 store,
720 ptr.get_anyref(),
721 StructRef::from_cloned_gc_ref,
722 )
723 }
724}
725
726unsafe impl WasmTy for ManuallyRooted<StructRef> {
727 #[inline]
728 fn valtype() -> ValType {
729 ValType::Ref(RefType::new(false, HeapType::Struct))
730 }
731
732 #[inline]
733 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
734 self.comes_from_same_store(store)
735 }
736
737 #[inline]
738 fn dynamic_concrete_type_check(
739 &self,
740 store: &StoreOpaque,
741 _: bool,
742 ty: &HeapType,
743 ) -> Result<()> {
744 match ty {
745 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
746 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
747
748 HeapType::Extern
749 | HeapType::NoExtern
750 | HeapType::Func
751 | HeapType::ConcreteFunc(_)
752 | HeapType::NoFunc
753 | HeapType::I31
754 | HeapType::Array
755 | HeapType::ConcreteArray(_)
756 | HeapType::None
757 | HeapType::NoCont
758 | HeapType::Cont
759 | HeapType::ConcreteCont(_)
760 | HeapType::NoExn
761 | HeapType::Exn
762 | HeapType::ConcreteExn(_) => bail!(
763 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
764 self._ty(store)?,
765 ),
766 }
767 }
768
769 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
770 self.wasm_ty_store(store, ptr, ValRaw::anyref)
771 }
772
773 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
774 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
775 }
776}
777
778unsafe impl WasmTy for Option<ManuallyRooted<StructRef>> {
779 #[inline]
780 fn valtype() -> ValType {
781 ValType::STRUCTREF
782 }
783
784 #[inline]
785 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
786 self.as_ref()
787 .map_or(true, |x| x.comes_from_same_store(store))
788 }
789
790 #[inline]
791 fn dynamic_concrete_type_check(
792 &self,
793 store: &StoreOpaque,
794 nullable: bool,
795 ty: &HeapType,
796 ) -> Result<()> {
797 match self {
798 Some(s) => {
799 ManuallyRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
800 }
801 None => {
802 ensure!(
803 nullable,
804 "expected a non-null reference, but found a null reference"
805 );
806 Ok(())
807 }
808 }
809 }
810
811 #[inline]
812 fn is_vmgcref_and_points_to_object(&self) -> bool {
813 self.is_some()
814 }
815
816 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
817 <ManuallyRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
818 }
819
820 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
821 <ManuallyRooted<StructRef>>::wasm_ty_option_load(
822 store,
823 ptr.get_anyref(),
824 StructRef::from_cloned_gc_ref,
825 )
826 }
827}