1use crate::runtime::vm::VMGcRef;
4use crate::store::StoreId;
5use crate::vm::{VMGcHeader, VMStructRef};
6use crate::{
7 prelude::*,
8 store::{AutoAssertNoGc, StoreContextMut, StoreOpaque},
9 AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
10 ManuallyRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
11};
12use crate::{AnyRef, FieldType};
13use core::mem::{self, MaybeUninit};
14use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
15
16pub struct StructRefPre {
62 store_id: StoreId,
63 ty: StructType,
64}
65
66impl StructRefPre {
67 pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
70 Self::_new(store.as_context_mut().0, ty)
71 }
72
73 pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
74 store.insert_gc_host_alloc_type(ty.registered_type().clone());
75 let store_id = store.id();
76
77 StructRefPre { store_id, ty }
78 }
79
80 pub(crate) fn layout(&self) -> &GcStructLayout {
81 self.ty
82 .registered_type()
83 .layout()
84 .expect("struct types have a layout")
85 .unwrap_struct()
86 }
87
88 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
89 self.ty.registered_type().index()
90 }
91}
92
93#[derive(Debug)]
163#[repr(transparent)]
164pub struct StructRef {
165 pub(super) inner: GcRootIndex,
166}
167
168unsafe impl GcRefImpl for StructRef {
169 #[allow(private_interfaces)]
170 fn transmute_ref(index: &GcRootIndex) -> &Self {
171 let me: &Self = unsafe { mem::transmute(index) };
173
174 assert!(matches!(
176 me,
177 Self {
178 inner: GcRootIndex { .. },
179 }
180 ));
181
182 me
183 }
184}
185
186impl Rooted<StructRef> {
187 #[inline]
189 pub fn to_anyref(self) -> Rooted<AnyRef> {
190 self.unchecked_cast()
191 }
192
193 #[inline]
195 pub fn to_eqref(self) -> Rooted<EqRef> {
196 self.unchecked_cast()
197 }
198}
199
200impl ManuallyRooted<StructRef> {
201 #[inline]
203 pub fn to_anyref(self) -> ManuallyRooted<AnyRef> {
204 self.unchecked_cast()
205 }
206
207 #[inline]
209 pub fn to_eqref(self) -> ManuallyRooted<EqRef> {
210 self.unchecked_cast()
211 }
212}
213
214impl StructRef {
215 pub fn new(
232 mut store: impl AsContextMut,
233 allocator: &StructRefPre,
234 fields: &[Val],
235 ) -> Result<Rooted<StructRef>> {
236 Self::_new(store.as_context_mut().0, allocator, fields)
237 }
238
239 pub(crate) fn _new(
240 store: &mut StoreOpaque,
241 allocator: &StructRefPre,
242 fields: &[Val],
243 ) -> Result<Rooted<StructRef>> {
244 assert_eq!(
245 store.id(),
246 allocator.store_id,
247 "attempted to use a `StructRefPre` with the wrong store"
248 );
249
250 let expected_len = allocator.ty.fields().len();
252 let actual_len = fields.len();
253 ensure!(
254 actual_len == expected_len,
255 "expected {expected_len} fields, got {actual_len}"
256 );
257 for (ty, val) in allocator.ty.fields().zip(fields) {
258 assert!(
259 val.comes_from_same_store(store),
260 "field value comes from the wrong store",
261 );
262 let ty = ty.element_type().unpack();
263 val.ensure_matches_ty(store, ty)
264 .context("field type mismatch")?;
265 }
266
267 let structref = store
270 .gc_store_mut()?
271 .alloc_uninit_struct(allocator.type_index(), &allocator.layout())
272 .context("unrecoverable error when allocating new `structref`")?
273 .ok_or_else(|| GcHeapOutOfMemory::new(()))?;
274
275 let mut store = AutoAssertNoGc::new(store);
280 match (|| {
281 for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
282 structref.initialize_field(
283 &mut store,
284 allocator.layout(),
285 ty.element_type(),
286 index,
287 *val,
288 )?;
289 }
290 Ok(())
291 })() {
292 Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
293 Err(e) => {
294 store.gc_store_mut()?.dealloc_uninit_struct(structref);
295 Err(e)
296 }
297 }
298 }
299
300 #[inline]
301 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
302 self.inner.comes_from_same_store(store)
303 }
304
305 pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
315 self._ty(store.as_context().0)
316 }
317
318 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
319 assert!(self.comes_from_same_store(store));
320 let index = self.type_index(store)?;
321 Ok(StructType::from_shared_type_index(store.engine(), index))
322 }
323
324 pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
337 self._matches_ty(store.as_context().0, ty)
338 }
339
340 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
341 assert!(self.comes_from_same_store(store));
342 Ok(self._ty(store)?.matches(ty))
343 }
344
345 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
346 if !self.comes_from_same_store(store) {
347 bail!("function used with wrong store");
348 }
349 if self._matches_ty(store, ty)? {
350 Ok(())
351 } else {
352 let actual_ty = self._ty(store)?;
353 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
354 }
355 }
356
357 pub fn fields<'a, T: 'a>(
370 &'a self,
371 store: impl Into<StoreContextMut<'a, T>>,
372 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
373 self._fields(store.into().0)
374 }
375
376 pub(crate) fn _fields<'a>(
377 &'a self,
378 store: &'a mut StoreOpaque,
379 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
380 assert!(self.comes_from_same_store(store));
381 let store = AutoAssertNoGc::new(store);
382
383 let gc_ref = self.inner.try_gc_ref(&store)?;
384 let header = store.gc_store()?.header(gc_ref);
385 debug_assert!(header.kind().matches(VMGcKind::StructRef));
386
387 let index = header.ty().expect("structrefs should have concrete types");
388 let ty = StructType::from_shared_type_index(store.engine(), index);
389 let len = ty.fields().len();
390
391 return Ok(Fields {
392 structref: self,
393 store,
394 index: 0,
395 len,
396 });
397
398 struct Fields<'a, 'b> {
399 structref: &'a StructRef,
400 store: AutoAssertNoGc<'b>,
401 index: usize,
402 len: usize,
403 }
404
405 impl Iterator for Fields<'_, '_> {
406 type Item = Val;
407
408 #[inline]
409 fn next(&mut self) -> Option<Self::Item> {
410 let i = self.index;
411 debug_assert!(i <= self.len);
412 if i >= self.len {
413 return None;
414 }
415 self.index += 1;
416 Some(self.structref._field(&mut self.store, i).unwrap())
417 }
418
419 #[inline]
420 fn size_hint(&self) -> (usize, Option<usize>) {
421 let len = self.len - self.index;
422 (len, Some(len))
423 }
424 }
425
426 impl ExactSizeIterator for Fields<'_, '_> {
427 #[inline]
428 fn len(&self) -> usize {
429 self.len - self.index
430 }
431 }
432 }
433
434 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
435 assert!(self.comes_from_same_store(&store));
436 let gc_ref = self.inner.try_gc_ref(store)?;
437 Ok(store.gc_store()?.header(gc_ref))
438 }
439
440 fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
441 assert!(self.comes_from_same_store(&store));
442 let gc_ref = self.inner.try_gc_ref(store)?;
443 debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
444 Ok(gc_ref.as_structref_unchecked())
445 }
446
447 fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
448 assert!(self.comes_from_same_store(&store));
449 let type_index = self.type_index(store)?;
450 let layout = store
451 .engine()
452 .signatures()
453 .layout(type_index)
454 .expect("struct types should have GC layouts");
455 match layout {
456 GcLayout::Struct(s) => Ok(s),
457 GcLayout::Array(_) => unreachable!(),
458 }
459 }
460
461 fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
462 let ty = self._ty(store)?;
463 match ty.field(field) {
464 Some(f) => Ok(f),
465 None => {
466 let len = ty.fields().len();
467 bail!("cannot access field {field}: struct only has {len} fields")
468 }
469 }
470 }
471
472 pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
486 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
487 self._field(&mut store, index)
488 }
489
490 pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
491 assert!(self.comes_from_same_store(store));
492 let structref = self.structref(store)?.unchecked_copy();
493 let field_ty = self.field_ty(store, index)?;
494 let layout = self.layout(store)?;
495 Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
496 }
497
498 pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
518 self._set_field(store.as_context_mut().0, index, value)
519 }
520
521 pub(crate) fn _set_field(
522 &self,
523 store: &mut StoreOpaque,
524 index: usize,
525 value: Val,
526 ) -> Result<()> {
527 assert!(self.comes_from_same_store(store));
528 let mut store = AutoAssertNoGc::new(store);
529
530 let field_ty = self.field_ty(&store, index)?;
531 ensure!(
532 field_ty.mutability().is_var(),
533 "cannot set field {index}: field is not mutable"
534 );
535
536 value
537 .ensure_matches_ty(&store, &field_ty.element_type().unpack())
538 .with_context(|| format!("cannot set field {index}: type mismatch"))?;
539
540 let layout = self.layout(&store)?;
541 let structref = self.structref(&store)?.unchecked_copy();
542
543 structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
544 }
545
546 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
547 let gc_ref = self.inner.try_gc_ref(store)?;
548 let header = store.gc_store()?.header(gc_ref);
549 debug_assert!(header.kind().matches(VMGcKind::StructRef));
550 Ok(header.ty().expect("structrefs should have concrete types"))
551 }
552
553 pub(crate) fn from_cloned_gc_ref(
559 store: &mut AutoAssertNoGc<'_>,
560 gc_ref: VMGcRef,
561 ) -> Rooted<Self> {
562 debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
563 Rooted::new(store, gc_ref)
564 }
565}
566
567unsafe impl WasmTy for Rooted<StructRef> {
568 #[inline]
569 fn valtype() -> ValType {
570 ValType::Ref(RefType::new(false, HeapType::Struct))
571 }
572
573 #[inline]
574 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
575 self.comes_from_same_store(store)
576 }
577
578 #[inline]
579 fn dynamic_concrete_type_check(
580 &self,
581 store: &StoreOpaque,
582 _nullable: bool,
583 ty: &HeapType,
584 ) -> Result<()> {
585 match ty {
586 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
587 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
588
589 HeapType::Extern
590 | HeapType::NoExtern
591 | HeapType::Func
592 | HeapType::ConcreteFunc(_)
593 | HeapType::NoFunc
594 | HeapType::I31
595 | HeapType::Array
596 | HeapType::ConcreteArray(_)
597 | HeapType::None => bail!(
598 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
599 self._ty(store)?,
600 ),
601 }
602 }
603
604 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
605 self.wasm_ty_store(store, ptr, ValRaw::anyref)
606 }
607
608 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
609 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
610 }
611}
612
613unsafe impl WasmTy for Option<Rooted<StructRef>> {
614 #[inline]
615 fn valtype() -> ValType {
616 ValType::STRUCTREF
617 }
618
619 #[inline]
620 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
621 self.map_or(true, |x| x.comes_from_same_store(store))
622 }
623
624 #[inline]
625 fn dynamic_concrete_type_check(
626 &self,
627 store: &StoreOpaque,
628 nullable: bool,
629 ty: &HeapType,
630 ) -> Result<()> {
631 match self {
632 Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
633 None => {
634 ensure!(
635 nullable,
636 "expected a non-null reference, but found a null reference"
637 );
638 Ok(())
639 }
640 }
641 }
642
643 #[inline]
644 fn is_vmgcref_and_points_to_object(&self) -> bool {
645 self.is_some()
646 }
647
648 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
649 <Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
650 }
651
652 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
653 <Rooted<StructRef>>::wasm_ty_option_load(
654 store,
655 ptr.get_anyref(),
656 StructRef::from_cloned_gc_ref,
657 )
658 }
659}
660
661unsafe impl WasmTy for ManuallyRooted<StructRef> {
662 #[inline]
663 fn valtype() -> ValType {
664 ValType::Ref(RefType::new(false, HeapType::Struct))
665 }
666
667 #[inline]
668 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
669 self.comes_from_same_store(store)
670 }
671
672 #[inline]
673 fn dynamic_concrete_type_check(
674 &self,
675 store: &StoreOpaque,
676 _: bool,
677 ty: &HeapType,
678 ) -> Result<()> {
679 match ty {
680 HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
681 HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
682
683 HeapType::Extern
684 | HeapType::NoExtern
685 | HeapType::Func
686 | HeapType::ConcreteFunc(_)
687 | HeapType::NoFunc
688 | HeapType::I31
689 | HeapType::Array
690 | HeapType::ConcreteArray(_)
691 | HeapType::None => bail!(
692 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
693 self._ty(store)?,
694 ),
695 }
696 }
697
698 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
699 self.wasm_ty_store(store, ptr, ValRaw::anyref)
700 }
701
702 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
703 Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
704 }
705}
706
707unsafe impl WasmTy for Option<ManuallyRooted<StructRef>> {
708 #[inline]
709 fn valtype() -> ValType {
710 ValType::STRUCTREF
711 }
712
713 #[inline]
714 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
715 self.as_ref()
716 .map_or(true, |x| x.comes_from_same_store(store))
717 }
718
719 #[inline]
720 fn dynamic_concrete_type_check(
721 &self,
722 store: &StoreOpaque,
723 nullable: bool,
724 ty: &HeapType,
725 ) -> Result<()> {
726 match self {
727 Some(s) => {
728 ManuallyRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
729 }
730 None => {
731 ensure!(
732 nullable,
733 "expected a non-null reference, but found a null reference"
734 );
735 Ok(())
736 }
737 }
738 }
739
740 #[inline]
741 fn is_vmgcref_and_points_to_object(&self) -> bool {
742 self.is_some()
743 }
744
745 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
746 <ManuallyRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
747 }
748
749 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
750 <ManuallyRooted<StructRef>>::wasm_ty_option_load(
751 store,
752 ptr.get_anyref(),
753 StructRef::from_cloned_gc_ref,
754 )
755 }
756}