wasmtime/runtime/vm/gc/enabled/
structref.rs

1use super::{truncate_i32_to_i8, truncate_i32_to_i16};
2use crate::{
3    AnyRef, ExnRef, ExternRef, Func, HeapType, RootedGcRefImpl, StorageType, Val, ValType,
4    prelude::*,
5    runtime::vm::{GcHeap, GcStore, VMGcRef},
6    store::AutoAssertNoGc,
7    vm::{FuncRefTableId, SendSyncPtr},
8};
9use core::fmt;
10use wasmtime_environ::{GcStructLayout, VMGcKind};
11
12/// A `VMGcRef` that we know points to a `struct`.
13///
14/// Create a `VMStructRef` via `VMGcRef::into_structref` and
15/// `VMGcRef::as_structref`, or their untyped equivalents
16/// `VMGcRef::into_structref_unchecked` and `VMGcRef::as_structref_unchecked`.
17///
18/// Note: This is not a `TypedGcRef<_>` because each collector can have a
19/// different concrete representation of `structref` that they allocate inside
20/// their heaps.
21#[derive(Debug, PartialEq, Eq, Hash)]
22#[repr(transparent)]
23pub struct VMStructRef(VMGcRef);
24
25impl fmt::Pointer for VMStructRef {
26    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
27        fmt::Pointer::fmt(&self.0, f)
28    }
29}
30
31impl From<VMStructRef> for VMGcRef {
32    #[inline]
33    fn from(x: VMStructRef) -> Self {
34        x.0
35    }
36}
37
38impl VMGcRef {
39    /// Is this `VMGcRef` pointing to a `struct`?
40    pub fn is_structref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> bool {
41        if self.is_i31() {
42            return false;
43        }
44
45        let header = gc_heap.header(&self);
46        header.kind().matches(VMGcKind::StructRef)
47    }
48
49    /// Create a new `VMStructRef` from the given `gc_ref`.
50    ///
51    /// If this is not a GC reference to an `structref`, `Err(self)` is
52    /// returned.
53    pub fn into_structref(self, gc_heap: &impl GcHeap) -> Result<VMStructRef, VMGcRef> {
54        if self.is_structref(gc_heap) {
55            Ok(self.into_structref_unchecked())
56        } else {
57            Err(self)
58        }
59    }
60
61    /// Create a new `VMStructRef` from `self` without actually checking that
62    /// `self` is an `structref`.
63    ///
64    /// This method does not check that `self` is actually an `structref`, but
65    /// it should be. Failure to uphold this invariant is memory safe but will
66    /// result in general incorrectness down the line such as panics or wrong
67    /// results.
68    #[inline]
69    pub fn into_structref_unchecked(self) -> VMStructRef {
70        debug_assert!(!self.is_i31());
71        VMStructRef(self)
72    }
73
74    /// Get this GC reference as an `structref` reference, if it actually is an
75    /// `structref` reference.
76    pub fn as_structref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> Option<&VMStructRef> {
77        if self.is_structref(gc_heap) {
78            Some(self.as_structref_unchecked())
79        } else {
80            None
81        }
82    }
83
84    /// Get this GC reference as an `structref` reference without checking if it
85    /// actually is an `structref` reference.
86    ///
87    /// Calling this method on a non-`structref` reference is memory safe, but
88    /// will lead to general incorrectness like panics and wrong results.
89    pub fn as_structref_unchecked(&self) -> &VMStructRef {
90        debug_assert!(!self.is_i31());
91        let ptr = self as *const VMGcRef;
92        let ret = unsafe { &*ptr.cast() };
93        assert!(matches!(ret, VMStructRef(VMGcRef { .. })));
94        ret
95    }
96}
97
98impl VMStructRef {
99    /// Get the underlying `VMGcRef`.
100    pub fn as_gc_ref(&self) -> &VMGcRef {
101        &self.0
102    }
103
104    /// Clone this `VMStructRef`, running any GC barriers as necessary.
105    pub fn clone(&self, gc_store: &mut GcStore) -> Self {
106        Self(gc_store.clone_gc_ref(&self.0))
107    }
108
109    /// Explicitly drop this `structref`, running GC drop barriers as necessary.
110    pub fn drop(self, gc_store: &mut GcStore) {
111        gc_store.drop_gc_ref(self.0);
112    }
113
114    /// Copy this `VMStructRef` without running the GC's clone barriers.
115    ///
116    /// Prefer calling `clone(&mut GcStore)` instead! This is mostly an internal
117    /// escape hatch for collector implementations.
118    ///
119    /// Failure to run GC barriers when they would otherwise be necessary can
120    /// lead to leaks, panics, and wrong results. It cannot lead to memory
121    /// unsafety, however.
122    pub fn unchecked_copy(&self) -> Self {
123        Self(self.0.unchecked_copy())
124    }
125
126    /// Read a field of the given `StorageType` into a `Val`.
127    ///
128    /// `i8` and `i16` fields are zero-extended into `Val::I32(_)`s.
129    ///
130    /// Does not check that the field is actually of type `ty`. That is the
131    /// caller's responsibility. Failure to do so is memory safe, but will lead
132    /// to general incorrectness such as panics and wrong results.
133    ///
134    /// Panics on out-of-bounds accesses.
135    pub fn read_field(
136        &self,
137        store: &mut AutoAssertNoGc,
138        layout: &GcStructLayout,
139        ty: &StorageType,
140        field: usize,
141    ) -> Val {
142        let offset = layout.fields[field].offset;
143        read_field_impl(self.as_gc_ref(), store, ty, offset)
144    }
145
146    /// Write the given value into this struct at the given offset.
147    ///
148    /// Returns an error if `val` is a GC reference that has since been
149    /// unrooted.
150    ///
151    /// Does not check that `val` matches `ty`, nor that the field is actually
152    /// of type `ty`. Checking those things is the caller's responsibility.
153    /// Failure to do so is memory safe, but will lead to general incorrectness
154    /// such as panics and wrong results.
155    ///
156    /// Panics on out-of-bounds accesses.
157    pub fn write_field(
158        &self,
159        store: &mut AutoAssertNoGc,
160        layout: &GcStructLayout,
161        ty: &StorageType,
162        field: usize,
163        val: Val,
164    ) -> Result<()> {
165        debug_assert!(val._matches_ty(&store, &ty.unpack())?);
166
167        let offset = layout.fields[field].offset;
168        let gcstore = store.require_gc_store_mut()?;
169        let data = gcstore.gc_object_data(self.as_gc_ref());
170        match val {
171            Val::I32(i) if ty.is_i8() => data.write_i8(offset, truncate_i32_to_i8(i)),
172            Val::I32(i) if ty.is_i16() => data.write_i16(offset, truncate_i32_to_i16(i)),
173            Val::I32(i) => data.write_i32(offset, i),
174            Val::I64(i) => data.write_i64(offset, i),
175            Val::F32(f) => data.write_u32(offset, f),
176            Val::F64(f) => data.write_u64(offset, f),
177            Val::V128(v) => data.write_v128(offset, v),
178
179            // For GC-managed references, we need to take care to run the
180            // appropriate barriers, even when we are writing null references
181            // into the struct.
182            //
183            // POD-read the old value into a local copy, run the GC write
184            // barrier on that local copy, and then POD-write the updated
185            // value back into the struct. This avoids transmuting the inner
186            // data, which would probably be fine, but this approach is
187            // Obviously Correct and should get us by for now. If LLVM isn't
188            // able to elide some of these unnecessary copies, and this
189            // method is ever hot enough, we can always come back and clean
190            // it up in the future.
191            Val::ExternRef(e) => {
192                let raw = data.read_u32(offset);
193                let mut gc_ref = VMGcRef::from_raw_u32(raw);
194                let e = match e {
195                    Some(e) => Some(e.try_gc_ref(store)?.unchecked_copy()),
196                    None => None,
197                };
198                let store = store.require_gc_store_mut()?;
199                store.write_gc_ref(&mut gc_ref, e.as_ref());
200                let data = store.gc_object_data(self.as_gc_ref());
201                data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
202            }
203            Val::AnyRef(a) => {
204                let raw = data.read_u32(offset);
205                let mut gc_ref = VMGcRef::from_raw_u32(raw);
206                let a = match a {
207                    Some(a) => Some(a.try_gc_ref(store)?.unchecked_copy()),
208                    None => None,
209                };
210                let store = store.require_gc_store_mut()?;
211                store.write_gc_ref(&mut gc_ref, a.as_ref());
212                let data = store.gc_object_data(self.as_gc_ref());
213                data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
214            }
215            Val::ExnRef(e) => {
216                let raw = data.read_u32(offset);
217                let mut gc_ref = VMGcRef::from_raw_u32(raw);
218                let e = match e {
219                    Some(e) => Some(e.try_gc_ref(store)?.unchecked_copy()),
220                    None => None,
221                };
222                let store = store.require_gc_store_mut()?;
223                store.write_gc_ref(&mut gc_ref, e.as_ref());
224                let data = store.gc_object_data(self.as_gc_ref());
225                data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
226            }
227
228            Val::FuncRef(f) => {
229                let f = f.map(|f| SendSyncPtr::new(f.vm_func_ref(store)));
230                let gcstore = store.require_gc_store_mut()?;
231                let id = unsafe { gcstore.func_ref_table.intern(f) };
232                gcstore
233                    .gc_object_data(self.as_gc_ref())
234                    .write_u32(offset, id.into_raw());
235            }
236            Val::ContRef(_) => {
237                // TODO(#10248): Implement struct continuation reference field handling
238                return Err(anyhow::anyhow!(
239                    "setting continuation references in struct fields not yet supported"
240                ));
241            }
242        }
243        Ok(())
244    }
245
246    /// Initialize a field in this structref that is currently uninitialized.
247    ///
248    /// The difference between this method and `write_field` is that GC barriers
249    /// are handled differently. When overwriting an initialized field (aka
250    /// `write_field`) we need to call the full write GC write barrier, which
251    /// logically drops the old GC reference and clones the new GC
252    /// reference. When we are initializing a field for the first time, there is
253    /// no old GC reference that is being overwritten and which we need to drop,
254    /// so we only need to clone the new GC reference.
255    ///
256    /// Calling this method on a structref that has already had the associated
257    /// field initialized will result in GC bugs. These are memory safe but will
258    /// lead to generally incorrect behavior such as panics, leaks, and
259    /// incorrect results.
260    ///
261    /// Does not check that `val` matches `ty`, nor that the field is actually
262    /// of type `ty`. Checking those things is the caller's responsibility.
263    /// Failure to do so is memory safe, but will lead to general incorrectness
264    /// such as panics and wrong results.
265    ///
266    /// Returns an error if `val` is a GC reference that has since been
267    /// unrooted.
268    ///
269    /// Panics on out-of-bounds accesses.
270    pub fn initialize_field(
271        &self,
272        store: &mut AutoAssertNoGc,
273        layout: &GcStructLayout,
274        ty: &StorageType,
275        field: usize,
276        val: Val,
277    ) -> Result<()> {
278        debug_assert!(val._matches_ty(&store, &ty.unpack())?);
279        let offset = layout.fields[field].offset;
280        initialize_field_impl(self.as_gc_ref(), store, ty, offset, val)
281    }
282}
283
284/// Read a field from a GC object at a given offset.
285///
286/// This factored-out function allows a shared implementation for both
287/// structs (this module) and exception objects.
288pub(crate) fn read_field_impl(
289    gc_ref: &VMGcRef,
290    store: &mut AutoAssertNoGc,
291    ty: &StorageType,
292    offset: u32,
293) -> Val {
294    let data = store.unwrap_gc_store_mut().gc_object_data(gc_ref);
295    match ty {
296        StorageType::I8 => Val::I32(data.read_u8(offset).into()),
297        StorageType::I16 => Val::I32(data.read_u16(offset).into()),
298        StorageType::ValType(ValType::I32) => Val::I32(data.read_i32(offset)),
299        StorageType::ValType(ValType::I64) => Val::I64(data.read_i64(offset)),
300        StorageType::ValType(ValType::F32) => Val::F32(data.read_u32(offset)),
301        StorageType::ValType(ValType::F64) => Val::F64(data.read_u64(offset)),
302        StorageType::ValType(ValType::V128) => Val::V128(data.read_v128(offset)),
303        StorageType::ValType(ValType::Ref(r)) => match r.heap_type().top() {
304            HeapType::Extern => {
305                let raw = data.read_u32(offset);
306                Val::ExternRef(ExternRef::_from_raw(store, raw))
307            }
308            HeapType::Any => {
309                let raw = data.read_u32(offset);
310                Val::AnyRef(AnyRef::_from_raw(store, raw))
311            }
312            HeapType::Exn => {
313                let raw = data.read_u32(offset);
314                Val::ExnRef(ExnRef::_from_raw(store, raw))
315            }
316            HeapType::Func => {
317                let func_ref_id = data.read_u32(offset);
318                let func_ref_id = FuncRefTableId::from_raw(func_ref_id);
319                let func_ref = store
320                    .unwrap_gc_store()
321                    .func_ref_table
322                    .get_untyped(func_ref_id);
323                Val::FuncRef(unsafe {
324                    func_ref.map(|p| Func::from_vm_func_ref(store.id(), p.as_non_null()))
325                })
326            }
327            otherwise => unreachable!("not a top type: {otherwise:?}"),
328        },
329    }
330}
331
332pub(crate) fn initialize_field_impl(
333    gc_ref: &VMGcRef,
334    store: &mut AutoAssertNoGc,
335    ty: &StorageType,
336    offset: u32,
337    val: Val,
338) -> Result<()> {
339    let gcstore = store.require_gc_store_mut()?;
340    match val {
341        Val::I32(i) if ty.is_i8() => gcstore
342            .gc_object_data(gc_ref)
343            .write_i8(offset, truncate_i32_to_i8(i)),
344        Val::I32(i) if ty.is_i16() => gcstore
345            .gc_object_data(gc_ref)
346            .write_i16(offset, truncate_i32_to_i16(i)),
347        Val::I32(i) => gcstore.gc_object_data(gc_ref).write_i32(offset, i),
348        Val::I64(i) => gcstore.gc_object_data(gc_ref).write_i64(offset, i),
349        Val::F32(f) => gcstore.gc_object_data(gc_ref).write_u32(offset, f),
350        Val::F64(f) => gcstore.gc_object_data(gc_ref).write_u64(offset, f),
351        Val::V128(v) => gcstore.gc_object_data(gc_ref).write_v128(offset, v),
352
353        // NB: We don't need to do a write barrier when initializing a
354        // field, because there is nothing being overwritten. Therefore, we
355        // just the clone barrier.
356        Val::ExternRef(x) => {
357            let x = match x {
358                None => 0,
359                Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
360            };
361            store
362                .require_gc_store_mut()?
363                .gc_object_data(gc_ref)
364                .write_u32(offset, x);
365        }
366        Val::AnyRef(x) => {
367            let x = match x {
368                None => 0,
369                Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
370            };
371            store
372                .require_gc_store_mut()?
373                .gc_object_data(gc_ref)
374                .write_u32(offset, x);
375        }
376        Val::ExnRef(x) => {
377            let x = match x {
378                None => 0,
379                Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
380            };
381            store
382                .require_gc_store_mut()?
383                .gc_object_data(gc_ref)
384                .write_u32(offset, x);
385        }
386
387        Val::FuncRef(f) => {
388            let f = f.map(|f| SendSyncPtr::new(f.vm_func_ref(store)));
389            let gcstore = store.require_gc_store_mut()?;
390            let id = unsafe { gcstore.func_ref_table.intern(f) };
391            gcstore
392                .gc_object_data(gc_ref)
393                .write_u32(offset, id.into_raw());
394        }
395        Val::ContRef(_) => {
396            // TODO(#10248): Implement struct continuation reference field init handling
397            return Err(anyhow::anyhow!(
398                "initializing continuation references in struct fields not yet supported"
399            ));
400        }
401    }
402    Ok(())
403}