wasmtime/runtime/vm/gc/enabled/
arrayref.rs

1use super::{truncate_i32_to_i8, truncate_i32_to_i16};
2use crate::{
3    AnyRef, ExnRef, ExternRef, Func, HeapType, RootedGcRefImpl, StorageType, Val, ValType,
4    prelude::*,
5    runtime::vm::{GcHeap, GcStore, VMGcRef},
6    store::{AutoAssertNoGc, StoreOpaque},
7    vm::{FuncRefTableId, SendSyncPtr},
8};
9use core::fmt;
10use wasmtime_environ::{GcArrayLayout, VMGcKind};
11
12/// A `VMGcRef` that we know points to a `array`.
13///
14/// Create a `VMArrayRef` via `VMGcRef::into_arrayref` and
15/// `VMGcRef::as_arrayref`, or their untyped equivalents
16/// `VMGcRef::into_arrayref_unchecked` and `VMGcRef::as_arrayref_unchecked`.
17///
18/// Note: This is not a `TypedGcRef<_>` because each collector can have a
19/// different concrete representation of `arrayref` that they allocate inside
20/// their heaps.
21#[derive(Debug, PartialEq, Eq, Hash)]
22#[repr(transparent)]
23pub struct VMArrayRef(VMGcRef);
24
25impl fmt::Pointer for VMArrayRef {
26    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
27        fmt::Pointer::fmt(&self.0, f)
28    }
29}
30
31impl From<VMArrayRef> for VMGcRef {
32    #[inline]
33    fn from(x: VMArrayRef) -> Self {
34        x.0
35    }
36}
37
38impl VMGcRef {
39    /// Is this `VMGcRef` pointing to a `array`?
40    pub fn is_arrayref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> bool {
41        if self.is_i31() {
42            return false;
43        }
44
45        let header = gc_heap.header(&self);
46        header.kind().matches(VMGcKind::ArrayRef)
47    }
48
49    /// Create a new `VMArrayRef` from the given `gc_ref`.
50    ///
51    /// If this is not a GC reference to an `arrayref`, `Err(self)` is
52    /// returned.
53    pub fn into_arrayref(self, gc_heap: &(impl GcHeap + ?Sized)) -> Result<VMArrayRef, VMGcRef> {
54        if self.is_arrayref(gc_heap) {
55            Ok(self.into_arrayref_unchecked())
56        } else {
57            Err(self)
58        }
59    }
60
61    /// Create a new `VMArrayRef` from `self` without actually checking that
62    /// `self` is an `arrayref`.
63    ///
64    /// This method does not check that `self` is actually an `arrayref`, but
65    /// it should be. Failure to uphold this invariant is memory safe but will
66    /// result in general incorrectness down the line such as panics or wrong
67    /// results.
68    #[inline]
69    pub fn into_arrayref_unchecked(self) -> VMArrayRef {
70        debug_assert!(!self.is_i31());
71        VMArrayRef(self)
72    }
73
74    /// Get this GC reference as an `arrayref` reference, if it actually is an
75    /// `arrayref` reference.
76    pub fn as_arrayref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> Option<&VMArrayRef> {
77        if self.is_arrayref(gc_heap) {
78            Some(self.as_arrayref_unchecked())
79        } else {
80            None
81        }
82    }
83
84    /// Get this GC reference as an `arrayref` reference without checking if it
85    /// actually is an `arrayref` reference.
86    ///
87    /// Calling this method on a non-`arrayref` reference is memory safe, but
88    /// will lead to general incorrectness like panics and wrong results.
89    pub fn as_arrayref_unchecked(&self) -> &VMArrayRef {
90        debug_assert!(!self.is_i31());
91        let ptr = self as *const VMGcRef;
92        let ret = unsafe { &*ptr.cast() };
93        assert!(matches!(ret, VMArrayRef(VMGcRef { .. })));
94        ret
95    }
96}
97
98impl VMArrayRef {
99    /// Get the underlying `VMGcRef`.
100    pub fn as_gc_ref(&self) -> &VMGcRef {
101        &self.0
102    }
103
104    /// Clone this `VMArrayRef`, running any GC barriers as necessary.
105    pub fn clone(&self, gc_store: &mut GcStore) -> Self {
106        Self(gc_store.clone_gc_ref(&self.0))
107    }
108
109    /// Explicitly drop this `arrayref`, running GC drop barriers as necessary.
110    pub fn drop(self, gc_store: &mut GcStore) {
111        gc_store.drop_gc_ref(self.0);
112    }
113
114    /// Copy this `VMArrayRef` without running the GC's clone barriers.
115    ///
116    /// Prefer calling `clone(&mut GcStore)` instead! This is mostly an internal
117    /// escape hatch for collector implementations.
118    ///
119    /// Failure to run GC barriers when they would otherwise be necessary can
120    /// lead to leaks, panics, and wrong results. It cannot lead to memory
121    /// unsafety, however.
122    pub fn unchecked_copy(&self) -> Self {
123        Self(self.0.unchecked_copy())
124    }
125
126    /// Get the length of this array.
127    pub fn len(&self, store: &StoreOpaque) -> u32 {
128        store.unwrap_gc_store().array_len(self)
129    }
130
131    /// Read an element of the given `StorageType` into a `Val`.
132    ///
133    /// `i8` and `i16` fields are zero-extended into `Val::I32(_)`s.
134    ///
135    /// Does not check that this array's elements are actually of type
136    /// `ty`. That is the caller's responsibility. Failure to do so is memory
137    /// safe, but will lead to general incorrectness such as panics and wrong
138    /// results.
139    ///
140    /// Panics on out-of-bounds accesses.
141    pub fn read_elem(
142        &self,
143        store: &mut AutoAssertNoGc,
144        layout: &GcArrayLayout,
145        ty: &StorageType,
146        index: u32,
147    ) -> Val {
148        let offset = layout.elem_offset(index);
149        let data = store.unwrap_gc_store_mut().gc_object_data(self.as_gc_ref());
150        match ty {
151            StorageType::I8 => Val::I32(data.read_u8(offset).into()),
152            StorageType::I16 => Val::I32(data.read_u16(offset).into()),
153            StorageType::ValType(ValType::I32) => Val::I32(data.read_i32(offset)),
154            StorageType::ValType(ValType::I64) => Val::I64(data.read_i64(offset)),
155            StorageType::ValType(ValType::F32) => Val::F32(data.read_u32(offset)),
156            StorageType::ValType(ValType::F64) => Val::F64(data.read_u64(offset)),
157            StorageType::ValType(ValType::V128) => Val::V128(data.read_v128(offset)),
158            StorageType::ValType(ValType::Ref(r)) => match r.heap_type().top() {
159                HeapType::Extern => {
160                    let raw = data.read_u32(offset);
161                    Val::ExternRef(ExternRef::_from_raw(store, raw))
162                }
163                HeapType::Any => {
164                    let raw = data.read_u32(offset);
165                    Val::AnyRef(AnyRef::_from_raw(store, raw))
166                }
167                HeapType::Exn => {
168                    let raw = data.read_u32(offset);
169                    Val::ExnRef(ExnRef::_from_raw(store, raw))
170                }
171                HeapType::Func => {
172                    let func_ref_id = data.read_u32(offset);
173                    let func_ref_id = FuncRefTableId::from_raw(func_ref_id);
174                    let func_ref = store
175                        .unwrap_gc_store()
176                        .func_ref_table
177                        .get_untyped(func_ref_id);
178                    Val::FuncRef(unsafe {
179                        func_ref.map(|p| Func::from_vm_func_ref(store.id(), p.as_non_null()))
180                    })
181                }
182                otherwise => unreachable!("not a top type: {otherwise:?}"),
183            },
184        }
185    }
186
187    /// Write the given value into this array at the given offset.
188    ///
189    /// Returns an error if `val` is a GC reference that has since been
190    /// unrooted.
191    ///
192    /// Does not check that `val` matches `ty`, nor that the field is actually
193    /// of type `ty`. Checking those things is the caller's responsibility.
194    /// Failure to do so is memory safe, but will lead to general incorrectness
195    /// such as panics and wrong results.
196    ///
197    /// Panics on out-of-bounds accesses.
198    pub fn write_elem(
199        &self,
200        store: &mut AutoAssertNoGc,
201        layout: &GcArrayLayout,
202        ty: &StorageType,
203        index: u32,
204        val: Val,
205    ) -> Result<()> {
206        debug_assert!(val._matches_ty(&store, &ty.unpack())?);
207
208        let offset = layout.elem_offset(index);
209        let data = store.unwrap_gc_store_mut().gc_object_data(self.as_gc_ref());
210        match val {
211            Val::I32(i) if ty.is_i8() => data.write_i8(offset, truncate_i32_to_i8(i)),
212            Val::I32(i) if ty.is_i16() => data.write_i16(offset, truncate_i32_to_i16(i)),
213            Val::I32(i) => data.write_i32(offset, i),
214            Val::I64(i) => data.write_i64(offset, i),
215            Val::F32(f) => data.write_u32(offset, f),
216            Val::F64(f) => data.write_u64(offset, f),
217            Val::V128(v) => data.write_v128(offset, v),
218
219            // For GC-managed references, we need to take care to run the
220            // appropriate barriers, even when we are writing null references
221            // into the array.
222            //
223            // POD-read the old value into a local copy, run the GC write
224            // barrier on that local copy, and then POD-write the updated
225            // value back into the array. This avoids transmuting the inner
226            // data, which would probably be fine, but this approach is
227            // Obviously Correct and should get us by for now. If LLVM isn't
228            // able to elide some of these unnecessary copies, and this
229            // method is ever hot enough, we can always come back and clean
230            // it up in the future.
231            Val::ExternRef(e) => {
232                let raw = data.read_u32(offset);
233                let mut gc_ref = VMGcRef::from_raw_u32(raw);
234                let e = match e {
235                    Some(e) => Some(e.try_gc_ref(store)?.unchecked_copy()),
236                    None => None,
237                };
238                let store = store.require_gc_store_mut()?;
239                store.write_gc_ref(&mut gc_ref, e.as_ref());
240                let data = store.gc_object_data(self.as_gc_ref());
241                data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
242            }
243            Val::AnyRef(a) => {
244                let raw = data.read_u32(offset);
245                let mut gc_ref = VMGcRef::from_raw_u32(raw);
246                let a = match a {
247                    Some(a) => Some(a.try_gc_ref(store)?.unchecked_copy()),
248                    None => None,
249                };
250                let store = store.require_gc_store_mut()?;
251                store.write_gc_ref(&mut gc_ref, a.as_ref());
252                let data = store.gc_object_data(self.as_gc_ref());
253                data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
254            }
255            Val::ExnRef(e) => {
256                let raw = data.read_u32(offset);
257                let mut gc_ref = VMGcRef::from_raw_u32(raw);
258                let e = match e {
259                    Some(e) => Some(e.try_gc_ref(store)?.unchecked_copy()),
260                    None => None,
261                };
262                let store = store.require_gc_store_mut()?;
263                store.write_gc_ref(&mut gc_ref, e.as_ref());
264                let data = store.gc_object_data(self.as_gc_ref());
265                data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
266            }
267
268            Val::FuncRef(f) => {
269                let func_ref = match f {
270                    Some(f) => Some(SendSyncPtr::new(f.vm_func_ref(store))),
271                    None => None,
272                };
273                let store = store.require_gc_store_mut()?;
274                let id = unsafe { store.func_ref_table.intern(func_ref) };
275                store
276                    .gc_object_data(self.as_gc_ref())
277                    .write_u32(offset, id.into_raw());
278            }
279            Val::ContRef(_) => {
280                // TODO(#10248): Implement array continuation reference element handling
281                return Err(anyhow::anyhow!(
282                    "setting continuation references in array elements not yet supported"
283                ));
284            }
285        }
286        Ok(())
287    }
288
289    /// Initialize an element in this arrayref that is currently uninitialized.
290    ///
291    /// The difference between this method and `write_elem` is that GC barriers
292    /// are handled differently. When overwriting an initialized element (aka
293    /// `write_elem`) we need to call the full write GC write barrier, which
294    /// logically drops the old GC reference and clones the new GC
295    /// reference. When we are initializing an element for the first time, there
296    /// is no old GC reference that is being overwritten and which we need to
297    /// drop, so we only need to clone the new GC reference.
298    ///
299    /// Calling this method on a arrayref that has already had the associated
300    /// element initialized will result in GC bugs. These are memory safe but
301    /// will lead to generally incorrect behavior such as panics, leaks, and
302    /// incorrect results.
303    ///
304    /// Does not check that `val` matches `ty`, nor that the field is actually
305    /// of type `ty`. Checking those things is the caller's responsibility.
306    /// Failure to do so is memory safe, but will lead to general incorrectness
307    /// such as panics and wrong results.
308    ///
309    /// Returns an error if `val` is a GC reference that has since been
310    /// unrooted.
311    ///
312    /// Panics on out-of-bounds accesses.
313    pub fn initialize_elem(
314        &self,
315        store: &mut AutoAssertNoGc,
316        layout: &GcArrayLayout,
317        ty: &StorageType,
318        index: u32,
319        val: Val,
320    ) -> Result<()> {
321        debug_assert!(val._matches_ty(&store, &ty.unpack())?);
322        let offset = layout.elem_offset(index);
323        let gcstore = store.require_gc_store_mut()?;
324        match val {
325            Val::I32(i) if ty.is_i8() => gcstore
326                .gc_object_data(self.as_gc_ref())
327                .write_i8(offset, truncate_i32_to_i8(i)),
328            Val::I32(i) if ty.is_i16() => gcstore
329                .gc_object_data(self.as_gc_ref())
330                .write_i16(offset, truncate_i32_to_i16(i)),
331            Val::I32(i) => gcstore
332                .gc_object_data(self.as_gc_ref())
333                .write_i32(offset, i),
334            Val::I64(i) => gcstore
335                .gc_object_data(self.as_gc_ref())
336                .write_i64(offset, i),
337            Val::F32(f) => gcstore
338                .gc_object_data(self.as_gc_ref())
339                .write_u32(offset, f),
340            Val::F64(f) => gcstore
341                .gc_object_data(self.as_gc_ref())
342                .write_u64(offset, f),
343            Val::V128(v) => gcstore
344                .gc_object_data(self.as_gc_ref())
345                .write_v128(offset, v),
346
347            // NB: We don't need to do a write barrier when initializing a
348            // field, because there is nothing being overwritten. Therefore, we
349            // just the clone barrier.
350            Val::ExternRef(x) => {
351                let x = match x {
352                    None => 0,
353                    Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
354                };
355                store
356                    .require_gc_store_mut()?
357                    .gc_object_data(self.as_gc_ref())
358                    .write_u32(offset, x);
359            }
360            Val::AnyRef(x) => {
361                let x = match x {
362                    None => 0,
363                    Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
364                };
365                store
366                    .require_gc_store_mut()?
367                    .gc_object_data(self.as_gc_ref())
368                    .write_u32(offset, x);
369            }
370            Val::ExnRef(x) => {
371                let x = match x {
372                    None => 0,
373                    Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
374                };
375                store
376                    .require_gc_store_mut()?
377                    .gc_object_data(self.as_gc_ref())
378                    .write_u32(offset, x);
379            }
380
381            Val::FuncRef(f) => {
382                let func_ref = match f {
383                    Some(f) => Some(SendSyncPtr::new(f.vm_func_ref(store))),
384                    None => None,
385                };
386                let gcstore = store.require_gc_store_mut()?;
387                let id = unsafe { gcstore.func_ref_table.intern(func_ref) };
388                gcstore
389                    .gc_object_data(self.as_gc_ref())
390                    .write_u32(offset, id.into_raw());
391            }
392            Val::ContRef(_) => {
393                // TODO(#10248): Implement array continuation reference init handling
394                return Err(anyhow::anyhow!(
395                    "initializing continuation references in array elements not yet supported"
396                ));
397            }
398        }
399        Ok(())
400    }
401}