wasmtime/runtime/gc/enabled/
arrayref.rs

1//! Working with GC `array` objects.
2
3use crate::runtime::vm::VMGcRef;
4use crate::store::StoreId;
5use crate::vm::{VMArrayRef, VMGcHeader};
6use crate::{AnyRef, FieldType};
7use crate::{
8    ArrayType, AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
9    ManuallyRooted, RefType, Rooted, Val, ValRaw, ValType, WasmTy,
10    prelude::*,
11    store::{AutoAssertNoGc, StoreContextMut, StoreOpaque},
12};
13use core::mem::{self, MaybeUninit};
14use wasmtime_environ::{GcArrayLayout, GcLayout, VMGcKind, VMSharedTypeIndex};
15
16/// An allocator for a particular Wasm GC array type.
17///
18/// Every `ArrayRefPre` is associated with a particular [`Store`][crate::Store]
19/// and a particular [`ArrayType`][crate::ArrayType].
20///
21/// Reusing an allocator across many allocations amortizes some per-type runtime
22/// overheads inside Wasmtime. An `ArrayRefPre` is to `ArrayRef`s as an
23/// `InstancePre` is to `Instance`s.
24///
25/// # Example
26///
27/// ```
28/// use wasmtime::*;
29///
30/// # fn foo() -> Result<()> {
31/// let mut config = Config::new();
32/// config.wasm_function_references(true);
33/// config.wasm_gc(true);
34///
35/// let engine = Engine::new(&config)?;
36/// let mut store = Store::new(&engine, ());
37///
38/// // Define an array type.
39/// let array_ty = ArrayType::new(
40///    store.engine(),
41///    FieldType::new(Mutability::Var, ValType::I32.into()),
42/// );
43///
44/// // Create an allocator for the array type.
45/// let allocator = ArrayRefPre::new(&mut store, array_ty);
46///
47/// {
48///     let mut scope = RootScope::new(&mut store);
49///
50///     // Allocate a bunch of instances of our array type using the same
51///     // allocator! This is faster than creating a new allocator for each
52///     // instance we want to allocate.
53///     for i in 0..10 {
54///         let len = 42;
55///         let elem = Val::I32(36);
56///         ArrayRef::new(&mut scope, &allocator, &elem, len)?;
57///     }
58/// }
59/// # Ok(())
60/// # }
61/// # let _ = foo();
62/// ```
63pub struct ArrayRefPre {
64    store_id: StoreId,
65    ty: ArrayType,
66}
67
68impl ArrayRefPre {
69    /// Create a new `ArrayRefPre` that is associated with the given store
70    /// and type.
71    pub fn new(mut store: impl AsContextMut, ty: ArrayType) -> Self {
72        Self::_new(store.as_context_mut().0, ty)
73    }
74
75    pub(crate) fn _new(store: &mut StoreOpaque, ty: ArrayType) -> Self {
76        store.insert_gc_host_alloc_type(ty.registered_type().clone());
77        let store_id = store.id();
78        ArrayRefPre { store_id, ty }
79    }
80
81    pub(crate) fn layout(&self) -> &GcArrayLayout {
82        self.ty
83            .registered_type()
84            .layout()
85            .expect("array types have a layout")
86            .unwrap_array()
87    }
88
89    pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
90        self.ty.registered_type().index()
91    }
92}
93
94/// A reference to a GC-managed `array` instance.
95///
96/// WebAssembly `array`s are a sequence of elements of some homogeneous
97/// type. The elements length is determined at allocation time — two instances
98/// of the same array type may have different lengths — but, once allocated, an
99/// array's length can never be resized. An array's elements are mutable or
100/// constant, depending on the array's type. This determines whether any array
101/// element can be assigned a new value or not. Each element is either an
102/// unpacked [`Val`][crate::Val] or a packed 8-/16-bit integer. Array elements
103/// are dynamically accessed via indexing; out-of-bounds accesses result in
104/// traps.
105///
106/// Like all WebAssembly references, these are opaque and unforgeable to Wasm:
107/// they cannot be faked and Wasm cannot, for example, cast the integer
108/// `0x12345678` into a reference, pretend it is a valid `arrayref`, and trick
109/// the host into dereferencing it and segfaulting or worse.
110///
111/// Note that you can also use `Rooted<ArrayRef>` and `ManuallyRooted<ArrayRef>`
112/// as a type parameter with [`Func::typed`][crate::Func::typed]- and
113/// [`Func::wrap`][crate::Func::wrap]-style APIs.
114///
115/// # Example
116///
117/// ```
118/// use wasmtime::*;
119///
120/// # fn foo() -> Result<()> {
121/// let mut config = Config::new();
122/// config.wasm_function_references(true);
123/// config.wasm_gc(true);
124///
125/// let engine = Engine::new(&config)?;
126/// let mut store = Store::new(&engine, ());
127///
128/// // Define the type for an array of `i32`s.
129/// let array_ty = ArrayType::new(
130///    store.engine(),
131///    FieldType::new(Mutability::Var, ValType::I32.into()),
132/// );
133///
134/// // Create an allocator for the array type.
135/// let allocator = ArrayRefPre::new(&mut store, array_ty);
136///
137/// {
138///     let mut scope = RootScope::new(&mut store);
139///
140///     // Allocate an instance of the array type.
141///     let len = 36;
142///     let elem = Val::I32(42);
143///     let my_array = match ArrayRef::new(&mut scope, &allocator, &elem, len) {
144///         Ok(s) => s,
145///         Err(e) => match e.downcast::<GcHeapOutOfMemory<()>>() {
146///             // If the heap is out of memory, then do a GC to free up some
147///             // space and try again.
148///             Ok(oom) => {
149///                 // Do a GC! Note: in an async context, you'd want to do
150///                 // `scope.as_context_mut().gc_async().await`.
151///                 scope.as_context_mut().gc(Some(&oom));
152///
153///                 // Try again. If the GC heap is still out of memory, then we
154///                 // weren't able to free up resources for this allocation, so
155///                 // propagate the error.
156///                 ArrayRef::new(&mut scope, &allocator, &elem, len)?
157///             }
158///             // Propagate any other kind of error.
159///             Err(e) => return Err(e),
160///         }
161///     };
162///
163///     // That instance's elements should have the initial value.
164///     for i in 0..len {
165///         let val = my_array.get(&mut scope, i)?.unwrap_i32();
166///         assert_eq!(val, 42);
167///     }
168///
169///     // We can set an element to a new value because the type was defined with
170///     // mutable elements (as opposed to const).
171///     my_array.set(&mut scope, 3, Val::I32(1234))?;
172///     let new_val = my_array.get(&mut scope, 3)?.unwrap_i32();
173///     assert_eq!(new_val, 1234);
174/// }
175/// # Ok(())
176/// # }
177/// # foo().unwrap();
178/// ```
179#[derive(Debug)]
180#[repr(transparent)]
181pub struct ArrayRef {
182    pub(super) inner: GcRootIndex,
183}
184
185unsafe impl GcRefImpl for ArrayRef {
186    fn transmute_ref(index: &GcRootIndex) -> &Self {
187        // Safety: `ArrayRef` is a newtype of a `GcRootIndex`.
188        let me: &Self = unsafe { mem::transmute(index) };
189
190        // Assert we really are just a newtype of a `GcRootIndex`.
191        assert!(matches!(
192            me,
193            Self {
194                inner: GcRootIndex { .. },
195            }
196        ));
197
198        me
199    }
200}
201
202impl Rooted<ArrayRef> {
203    /// Upcast this `arrayref` into an `anyref`.
204    #[inline]
205    pub fn to_anyref(self) -> Rooted<AnyRef> {
206        self.unchecked_cast()
207    }
208
209    /// Upcast this `arrayref` into an `eqref`.
210    #[inline]
211    pub fn to_eqref(self) -> Rooted<EqRef> {
212        self.unchecked_cast()
213    }
214}
215
216impl ManuallyRooted<ArrayRef> {
217    /// Upcast this `arrayref` into an `anyref`.
218    #[inline]
219    pub fn to_anyref(self) -> ManuallyRooted<AnyRef> {
220        self.unchecked_cast()
221    }
222
223    /// Upcast this `arrayref` into an `eqref`.
224    #[inline]
225    pub fn to_eqref(self) -> ManuallyRooted<EqRef> {
226        self.unchecked_cast()
227    }
228}
229
230/// An iterator for elements in `ArrayRef::new[_async].
231///
232/// NB: We can't use `iter::repeat(elem).take(len)` because that doesn't
233/// implement `ExactSizeIterator`.
234#[derive(Clone)]
235struct RepeatN<'a>(&'a Val, u32);
236
237impl<'a> Iterator for RepeatN<'a> {
238    type Item = &'a Val;
239
240    fn next(&mut self) -> Option<Self::Item> {
241        if self.1 == 0 {
242            None
243        } else {
244            self.1 -= 1;
245            Some(self.0)
246        }
247    }
248
249    fn size_hint(&self) -> (usize, Option<usize>) {
250        let len = self.len();
251        (len, Some(len))
252    }
253}
254
255impl ExactSizeIterator for RepeatN<'_> {
256    fn len(&self) -> usize {
257        usize::try_from(self.1).unwrap()
258    }
259}
260
261impl ArrayRef {
262    /// Allocate a new `array` of the given length, with every element
263    /// initialized to `elem`.
264    ///
265    /// For example, `ArrayRef::new(ctx, pre, &Val::I64(9), 3)` allocates the
266    /// array `[9, 9, 9]`.
267    ///
268    /// This is similar to the `array.new` instruction.
269    ///
270    /// # Automatic Garbage Collection
271    ///
272    /// If the GC heap is at capacity, and there isn't room for allocating this
273    /// new array, then this method will automatically trigger a synchronous
274    /// collection in an attempt to free up space in the GC heap.
275    ///
276    /// # Errors
277    ///
278    /// If the given `elem` value's type does not match the `allocator`'s array
279    /// type's element type, an error is returned.
280    ///
281    /// If the allocation cannot be satisfied because the GC heap is currently
282    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
283    /// error is returned. The allocation might succeed on a second attempt if
284    /// you drop some rooted GC references and try again.
285    ///
286    /// # Panics
287    ///
288    /// Panics if the `store` is configured for async; use
289    /// [`ArrayRef::new_async`][crate::ArrayRef::new_async] to perform
290    /// asynchronous allocation instead.
291    ///
292    /// Panics if either the allocator or the `elem` value is not associated
293    /// with the given store.
294    pub fn new(
295        mut store: impl AsContextMut,
296        allocator: &ArrayRefPre,
297        elem: &Val,
298        len: u32,
299    ) -> Result<Rooted<ArrayRef>> {
300        Self::_new(store.as_context_mut().0, allocator, elem, len)
301    }
302
303    pub(crate) fn _new(
304        store: &mut StoreOpaque,
305        allocator: &ArrayRefPre,
306        elem: &Val,
307        len: u32,
308    ) -> Result<Rooted<ArrayRef>> {
309        store.retry_after_gc((), |store, ()| {
310            Self::new_from_iter(store, allocator, RepeatN(elem, len))
311        })
312    }
313
314    /// Asynchronously allocate a new `array` of the given length, with every
315    /// element initialized to `elem`.
316    ///
317    /// For example, `ArrayRef::new(ctx, pre, &Val::I64(9), 3)` allocates the
318    /// array `[9, 9, 9]`.
319    ///
320    /// This is similar to the `array.new` instruction.
321    ///
322    /// # Automatic Garbage Collection
323    ///
324    /// If the GC heap is at capacity, and there isn't room for allocating this
325    /// new array, then this method will automatically trigger a asynchronous
326    /// collection in an attempt to free up space in the GC heap.
327    ///
328    /// # Errors
329    ///
330    /// If the given `elem` value's type does not match the `allocator`'s array
331    /// type's element type, an error is returned.
332    ///
333    /// If the allocation cannot be satisfied because the GC heap is currently
334    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
335    /// error is returned. The allocation might succeed on a second attempt if
336    /// you drop some rooted GC references and try again.
337    ///
338    /// # Panics
339    ///
340    /// Panics if your engine is not configured for async; use
341    /// [`ArrayRef::new_async`][crate::ArrayRef::new_async] to perform
342    /// synchronous allocation instead.
343    ///
344    /// Panics if either the allocator or the `elem` value is not associated
345    /// with the given store.
346    #[cfg(feature = "async")]
347    pub async fn new_async(
348        mut store: impl AsContextMut,
349        allocator: &ArrayRefPre,
350        elem: &Val,
351        len: u32,
352    ) -> Result<Rooted<ArrayRef>> {
353        Self::_new_async(store.as_context_mut().0, allocator, elem, len).await
354    }
355
356    #[cfg(feature = "async")]
357    pub(crate) async fn _new_async(
358        store: &mut StoreOpaque,
359        allocator: &ArrayRefPre,
360        elem: &Val,
361        len: u32,
362    ) -> Result<Rooted<ArrayRef>> {
363        store
364            .retry_after_gc_async((), |store, ()| {
365                Self::new_from_iter(store, allocator, RepeatN(elem, len))
366            })
367            .await
368    }
369
370    /// Like `ArrayRef::new` but when async is configured must only ever be
371    /// called from on a fiber stack.
372    pub(crate) unsafe fn new_maybe_async(
373        store: &mut StoreOpaque,
374        allocator: &ArrayRefPre,
375        elem: &Val,
376        len: u32,
377    ) -> Result<Rooted<ArrayRef>> {
378        // Type check the initial element value against the element type.
379        elem.ensure_matches_ty(store, allocator.ty.element_type().unpack())
380            .context("element type mismatch")?;
381
382        unsafe {
383            store.retry_after_gc_maybe_async((), |store, ()| {
384                Self::new_from_iter(store, allocator, RepeatN(elem, len))
385            })
386        }
387    }
388
389    /// Allocate a new array of the given elements.
390    ///
391    /// Does not attempt a GC on OOM; leaves that to callers.
392    fn new_from_iter<'a>(
393        store: &mut StoreOpaque,
394        allocator: &ArrayRefPre,
395        elems: impl Clone + ExactSizeIterator<Item = &'a Val>,
396    ) -> Result<Rooted<ArrayRef>> {
397        assert_eq!(
398            store.id(),
399            allocator.store_id,
400            "attempted to use a `ArrayRefPre` with the wrong store"
401        );
402
403        // Type check the elements against the element type.
404        for elem in elems.clone() {
405            elem.ensure_matches_ty(store, allocator.ty.element_type().unpack())
406                .context("element type mismatch")?;
407        }
408
409        let len = u32::try_from(elems.len()).unwrap();
410
411        // Allocate the array and write each field value into the appropriate
412        // offset.
413        let arrayref = store
414            .gc_store_mut()?
415            .alloc_uninit_array(allocator.type_index(), len, allocator.layout())
416            .context("unrecoverable error when allocating new `arrayref`")?
417            .map_err(|n| GcHeapOutOfMemory::new((), n))?;
418
419        // From this point on, if we get any errors, then the array is not
420        // fully initialized, so we need to eagerly deallocate it before the
421        // next GC where the collector might try to interpret one of the
422        // uninitialized fields as a GC reference.
423        let mut store = AutoAssertNoGc::new(store);
424        match (|| {
425            let elem_ty = allocator.ty.element_type();
426            for (i, elem) in elems.enumerate() {
427                let i = u32::try_from(i).unwrap();
428                debug_assert!(i < len);
429                arrayref.initialize_elem(&mut store, allocator.layout(), &elem_ty, i, *elem)?;
430            }
431            Ok(())
432        })() {
433            Ok(()) => Ok(Rooted::new(&mut store, arrayref.into())),
434            Err(e) => {
435                store.gc_store_mut()?.dealloc_uninit_array(arrayref);
436                Err(e)
437            }
438        }
439    }
440
441    /// Synchronously allocate a new `array` containing the given elements.
442    ///
443    /// For example, `ArrayRef::new_fixed(ctx, pre, &[Val::I64(4), Val::I64(5),
444    /// Val::I64(6)])` allocates the array `[4, 5, 6]`.
445    ///
446    /// This is similar to the `array.new_fixed` instruction.
447    ///
448    /// # Automatic Garbage Collection
449    ///
450    /// If the GC heap is at capacity, and there isn't room for allocating this
451    /// new array, then this method will automatically trigger a synchronous
452    /// collection in an attempt to free up space in the GC heap.
453    ///
454    /// # Errors
455    ///
456    /// If any of the `elems` values' type does not match the `allocator`'s
457    /// array type's element type, an error is returned.
458    ///
459    /// If the allocation cannot be satisfied because the GC heap is currently
460    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
461    /// error is returned. The allocation might succeed on a second attempt if
462    /// you drop some rooted GC references and try again.
463    ///
464    /// # Panics
465    ///
466    /// Panics if the `store` is configured for async; use
467    /// [`ArrayRef::new_fixed_async`][crate::ArrayRef::new_fixed_async] to
468    /// perform asynchronous allocation instead.
469    ///
470    /// Panics if the allocator or any of the `elems` values are not associated
471    /// with the given store.
472    pub fn new_fixed(
473        mut store: impl AsContextMut,
474        allocator: &ArrayRefPre,
475        elems: &[Val],
476    ) -> Result<Rooted<ArrayRef>> {
477        Self::_new_fixed(store.as_context_mut().0, allocator, elems)
478    }
479
480    pub(crate) fn _new_fixed(
481        store: &mut StoreOpaque,
482        allocator: &ArrayRefPre,
483        elems: &[Val],
484    ) -> Result<Rooted<ArrayRef>> {
485        store.retry_after_gc((), |store, ()| {
486            Self::new_from_iter(store, allocator, elems.iter())
487        })
488    }
489
490    /// Asynchronously allocate a new `array` containing the given elements.
491    ///
492    /// For example, `ArrayRef::new_fixed_async(ctx, pre, &[Val::I64(4),
493    /// Val::I64(5), Val::I64(6)])` allocates the array `[4, 5, 6]`.
494    ///
495    /// This is similar to the `array.new_fixed` instruction.
496    ///
497    /// If your engine is not configured for async, use
498    /// [`ArrayRef::new_fixed`][crate::ArrayRef::new_fixed] to perform
499    /// synchronous allocation.
500    ///
501    /// # Automatic Garbage Collection
502    ///
503    /// If the GC heap is at capacity, and there isn't room for allocating this
504    /// new array, then this method will automatically trigger a synchronous
505    /// collection in an attempt to free up space in the GC heap.
506    ///
507    /// # Errors
508    ///
509    /// If any of the `elems` values' type does not match the `allocator`'s
510    /// array type's element type, an error is returned.
511    ///
512    /// If the allocation cannot be satisfied because the GC heap is currently
513    /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
514    /// error is returned. The allocation might succeed on a second attempt if
515    /// you drop some rooted GC references and try again.
516    ///
517    /// # Panics
518    ///
519    /// Panics if the `store` is not configured for async; use
520    /// [`ArrayRef::new_fixed`][crate::ArrayRef::new_fixed] to perform
521    /// synchronous allocation instead.
522    ///
523    /// Panics if the allocator or any of the `elems` values are not associated
524    /// with the given store.
525    #[cfg(feature = "async")]
526    pub async fn new_fixed_async(
527        mut store: impl AsContextMut,
528        allocator: &ArrayRefPre,
529        elems: &[Val],
530    ) -> Result<Rooted<ArrayRef>> {
531        Self::_new_fixed_async(store.as_context_mut().0, allocator, elems).await
532    }
533
534    #[cfg(feature = "async")]
535    pub(crate) async fn _new_fixed_async(
536        store: &mut StoreOpaque,
537        allocator: &ArrayRefPre,
538        elems: &[Val],
539    ) -> Result<Rooted<ArrayRef>> {
540        store
541            .retry_after_gc_async((), |store, ()| {
542                Self::new_from_iter(store, allocator, elems.iter())
543            })
544            .await
545    }
546
547    /// Like `ArrayRef::new_fixed[_async]` but it is the caller's responsibility
548    /// to ensure that when async is enabled, this is only called from on a
549    /// fiber stack.
550    pub(crate) unsafe fn new_fixed_maybe_async(
551        store: &mut StoreOpaque,
552        allocator: &ArrayRefPre,
553        elems: &[Val],
554    ) -> Result<Rooted<ArrayRef>> {
555        unsafe {
556            store.retry_after_gc_maybe_async((), |store, ()| {
557                Self::new_from_iter(store, allocator, elems.iter())
558            })
559        }
560    }
561
562    #[inline]
563    pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
564        self.inner.comes_from_same_store(store)
565    }
566
567    /// Get this `arrayref`'s type.
568    ///
569    /// # Errors
570    ///
571    /// Return an error if this reference has been unrooted.
572    ///
573    /// # Panics
574    ///
575    /// Panics if this reference is associated with a different store.
576    pub fn ty(&self, store: impl AsContext) -> Result<ArrayType> {
577        self._ty(store.as_context().0)
578    }
579
580    pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ArrayType> {
581        assert!(self.comes_from_same_store(store));
582        let index = self.type_index(store)?;
583        Ok(ArrayType::from_shared_type_index(store.engine(), index))
584    }
585
586    /// Does this `arrayref` match the given type?
587    ///
588    /// That is, is this array's type a subtype of the given type?
589    ///
590    /// # Errors
591    ///
592    /// Return an error if this reference has been unrooted.
593    ///
594    /// # Panics
595    ///
596    /// Panics if this reference is associated with a different store or if the
597    /// type is not associated with the store's engine.
598    pub fn matches_ty(&self, store: impl AsContext, ty: &ArrayType) -> Result<bool> {
599        self._matches_ty(store.as_context().0, ty)
600    }
601
602    pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &ArrayType) -> Result<bool> {
603        assert!(self.comes_from_same_store(store));
604        Ok(self._ty(store)?.matches(ty))
605    }
606
607    pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &ArrayType) -> Result<()> {
608        if !self.comes_from_same_store(store) {
609            bail!("function used with wrong store");
610        }
611        if self._matches_ty(store, ty)? {
612            Ok(())
613        } else {
614            let actual_ty = self._ty(store)?;
615            bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
616        }
617    }
618
619    /// Get the length of this array.
620    ///
621    /// # Errors
622    ///
623    /// Return an error if this reference has been unrooted.
624    ///
625    /// # Panics
626    ///
627    /// Panics if this reference is associated with a different store.
628    pub fn len(&self, store: impl AsContext) -> Result<u32> {
629        self._len(store.as_context().0)
630    }
631
632    pub(crate) fn _len(&self, store: &StoreOpaque) -> Result<u32> {
633        assert!(self.comes_from_same_store(store));
634        let gc_ref = self.inner.try_gc_ref(store)?;
635        debug_assert!({
636            let header = store.gc_store()?.header(gc_ref);
637            header.kind().matches(VMGcKind::ArrayRef)
638        });
639        let arrayref = gc_ref.as_arrayref_unchecked();
640        Ok(arrayref.len(store))
641    }
642
643    /// Get the values of this array's elements.
644    ///
645    /// Note that `i8` and `i16` element values are zero-extended into
646    /// `Val::I32(_)`s.
647    ///
648    /// # Errors
649    ///
650    /// Return an error if this reference has been unrooted.
651    ///
652    /// # Panics
653    ///
654    /// Panics if this reference is associated with a different store.
655    pub fn elems<'a, T: 'static>(
656        &'a self,
657        store: impl Into<StoreContextMut<'a, T>>,
658    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
659        self._elems(store.into().0)
660    }
661
662    pub(crate) fn _elems<'a>(
663        &'a self,
664        store: &'a mut StoreOpaque,
665    ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
666        assert!(self.comes_from_same_store(store));
667        let store = AutoAssertNoGc::new(store);
668
669        let gc_ref = self.inner.try_gc_ref(&store)?;
670        let header = store.gc_store()?.header(gc_ref);
671        debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
672
673        let len = self._len(&store)?;
674
675        return Ok(Elems {
676            arrayref: self,
677            store,
678            index: 0,
679            len,
680        });
681
682        struct Elems<'a, 'b> {
683            arrayref: &'a ArrayRef,
684            store: AutoAssertNoGc<'b>,
685            index: u32,
686            len: u32,
687        }
688
689        impl Iterator for Elems<'_, '_> {
690            type Item = Val;
691
692            #[inline]
693            fn next(&mut self) -> Option<Self::Item> {
694                let i = self.index;
695                debug_assert!(i <= self.len);
696                if i >= self.len {
697                    return None;
698                }
699                self.index += 1;
700                Some(self.arrayref._get(&mut self.store, i).unwrap())
701            }
702
703            #[inline]
704            fn size_hint(&self) -> (usize, Option<usize>) {
705                let len = self.len - self.index;
706                let len = usize::try_from(len).unwrap();
707                (len, Some(len))
708            }
709        }
710
711        impl ExactSizeIterator for Elems<'_, '_> {
712            #[inline]
713            fn len(&self) -> usize {
714                let len = self.len - self.index;
715                usize::try_from(len).unwrap()
716            }
717        }
718    }
719
720    fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
721        assert!(self.comes_from_same_store(&store));
722        let gc_ref = self.inner.try_gc_ref(store)?;
723        Ok(store.gc_store()?.header(gc_ref))
724    }
725
726    fn arrayref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMArrayRef> {
727        assert!(self.comes_from_same_store(&store));
728        let gc_ref = self.inner.try_gc_ref(store)?;
729        debug_assert!(self.header(store)?.kind().matches(VMGcKind::ArrayRef));
730        Ok(gc_ref.as_arrayref_unchecked())
731    }
732
733    pub(crate) fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcArrayLayout> {
734        assert!(self.comes_from_same_store(&store));
735        let type_index = self.type_index(store)?;
736        let layout = store
737            .engine()
738            .signatures()
739            .layout(type_index)
740            .expect("array types should have GC layouts");
741        match layout {
742            GcLayout::Array(a) => Ok(a),
743            GcLayout::Struct(_) => unreachable!(),
744            GcLayout::Exception(_) => unreachable!(),
745        }
746    }
747
748    fn field_ty(&self, store: &StoreOpaque) -> Result<FieldType> {
749        let ty = self._ty(store)?;
750        Ok(ty.field_type())
751    }
752
753    /// Get this array's `index`th element.
754    ///
755    /// Note that `i8` and `i16` field values are zero-extended into
756    /// `Val::I32(_)`s.
757    ///
758    /// # Errors
759    ///
760    /// Returns an `Err(_)` if the index is out of bounds or this reference has
761    /// been unrooted.
762    ///
763    /// # Panics
764    ///
765    /// Panics if this reference is associated with a different store.
766    pub fn get(&self, mut store: impl AsContextMut, index: u32) -> Result<Val> {
767        let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
768        self._get(&mut store, index)
769    }
770
771    pub(crate) fn _get(&self, store: &mut AutoAssertNoGc<'_>, index: u32) -> Result<Val> {
772        assert!(
773            self.comes_from_same_store(store),
774            "attempted to use an array with the wrong store",
775        );
776        let arrayref = self.arrayref(store)?.unchecked_copy();
777        let field_ty = self.field_ty(store)?;
778        let layout = self.layout(store)?;
779        let len = arrayref.len(store);
780        ensure!(
781            index < len,
782            "index out of bounds: the length is {len} but the index is {index}"
783        );
784        Ok(arrayref.read_elem(store, &layout, field_ty.element_type(), index))
785    }
786
787    /// Set this array's `index`th element.
788    ///
789    /// # Errors
790    ///
791    /// Returns an error in the following scenarios:
792    ///
793    /// * When given a value of the wrong type, such as trying to write an `f32`
794    ///   value into an array of `i64` elements.
795    ///
796    /// * When the array elements are not mutable.
797    ///
798    /// * When `index` is not within the range `0..self.len(ctx)`.
799    ///
800    /// * When `value` is a GC reference that has since been unrooted.
801    ///
802    /// # Panics
803    ///
804    /// Panics if either this reference or the given `value` is associated with
805    /// a different store.
806    pub fn set(&self, mut store: impl AsContextMut, index: u32, value: Val) -> Result<()> {
807        self._set(store.as_context_mut().0, index, value)
808    }
809
810    pub(crate) fn _set(&self, store: &mut StoreOpaque, index: u32, value: Val) -> Result<()> {
811        assert!(
812            self.comes_from_same_store(store),
813            "attempted to use an array with the wrong store",
814        );
815        assert!(
816            value.comes_from_same_store(store),
817            "attempted to use a value with the wrong store",
818        );
819
820        let mut store = AutoAssertNoGc::new(store);
821
822        let field_ty = self.field_ty(&store)?;
823        ensure!(
824            field_ty.mutability().is_var(),
825            "cannot set element {index}: array elements are not mutable"
826        );
827
828        value
829            .ensure_matches_ty(&store, &field_ty.element_type().unpack())
830            .with_context(|| format!("cannot set element {index}: type mismatch"))?;
831
832        let layout = self.layout(&store)?;
833        let arrayref = self.arrayref(&store)?.unchecked_copy();
834
835        let len = arrayref.len(&store);
836        ensure!(
837            index < len,
838            "index out of bounds: the length is {len} but the index is {index}"
839        );
840
841        arrayref.write_elem(&mut store, &layout, field_ty.element_type(), index, value)
842    }
843
844    pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
845        let gc_ref = self.inner.try_gc_ref(store)?;
846        let header = store.gc_store()?.header(gc_ref);
847        debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
848        Ok(header.ty().expect("arrayrefs should have concrete types"))
849    }
850
851    /// Create a new `Rooted<ArrayRef>` from the given GC reference.
852    ///
853    /// `gc_ref` should point to a valid `arrayref` and should belong to the
854    /// store's GC heap. Failure to uphold these invariants is memory safe but
855    /// will lead to general incorrectness such as panics or wrong results.
856    pub(crate) fn from_cloned_gc_ref(
857        store: &mut AutoAssertNoGc<'_>,
858        gc_ref: VMGcRef,
859    ) -> Rooted<Self> {
860        debug_assert!(gc_ref.is_arrayref(&*store.unwrap_gc_store().gc_heap));
861        Rooted::new(store, gc_ref)
862    }
863}
864
865unsafe impl WasmTy for Rooted<ArrayRef> {
866    #[inline]
867    fn valtype() -> ValType {
868        ValType::Ref(RefType::new(false, HeapType::Array))
869    }
870
871    #[inline]
872    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
873        self.comes_from_same_store(store)
874    }
875
876    #[inline]
877    fn dynamic_concrete_type_check(
878        &self,
879        store: &StoreOpaque,
880        _nullable: bool,
881        ty: &HeapType,
882    ) -> Result<()> {
883        match ty {
884            HeapType::Any | HeapType::Eq | HeapType::Array => Ok(()),
885            HeapType::ConcreteArray(ty) => self.ensure_matches_ty(store, ty),
886
887            HeapType::Extern
888            | HeapType::NoExtern
889            | HeapType::Func
890            | HeapType::ConcreteFunc(_)
891            | HeapType::NoFunc
892            | HeapType::I31
893            | HeapType::Struct
894            | HeapType::ConcreteStruct(_)
895            | HeapType::Cont
896            | HeapType::NoCont
897            | HeapType::ConcreteCont(_)
898            | HeapType::Exn
899            | HeapType::NoExn
900            | HeapType::ConcreteExn(_)
901            | HeapType::None => bail!(
902                "type mismatch: expected `(ref {ty})`, got `(ref {})`",
903                self._ty(store)?,
904            ),
905        }
906    }
907
908    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
909        self.wasm_ty_store(store, ptr, ValRaw::anyref)
910    }
911
912    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
913        Self::wasm_ty_load(store, ptr.get_anyref(), ArrayRef::from_cloned_gc_ref)
914    }
915}
916
917unsafe impl WasmTy for Option<Rooted<ArrayRef>> {
918    #[inline]
919    fn valtype() -> ValType {
920        ValType::ARRAYREF
921    }
922
923    #[inline]
924    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
925        self.map_or(true, |x| x.comes_from_same_store(store))
926    }
927
928    #[inline]
929    fn dynamic_concrete_type_check(
930        &self,
931        store: &StoreOpaque,
932        nullable: bool,
933        ty: &HeapType,
934    ) -> Result<()> {
935        match self {
936            Some(s) => Rooted::<ArrayRef>::dynamic_concrete_type_check(s, store, nullable, ty),
937            None => {
938                ensure!(
939                    nullable,
940                    "expected a non-null reference, but found a null reference"
941                );
942                Ok(())
943            }
944        }
945    }
946
947    #[inline]
948    fn is_vmgcref_and_points_to_object(&self) -> bool {
949        self.is_some()
950    }
951
952    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
953        <Rooted<ArrayRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
954    }
955
956    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
957        <Rooted<ArrayRef>>::wasm_ty_option_load(
958            store,
959            ptr.get_anyref(),
960            ArrayRef::from_cloned_gc_ref,
961        )
962    }
963}
964
965unsafe impl WasmTy for ManuallyRooted<ArrayRef> {
966    #[inline]
967    fn valtype() -> ValType {
968        ValType::Ref(RefType::new(false, HeapType::Array))
969    }
970
971    #[inline]
972    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
973        self.comes_from_same_store(store)
974    }
975
976    #[inline]
977    fn dynamic_concrete_type_check(
978        &self,
979        store: &StoreOpaque,
980        _: bool,
981        ty: &HeapType,
982    ) -> Result<()> {
983        match ty {
984            HeapType::Any | HeapType::Eq | HeapType::Array => Ok(()),
985            HeapType::ConcreteArray(ty) => self.ensure_matches_ty(store, ty),
986
987            HeapType::Extern
988            | HeapType::NoExtern
989            | HeapType::Func
990            | HeapType::ConcreteFunc(_)
991            | HeapType::NoFunc
992            | HeapType::I31
993            | HeapType::Struct
994            | HeapType::ConcreteStruct(_)
995            | HeapType::Cont
996            | HeapType::NoCont
997            | HeapType::ConcreteCont(_)
998            | HeapType::Exn
999            | HeapType::NoExn
1000            | HeapType::ConcreteExn(_)
1001            | HeapType::None => bail!(
1002                "type mismatch: expected `(ref {ty})`, got `(ref {})`",
1003                self._ty(store)?,
1004            ),
1005        }
1006    }
1007
1008    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
1009        self.wasm_ty_store(store, ptr, ValRaw::anyref)
1010    }
1011
1012    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
1013        Self::wasm_ty_load(store, ptr.get_anyref(), ArrayRef::from_cloned_gc_ref)
1014    }
1015}
1016
1017unsafe impl WasmTy for Option<ManuallyRooted<ArrayRef>> {
1018    #[inline]
1019    fn valtype() -> ValType {
1020        ValType::ARRAYREF
1021    }
1022
1023    #[inline]
1024    fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
1025        self.as_ref()
1026            .map_or(true, |x| x.comes_from_same_store(store))
1027    }
1028
1029    #[inline]
1030    fn dynamic_concrete_type_check(
1031        &self,
1032        store: &StoreOpaque,
1033        nullable: bool,
1034        ty: &HeapType,
1035    ) -> Result<()> {
1036        match self {
1037            Some(s) => {
1038                ManuallyRooted::<ArrayRef>::dynamic_concrete_type_check(s, store, nullable, ty)
1039            }
1040            None => {
1041                ensure!(
1042                    nullable,
1043                    "expected a non-null reference, but found a null reference"
1044                );
1045                Ok(())
1046            }
1047        }
1048    }
1049
1050    #[inline]
1051    fn is_vmgcref_and_points_to_object(&self) -> bool {
1052        self.is_some()
1053    }
1054
1055    fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
1056        <ManuallyRooted<ArrayRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
1057    }
1058
1059    unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
1060        <ManuallyRooted<ArrayRef>>::wasm_ty_option_load(
1061            store,
1062            ptr.get_anyref(),
1063            ArrayRef::from_cloned_gc_ref,
1064        )
1065    }
1066}