wasmtime/runtime/gc/enabled/arrayref.rs
1//! Working with GC `array` objects.
2
3use crate::runtime::vm::VMGcRef;
4use crate::store::StoreId;
5use crate::vm::{VMArrayRef, VMGcHeader};
6use crate::{
7 prelude::*,
8 store::{AutoAssertNoGc, StoreContextMut, StoreOpaque},
9 ArrayType, AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
10 ManuallyRooted, RefType, Rooted, Val, ValRaw, ValType, WasmTy,
11};
12use crate::{AnyRef, FieldType};
13use core::mem::{self, MaybeUninit};
14use wasmtime_environ::{GcArrayLayout, GcLayout, VMGcKind, VMSharedTypeIndex};
15
16/// An allocator for a particular Wasm GC array type.
17///
18/// Every `ArrayRefPre` is associated with a particular [`Store`][crate::Store]
19/// and a particular [`ArrayType`][crate::ArrayType].
20///
21/// Reusing an allocator across many allocations amortizes some per-type runtime
22/// overheads inside Wasmtime. An `ArrayRefPre` is to `ArrayRef`s as an
23/// `InstancePre` is to `Instance`s.
24///
25/// # Example
26///
27/// ```
28/// use wasmtime::*;
29///
30/// # fn foo() -> Result<()> {
31/// let mut config = Config::new();
32/// config.wasm_function_references(true);
33/// config.wasm_gc(true);
34///
35/// let engine = Engine::new(&config)?;
36/// let mut store = Store::new(&engine, ());
37///
38/// // Define an array type.
39/// let array_ty = ArrayType::new(
40/// store.engine(),
41/// FieldType::new(Mutability::Var, ValType::I32.into()),
42/// );
43///
44/// // Create an allocator for the array type.
45/// let allocator = ArrayRefPre::new(&mut store, array_ty);
46///
47/// {
48/// let mut scope = RootScope::new(&mut store);
49///
50/// // Allocate a bunch of instances of our array type using the same
51/// // allocator! This is faster than creating a new allocator for each
52/// // instance we want to allocate.
53/// for i in 0..10 {
54/// let len = 42;
55/// let elem = Val::I32(36);
56/// ArrayRef::new(&mut scope, &allocator, &elem, len)?;
57/// }
58/// }
59/// # Ok(())
60/// # }
61/// # let _ = foo();
62/// ```
63pub struct ArrayRefPre {
64 store_id: StoreId,
65 ty: ArrayType,
66}
67
68impl ArrayRefPre {
69 /// Create a new `ArrayRefPre` that is associated with the given store
70 /// and type.
71 pub fn new(mut store: impl AsContextMut, ty: ArrayType) -> Self {
72 Self::_new(store.as_context_mut().0, ty)
73 }
74
75 pub(crate) fn _new(store: &mut StoreOpaque, ty: ArrayType) -> Self {
76 store.insert_gc_host_alloc_type(ty.registered_type().clone());
77 let store_id = store.id();
78 ArrayRefPre { store_id, ty }
79 }
80
81 pub(crate) fn layout(&self) -> &GcArrayLayout {
82 self.ty
83 .registered_type()
84 .layout()
85 .expect("array types have a layout")
86 .unwrap_array()
87 }
88
89 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
90 self.ty.registered_type().index()
91 }
92}
93
94/// A reference to a GC-managed `array` instance.
95///
96/// WebAssembly `array`s are a sequence of elements of some homogeneous
97/// type. The elements length is determined at allocation time — two instances
98/// of the same array type may have different lengths — but, once allocated, an
99/// array's length can never be resized. An array's elements are mutable or
100/// constant, depending on the array's type. This determines whether any array
101/// element can be assigned a new value or not. Each element is either an
102/// unpacked [`Val`][crate::Val] or a packed 8-/16-bit integer. Array elements
103/// are dynamically accessed via indexing; out-of-bounds accesses result in
104/// traps.
105///
106/// Like all WebAssembly references, these are opaque and unforgeable to Wasm:
107/// they cannot be faked and Wasm cannot, for example, cast the integer
108/// `0x12345678` into a reference, pretend it is a valid `arrayref`, and trick
109/// the host into dereferencing it and segfaulting or worse.
110///
111/// Note that you can also use `Rooted<ArrayRef>` and `ManuallyRooted<ArrayRef>`
112/// as a type parameter with [`Func::typed`][crate::Func::typed]- and
113/// [`Func::wrap`][crate::Func::wrap]-style APIs.
114///
115/// # Example
116///
117/// ```
118/// use wasmtime::*;
119///
120/// # fn foo() -> Result<()> {
121/// let mut config = Config::new();
122/// config.wasm_function_references(true);
123/// config.wasm_gc(true);
124///
125/// let engine = Engine::new(&config)?;
126/// let mut store = Store::new(&engine, ());
127///
128/// // Define the type for an array of `i32`s.
129/// let array_ty = ArrayType::new(
130/// store.engine(),
131/// FieldType::new(Mutability::Var, ValType::I32.into()),
132/// );
133///
134/// // Create an allocator for the array type.
135/// let allocator = ArrayRefPre::new(&mut store, array_ty);
136///
137/// {
138/// let mut scope = RootScope::new(&mut store);
139///
140/// // Allocate an instance of the array type.
141/// let len = 36;
142/// let elem = Val::I32(42);
143/// let my_array = match ArrayRef::new(&mut scope, &allocator, &elem, len) {
144/// Ok(s) => s,
145/// Err(e) => match e.downcast::<GcHeapOutOfMemory<()>>() {
146/// // If the heap is out of memory, then do a GC to free up some
147/// // space and try again.
148/// Ok(oom) => {
149/// // Do a GC! Note: in an async context, you'd want to do
150/// // `scope.as_context_mut().gc_async().await`.
151/// scope.as_context_mut().gc(Some(&oom));
152///
153/// // Try again. If the GC heap is still out of memory, then we
154/// // weren't able to free up resources for this allocation, so
155/// // propagate the error.
156/// ArrayRef::new(&mut scope, &allocator, &elem, len)?
157/// }
158/// // Propagate any other kind of error.
159/// Err(e) => return Err(e),
160/// }
161/// };
162///
163/// // That instance's elements should have the initial value.
164/// for i in 0..len {
165/// let val = my_array.get(&mut scope, i)?.unwrap_i32();
166/// assert_eq!(val, 42);
167/// }
168///
169/// // We can set an element to a new value because the type was defined with
170/// // mutable elements (as opposed to const).
171/// my_array.set(&mut scope, 3, Val::I32(1234))?;
172/// let new_val = my_array.get(&mut scope, 3)?.unwrap_i32();
173/// assert_eq!(new_val, 1234);
174/// }
175/// # Ok(())
176/// # }
177/// # foo().unwrap();
178/// ```
179#[derive(Debug)]
180#[repr(transparent)]
181pub struct ArrayRef {
182 pub(super) inner: GcRootIndex,
183}
184
185unsafe impl GcRefImpl for ArrayRef {
186 #[allow(private_interfaces)]
187 fn transmute_ref(index: &GcRootIndex) -> &Self {
188 // Safety: `ArrayRef` is a newtype of a `GcRootIndex`.
189 let me: &Self = unsafe { mem::transmute(index) };
190
191 // Assert we really are just a newtype of a `GcRootIndex`.
192 assert!(matches!(
193 me,
194 Self {
195 inner: GcRootIndex { .. },
196 }
197 ));
198
199 me
200 }
201}
202
203impl Rooted<ArrayRef> {
204 /// Upcast this `arrayref` into an `anyref`.
205 #[inline]
206 pub fn to_anyref(self) -> Rooted<AnyRef> {
207 self.unchecked_cast()
208 }
209
210 /// Upcast this `arrayref` into an `eqref`.
211 #[inline]
212 pub fn to_eqref(self) -> Rooted<EqRef> {
213 self.unchecked_cast()
214 }
215}
216
217impl ManuallyRooted<ArrayRef> {
218 /// Upcast this `arrayref` into an `anyref`.
219 #[inline]
220 pub fn to_anyref(self) -> ManuallyRooted<AnyRef> {
221 self.unchecked_cast()
222 }
223
224 /// Upcast this `arrayref` into an `eqref`.
225 #[inline]
226 pub fn to_eqref(self) -> ManuallyRooted<EqRef> {
227 self.unchecked_cast()
228 }
229}
230
231/// An iterator for elements in `ArrayRef::new[_async].
232///
233/// NB: We can't use `iter::repeat(elem).take(len)` because that doesn't
234/// implement `ExactSizeIterator`.
235#[derive(Clone)]
236struct RepeatN<'a>(&'a Val, u32);
237
238impl<'a> Iterator for RepeatN<'a> {
239 type Item = &'a Val;
240
241 fn next(&mut self) -> Option<Self::Item> {
242 if self.1 == 0 {
243 None
244 } else {
245 self.1 -= 1;
246 Some(self.0)
247 }
248 }
249
250 fn size_hint(&self) -> (usize, Option<usize>) {
251 let len = self.len();
252 (len, Some(len))
253 }
254}
255
256impl ExactSizeIterator for RepeatN<'_> {
257 fn len(&self) -> usize {
258 usize::try_from(self.1).unwrap()
259 }
260}
261
262impl ArrayRef {
263 /// Allocate a new `array` of the given length, with every element
264 /// initialized to `elem`.
265 ///
266 /// For example, `ArrayRef::new(ctx, pre, &Val::I64(9), 3)` allocates the
267 /// array `[9, 9, 9]`.
268 ///
269 /// This is similar to the `array.new` instruction.
270 ///
271 /// # Automatic Garbage Collection
272 ///
273 /// If the GC heap is at capacity, and there isn't room for allocating this
274 /// new array, then this method will automatically trigger a synchronous
275 /// collection in an attempt to free up space in the GC heap.
276 ///
277 /// # Errors
278 ///
279 /// If the given `elem` value's type does not match the `allocator`'s array
280 /// type's element type, an error is returned.
281 ///
282 /// If the allocation cannot be satisfied because the GC heap is currently
283 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
284 /// error is returned. The allocation might succeed on a second attempt if
285 /// you drop some rooted GC references and try again.
286 ///
287 /// # Panics
288 ///
289 /// Panics if the `store` is configured for async; use
290 /// [`ArrayRef::new_async`][crate::ArrayRef::new_async] to perform
291 /// asynchronous allocation instead.
292 ///
293 /// Panics if either the allocator or the `elem` value is not associated
294 /// with the given store.
295 pub fn new(
296 mut store: impl AsContextMut,
297 allocator: &ArrayRefPre,
298 elem: &Val,
299 len: u32,
300 ) -> Result<Rooted<ArrayRef>> {
301 Self::_new(store.as_context_mut().0, allocator, elem, len)
302 }
303
304 pub(crate) fn _new(
305 store: &mut StoreOpaque,
306 allocator: &ArrayRefPre,
307 elem: &Val,
308 len: u32,
309 ) -> Result<Rooted<ArrayRef>> {
310 store.retry_after_gc((), |store, ()| {
311 Self::new_from_iter(store, allocator, RepeatN(elem, len))
312 })
313 }
314
315 /// Asynchronously allocate a new `array` of the given length, with every
316 /// element initialized to `elem`.
317 ///
318 /// For example, `ArrayRef::new(ctx, pre, &Val::I64(9), 3)` allocates the
319 /// array `[9, 9, 9]`.
320 ///
321 /// This is similar to the `array.new` instruction.
322 ///
323 /// # Automatic Garbage Collection
324 ///
325 /// If the GC heap is at capacity, and there isn't room for allocating this
326 /// new array, then this method will automatically trigger a asynchronous
327 /// collection in an attempt to free up space in the GC heap.
328 ///
329 /// # Errors
330 ///
331 /// If the given `elem` value's type does not match the `allocator`'s array
332 /// type's element type, an error is returned.
333 ///
334 /// If the allocation cannot be satisfied because the GC heap is currently
335 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
336 /// error is returned. The allocation might succeed on a second attempt if
337 /// you drop some rooted GC references and try again.
338 ///
339 /// # Panics
340 ///
341 /// Panics if your engine is not configured for async; use
342 /// [`ArrayRef::new_async`][crate::ArrayRef::new_async] to perform
343 /// synchronous allocation instead.
344 ///
345 /// Panics if either the allocator or the `elem` value is not associated
346 /// with the given store.
347 #[cfg(feature = "async")]
348 pub async fn new_async(
349 mut store: impl AsContextMut,
350 allocator: &ArrayRefPre,
351 elem: &Val,
352 len: u32,
353 ) -> Result<Rooted<ArrayRef>> {
354 Self::_new_async(store.as_context_mut().0, allocator, elem, len).await
355 }
356
357 #[cfg(feature = "async")]
358 pub(crate) async fn _new_async(
359 store: &mut StoreOpaque,
360 allocator: &ArrayRefPre,
361 elem: &Val,
362 len: u32,
363 ) -> Result<Rooted<ArrayRef>> {
364 store
365 .retry_after_gc_async((), |store, ()| {
366 Self::new_from_iter(store, allocator, RepeatN(elem, len))
367 })
368 .await
369 }
370
371 /// Like `ArrayRef::new` but when async is configured must only ever be
372 /// called from on a fiber stack.
373 pub(crate) unsafe fn new_maybe_async(
374 store: &mut StoreOpaque,
375 allocator: &ArrayRefPre,
376 elem: &Val,
377 len: u32,
378 ) -> Result<Rooted<ArrayRef>> {
379 // Type check the initial element value against the element type.
380 elem.ensure_matches_ty(store, allocator.ty.element_type().unpack())
381 .context("element type mismatch")?;
382
383 unsafe {
384 store.retry_after_gc_maybe_async((), |store, ()| {
385 Self::new_from_iter(store, allocator, RepeatN(elem, len))
386 })
387 }
388 }
389
390 /// Allocate a new array of the given elements.
391 ///
392 /// Does not attempt a GC on OOM; leaves that to callers.
393 fn new_from_iter<'a>(
394 store: &mut StoreOpaque,
395 allocator: &ArrayRefPre,
396 elems: impl Clone + ExactSizeIterator<Item = &'a Val>,
397 ) -> Result<Rooted<ArrayRef>> {
398 assert_eq!(
399 store.id(),
400 allocator.store_id,
401 "attempted to use a `ArrayRefPre` with the wrong store"
402 );
403
404 // Type check the elements against the element type.
405 for elem in elems.clone() {
406 elem.ensure_matches_ty(store, allocator.ty.element_type().unpack())
407 .context("element type mismatch")?;
408 }
409
410 let len = u32::try_from(elems.len()).unwrap();
411
412 // Allocate the array and write each field value into the appropriate
413 // offset.
414 let arrayref = store
415 .gc_store_mut()?
416 .alloc_uninit_array(allocator.type_index(), len, allocator.layout())
417 .context("unrecoverable error when allocating new `arrayref`")?
418 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
419
420 // From this point on, if we get any errors, then the array is not
421 // fully initialized, so we need to eagerly deallocate it before the
422 // next GC where the collector might try to interpret one of the
423 // uninitialized fields as a GC reference.
424 let mut store = AutoAssertNoGc::new(store);
425 match (|| {
426 let elem_ty = allocator.ty.element_type();
427 for (i, elem) in elems.enumerate() {
428 let i = u32::try_from(i).unwrap();
429 debug_assert!(i < len);
430 arrayref.initialize_elem(&mut store, allocator.layout(), &elem_ty, i, *elem)?;
431 }
432 Ok(())
433 })() {
434 Ok(()) => Ok(Rooted::new(&mut store, arrayref.into())),
435 Err(e) => {
436 store.gc_store_mut()?.dealloc_uninit_array(arrayref);
437 Err(e)
438 }
439 }
440 }
441
442 /// Synchronously allocate a new `array` containing the given elements.
443 ///
444 /// For example, `ArrayRef::new_fixed(ctx, pre, &[Val::I64(4), Val::I64(5),
445 /// Val::I64(6)])` allocates the array `[4, 5, 6]`.
446 ///
447 /// This is similar to the `array.new_fixed` instruction.
448 ///
449 /// # Automatic Garbage Collection
450 ///
451 /// If the GC heap is at capacity, and there isn't room for allocating this
452 /// new array, then this method will automatically trigger a synchronous
453 /// collection in an attempt to free up space in the GC heap.
454 ///
455 /// # Errors
456 ///
457 /// If any of the `elems` values' type does not match the `allocator`'s
458 /// array type's element type, an error is returned.
459 ///
460 /// If the allocation cannot be satisfied because the GC heap is currently
461 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
462 /// error is returned. The allocation might succeed on a second attempt if
463 /// you drop some rooted GC references and try again.
464 ///
465 /// # Panics
466 ///
467 /// Panics if the `store` is configured for async; use
468 /// [`ArrayRef::new_fixed_async`][crate::ArrayRef::new_fixed_async] to
469 /// perform asynchronous allocation instead.
470 ///
471 /// Panics if the allocator or any of the `elems` values are not associated
472 /// with the given store.
473 pub fn new_fixed(
474 mut store: impl AsContextMut,
475 allocator: &ArrayRefPre,
476 elems: &[Val],
477 ) -> Result<Rooted<ArrayRef>> {
478 Self::_new_fixed(store.as_context_mut().0, allocator, elems)
479 }
480
481 pub(crate) fn _new_fixed(
482 store: &mut StoreOpaque,
483 allocator: &ArrayRefPre,
484 elems: &[Val],
485 ) -> Result<Rooted<ArrayRef>> {
486 store.retry_after_gc((), |store, ()| {
487 Self::new_from_iter(store, allocator, elems.iter())
488 })
489 }
490
491 /// Asynchronously allocate a new `array` containing the given elements.
492 ///
493 /// For example, `ArrayRef::new_fixed_async(ctx, pre, &[Val::I64(4),
494 /// Val::I64(5), Val::I64(6)])` allocates the array `[4, 5, 6]`.
495 ///
496 /// This is similar to the `array.new_fixed` instruction.
497 ///
498 /// If your engine is not configured for async, use
499 /// [`ArrayRef::new_fixed`][crate::ArrayRef::new_fixed] to perform
500 /// synchronous allocation.
501 ///
502 /// # Automatic Garbage Collection
503 ///
504 /// If the GC heap is at capacity, and there isn't room for allocating this
505 /// new array, then this method will automatically trigger a synchronous
506 /// collection in an attempt to free up space in the GC heap.
507 ///
508 /// # Errors
509 ///
510 /// If any of the `elems` values' type does not match the `allocator`'s
511 /// array type's element type, an error is returned.
512 ///
513 /// If the allocation cannot be satisfied because the GC heap is currently
514 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
515 /// error is returned. The allocation might succeed on a second attempt if
516 /// you drop some rooted GC references and try again.
517 ///
518 /// # Panics
519 ///
520 /// Panics if the `store` is not configured for async; use
521 /// [`ArrayRef::new_fixed`][crate::ArrayRef::new_fixed] to perform
522 /// synchronous allocation instead.
523 ///
524 /// Panics if the allocator or any of the `elems` values are not associated
525 /// with the given store.
526 #[cfg(feature = "async")]
527 pub async fn new_fixed_async(
528 mut store: impl AsContextMut,
529 allocator: &ArrayRefPre,
530 elems: &[Val],
531 ) -> Result<Rooted<ArrayRef>> {
532 Self::_new_fixed_async(store.as_context_mut().0, allocator, elems).await
533 }
534
535 #[cfg(feature = "async")]
536 pub(crate) async fn _new_fixed_async(
537 store: &mut StoreOpaque,
538 allocator: &ArrayRefPre,
539 elems: &[Val],
540 ) -> Result<Rooted<ArrayRef>> {
541 store
542 .retry_after_gc_async((), |store, ()| {
543 Self::new_from_iter(store, allocator, elems.iter())
544 })
545 .await
546 }
547
548 /// Like `ArrayRef::new_fixed[_async]` but it is the caller's responsibility
549 /// to ensure that when async is enabled, this is only called from on a
550 /// fiber stack.
551 pub(crate) unsafe fn new_fixed_maybe_async(
552 store: &mut StoreOpaque,
553 allocator: &ArrayRefPre,
554 elems: &[Val],
555 ) -> Result<Rooted<ArrayRef>> {
556 unsafe {
557 store.retry_after_gc_maybe_async((), |store, ()| {
558 Self::new_from_iter(store, allocator, elems.iter())
559 })
560 }
561 }
562
563 #[inline]
564 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
565 self.inner.comes_from_same_store(store)
566 }
567
568 /// Get this `arrayref`'s type.
569 ///
570 /// # Errors
571 ///
572 /// Return an error if this reference has been unrooted.
573 ///
574 /// # Panics
575 ///
576 /// Panics if this reference is associated with a different store.
577 pub fn ty(&self, store: impl AsContext) -> Result<ArrayType> {
578 self._ty(store.as_context().0)
579 }
580
581 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ArrayType> {
582 assert!(self.comes_from_same_store(store));
583 let index = self.type_index(store)?;
584 Ok(ArrayType::from_shared_type_index(store.engine(), index))
585 }
586
587 /// Does this `arrayref` match the given type?
588 ///
589 /// That is, is this array's type a subtype of the given type?
590 ///
591 /// # Errors
592 ///
593 /// Return an error if this reference has been unrooted.
594 ///
595 /// # Panics
596 ///
597 /// Panics if this reference is associated with a different store or if the
598 /// type is not associated with the store's engine.
599 pub fn matches_ty(&self, store: impl AsContext, ty: &ArrayType) -> Result<bool> {
600 self._matches_ty(store.as_context().0, ty)
601 }
602
603 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &ArrayType) -> Result<bool> {
604 assert!(self.comes_from_same_store(store));
605 Ok(self._ty(store)?.matches(ty))
606 }
607
608 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &ArrayType) -> Result<()> {
609 if !self.comes_from_same_store(store) {
610 bail!("function used with wrong store");
611 }
612 if self._matches_ty(store, ty)? {
613 Ok(())
614 } else {
615 let actual_ty = self._ty(store)?;
616 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
617 }
618 }
619
620 /// Get the length of this array.
621 ///
622 /// # Errors
623 ///
624 /// Return an error if this reference has been unrooted.
625 ///
626 /// # Panics
627 ///
628 /// Panics if this reference is associated with a different store.
629 pub fn len(&self, store: impl AsContext) -> Result<u32> {
630 self._len(store.as_context().0)
631 }
632
633 pub(crate) fn _len(&self, store: &StoreOpaque) -> Result<u32> {
634 assert!(self.comes_from_same_store(store));
635 let gc_ref = self.inner.try_gc_ref(store)?;
636 debug_assert!({
637 let header = store.gc_store()?.header(gc_ref);
638 header.kind().matches(VMGcKind::ArrayRef)
639 });
640 let arrayref = gc_ref.as_arrayref_unchecked();
641 Ok(arrayref.len(store))
642 }
643
644 /// Get the values of this array's elements.
645 ///
646 /// Note that `i8` and `i16` element values are zero-extended into
647 /// `Val::I32(_)`s.
648 ///
649 /// # Errors
650 ///
651 /// Return an error if this reference has been unrooted.
652 ///
653 /// # Panics
654 ///
655 /// Panics if this reference is associated with a different store.
656 pub fn elems<'a, T: 'a>(
657 &'a self,
658 store: impl Into<StoreContextMut<'a, T>>,
659 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
660 self._elems(store.into().0)
661 }
662
663 pub(crate) fn _elems<'a>(
664 &'a self,
665 store: &'a mut StoreOpaque,
666 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
667 assert!(self.comes_from_same_store(store));
668 let store = AutoAssertNoGc::new(store);
669
670 let gc_ref = self.inner.try_gc_ref(&store)?;
671 let header = store.gc_store()?.header(gc_ref);
672 debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
673
674 let len = self._len(&store)?;
675
676 return Ok(Elems {
677 arrayref: self,
678 store,
679 index: 0,
680 len,
681 });
682
683 struct Elems<'a, 'b> {
684 arrayref: &'a ArrayRef,
685 store: AutoAssertNoGc<'b>,
686 index: u32,
687 len: u32,
688 }
689
690 impl Iterator for Elems<'_, '_> {
691 type Item = Val;
692
693 #[inline]
694 fn next(&mut self) -> Option<Self::Item> {
695 let i = self.index;
696 debug_assert!(i <= self.len);
697 if i >= self.len {
698 return None;
699 }
700 self.index += 1;
701 Some(self.arrayref._get(&mut self.store, i).unwrap())
702 }
703
704 #[inline]
705 fn size_hint(&self) -> (usize, Option<usize>) {
706 let len = self.len - self.index;
707 let len = usize::try_from(len).unwrap();
708 (len, Some(len))
709 }
710 }
711
712 impl ExactSizeIterator for Elems<'_, '_> {
713 #[inline]
714 fn len(&self) -> usize {
715 let len = self.len - self.index;
716 usize::try_from(len).unwrap()
717 }
718 }
719 }
720
721 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
722 assert!(self.comes_from_same_store(&store));
723 let gc_ref = self.inner.try_gc_ref(store)?;
724 Ok(store.gc_store()?.header(gc_ref))
725 }
726
727 fn arrayref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMArrayRef> {
728 assert!(self.comes_from_same_store(&store));
729 let gc_ref = self.inner.try_gc_ref(store)?;
730 debug_assert!(self.header(store)?.kind().matches(VMGcKind::ArrayRef));
731 Ok(gc_ref.as_arrayref_unchecked())
732 }
733
734 pub(crate) fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcArrayLayout> {
735 assert!(self.comes_from_same_store(&store));
736 let type_index = self.type_index(store)?;
737 let layout = store
738 .engine()
739 .signatures()
740 .layout(type_index)
741 .expect("array types should have GC layouts");
742 match layout {
743 GcLayout::Array(a) => Ok(a),
744 GcLayout::Struct(_) => unreachable!(),
745 }
746 }
747
748 fn field_ty(&self, store: &StoreOpaque) -> Result<FieldType> {
749 let ty = self._ty(store)?;
750 Ok(ty.field_type())
751 }
752
753 /// Get this array's `index`th element.
754 ///
755 /// Note that `i8` and `i16` field values are zero-extended into
756 /// `Val::I32(_)`s.
757 ///
758 /// # Errors
759 ///
760 /// Returns an `Err(_)` if the index is out of bounds or this reference has
761 /// been unrooted.
762 ///
763 /// # Panics
764 ///
765 /// Panics if this reference is associated with a different store.
766 pub fn get(&self, mut store: impl AsContextMut, index: u32) -> Result<Val> {
767 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
768 self._get(&mut store, index)
769 }
770
771 pub(crate) fn _get(&self, store: &mut AutoAssertNoGc<'_>, index: u32) -> Result<Val> {
772 assert!(
773 self.comes_from_same_store(store),
774 "attempted to use an array with the wrong store",
775 );
776 let arrayref = self.arrayref(store)?.unchecked_copy();
777 let field_ty = self.field_ty(store)?;
778 let layout = self.layout(store)?;
779 let len = arrayref.len(store);
780 ensure!(
781 index < len,
782 "index out of bounds: the length is {len} but the index is {index}"
783 );
784 Ok(arrayref.read_elem(store, &layout, field_ty.element_type(), index))
785 }
786
787 /// Set this array's `index`th element.
788 ///
789 /// # Errors
790 ///
791 /// Returns an error in the following scenarios:
792 ///
793 /// * When given a value of the wrong type, such as trying to write an `f32`
794 /// value into an array of `i64` elements.
795 ///
796 /// * When the array elements are not mutable.
797 ///
798 /// * When `index` is not within the range `0..self.len(ctx)`.
799 ///
800 /// * When `value` is a GC reference that has since been unrooted.
801 ///
802 /// # Panics
803 ///
804 /// Panics if either this reference or the given `value` is associated with
805 /// a different store.
806 pub fn set(&self, mut store: impl AsContextMut, index: u32, value: Val) -> Result<()> {
807 self._set(store.as_context_mut().0, index, value)
808 }
809
810 pub(crate) fn _set(&self, store: &mut StoreOpaque, index: u32, value: Val) -> Result<()> {
811 assert!(
812 self.comes_from_same_store(store),
813 "attempted to use an array with the wrong store",
814 );
815 assert!(
816 value.comes_from_same_store(store),
817 "attempted to use a value with the wrong store",
818 );
819
820 let mut store = AutoAssertNoGc::new(store);
821
822 let field_ty = self.field_ty(&store)?;
823 ensure!(
824 field_ty.mutability().is_var(),
825 "cannot set element {index}: array elements are not mutable"
826 );
827
828 value
829 .ensure_matches_ty(&store, &field_ty.element_type().unpack())
830 .with_context(|| format!("cannot set element {index}: type mismatch"))?;
831
832 let layout = self.layout(&store)?;
833 let arrayref = self.arrayref(&store)?.unchecked_copy();
834
835 let len = arrayref.len(&store);
836 ensure!(
837 index < len,
838 "index out of bounds: the length is {len} but the index is {index}"
839 );
840
841 arrayref.write_elem(&mut store, &layout, field_ty.element_type(), index, value)
842 }
843
844 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
845 let gc_ref = self.inner.try_gc_ref(store)?;
846 let header = store.gc_store()?.header(gc_ref);
847 debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
848 Ok(header.ty().expect("arrayrefs should have concrete types"))
849 }
850
851 /// Create a new `Rooted<ArrayRef>` from the given GC reference.
852 ///
853 /// `gc_ref` should point to a valid `arrayref` and should belong to the
854 /// store's GC heap. Failure to uphold these invariants is memory safe but
855 /// will lead to general incorrectness such as panics or wrong results.
856 pub(crate) fn from_cloned_gc_ref(
857 store: &mut AutoAssertNoGc<'_>,
858 gc_ref: VMGcRef,
859 ) -> Rooted<Self> {
860 debug_assert!(gc_ref.is_arrayref(&*store.unwrap_gc_store().gc_heap));
861 Rooted::new(store, gc_ref)
862 }
863}
864
865unsafe impl WasmTy for Rooted<ArrayRef> {
866 #[inline]
867 fn valtype() -> ValType {
868 ValType::Ref(RefType::new(false, HeapType::Array))
869 }
870
871 #[inline]
872 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
873 self.comes_from_same_store(store)
874 }
875
876 #[inline]
877 fn dynamic_concrete_type_check(
878 &self,
879 store: &StoreOpaque,
880 _nullable: bool,
881 ty: &HeapType,
882 ) -> Result<()> {
883 match ty {
884 HeapType::Any | HeapType::Eq | HeapType::Array => Ok(()),
885 HeapType::ConcreteArray(ty) => self.ensure_matches_ty(store, ty),
886
887 HeapType::Extern
888 | HeapType::NoExtern
889 | HeapType::Func
890 | HeapType::ConcreteFunc(_)
891 | HeapType::NoFunc
892 | HeapType::I31
893 | HeapType::Struct
894 | HeapType::ConcreteStruct(_)
895 | HeapType::None => bail!(
896 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
897 self._ty(store)?,
898 ),
899 }
900 }
901
902 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
903 self.wasm_ty_store(store, ptr, ValRaw::anyref)
904 }
905
906 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
907 Self::wasm_ty_load(store, ptr.get_anyref(), ArrayRef::from_cloned_gc_ref)
908 }
909}
910
911unsafe impl WasmTy for Option<Rooted<ArrayRef>> {
912 #[inline]
913 fn valtype() -> ValType {
914 ValType::ARRAYREF
915 }
916
917 #[inline]
918 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
919 self.map_or(true, |x| x.comes_from_same_store(store))
920 }
921
922 #[inline]
923 fn dynamic_concrete_type_check(
924 &self,
925 store: &StoreOpaque,
926 nullable: bool,
927 ty: &HeapType,
928 ) -> Result<()> {
929 match self {
930 Some(s) => Rooted::<ArrayRef>::dynamic_concrete_type_check(s, store, nullable, ty),
931 None => {
932 ensure!(
933 nullable,
934 "expected a non-null reference, but found a null reference"
935 );
936 Ok(())
937 }
938 }
939 }
940
941 #[inline]
942 fn is_vmgcref_and_points_to_object(&self) -> bool {
943 self.is_some()
944 }
945
946 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
947 <Rooted<ArrayRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
948 }
949
950 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
951 <Rooted<ArrayRef>>::wasm_ty_option_load(
952 store,
953 ptr.get_anyref(),
954 ArrayRef::from_cloned_gc_ref,
955 )
956 }
957}
958
959unsafe impl WasmTy for ManuallyRooted<ArrayRef> {
960 #[inline]
961 fn valtype() -> ValType {
962 ValType::Ref(RefType::new(false, HeapType::Array))
963 }
964
965 #[inline]
966 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
967 self.comes_from_same_store(store)
968 }
969
970 #[inline]
971 fn dynamic_concrete_type_check(
972 &self,
973 store: &StoreOpaque,
974 _: bool,
975 ty: &HeapType,
976 ) -> Result<()> {
977 match ty {
978 HeapType::Any | HeapType::Eq | HeapType::Array => Ok(()),
979 HeapType::ConcreteArray(ty) => self.ensure_matches_ty(store, ty),
980
981 HeapType::Extern
982 | HeapType::NoExtern
983 | HeapType::Func
984 | HeapType::ConcreteFunc(_)
985 | HeapType::NoFunc
986 | HeapType::I31
987 | HeapType::Struct
988 | HeapType::ConcreteStruct(_)
989 | HeapType::None => bail!(
990 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
991 self._ty(store)?,
992 ),
993 }
994 }
995
996 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
997 self.wasm_ty_store(store, ptr, ValRaw::anyref)
998 }
999
1000 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
1001 Self::wasm_ty_load(store, ptr.get_anyref(), ArrayRef::from_cloned_gc_ref)
1002 }
1003}
1004
1005unsafe impl WasmTy for Option<ManuallyRooted<ArrayRef>> {
1006 #[inline]
1007 fn valtype() -> ValType {
1008 ValType::ARRAYREF
1009 }
1010
1011 #[inline]
1012 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
1013 self.as_ref()
1014 .map_or(true, |x| x.comes_from_same_store(store))
1015 }
1016
1017 #[inline]
1018 fn dynamic_concrete_type_check(
1019 &self,
1020 store: &StoreOpaque,
1021 nullable: bool,
1022 ty: &HeapType,
1023 ) -> Result<()> {
1024 match self {
1025 Some(s) => {
1026 ManuallyRooted::<ArrayRef>::dynamic_concrete_type_check(s, store, nullable, ty)
1027 }
1028 None => {
1029 ensure!(
1030 nullable,
1031 "expected a non-null reference, but found a null reference"
1032 );
1033 Ok(())
1034 }
1035 }
1036 }
1037
1038 #[inline]
1039 fn is_vmgcref_and_points_to_object(&self) -> bool {
1040 self.is_some()
1041 }
1042
1043 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
1044 <ManuallyRooted<ArrayRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
1045 }
1046
1047 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
1048 <ManuallyRooted<ArrayRef>>::wasm_ty_option_load(
1049 store,
1050 ptr.get_anyref(),
1051 ArrayRef::from_cloned_gc_ref,
1052 )
1053 }
1054}