wasmtime/runtime/gc/enabled/arrayref.rs
1//! Working with GC `array` objects.
2
3use crate::runtime::vm::VMGcRef;
4use crate::store::{Asyncness, StoreId, StoreResourceLimiter};
5#[cfg(feature = "async")]
6use crate::vm::VMStore;
7use crate::vm::{self, VMArrayRef, VMGcHeader};
8use crate::{AnyRef, FieldType};
9use crate::{
10 ArrayType, AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
11 OwnedRooted, RefType, Rooted, Val, ValRaw, ValType, WasmTy,
12 prelude::*,
13 store::{AutoAssertNoGc, StoreContextMut, StoreOpaque},
14};
15use core::mem::{self, MaybeUninit};
16use wasmtime_environ::{GcArrayLayout, GcLayout, VMGcKind, VMSharedTypeIndex};
17
18/// An allocator for a particular Wasm GC array type.
19///
20/// Every `ArrayRefPre` is associated with a particular [`Store`][crate::Store]
21/// and a particular [`ArrayType`][crate::ArrayType].
22///
23/// Reusing an allocator across many allocations amortizes some per-type runtime
24/// overheads inside Wasmtime. An `ArrayRefPre` is to `ArrayRef`s as an
25/// `InstancePre` is to `Instance`s.
26///
27/// # Example
28///
29/// ```
30/// use wasmtime::*;
31///
32/// # fn foo() -> Result<()> {
33/// let mut config = Config::new();
34/// config.wasm_function_references(true);
35/// config.wasm_gc(true);
36///
37/// let engine = Engine::new(&config)?;
38/// let mut store = Store::new(&engine, ());
39///
40/// // Define an array type.
41/// let array_ty = ArrayType::new(
42/// store.engine(),
43/// FieldType::new(Mutability::Var, ValType::I32.into()),
44/// );
45///
46/// // Create an allocator for the array type.
47/// let allocator = ArrayRefPre::new(&mut store, array_ty);
48///
49/// {
50/// let mut scope = RootScope::new(&mut store);
51///
52/// // Allocate a bunch of instances of our array type using the same
53/// // allocator! This is faster than creating a new allocator for each
54/// // instance we want to allocate.
55/// for i in 0..10 {
56/// let len = 42;
57/// let elem = Val::I32(36);
58/// ArrayRef::new(&mut scope, &allocator, &elem, len)?;
59/// }
60/// }
61/// # Ok(())
62/// # }
63/// # let _ = foo();
64/// ```
65pub struct ArrayRefPre {
66 store_id: StoreId,
67 ty: ArrayType,
68}
69
70impl ArrayRefPre {
71 /// Create a new `ArrayRefPre` that is associated with the given store
72 /// and type.
73 pub fn new(mut store: impl AsContextMut, ty: ArrayType) -> Self {
74 Self::_new(store.as_context_mut().0, ty)
75 }
76
77 pub(crate) fn _new(store: &mut StoreOpaque, ty: ArrayType) -> Self {
78 store.insert_gc_host_alloc_type(ty.registered_type().clone());
79 let store_id = store.id();
80 ArrayRefPre { store_id, ty }
81 }
82
83 pub(crate) fn layout(&self) -> &GcArrayLayout {
84 self.ty
85 .registered_type()
86 .layout()
87 .expect("array types have a layout")
88 .unwrap_array()
89 }
90
91 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
92 self.ty.registered_type().index()
93 }
94}
95
96/// A reference to a GC-managed `array` instance.
97///
98/// WebAssembly `array`s are a sequence of elements of some homogeneous
99/// type. The elements length is determined at allocation time — two instances
100/// of the same array type may have different lengths — but, once allocated, an
101/// array's length can never be resized. An array's elements are mutable or
102/// constant, depending on the array's type. This determines whether any array
103/// element can be assigned a new value or not. Each element is either an
104/// unpacked [`Val`][crate::Val] or a packed 8-/16-bit integer. Array elements
105/// are dynamically accessed via indexing; out-of-bounds accesses result in
106/// traps.
107///
108/// Like all WebAssembly references, these are opaque and unforgeable to Wasm:
109/// they cannot be faked and Wasm cannot, for example, cast the integer
110/// `0x12345678` into a reference, pretend it is a valid `arrayref`, and trick
111/// the host into dereferencing it and segfaulting or worse.
112///
113/// Note that you can also use `Rooted<ArrayRef>` and `OwnedRooted<ArrayRef>`
114/// as a type parameter with [`Func::typed`][crate::Func::typed]- and
115/// [`Func::wrap`][crate::Func::wrap]-style APIs.
116///
117/// # Example
118///
119/// ```
120/// use wasmtime::*;
121///
122/// # fn foo() -> Result<()> {
123/// let mut config = Config::new();
124/// config.wasm_function_references(true);
125/// config.wasm_gc(true);
126///
127/// let engine = Engine::new(&config)?;
128/// let mut store = Store::new(&engine, ());
129///
130/// // Define the type for an array of `i32`s.
131/// let array_ty = ArrayType::new(
132/// store.engine(),
133/// FieldType::new(Mutability::Var, ValType::I32.into()),
134/// );
135///
136/// // Create an allocator for the array type.
137/// let allocator = ArrayRefPre::new(&mut store, array_ty);
138///
139/// {
140/// let mut scope = RootScope::new(&mut store);
141///
142/// // Allocate an instance of the array type.
143/// let len = 36;
144/// let elem = Val::I32(42);
145/// let my_array = match ArrayRef::new(&mut scope, &allocator, &elem, len) {
146/// Ok(s) => s,
147/// Err(e) => match e.downcast::<GcHeapOutOfMemory<()>>() {
148/// // If the heap is out of memory, then do a GC to free up some
149/// // space and try again.
150/// Ok(oom) => {
151/// // Do a GC! Note: in an async context, you'd want to do
152/// // `scope.as_context_mut().gc_async().await`.
153/// scope.as_context_mut().gc(Some(&oom))?;
154///
155/// // Try again. If the GC heap is still out of memory, then we
156/// // weren't able to free up resources for this allocation, so
157/// // propagate the error.
158/// ArrayRef::new(&mut scope, &allocator, &elem, len)?
159/// }
160/// // Propagate any other kind of error.
161/// Err(e) => return Err(e),
162/// }
163/// };
164///
165/// // That instance's elements should have the initial value.
166/// for i in 0..len {
167/// let val = my_array.get(&mut scope, i)?.unwrap_i32();
168/// assert_eq!(val, 42);
169/// }
170///
171/// // We can set an element to a new value because the type was defined with
172/// // mutable elements (as opposed to const).
173/// my_array.set(&mut scope, 3, Val::I32(1234))?;
174/// let new_val = my_array.get(&mut scope, 3)?.unwrap_i32();
175/// assert_eq!(new_val, 1234);
176/// }
177/// # Ok(())
178/// # }
179/// # foo().unwrap();
180/// ```
181#[derive(Debug)]
182#[repr(transparent)]
183pub struct ArrayRef {
184 pub(super) inner: GcRootIndex,
185}
186
187unsafe impl GcRefImpl for ArrayRef {
188 fn transmute_ref(index: &GcRootIndex) -> &Self {
189 // Safety: `ArrayRef` is a newtype of a `GcRootIndex`.
190 let me: &Self = unsafe { mem::transmute(index) };
191
192 // Assert we really are just a newtype of a `GcRootIndex`.
193 assert!(matches!(
194 me,
195 Self {
196 inner: GcRootIndex { .. },
197 }
198 ));
199
200 me
201 }
202}
203
204impl Rooted<ArrayRef> {
205 /// Upcast this `arrayref` into an `anyref`.
206 #[inline]
207 pub fn to_anyref(self) -> Rooted<AnyRef> {
208 self.unchecked_cast()
209 }
210
211 /// Upcast this `arrayref` into an `eqref`.
212 #[inline]
213 pub fn to_eqref(self) -> Rooted<EqRef> {
214 self.unchecked_cast()
215 }
216}
217
218impl OwnedRooted<ArrayRef> {
219 /// Upcast this `arrayref` into an `anyref`.
220 #[inline]
221 pub fn to_anyref(self) -> OwnedRooted<AnyRef> {
222 self.unchecked_cast()
223 }
224
225 /// Upcast this `arrayref` into an `eqref`.
226 #[inline]
227 pub fn to_eqref(self) -> OwnedRooted<EqRef> {
228 self.unchecked_cast()
229 }
230}
231
232/// An iterator for elements in `ArrayRef::new[_async].
233///
234/// NB: We can't use `iter::repeat(elem).take(len)` because that doesn't
235/// implement `ExactSizeIterator`.
236#[derive(Clone)]
237struct RepeatN<'a>(&'a Val, u32);
238
239impl<'a> Iterator for RepeatN<'a> {
240 type Item = &'a Val;
241
242 fn next(&mut self) -> Option<Self::Item> {
243 if self.1 == 0 {
244 None
245 } else {
246 self.1 -= 1;
247 Some(self.0)
248 }
249 }
250
251 fn size_hint(&self) -> (usize, Option<usize>) {
252 let len = self.len();
253 (len, Some(len))
254 }
255}
256
257impl ExactSizeIterator for RepeatN<'_> {
258 fn len(&self) -> usize {
259 usize::try_from(self.1).unwrap()
260 }
261}
262
263impl ArrayRef {
264 /// Allocate a new `array` of the given length, with every element
265 /// initialized to `elem`.
266 ///
267 /// For example, `ArrayRef::new(ctx, pre, &Val::I64(9), 3)` allocates the
268 /// array `[9, 9, 9]`.
269 ///
270 /// This is similar to the `array.new` instruction.
271 ///
272 /// # Automatic Garbage Collection
273 ///
274 /// If the GC heap is at capacity, and there isn't room for allocating this
275 /// new array, then this method will automatically trigger a synchronous
276 /// collection in an attempt to free up space in the GC heap.
277 ///
278 /// # Errors
279 ///
280 /// If the given `elem` value's type does not match the `allocator`'s array
281 /// type's element type, an error is returned.
282 ///
283 /// If the allocation cannot be satisfied because the GC heap is currently
284 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
285 /// error is returned. The allocation might succeed on a second attempt if
286 /// you drop some rooted GC references and try again.
287 ///
288 /// If `store` is configured with a
289 /// [`ResourceLimiterAsync`](crate::ResourceLimiterAsync) then an error will
290 /// be returned because [`ArrayRef::new_async`] should be used instead.
291 ///
292 /// # Panics
293 ///
294 /// Panics if either the allocator or the `elem` value is not associated
295 /// with the given store.
296 pub fn new(
297 mut store: impl AsContextMut,
298 allocator: &ArrayRefPre,
299 elem: &Val,
300 len: u32,
301 ) -> Result<Rooted<ArrayRef>> {
302 let (mut limiter, store) = store
303 .as_context_mut()
304 .0
305 .validate_sync_resource_limiter_and_store_opaque()?;
306 vm::assert_ready(Self::_new_async(
307 store,
308 limiter.as_mut(),
309 allocator,
310 elem,
311 len,
312 Asyncness::No,
313 ))
314 }
315
316 /// Asynchronously allocate a new `array` of the given length, with every
317 /// element initialized to `elem`.
318 ///
319 /// For example, `ArrayRef::new(ctx, pre, &Val::I64(9), 3)` allocates the
320 /// array `[9, 9, 9]`.
321 ///
322 /// This is similar to the `array.new` instruction.
323 ///
324 /// # Automatic Garbage Collection
325 ///
326 /// If the GC heap is at capacity, and there isn't room for allocating this
327 /// new array, then this method will automatically trigger a asynchronous
328 /// collection in an attempt to free up space in the GC heap.
329 ///
330 /// # Errors
331 ///
332 /// If the given `elem` value's type does not match the `allocator`'s array
333 /// type's element type, an error is returned.
334 ///
335 /// If the allocation cannot be satisfied because the GC heap is currently
336 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
337 /// error is returned. The allocation might succeed on a second attempt if
338 /// you drop some rooted GC references and try again.
339 ///
340 /// # Panics
341 ///
342 /// Panics if your engine is not configured for async; use
343 /// [`ArrayRef::new_async`][crate::ArrayRef::new_async] to perform
344 /// synchronous allocation instead.
345 ///
346 /// Panics if either the allocator or the `elem` value is not associated
347 /// with the given store.
348 #[cfg(feature = "async")]
349 pub async fn new_async(
350 mut store: impl AsContextMut,
351 allocator: &ArrayRefPre,
352 elem: &Val,
353 len: u32,
354 ) -> Result<Rooted<ArrayRef>> {
355 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
356 Self::_new_async(
357 store,
358 limiter.as_mut(),
359 allocator,
360 elem,
361 len,
362 Asyncness::Yes,
363 )
364 .await
365 }
366
367 pub(crate) async fn _new_async(
368 store: &mut StoreOpaque,
369 limiter: Option<&mut StoreResourceLimiter<'_>>,
370 allocator: &ArrayRefPre,
371 elem: &Val,
372 len: u32,
373 asyncness: Asyncness,
374 ) -> Result<Rooted<ArrayRef>> {
375 store
376 .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
377 Self::new_from_iter(store, allocator, RepeatN(elem, len))
378 })
379 .await
380 }
381
382 /// Allocate a new array of the given elements.
383 ///
384 /// Does not attempt a GC on OOM; leaves that to callers.
385 fn new_from_iter<'a>(
386 store: &mut StoreOpaque,
387 allocator: &ArrayRefPre,
388 elems: impl Clone + ExactSizeIterator<Item = &'a Val>,
389 ) -> Result<Rooted<ArrayRef>> {
390 assert_eq!(
391 store.id(),
392 allocator.store_id,
393 "attempted to use a `ArrayRefPre` with the wrong store"
394 );
395
396 let len = u32::try_from(elems.len()).unwrap();
397
398 // Allocate the array.
399 let arrayref = store
400 .require_gc_store_mut()?
401 .alloc_uninit_array(allocator.type_index(), len, allocator.layout())
402 .context("unrecoverable error when allocating new `arrayref`")?
403 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
404
405 // Type check the elements against the element type.
406 for elem in elems.clone() {
407 elem.ensure_matches_ty(store, allocator.ty.element_type().unpack())
408 .context("element type mismatch")?;
409 }
410
411 // From this point on, if we get any errors, then the array is not
412 // fully initialized, so we need to eagerly deallocate it before the
413 // next GC where the collector might try to interpret one of the
414 // uninitialized fields as a GC reference.
415 let mut store = AutoAssertNoGc::new(store);
416 match (|| {
417 let elem_ty = allocator.ty.element_type();
418 for (i, elem) in elems.enumerate() {
419 let i = u32::try_from(i).unwrap();
420 debug_assert!(i < len);
421 arrayref.initialize_elem(&mut store, allocator.layout(), &elem_ty, i, *elem)?;
422 }
423 Ok(())
424 })() {
425 Ok(()) => Ok(Rooted::new(&mut store, arrayref.into())),
426 Err(e) => {
427 store.require_gc_store_mut()?.dealloc_uninit_array(arrayref);
428 Err(e)
429 }
430 }
431 }
432
433 /// Synchronously allocate a new `array` containing the given elements.
434 ///
435 /// For example, `ArrayRef::new_fixed(ctx, pre, &[Val::I64(4), Val::I64(5),
436 /// Val::I64(6)])` allocates the array `[4, 5, 6]`.
437 ///
438 /// This is similar to the `array.new_fixed` instruction.
439 ///
440 /// # Automatic Garbage Collection
441 ///
442 /// If the GC heap is at capacity, and there isn't room for allocating this
443 /// new array, then this method will automatically trigger a synchronous
444 /// collection in an attempt to free up space in the GC heap.
445 ///
446 /// # Errors
447 ///
448 /// If any of the `elems` values' type does not match the `allocator`'s
449 /// array type's element type, an error is returned.
450 ///
451 /// If the allocation cannot be satisfied because the GC heap is currently
452 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
453 /// error is returned. The allocation might succeed on a second attempt if
454 /// you drop some rooted GC references and try again.
455 ///
456 /// If `store` is configured with a
457 /// [`ResourceLimiterAsync`](crate::ResourceLimiterAsync) then an error
458 /// will be returned because [`ArrayRef::new_fixed_async`] should be used
459 /// instead.
460 ///
461 /// # Panics
462 ///
463 /// Panics if the allocator or any of the `elems` values are not associated
464 /// with the given store.
465 pub fn new_fixed(
466 mut store: impl AsContextMut,
467 allocator: &ArrayRefPre,
468 elems: &[Val],
469 ) -> Result<Rooted<ArrayRef>> {
470 let (mut limiter, store) = store
471 .as_context_mut()
472 .0
473 .validate_sync_resource_limiter_and_store_opaque()?;
474 vm::assert_ready(Self::_new_fixed_async(
475 store,
476 limiter.as_mut(),
477 allocator,
478 elems,
479 Asyncness::No,
480 ))
481 }
482
483 /// Asynchronously allocate a new `array` containing the given elements.
484 ///
485 /// For example, `ArrayRef::new_fixed_async(ctx, pre, &[Val::I64(4),
486 /// Val::I64(5), Val::I64(6)])` allocates the array `[4, 5, 6]`.
487 ///
488 /// This is similar to the `array.new_fixed` instruction.
489 ///
490 /// If your engine is not configured for async, use
491 /// [`ArrayRef::new_fixed`][crate::ArrayRef::new_fixed] to perform
492 /// synchronous allocation.
493 ///
494 /// # Automatic Garbage Collection
495 ///
496 /// If the GC heap is at capacity, and there isn't room for allocating this
497 /// new array, then this method will automatically trigger a synchronous
498 /// collection in an attempt to free up space in the GC heap.
499 ///
500 /// # Errors
501 ///
502 /// If any of the `elems` values' type does not match the `allocator`'s
503 /// array type's element type, an error is returned.
504 ///
505 /// If the allocation cannot be satisfied because the GC heap is currently
506 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
507 /// error is returned. The allocation might succeed on a second attempt if
508 /// you drop some rooted GC references and try again.
509 ///
510 /// # Panics
511 ///
512 /// Panics if the `store` is not configured for async; use
513 /// [`ArrayRef::new_fixed`][crate::ArrayRef::new_fixed] to perform
514 /// synchronous allocation instead.
515 ///
516 /// Panics if the allocator or any of the `elems` values are not associated
517 /// with the given store.
518 #[cfg(feature = "async")]
519 pub async fn new_fixed_async(
520 mut store: impl AsContextMut,
521 allocator: &ArrayRefPre,
522 elems: &[Val],
523 ) -> Result<Rooted<ArrayRef>> {
524 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
525 Self::_new_fixed_async(store, limiter.as_mut(), allocator, elems, Asyncness::Yes).await
526 }
527
528 pub(crate) async fn _new_fixed_async(
529 store: &mut StoreOpaque,
530 limiter: Option<&mut StoreResourceLimiter<'_>>,
531 allocator: &ArrayRefPre,
532 elems: &[Val],
533 asyncness: Asyncness,
534 ) -> Result<Rooted<ArrayRef>> {
535 store
536 .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
537 Self::new_from_iter(store, allocator, elems.iter())
538 })
539 .await
540 }
541
542 #[inline]
543 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
544 self.inner.comes_from_same_store(store)
545 }
546
547 /// Get this `arrayref`'s type.
548 ///
549 /// # Errors
550 ///
551 /// Return an error if this reference has been unrooted.
552 ///
553 /// # Panics
554 ///
555 /// Panics if this reference is associated with a different store.
556 pub fn ty(&self, store: impl AsContext) -> Result<ArrayType> {
557 self._ty(store.as_context().0)
558 }
559
560 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ArrayType> {
561 assert!(self.comes_from_same_store(store));
562 let index = self.type_index(store)?;
563 Ok(ArrayType::from_shared_type_index(store.engine(), index))
564 }
565
566 /// Does this `arrayref` match the given type?
567 ///
568 /// That is, is this array's type a subtype of the given type?
569 ///
570 /// # Errors
571 ///
572 /// Return an error if this reference has been unrooted.
573 ///
574 /// # Panics
575 ///
576 /// Panics if this reference is associated with a different store or if the
577 /// type is not associated with the store's engine.
578 pub fn matches_ty(&self, store: impl AsContext, ty: &ArrayType) -> Result<bool> {
579 self._matches_ty(store.as_context().0, ty)
580 }
581
582 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &ArrayType) -> Result<bool> {
583 assert!(self.comes_from_same_store(store));
584 Ok(self._ty(store)?.matches(ty))
585 }
586
587 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &ArrayType) -> Result<()> {
588 if !self.comes_from_same_store(store) {
589 bail!("function used with wrong store");
590 }
591 if self._matches_ty(store, ty)? {
592 Ok(())
593 } else {
594 let actual_ty = self._ty(store)?;
595 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
596 }
597 }
598
599 /// Get the length of this array.
600 ///
601 /// # Errors
602 ///
603 /// Return an error if this reference has been unrooted.
604 ///
605 /// # Panics
606 ///
607 /// Panics if this reference is associated with a different store.
608 pub fn len(&self, store: impl AsContext) -> Result<u32> {
609 self._len(store.as_context().0)
610 }
611
612 pub(crate) fn _len(&self, store: &StoreOpaque) -> Result<u32> {
613 assert!(self.comes_from_same_store(store));
614 let gc_ref = self.inner.try_gc_ref(store)?;
615 debug_assert!({
616 let header = store.require_gc_store()?.header(gc_ref);
617 header.kind().matches(VMGcKind::ArrayRef)
618 });
619 let arrayref = gc_ref.as_arrayref_unchecked();
620 Ok(arrayref.len(store))
621 }
622
623 /// Get the values of this array's elements.
624 ///
625 /// Note that `i8` and `i16` element values are zero-extended into
626 /// `Val::I32(_)`s.
627 ///
628 /// # Errors
629 ///
630 /// Return an error if this reference has been unrooted.
631 ///
632 /// # Panics
633 ///
634 /// Panics if this reference is associated with a different store.
635 pub fn elems<'a, T: 'static>(
636 &'a self,
637 store: impl Into<StoreContextMut<'a, T>>,
638 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
639 self._elems(store.into().0)
640 }
641
642 pub(crate) fn _elems<'a>(
643 &'a self,
644 store: &'a mut StoreOpaque,
645 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
646 assert!(self.comes_from_same_store(store));
647 let store = AutoAssertNoGc::new(store);
648
649 let gc_ref = self.inner.try_gc_ref(&store)?;
650 let header = store.require_gc_store()?.header(gc_ref);
651 debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
652
653 let len = self._len(&store)?;
654
655 return Ok(Elems {
656 arrayref: self,
657 store,
658 index: 0,
659 len,
660 });
661
662 struct Elems<'a, 'b> {
663 arrayref: &'a ArrayRef,
664 store: AutoAssertNoGc<'b>,
665 index: u32,
666 len: u32,
667 }
668
669 impl Iterator for Elems<'_, '_> {
670 type Item = Val;
671
672 #[inline]
673 fn next(&mut self) -> Option<Self::Item> {
674 let i = self.index;
675 debug_assert!(i <= self.len);
676 if i >= self.len {
677 return None;
678 }
679 self.index += 1;
680 Some(self.arrayref._get(&mut self.store, i).unwrap())
681 }
682
683 #[inline]
684 fn size_hint(&self) -> (usize, Option<usize>) {
685 let len = self.len - self.index;
686 let len = usize::try_from(len).unwrap();
687 (len, Some(len))
688 }
689 }
690
691 impl ExactSizeIterator for Elems<'_, '_> {
692 #[inline]
693 fn len(&self) -> usize {
694 let len = self.len - self.index;
695 usize::try_from(len).unwrap()
696 }
697 }
698 }
699
700 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
701 assert!(self.comes_from_same_store(&store));
702 let gc_ref = self.inner.try_gc_ref(store)?;
703 Ok(store.require_gc_store()?.header(gc_ref))
704 }
705
706 fn arrayref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMArrayRef> {
707 assert!(self.comes_from_same_store(&store));
708 let gc_ref = self.inner.try_gc_ref(store)?;
709 debug_assert!(self.header(store)?.kind().matches(VMGcKind::ArrayRef));
710 Ok(gc_ref.as_arrayref_unchecked())
711 }
712
713 pub(crate) fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcArrayLayout> {
714 assert!(self.comes_from_same_store(&store));
715 let type_index = self.type_index(store)?;
716 let layout = store
717 .engine()
718 .signatures()
719 .layout(type_index)
720 .expect("array types should have GC layouts");
721 match layout {
722 GcLayout::Array(a) => Ok(a),
723 GcLayout::Struct(_) => unreachable!(),
724 }
725 }
726
727 fn field_ty(&self, store: &StoreOpaque) -> Result<FieldType> {
728 let ty = self._ty(store)?;
729 Ok(ty.field_type())
730 }
731
732 /// Get this array's `index`th element.
733 ///
734 /// Note that `i8` and `i16` field values are zero-extended into
735 /// `Val::I32(_)`s.
736 ///
737 /// # Errors
738 ///
739 /// Returns an `Err(_)` if the index is out of bounds or this reference has
740 /// been unrooted.
741 ///
742 /// # Panics
743 ///
744 /// Panics if this reference is associated with a different store.
745 pub fn get(&self, mut store: impl AsContextMut, index: u32) -> Result<Val> {
746 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
747 self._get(&mut store, index)
748 }
749
750 pub(crate) fn _get(&self, store: &mut AutoAssertNoGc<'_>, index: u32) -> Result<Val> {
751 assert!(
752 self.comes_from_same_store(store),
753 "attempted to use an array with the wrong store",
754 );
755 let arrayref = self.arrayref(store)?.unchecked_copy();
756 let field_ty = self.field_ty(store)?;
757 let layout = self.layout(store)?;
758 let len = arrayref.len(store);
759 ensure!(
760 index < len,
761 "index out of bounds: the length is {len} but the index is {index}"
762 );
763 Ok(arrayref.read_elem(store, &layout, field_ty.element_type(), index))
764 }
765
766 /// Set this array's `index`th element.
767 ///
768 /// # Errors
769 ///
770 /// Returns an error in the following scenarios:
771 ///
772 /// * When given a value of the wrong type, such as trying to write an `f32`
773 /// value into an array of `i64` elements.
774 ///
775 /// * When the array elements are not mutable.
776 ///
777 /// * When `index` is not within the range `0..self.len(ctx)`.
778 ///
779 /// * When `value` is a GC reference that has since been unrooted.
780 ///
781 /// # Panics
782 ///
783 /// Panics if either this reference or the given `value` is associated with
784 /// a different store.
785 pub fn set(&self, mut store: impl AsContextMut, index: u32, value: Val) -> Result<()> {
786 self._set(store.as_context_mut().0, index, value)
787 }
788
789 pub(crate) fn _set(&self, store: &mut StoreOpaque, index: u32, value: Val) -> Result<()> {
790 assert!(
791 self.comes_from_same_store(store),
792 "attempted to use an array with the wrong store",
793 );
794 assert!(
795 value.comes_from_same_store(store),
796 "attempted to use a value with the wrong store",
797 );
798
799 let mut store = AutoAssertNoGc::new(store);
800
801 let field_ty = self.field_ty(&store)?;
802 ensure!(
803 field_ty.mutability().is_var(),
804 "cannot set element {index}: array elements are not mutable"
805 );
806
807 value
808 .ensure_matches_ty(&store, &field_ty.element_type().unpack())
809 .with_context(|| format!("cannot set element {index}: type mismatch"))?;
810
811 let layout = self.layout(&store)?;
812 let arrayref = self.arrayref(&store)?.unchecked_copy();
813
814 let len = arrayref.len(&store);
815 ensure!(
816 index < len,
817 "index out of bounds: the length is {len} but the index is {index}"
818 );
819
820 arrayref.write_elem(&mut store, &layout, field_ty.element_type(), index, value)
821 }
822
823 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
824 let gc_ref = self.inner.try_gc_ref(store)?;
825 let header = store.require_gc_store()?.header(gc_ref);
826 debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
827 Ok(header.ty().expect("arrayrefs should have concrete types"))
828 }
829
830 /// Create a new `Rooted<ArrayRef>` from the given GC reference.
831 ///
832 /// `gc_ref` should point to a valid `arrayref` and should belong to the
833 /// store's GC heap. Failure to uphold these invariants is memory safe but
834 /// will lead to general incorrectness such as panics or wrong results.
835 pub(crate) fn from_cloned_gc_ref(
836 store: &mut AutoAssertNoGc<'_>,
837 gc_ref: VMGcRef,
838 ) -> Rooted<Self> {
839 debug_assert!(gc_ref.is_arrayref(&*store.unwrap_gc_store().gc_heap));
840 Rooted::new(store, gc_ref)
841 }
842}
843
844unsafe impl WasmTy for Rooted<ArrayRef> {
845 #[inline]
846 fn valtype() -> ValType {
847 ValType::Ref(RefType::new(false, HeapType::Array))
848 }
849
850 #[inline]
851 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
852 self.comes_from_same_store(store)
853 }
854
855 #[inline]
856 fn dynamic_concrete_type_check(
857 &self,
858 store: &StoreOpaque,
859 _nullable: bool,
860 ty: &HeapType,
861 ) -> Result<()> {
862 match ty {
863 HeapType::Any | HeapType::Eq | HeapType::Array => Ok(()),
864 HeapType::ConcreteArray(ty) => self.ensure_matches_ty(store, ty),
865
866 HeapType::Extern
867 | HeapType::NoExtern
868 | HeapType::Func
869 | HeapType::ConcreteFunc(_)
870 | HeapType::NoFunc
871 | HeapType::I31
872 | HeapType::Struct
873 | HeapType::ConcreteStruct(_)
874 | HeapType::Cont
875 | HeapType::NoCont
876 | HeapType::ConcreteCont(_)
877 | HeapType::Exn
878 | HeapType::NoExn
879 | HeapType::ConcreteExn(_)
880 | HeapType::None => bail!(
881 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
882 self._ty(store)?,
883 ),
884 }
885 }
886
887 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
888 self.wasm_ty_store(store, ptr, ValRaw::anyref)
889 }
890
891 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
892 Self::wasm_ty_load(store, ptr.get_anyref(), ArrayRef::from_cloned_gc_ref)
893 }
894}
895
896unsafe impl WasmTy for Option<Rooted<ArrayRef>> {
897 #[inline]
898 fn valtype() -> ValType {
899 ValType::ARRAYREF
900 }
901
902 #[inline]
903 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
904 self.map_or(true, |x| x.comes_from_same_store(store))
905 }
906
907 #[inline]
908 fn dynamic_concrete_type_check(
909 &self,
910 store: &StoreOpaque,
911 nullable: bool,
912 ty: &HeapType,
913 ) -> Result<()> {
914 match self {
915 Some(s) => Rooted::<ArrayRef>::dynamic_concrete_type_check(s, store, nullable, ty),
916 None => {
917 ensure!(
918 nullable,
919 "expected a non-null reference, but found a null reference"
920 );
921 Ok(())
922 }
923 }
924 }
925
926 #[inline]
927 fn is_vmgcref_and_points_to_object(&self) -> bool {
928 self.is_some()
929 }
930
931 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
932 <Rooted<ArrayRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
933 }
934
935 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
936 <Rooted<ArrayRef>>::wasm_ty_option_load(
937 store,
938 ptr.get_anyref(),
939 ArrayRef::from_cloned_gc_ref,
940 )
941 }
942}
943
944unsafe impl WasmTy for OwnedRooted<ArrayRef> {
945 #[inline]
946 fn valtype() -> ValType {
947 ValType::Ref(RefType::new(false, HeapType::Array))
948 }
949
950 #[inline]
951 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
952 self.comes_from_same_store(store)
953 }
954
955 #[inline]
956 fn dynamic_concrete_type_check(
957 &self,
958 store: &StoreOpaque,
959 _: bool,
960 ty: &HeapType,
961 ) -> Result<()> {
962 match ty {
963 HeapType::Any | HeapType::Eq | HeapType::Array => Ok(()),
964 HeapType::ConcreteArray(ty) => self.ensure_matches_ty(store, ty),
965
966 HeapType::Extern
967 | HeapType::NoExtern
968 | HeapType::Func
969 | HeapType::ConcreteFunc(_)
970 | HeapType::NoFunc
971 | HeapType::I31
972 | HeapType::Struct
973 | HeapType::ConcreteStruct(_)
974 | HeapType::Cont
975 | HeapType::NoCont
976 | HeapType::ConcreteCont(_)
977 | HeapType::Exn
978 | HeapType::NoExn
979 | HeapType::ConcreteExn(_)
980 | HeapType::None => bail!(
981 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
982 self._ty(store)?,
983 ),
984 }
985 }
986
987 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
988 self.wasm_ty_store(store, ptr, ValRaw::anyref)
989 }
990
991 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
992 Self::wasm_ty_load(store, ptr.get_anyref(), ArrayRef::from_cloned_gc_ref)
993 }
994}
995
996unsafe impl WasmTy for Option<OwnedRooted<ArrayRef>> {
997 #[inline]
998 fn valtype() -> ValType {
999 ValType::ARRAYREF
1000 }
1001
1002 #[inline]
1003 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
1004 self.as_ref()
1005 .map_or(true, |x| x.comes_from_same_store(store))
1006 }
1007
1008 #[inline]
1009 fn dynamic_concrete_type_check(
1010 &self,
1011 store: &StoreOpaque,
1012 nullable: bool,
1013 ty: &HeapType,
1014 ) -> Result<()> {
1015 match self {
1016 Some(s) => OwnedRooted::<ArrayRef>::dynamic_concrete_type_check(s, store, nullable, ty),
1017 None => {
1018 ensure!(
1019 nullable,
1020 "expected a non-null reference, but found a null reference"
1021 );
1022 Ok(())
1023 }
1024 }
1025 }
1026
1027 #[inline]
1028 fn is_vmgcref_and_points_to_object(&self) -> bool {
1029 self.is_some()
1030 }
1031
1032 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
1033 <OwnedRooted<ArrayRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
1034 }
1035
1036 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
1037 <OwnedRooted<ArrayRef>>::wasm_ty_option_load(
1038 store,
1039 ptr.get_anyref(),
1040 ArrayRef::from_cloned_gc_ref,
1041 )
1042 }
1043}