wasmtime/runtime/gc/enabled/arrayref.rs
1//! Working with GC `array` objects.
2
3use crate::runtime::vm::VMGcRef;
4use crate::store::{Asyncness, StoreId, StoreResourceLimiter};
5#[cfg(feature = "async")]
6use crate::vm::VMStore;
7use crate::vm::{self, VMArrayRef, VMGcHeader};
8use crate::{AnyRef, FieldType};
9use crate::{
10 ArrayType, AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
11 OwnedRooted, RefType, Rooted, Val, ValRaw, ValType, WasmTy,
12 prelude::*,
13 store::{AutoAssertNoGc, StoreContextMut, StoreOpaque},
14};
15use core::mem::{self, MaybeUninit};
16use wasmtime_environ::{GcArrayLayout, GcLayout, VMGcKind, VMSharedTypeIndex};
17
18/// An allocator for a particular Wasm GC array type.
19///
20/// Every `ArrayRefPre` is associated with a particular [`Store`][crate::Store]
21/// and a particular [`ArrayType`][crate::ArrayType].
22///
23/// Reusing an allocator across many allocations amortizes some per-type runtime
24/// overheads inside Wasmtime. An `ArrayRefPre` is to `ArrayRef`s as an
25/// `InstancePre` is to `Instance`s.
26///
27/// # Example
28///
29/// ```
30/// use wasmtime::*;
31///
32/// # fn foo() -> Result<()> {
33/// let mut config = Config::new();
34/// config.wasm_function_references(true);
35/// config.wasm_gc(true);
36///
37/// let engine = Engine::new(&config)?;
38/// let mut store = Store::new(&engine, ());
39///
40/// // Define an array type.
41/// let array_ty = ArrayType::new(
42/// store.engine(),
43/// FieldType::new(Mutability::Var, ValType::I32.into()),
44/// );
45///
46/// // Create an allocator for the array type.
47/// let allocator = ArrayRefPre::new(&mut store, array_ty);
48///
49/// {
50/// let mut scope = RootScope::new(&mut store);
51///
52/// // Allocate a bunch of instances of our array type using the same
53/// // allocator! This is faster than creating a new allocator for each
54/// // instance we want to allocate.
55/// for i in 0..10 {
56/// let len = 42;
57/// let elem = Val::I32(36);
58/// ArrayRef::new(&mut scope, &allocator, &elem, len)?;
59/// }
60/// }
61/// # Ok(())
62/// # }
63/// # let _ = foo();
64/// ```
65pub struct ArrayRefPre {
66 store_id: StoreId,
67 ty: ArrayType,
68}
69
70impl ArrayRefPre {
71 /// Create a new `ArrayRefPre` that is associated with the given store
72 /// and type.
73 pub fn new(mut store: impl AsContextMut, ty: ArrayType) -> Self {
74 Self::_new(store.as_context_mut().0, ty)
75 }
76
77 pub(crate) fn _new(store: &mut StoreOpaque, ty: ArrayType) -> Self {
78 store.insert_gc_host_alloc_type(ty.registered_type().clone());
79 let store_id = store.id();
80 ArrayRefPre { store_id, ty }
81 }
82
83 pub(crate) fn layout(&self) -> &GcArrayLayout {
84 self.ty
85 .registered_type()
86 .layout()
87 .expect("array types have a layout")
88 .unwrap_array()
89 }
90
91 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
92 self.ty.registered_type().index()
93 }
94}
95
96/// A reference to a GC-managed `array` instance.
97///
98/// WebAssembly `array`s are a sequence of elements of some homogeneous
99/// type. The elements length is determined at allocation time — two instances
100/// of the same array type may have different lengths — but, once allocated, an
101/// array's length can never be resized. An array's elements are mutable or
102/// constant, depending on the array's type. This determines whether any array
103/// element can be assigned a new value or not. Each element is either an
104/// unpacked [`Val`][crate::Val] or a packed 8-/16-bit integer. Array elements
105/// are dynamically accessed via indexing; out-of-bounds accesses result in
106/// traps.
107///
108/// Like all WebAssembly references, these are opaque and unforgeable to Wasm:
109/// they cannot be faked and Wasm cannot, for example, cast the integer
110/// `0x12345678` into a reference, pretend it is a valid `arrayref`, and trick
111/// the host into dereferencing it and segfaulting or worse.
112///
113/// Note that you can also use `Rooted<ArrayRef>` and `OwnedRooted<ArrayRef>`
114/// as a type parameter with [`Func::typed`][crate::Func::typed]- and
115/// [`Func::wrap`][crate::Func::wrap]-style APIs.
116///
117/// # Example
118///
119/// ```
120/// use wasmtime::*;
121///
122/// # fn foo() -> Result<()> {
123/// let mut config = Config::new();
124/// config.wasm_function_references(true);
125/// config.wasm_gc(true);
126///
127/// let engine = Engine::new(&config)?;
128/// let mut store = Store::new(&engine, ());
129///
130/// // Define the type for an array of `i32`s.
131/// let array_ty = ArrayType::new(
132/// store.engine(),
133/// FieldType::new(Mutability::Var, ValType::I32.into()),
134/// );
135///
136/// // Create an allocator for the array type.
137/// let allocator = ArrayRefPre::new(&mut store, array_ty);
138///
139/// {
140/// let mut scope = RootScope::new(&mut store);
141///
142/// // Allocate an instance of the array type.
143/// let len = 36;
144/// let elem = Val::I32(42);
145/// let my_array = match ArrayRef::new(&mut scope, &allocator, &elem, len) {
146/// Ok(s) => s,
147/// Err(e) => match e.downcast::<GcHeapOutOfMemory<()>>() {
148/// // If the heap is out of memory, then do a GC to free up some
149/// // space and try again.
150/// Ok(oom) => {
151/// // Do a GC! Note: in an async context, you'd want to do
152/// // `scope.as_context_mut().gc_async().await`.
153/// scope.as_context_mut().gc(Some(&oom))?;
154///
155/// // Try again. If the GC heap is still out of memory, then we
156/// // weren't able to free up resources for this allocation, so
157/// // propagate the error.
158/// ArrayRef::new(&mut scope, &allocator, &elem, len)?
159/// }
160/// // Propagate any other kind of error.
161/// Err(e) => return Err(e),
162/// }
163/// };
164///
165/// // That instance's elements should have the initial value.
166/// for i in 0..len {
167/// let val = my_array.get(&mut scope, i)?.unwrap_i32();
168/// assert_eq!(val, 42);
169/// }
170///
171/// // We can set an element to a new value because the type was defined with
172/// // mutable elements (as opposed to const).
173/// my_array.set(&mut scope, 3, Val::I32(1234))?;
174/// let new_val = my_array.get(&mut scope, 3)?.unwrap_i32();
175/// assert_eq!(new_val, 1234);
176/// }
177/// # Ok(())
178/// # }
179/// # foo().unwrap();
180/// ```
181#[derive(Debug)]
182#[repr(transparent)]
183pub struct ArrayRef {
184 pub(super) inner: GcRootIndex,
185}
186
187unsafe impl GcRefImpl for ArrayRef {
188 fn transmute_ref(index: &GcRootIndex) -> &Self {
189 // Safety: `ArrayRef` is a newtype of a `GcRootIndex`.
190 let me: &Self = unsafe { mem::transmute(index) };
191
192 // Assert we really are just a newtype of a `GcRootIndex`.
193 assert!(matches!(
194 me,
195 Self {
196 inner: GcRootIndex { .. },
197 }
198 ));
199
200 me
201 }
202}
203
204impl Rooted<ArrayRef> {
205 /// Upcast this `arrayref` into an `anyref`.
206 #[inline]
207 pub fn to_anyref(self) -> Rooted<AnyRef> {
208 self.unchecked_cast()
209 }
210
211 /// Upcast this `arrayref` into an `eqref`.
212 #[inline]
213 pub fn to_eqref(self) -> Rooted<EqRef> {
214 self.unchecked_cast()
215 }
216}
217
218impl OwnedRooted<ArrayRef> {
219 /// Upcast this `arrayref` into an `anyref`.
220 #[inline]
221 pub fn to_anyref(self) -> OwnedRooted<AnyRef> {
222 self.unchecked_cast()
223 }
224
225 /// Upcast this `arrayref` into an `eqref`.
226 #[inline]
227 pub fn to_eqref(self) -> OwnedRooted<EqRef> {
228 self.unchecked_cast()
229 }
230}
231
232/// An iterator for elements in `ArrayRef::new[_async].
233///
234/// NB: We can't use `iter::repeat(elem).take(len)` because that doesn't
235/// implement `ExactSizeIterator`.
236#[derive(Clone)]
237struct RepeatN<'a>(&'a Val, u32);
238
239impl<'a> Iterator for RepeatN<'a> {
240 type Item = &'a Val;
241
242 fn next(&mut self) -> Option<Self::Item> {
243 if self.1 == 0 {
244 None
245 } else {
246 self.1 -= 1;
247 Some(self.0)
248 }
249 }
250
251 fn size_hint(&self) -> (usize, Option<usize>) {
252 let len = self.len();
253 (len, Some(len))
254 }
255}
256
257impl ExactSizeIterator for RepeatN<'_> {
258 fn len(&self) -> usize {
259 usize::try_from(self.1).unwrap()
260 }
261}
262
263impl ArrayRef {
264 /// Allocate a new `array` of the given length, with every element
265 /// initialized to `elem`.
266 ///
267 /// For example, `ArrayRef::new(ctx, pre, &Val::I64(9), 3)` allocates the
268 /// array `[9, 9, 9]`.
269 ///
270 /// This is similar to the `array.new` instruction.
271 ///
272 /// # Automatic Garbage Collection
273 ///
274 /// If the GC heap is at capacity, and there isn't room for allocating this
275 /// new array, then this method will automatically trigger a synchronous
276 /// collection in an attempt to free up space in the GC heap.
277 ///
278 /// # Errors
279 ///
280 /// If the given `elem` value's type does not match the `allocator`'s array
281 /// type's element type, an error is returned.
282 ///
283 /// If the allocation cannot be satisfied because the GC heap is currently
284 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
285 /// error is returned. The allocation might succeed on a second attempt if
286 /// you drop some rooted GC references and try again.
287 ///
288 /// If `store` is configured with a
289 /// [`ResourceLimiterAsync`](crate::ResourceLimiterAsync) then an error will
290 /// be returned because [`ArrayRef::new_async`] should be used instead.
291 ///
292 /// # Panics
293 ///
294 /// Panics if either the allocator or the `elem` value is not associated
295 /// with the given store.
296 pub fn new(
297 mut store: impl AsContextMut,
298 allocator: &ArrayRefPre,
299 elem: &Val,
300 len: u32,
301 ) -> Result<Rooted<ArrayRef>> {
302 let (mut limiter, store) = store
303 .as_context_mut()
304 .0
305 .validate_sync_resource_limiter_and_store_opaque()?;
306 vm::assert_ready(Self::_new_async(
307 store,
308 limiter.as_mut(),
309 allocator,
310 elem,
311 len,
312 Asyncness::No,
313 ))
314 }
315
316 /// Asynchronously allocate a new `array` of the given length, with every
317 /// element initialized to `elem`.
318 ///
319 /// For example, `ArrayRef::new(ctx, pre, &Val::I64(9), 3)` allocates the
320 /// array `[9, 9, 9]`.
321 ///
322 /// This is similar to the `array.new` instruction.
323 ///
324 /// # Automatic Garbage Collection
325 ///
326 /// If the GC heap is at capacity, and there isn't room for allocating this
327 /// new array, then this method will automatically trigger a asynchronous
328 /// collection in an attempt to free up space in the GC heap.
329 ///
330 /// # Errors
331 ///
332 /// If the given `elem` value's type does not match the `allocator`'s array
333 /// type's element type, an error is returned.
334 ///
335 /// If the allocation cannot be satisfied because the GC heap is currently
336 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
337 /// error is returned. The allocation might succeed on a second attempt if
338 /// you drop some rooted GC references and try again.
339 ///
340 /// # Panics
341 ///
342 /// Panics if your engine is not configured for async; use
343 /// [`ArrayRef::new_async`][crate::ArrayRef::new_async] to perform
344 /// synchronous allocation instead.
345 ///
346 /// Panics if either the allocator or the `elem` value is not associated
347 /// with the given store.
348 #[cfg(feature = "async")]
349 pub async fn new_async(
350 mut store: impl AsContextMut,
351 allocator: &ArrayRefPre,
352 elem: &Val,
353 len: u32,
354 ) -> Result<Rooted<ArrayRef>> {
355 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
356 Self::_new_async(
357 store,
358 limiter.as_mut(),
359 allocator,
360 elem,
361 len,
362 Asyncness::Yes,
363 )
364 .await
365 }
366
367 pub(crate) async fn _new_async(
368 store: &mut StoreOpaque,
369 limiter: Option<&mut StoreResourceLimiter<'_>>,
370 allocator: &ArrayRefPre,
371 elem: &Val,
372 len: u32,
373 asyncness: Asyncness,
374 ) -> Result<Rooted<ArrayRef>> {
375 store
376 .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
377 Self::new_from_iter(store, allocator, RepeatN(elem, len))
378 })
379 .await
380 }
381
382 /// Allocate a new array of the given elements.
383 ///
384 /// Does not attempt a GC on OOM; leaves that to callers.
385 fn new_from_iter<'a>(
386 store: &mut StoreOpaque,
387 allocator: &ArrayRefPre,
388 elems: impl Clone + ExactSizeIterator<Item = &'a Val>,
389 ) -> Result<Rooted<ArrayRef>> {
390 assert_eq!(
391 store.id(),
392 allocator.store_id,
393 "attempted to use a `ArrayRefPre` with the wrong store"
394 );
395
396 // Type check the elements against the element type.
397 for elem in elems.clone() {
398 elem.ensure_matches_ty(store, allocator.ty.element_type().unpack())
399 .context("element type mismatch")?;
400 }
401
402 let len = u32::try_from(elems.len()).unwrap();
403
404 // Allocate the array and write each field value into the appropriate
405 // offset.
406 let arrayref = store
407 .require_gc_store_mut()?
408 .alloc_uninit_array(allocator.type_index(), len, allocator.layout())
409 .context("unrecoverable error when allocating new `arrayref`")?
410 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
411
412 // From this point on, if we get any errors, then the array is not
413 // fully initialized, so we need to eagerly deallocate it before the
414 // next GC where the collector might try to interpret one of the
415 // uninitialized fields as a GC reference.
416 let mut store = AutoAssertNoGc::new(store);
417 match (|| {
418 let elem_ty = allocator.ty.element_type();
419 for (i, elem) in elems.enumerate() {
420 let i = u32::try_from(i).unwrap();
421 debug_assert!(i < len);
422 arrayref.initialize_elem(&mut store, allocator.layout(), &elem_ty, i, *elem)?;
423 }
424 Ok(())
425 })() {
426 Ok(()) => Ok(Rooted::new(&mut store, arrayref.into())),
427 Err(e) => {
428 store.require_gc_store_mut()?.dealloc_uninit_array(arrayref);
429 Err(e)
430 }
431 }
432 }
433
434 /// Synchronously allocate a new `array` containing the given elements.
435 ///
436 /// For example, `ArrayRef::new_fixed(ctx, pre, &[Val::I64(4), Val::I64(5),
437 /// Val::I64(6)])` allocates the array `[4, 5, 6]`.
438 ///
439 /// This is similar to the `array.new_fixed` instruction.
440 ///
441 /// # Automatic Garbage Collection
442 ///
443 /// If the GC heap is at capacity, and there isn't room for allocating this
444 /// new array, then this method will automatically trigger a synchronous
445 /// collection in an attempt to free up space in the GC heap.
446 ///
447 /// # Errors
448 ///
449 /// If any of the `elems` values' type does not match the `allocator`'s
450 /// array type's element type, an error is returned.
451 ///
452 /// If the allocation cannot be satisfied because the GC heap is currently
453 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
454 /// error is returned. The allocation might succeed on a second attempt if
455 /// you drop some rooted GC references and try again.
456 ///
457 /// If `store` is configured with a
458 /// [`ResourceLimiterAsync`](crate::ResourceLimiterAsync) then an error
459 /// will be returned because [`ArrayRef::new_fixed_async`] should be used
460 /// instead.
461 ///
462 /// # Panics
463 ///
464 /// Panics if the allocator or any of the `elems` values are not associated
465 /// with the given store.
466 pub fn new_fixed(
467 mut store: impl AsContextMut,
468 allocator: &ArrayRefPre,
469 elems: &[Val],
470 ) -> Result<Rooted<ArrayRef>> {
471 let (mut limiter, store) = store
472 .as_context_mut()
473 .0
474 .validate_sync_resource_limiter_and_store_opaque()?;
475 vm::assert_ready(Self::_new_fixed_async(
476 store,
477 limiter.as_mut(),
478 allocator,
479 elems,
480 Asyncness::No,
481 ))
482 }
483
484 /// Asynchronously allocate a new `array` containing the given elements.
485 ///
486 /// For example, `ArrayRef::new_fixed_async(ctx, pre, &[Val::I64(4),
487 /// Val::I64(5), Val::I64(6)])` allocates the array `[4, 5, 6]`.
488 ///
489 /// This is similar to the `array.new_fixed` instruction.
490 ///
491 /// If your engine is not configured for async, use
492 /// [`ArrayRef::new_fixed`][crate::ArrayRef::new_fixed] to perform
493 /// synchronous allocation.
494 ///
495 /// # Automatic Garbage Collection
496 ///
497 /// If the GC heap is at capacity, and there isn't room for allocating this
498 /// new array, then this method will automatically trigger a synchronous
499 /// collection in an attempt to free up space in the GC heap.
500 ///
501 /// # Errors
502 ///
503 /// If any of the `elems` values' type does not match the `allocator`'s
504 /// array type's element type, an error is returned.
505 ///
506 /// If the allocation cannot be satisfied because the GC heap is currently
507 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
508 /// error is returned. The allocation might succeed on a second attempt if
509 /// you drop some rooted GC references and try again.
510 ///
511 /// # Panics
512 ///
513 /// Panics if the `store` is not configured for async; use
514 /// [`ArrayRef::new_fixed`][crate::ArrayRef::new_fixed] to perform
515 /// synchronous allocation instead.
516 ///
517 /// Panics if the allocator or any of the `elems` values are not associated
518 /// with the given store.
519 #[cfg(feature = "async")]
520 pub async fn new_fixed_async(
521 mut store: impl AsContextMut,
522 allocator: &ArrayRefPre,
523 elems: &[Val],
524 ) -> Result<Rooted<ArrayRef>> {
525 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
526 Self::_new_fixed_async(store, limiter.as_mut(), allocator, elems, Asyncness::Yes).await
527 }
528
529 pub(crate) async fn _new_fixed_async(
530 store: &mut StoreOpaque,
531 limiter: Option<&mut StoreResourceLimiter<'_>>,
532 allocator: &ArrayRefPre,
533 elems: &[Val],
534 asyncness: Asyncness,
535 ) -> Result<Rooted<ArrayRef>> {
536 store
537 .retry_after_gc_async(limiter, (), asyncness, |store, ()| {
538 Self::new_from_iter(store, allocator, elems.iter())
539 })
540 .await
541 }
542
543 #[inline]
544 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
545 self.inner.comes_from_same_store(store)
546 }
547
548 /// Get this `arrayref`'s type.
549 ///
550 /// # Errors
551 ///
552 /// Return an error if this reference has been unrooted.
553 ///
554 /// # Panics
555 ///
556 /// Panics if this reference is associated with a different store.
557 pub fn ty(&self, store: impl AsContext) -> Result<ArrayType> {
558 self._ty(store.as_context().0)
559 }
560
561 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ArrayType> {
562 assert!(self.comes_from_same_store(store));
563 let index = self.type_index(store)?;
564 Ok(ArrayType::from_shared_type_index(store.engine(), index))
565 }
566
567 /// Does this `arrayref` match the given type?
568 ///
569 /// That is, is this array's type a subtype of the given type?
570 ///
571 /// # Errors
572 ///
573 /// Return an error if this reference has been unrooted.
574 ///
575 /// # Panics
576 ///
577 /// Panics if this reference is associated with a different store or if the
578 /// type is not associated with the store's engine.
579 pub fn matches_ty(&self, store: impl AsContext, ty: &ArrayType) -> Result<bool> {
580 self._matches_ty(store.as_context().0, ty)
581 }
582
583 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &ArrayType) -> Result<bool> {
584 assert!(self.comes_from_same_store(store));
585 Ok(self._ty(store)?.matches(ty))
586 }
587
588 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &ArrayType) -> Result<()> {
589 if !self.comes_from_same_store(store) {
590 bail!("function used with wrong store");
591 }
592 if self._matches_ty(store, ty)? {
593 Ok(())
594 } else {
595 let actual_ty = self._ty(store)?;
596 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
597 }
598 }
599
600 /// Get the length of this array.
601 ///
602 /// # Errors
603 ///
604 /// Return an error if this reference has been unrooted.
605 ///
606 /// # Panics
607 ///
608 /// Panics if this reference is associated with a different store.
609 pub fn len(&self, store: impl AsContext) -> Result<u32> {
610 self._len(store.as_context().0)
611 }
612
613 pub(crate) fn _len(&self, store: &StoreOpaque) -> Result<u32> {
614 assert!(self.comes_from_same_store(store));
615 let gc_ref = self.inner.try_gc_ref(store)?;
616 debug_assert!({
617 let header = store.require_gc_store()?.header(gc_ref);
618 header.kind().matches(VMGcKind::ArrayRef)
619 });
620 let arrayref = gc_ref.as_arrayref_unchecked();
621 Ok(arrayref.len(store))
622 }
623
624 /// Get the values of this array's elements.
625 ///
626 /// Note that `i8` and `i16` element values are zero-extended into
627 /// `Val::I32(_)`s.
628 ///
629 /// # Errors
630 ///
631 /// Return an error if this reference has been unrooted.
632 ///
633 /// # Panics
634 ///
635 /// Panics if this reference is associated with a different store.
636 pub fn elems<'a, T: 'static>(
637 &'a self,
638 store: impl Into<StoreContextMut<'a, T>>,
639 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
640 self._elems(store.into().0)
641 }
642
643 pub(crate) fn _elems<'a>(
644 &'a self,
645 store: &'a mut StoreOpaque,
646 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
647 assert!(self.comes_from_same_store(store));
648 let store = AutoAssertNoGc::new(store);
649
650 let gc_ref = self.inner.try_gc_ref(&store)?;
651 let header = store.require_gc_store()?.header(gc_ref);
652 debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
653
654 let len = self._len(&store)?;
655
656 return Ok(Elems {
657 arrayref: self,
658 store,
659 index: 0,
660 len,
661 });
662
663 struct Elems<'a, 'b> {
664 arrayref: &'a ArrayRef,
665 store: AutoAssertNoGc<'b>,
666 index: u32,
667 len: u32,
668 }
669
670 impl Iterator for Elems<'_, '_> {
671 type Item = Val;
672
673 #[inline]
674 fn next(&mut self) -> Option<Self::Item> {
675 let i = self.index;
676 debug_assert!(i <= self.len);
677 if i >= self.len {
678 return None;
679 }
680 self.index += 1;
681 Some(self.arrayref._get(&mut self.store, i).unwrap())
682 }
683
684 #[inline]
685 fn size_hint(&self) -> (usize, Option<usize>) {
686 let len = self.len - self.index;
687 let len = usize::try_from(len).unwrap();
688 (len, Some(len))
689 }
690 }
691
692 impl ExactSizeIterator for Elems<'_, '_> {
693 #[inline]
694 fn len(&self) -> usize {
695 let len = self.len - self.index;
696 usize::try_from(len).unwrap()
697 }
698 }
699 }
700
701 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
702 assert!(self.comes_from_same_store(&store));
703 let gc_ref = self.inner.try_gc_ref(store)?;
704 Ok(store.require_gc_store()?.header(gc_ref))
705 }
706
707 fn arrayref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMArrayRef> {
708 assert!(self.comes_from_same_store(&store));
709 let gc_ref = self.inner.try_gc_ref(store)?;
710 debug_assert!(self.header(store)?.kind().matches(VMGcKind::ArrayRef));
711 Ok(gc_ref.as_arrayref_unchecked())
712 }
713
714 pub(crate) fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcArrayLayout> {
715 assert!(self.comes_from_same_store(&store));
716 let type_index = self.type_index(store)?;
717 let layout = store
718 .engine()
719 .signatures()
720 .layout(type_index)
721 .expect("array types should have GC layouts");
722 match layout {
723 GcLayout::Array(a) => Ok(a),
724 GcLayout::Struct(_) => unreachable!(),
725 }
726 }
727
728 fn field_ty(&self, store: &StoreOpaque) -> Result<FieldType> {
729 let ty = self._ty(store)?;
730 Ok(ty.field_type())
731 }
732
733 /// Get this array's `index`th element.
734 ///
735 /// Note that `i8` and `i16` field values are zero-extended into
736 /// `Val::I32(_)`s.
737 ///
738 /// # Errors
739 ///
740 /// Returns an `Err(_)` if the index is out of bounds or this reference has
741 /// been unrooted.
742 ///
743 /// # Panics
744 ///
745 /// Panics if this reference is associated with a different store.
746 pub fn get(&self, mut store: impl AsContextMut, index: u32) -> Result<Val> {
747 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
748 self._get(&mut store, index)
749 }
750
751 pub(crate) fn _get(&self, store: &mut AutoAssertNoGc<'_>, index: u32) -> Result<Val> {
752 assert!(
753 self.comes_from_same_store(store),
754 "attempted to use an array with the wrong store",
755 );
756 let arrayref = self.arrayref(store)?.unchecked_copy();
757 let field_ty = self.field_ty(store)?;
758 let layout = self.layout(store)?;
759 let len = arrayref.len(store);
760 ensure!(
761 index < len,
762 "index out of bounds: the length is {len} but the index is {index}"
763 );
764 Ok(arrayref.read_elem(store, &layout, field_ty.element_type(), index))
765 }
766
767 /// Set this array's `index`th element.
768 ///
769 /// # Errors
770 ///
771 /// Returns an error in the following scenarios:
772 ///
773 /// * When given a value of the wrong type, such as trying to write an `f32`
774 /// value into an array of `i64` elements.
775 ///
776 /// * When the array elements are not mutable.
777 ///
778 /// * When `index` is not within the range `0..self.len(ctx)`.
779 ///
780 /// * When `value` is a GC reference that has since been unrooted.
781 ///
782 /// # Panics
783 ///
784 /// Panics if either this reference or the given `value` is associated with
785 /// a different store.
786 pub fn set(&self, mut store: impl AsContextMut, index: u32, value: Val) -> Result<()> {
787 self._set(store.as_context_mut().0, index, value)
788 }
789
790 pub(crate) fn _set(&self, store: &mut StoreOpaque, index: u32, value: Val) -> Result<()> {
791 assert!(
792 self.comes_from_same_store(store),
793 "attempted to use an array with the wrong store",
794 );
795 assert!(
796 value.comes_from_same_store(store),
797 "attempted to use a value with the wrong store",
798 );
799
800 let mut store = AutoAssertNoGc::new(store);
801
802 let field_ty = self.field_ty(&store)?;
803 ensure!(
804 field_ty.mutability().is_var(),
805 "cannot set element {index}: array elements are not mutable"
806 );
807
808 value
809 .ensure_matches_ty(&store, &field_ty.element_type().unpack())
810 .with_context(|| format!("cannot set element {index}: type mismatch"))?;
811
812 let layout = self.layout(&store)?;
813 let arrayref = self.arrayref(&store)?.unchecked_copy();
814
815 let len = arrayref.len(&store);
816 ensure!(
817 index < len,
818 "index out of bounds: the length is {len} but the index is {index}"
819 );
820
821 arrayref.write_elem(&mut store, &layout, field_ty.element_type(), index, value)
822 }
823
824 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
825 let gc_ref = self.inner.try_gc_ref(store)?;
826 let header = store.require_gc_store()?.header(gc_ref);
827 debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
828 Ok(header.ty().expect("arrayrefs should have concrete types"))
829 }
830
831 /// Create a new `Rooted<ArrayRef>` from the given GC reference.
832 ///
833 /// `gc_ref` should point to a valid `arrayref` and should belong to the
834 /// store's GC heap. Failure to uphold these invariants is memory safe but
835 /// will lead to general incorrectness such as panics or wrong results.
836 pub(crate) fn from_cloned_gc_ref(
837 store: &mut AutoAssertNoGc<'_>,
838 gc_ref: VMGcRef,
839 ) -> Rooted<Self> {
840 debug_assert!(gc_ref.is_arrayref(&*store.unwrap_gc_store().gc_heap));
841 Rooted::new(store, gc_ref)
842 }
843}
844
845unsafe impl WasmTy for Rooted<ArrayRef> {
846 #[inline]
847 fn valtype() -> ValType {
848 ValType::Ref(RefType::new(false, HeapType::Array))
849 }
850
851 #[inline]
852 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
853 self.comes_from_same_store(store)
854 }
855
856 #[inline]
857 fn dynamic_concrete_type_check(
858 &self,
859 store: &StoreOpaque,
860 _nullable: bool,
861 ty: &HeapType,
862 ) -> Result<()> {
863 match ty {
864 HeapType::Any | HeapType::Eq | HeapType::Array => Ok(()),
865 HeapType::ConcreteArray(ty) => self.ensure_matches_ty(store, ty),
866
867 HeapType::Extern
868 | HeapType::NoExtern
869 | HeapType::Func
870 | HeapType::ConcreteFunc(_)
871 | HeapType::NoFunc
872 | HeapType::I31
873 | HeapType::Struct
874 | HeapType::ConcreteStruct(_)
875 | HeapType::Cont
876 | HeapType::NoCont
877 | HeapType::ConcreteCont(_)
878 | HeapType::Exn
879 | HeapType::NoExn
880 | HeapType::ConcreteExn(_)
881 | HeapType::None => bail!(
882 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
883 self._ty(store)?,
884 ),
885 }
886 }
887
888 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
889 self.wasm_ty_store(store, ptr, ValRaw::anyref)
890 }
891
892 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
893 Self::wasm_ty_load(store, ptr.get_anyref(), ArrayRef::from_cloned_gc_ref)
894 }
895}
896
897unsafe impl WasmTy for Option<Rooted<ArrayRef>> {
898 #[inline]
899 fn valtype() -> ValType {
900 ValType::ARRAYREF
901 }
902
903 #[inline]
904 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
905 self.map_or(true, |x| x.comes_from_same_store(store))
906 }
907
908 #[inline]
909 fn dynamic_concrete_type_check(
910 &self,
911 store: &StoreOpaque,
912 nullable: bool,
913 ty: &HeapType,
914 ) -> Result<()> {
915 match self {
916 Some(s) => Rooted::<ArrayRef>::dynamic_concrete_type_check(s, store, nullable, ty),
917 None => {
918 ensure!(
919 nullable,
920 "expected a non-null reference, but found a null reference"
921 );
922 Ok(())
923 }
924 }
925 }
926
927 #[inline]
928 fn is_vmgcref_and_points_to_object(&self) -> bool {
929 self.is_some()
930 }
931
932 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
933 <Rooted<ArrayRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
934 }
935
936 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
937 <Rooted<ArrayRef>>::wasm_ty_option_load(
938 store,
939 ptr.get_anyref(),
940 ArrayRef::from_cloned_gc_ref,
941 )
942 }
943}
944
945unsafe impl WasmTy for OwnedRooted<ArrayRef> {
946 #[inline]
947 fn valtype() -> ValType {
948 ValType::Ref(RefType::new(false, HeapType::Array))
949 }
950
951 #[inline]
952 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
953 self.comes_from_same_store(store)
954 }
955
956 #[inline]
957 fn dynamic_concrete_type_check(
958 &self,
959 store: &StoreOpaque,
960 _: bool,
961 ty: &HeapType,
962 ) -> Result<()> {
963 match ty {
964 HeapType::Any | HeapType::Eq | HeapType::Array => Ok(()),
965 HeapType::ConcreteArray(ty) => self.ensure_matches_ty(store, ty),
966
967 HeapType::Extern
968 | HeapType::NoExtern
969 | HeapType::Func
970 | HeapType::ConcreteFunc(_)
971 | HeapType::NoFunc
972 | HeapType::I31
973 | HeapType::Struct
974 | HeapType::ConcreteStruct(_)
975 | HeapType::Cont
976 | HeapType::NoCont
977 | HeapType::ConcreteCont(_)
978 | HeapType::Exn
979 | HeapType::NoExn
980 | HeapType::ConcreteExn(_)
981 | HeapType::None => bail!(
982 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
983 self._ty(store)?,
984 ),
985 }
986 }
987
988 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
989 self.wasm_ty_store(store, ptr, ValRaw::anyref)
990 }
991
992 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
993 Self::wasm_ty_load(store, ptr.get_anyref(), ArrayRef::from_cloned_gc_ref)
994 }
995}
996
997unsafe impl WasmTy for Option<OwnedRooted<ArrayRef>> {
998 #[inline]
999 fn valtype() -> ValType {
1000 ValType::ARRAYREF
1001 }
1002
1003 #[inline]
1004 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
1005 self.as_ref()
1006 .map_or(true, |x| x.comes_from_same_store(store))
1007 }
1008
1009 #[inline]
1010 fn dynamic_concrete_type_check(
1011 &self,
1012 store: &StoreOpaque,
1013 nullable: bool,
1014 ty: &HeapType,
1015 ) -> Result<()> {
1016 match self {
1017 Some(s) => OwnedRooted::<ArrayRef>::dynamic_concrete_type_check(s, store, nullable, ty),
1018 None => {
1019 ensure!(
1020 nullable,
1021 "expected a non-null reference, but found a null reference"
1022 );
1023 Ok(())
1024 }
1025 }
1026 }
1027
1028 #[inline]
1029 fn is_vmgcref_and_points_to_object(&self) -> bool {
1030 self.is_some()
1031 }
1032
1033 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
1034 <OwnedRooted<ArrayRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
1035 }
1036
1037 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
1038 <OwnedRooted<ArrayRef>>::wasm_ty_option_load(
1039 store,
1040 ptr.get_anyref(),
1041 ArrayRef::from_cloned_gc_ref,
1042 )
1043 }
1044}