wasmtime/runtime/gc/enabled/arrayref.rs
1//! Working with GC `array` objects.
2
3use crate::runtime::vm::{VMGcRef, VMStore};
4use crate::store::{StoreId, StoreResourceLimiter};
5use crate::vm::{self, VMArrayRef, VMGcHeader};
6use crate::{AnyRef, FieldType};
7use crate::{
8 ArrayType, AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
9 ManuallyRooted, RefType, Rooted, Val, ValRaw, ValType, WasmTy,
10 prelude::*,
11 store::{AutoAssertNoGc, StoreContextMut, StoreOpaque},
12};
13use core::mem::{self, MaybeUninit};
14use wasmtime_environ::{GcArrayLayout, GcLayout, VMGcKind, VMSharedTypeIndex};
15
16/// An allocator for a particular Wasm GC array type.
17///
18/// Every `ArrayRefPre` is associated with a particular [`Store`][crate::Store]
19/// and a particular [`ArrayType`][crate::ArrayType].
20///
21/// Reusing an allocator across many allocations amortizes some per-type runtime
22/// overheads inside Wasmtime. An `ArrayRefPre` is to `ArrayRef`s as an
23/// `InstancePre` is to `Instance`s.
24///
25/// # Example
26///
27/// ```
28/// use wasmtime::*;
29///
30/// # fn foo() -> Result<()> {
31/// let mut config = Config::new();
32/// config.wasm_function_references(true);
33/// config.wasm_gc(true);
34///
35/// let engine = Engine::new(&config)?;
36/// let mut store = Store::new(&engine, ());
37///
38/// // Define an array type.
39/// let array_ty = ArrayType::new(
40/// store.engine(),
41/// FieldType::new(Mutability::Var, ValType::I32.into()),
42/// );
43///
44/// // Create an allocator for the array type.
45/// let allocator = ArrayRefPre::new(&mut store, array_ty);
46///
47/// {
48/// let mut scope = RootScope::new(&mut store);
49///
50/// // Allocate a bunch of instances of our array type using the same
51/// // allocator! This is faster than creating a new allocator for each
52/// // instance we want to allocate.
53/// for i in 0..10 {
54/// let len = 42;
55/// let elem = Val::I32(36);
56/// ArrayRef::new(&mut scope, &allocator, &elem, len)?;
57/// }
58/// }
59/// # Ok(())
60/// # }
61/// # let _ = foo();
62/// ```
63pub struct ArrayRefPre {
64 store_id: StoreId,
65 ty: ArrayType,
66}
67
68impl ArrayRefPre {
69 /// Create a new `ArrayRefPre` that is associated with the given store
70 /// and type.
71 pub fn new(mut store: impl AsContextMut, ty: ArrayType) -> Self {
72 Self::_new(store.as_context_mut().0, ty)
73 }
74
75 pub(crate) fn _new(store: &mut StoreOpaque, ty: ArrayType) -> Self {
76 store.insert_gc_host_alloc_type(ty.registered_type().clone());
77 let store_id = store.id();
78 ArrayRefPre { store_id, ty }
79 }
80
81 pub(crate) fn layout(&self) -> &GcArrayLayout {
82 self.ty
83 .registered_type()
84 .layout()
85 .expect("array types have a layout")
86 .unwrap_array()
87 }
88
89 pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
90 self.ty.registered_type().index()
91 }
92}
93
94/// A reference to a GC-managed `array` instance.
95///
96/// WebAssembly `array`s are a sequence of elements of some homogeneous
97/// type. The elements length is determined at allocation time — two instances
98/// of the same array type may have different lengths — but, once allocated, an
99/// array's length can never be resized. An array's elements are mutable or
100/// constant, depending on the array's type. This determines whether any array
101/// element can be assigned a new value or not. Each element is either an
102/// unpacked [`Val`][crate::Val] or a packed 8-/16-bit integer. Array elements
103/// are dynamically accessed via indexing; out-of-bounds accesses result in
104/// traps.
105///
106/// Like all WebAssembly references, these are opaque and unforgeable to Wasm:
107/// they cannot be faked and Wasm cannot, for example, cast the integer
108/// `0x12345678` into a reference, pretend it is a valid `arrayref`, and trick
109/// the host into dereferencing it and segfaulting or worse.
110///
111/// Note that you can also use `Rooted<ArrayRef>` and `ManuallyRooted<ArrayRef>`
112/// as a type parameter with [`Func::typed`][crate::Func::typed]- and
113/// [`Func::wrap`][crate::Func::wrap]-style APIs.
114///
115/// # Example
116///
117/// ```
118/// use wasmtime::*;
119///
120/// # fn foo() -> Result<()> {
121/// let mut config = Config::new();
122/// config.wasm_function_references(true);
123/// config.wasm_gc(true);
124///
125/// let engine = Engine::new(&config)?;
126/// let mut store = Store::new(&engine, ());
127///
128/// // Define the type for an array of `i32`s.
129/// let array_ty = ArrayType::new(
130/// store.engine(),
131/// FieldType::new(Mutability::Var, ValType::I32.into()),
132/// );
133///
134/// // Create an allocator for the array type.
135/// let allocator = ArrayRefPre::new(&mut store, array_ty);
136///
137/// {
138/// let mut scope = RootScope::new(&mut store);
139///
140/// // Allocate an instance of the array type.
141/// let len = 36;
142/// let elem = Val::I32(42);
143/// let my_array = match ArrayRef::new(&mut scope, &allocator, &elem, len) {
144/// Ok(s) => s,
145/// Err(e) => match e.downcast::<GcHeapOutOfMemory<()>>() {
146/// // If the heap is out of memory, then do a GC to free up some
147/// // space and try again.
148/// Ok(oom) => {
149/// // Do a GC! Note: in an async context, you'd want to do
150/// // `scope.as_context_mut().gc_async().await`.
151/// scope.as_context_mut().gc(Some(&oom));
152///
153/// // Try again. If the GC heap is still out of memory, then we
154/// // weren't able to free up resources for this allocation, so
155/// // propagate the error.
156/// ArrayRef::new(&mut scope, &allocator, &elem, len)?
157/// }
158/// // Propagate any other kind of error.
159/// Err(e) => return Err(e),
160/// }
161/// };
162///
163/// // That instance's elements should have the initial value.
164/// for i in 0..len {
165/// let val = my_array.get(&mut scope, i)?.unwrap_i32();
166/// assert_eq!(val, 42);
167/// }
168///
169/// // We can set an element to a new value because the type was defined with
170/// // mutable elements (as opposed to const).
171/// my_array.set(&mut scope, 3, Val::I32(1234))?;
172/// let new_val = my_array.get(&mut scope, 3)?.unwrap_i32();
173/// assert_eq!(new_val, 1234);
174/// }
175/// # Ok(())
176/// # }
177/// # foo().unwrap();
178/// ```
179#[derive(Debug)]
180#[repr(transparent)]
181pub struct ArrayRef {
182 pub(super) inner: GcRootIndex,
183}
184
185unsafe impl GcRefImpl for ArrayRef {
186 fn transmute_ref(index: &GcRootIndex) -> &Self {
187 // Safety: `ArrayRef` is a newtype of a `GcRootIndex`.
188 let me: &Self = unsafe { mem::transmute(index) };
189
190 // Assert we really are just a newtype of a `GcRootIndex`.
191 assert!(matches!(
192 me,
193 Self {
194 inner: GcRootIndex { .. },
195 }
196 ));
197
198 me
199 }
200}
201
202impl Rooted<ArrayRef> {
203 /// Upcast this `arrayref` into an `anyref`.
204 #[inline]
205 pub fn to_anyref(self) -> Rooted<AnyRef> {
206 self.unchecked_cast()
207 }
208
209 /// Upcast this `arrayref` into an `eqref`.
210 #[inline]
211 pub fn to_eqref(self) -> Rooted<EqRef> {
212 self.unchecked_cast()
213 }
214}
215
216impl ManuallyRooted<ArrayRef> {
217 /// Upcast this `arrayref` into an `anyref`.
218 #[inline]
219 pub fn to_anyref(self) -> ManuallyRooted<AnyRef> {
220 self.unchecked_cast()
221 }
222
223 /// Upcast this `arrayref` into an `eqref`.
224 #[inline]
225 pub fn to_eqref(self) -> ManuallyRooted<EqRef> {
226 self.unchecked_cast()
227 }
228}
229
230/// An iterator for elements in `ArrayRef::new[_async].
231///
232/// NB: We can't use `iter::repeat(elem).take(len)` because that doesn't
233/// implement `ExactSizeIterator`.
234#[derive(Clone)]
235struct RepeatN<'a>(&'a Val, u32);
236
237impl<'a> Iterator for RepeatN<'a> {
238 type Item = &'a Val;
239
240 fn next(&mut self) -> Option<Self::Item> {
241 if self.1 == 0 {
242 None
243 } else {
244 self.1 -= 1;
245 Some(self.0)
246 }
247 }
248
249 fn size_hint(&self) -> (usize, Option<usize>) {
250 let len = self.len();
251 (len, Some(len))
252 }
253}
254
255impl ExactSizeIterator for RepeatN<'_> {
256 fn len(&self) -> usize {
257 usize::try_from(self.1).unwrap()
258 }
259}
260
261impl ArrayRef {
262 /// Allocate a new `array` of the given length, with every element
263 /// initialized to `elem`.
264 ///
265 /// For example, `ArrayRef::new(ctx, pre, &Val::I64(9), 3)` allocates the
266 /// array `[9, 9, 9]`.
267 ///
268 /// This is similar to the `array.new` instruction.
269 ///
270 /// # Automatic Garbage Collection
271 ///
272 /// If the GC heap is at capacity, and there isn't room for allocating this
273 /// new array, then this method will automatically trigger a synchronous
274 /// collection in an attempt to free up space in the GC heap.
275 ///
276 /// # Errors
277 ///
278 /// If the given `elem` value's type does not match the `allocator`'s array
279 /// type's element type, an error is returned.
280 ///
281 /// If the allocation cannot be satisfied because the GC heap is currently
282 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
283 /// error is returned. The allocation might succeed on a second attempt if
284 /// you drop some rooted GC references and try again.
285 ///
286 /// # Panics
287 ///
288 /// Panics if the `store` is configured for async; use
289 /// [`ArrayRef::new_async`][crate::ArrayRef::new_async] to perform
290 /// asynchronous allocation instead.
291 ///
292 /// Panics if either the allocator or the `elem` value is not associated
293 /// with the given store.
294 pub fn new(
295 mut store: impl AsContextMut,
296 allocator: &ArrayRefPre,
297 elem: &Val,
298 len: u32,
299 ) -> Result<Rooted<ArrayRef>> {
300 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
301 assert!(!store.async_support());
302 vm::assert_ready(Self::_new_async(
303 store,
304 limiter.as_mut(),
305 allocator,
306 elem,
307 len,
308 ))
309 }
310
311 /// Asynchronously allocate a new `array` of the given length, with every
312 /// element initialized to `elem`.
313 ///
314 /// For example, `ArrayRef::new(ctx, pre, &Val::I64(9), 3)` allocates the
315 /// array `[9, 9, 9]`.
316 ///
317 /// This is similar to the `array.new` instruction.
318 ///
319 /// # Automatic Garbage Collection
320 ///
321 /// If the GC heap is at capacity, and there isn't room for allocating this
322 /// new array, then this method will automatically trigger a asynchronous
323 /// collection in an attempt to free up space in the GC heap.
324 ///
325 /// # Errors
326 ///
327 /// If the given `elem` value's type does not match the `allocator`'s array
328 /// type's element type, an error is returned.
329 ///
330 /// If the allocation cannot be satisfied because the GC heap is currently
331 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
332 /// error is returned. The allocation might succeed on a second attempt if
333 /// you drop some rooted GC references and try again.
334 ///
335 /// # Panics
336 ///
337 /// Panics if your engine is not configured for async; use
338 /// [`ArrayRef::new_async`][crate::ArrayRef::new_async] to perform
339 /// synchronous allocation instead.
340 ///
341 /// Panics if either the allocator or the `elem` value is not associated
342 /// with the given store.
343 #[cfg(feature = "async")]
344 pub async fn new_async(
345 mut store: impl AsContextMut,
346 allocator: &ArrayRefPre,
347 elem: &Val,
348 len: u32,
349 ) -> Result<Rooted<ArrayRef>> {
350 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
351 Self::_new_async(store, limiter.as_mut(), allocator, elem, len).await
352 }
353
354 pub(crate) async fn _new_async(
355 store: &mut StoreOpaque,
356 limiter: Option<&mut StoreResourceLimiter<'_>>,
357 allocator: &ArrayRefPre,
358 elem: &Val,
359 len: u32,
360 ) -> Result<Rooted<ArrayRef>> {
361 store
362 .retry_after_gc_async(limiter, (), |store, ()| {
363 Self::new_from_iter(store, allocator, RepeatN(elem, len))
364 })
365 .await
366 }
367
368 /// Allocate a new array of the given elements.
369 ///
370 /// Does not attempt a GC on OOM; leaves that to callers.
371 fn new_from_iter<'a>(
372 store: &mut StoreOpaque,
373 allocator: &ArrayRefPre,
374 elems: impl Clone + ExactSizeIterator<Item = &'a Val>,
375 ) -> Result<Rooted<ArrayRef>> {
376 assert_eq!(
377 store.id(),
378 allocator.store_id,
379 "attempted to use a `ArrayRefPre` with the wrong store"
380 );
381
382 // Type check the elements against the element type.
383 for elem in elems.clone() {
384 elem.ensure_matches_ty(store, allocator.ty.element_type().unpack())
385 .context("element type mismatch")?;
386 }
387
388 let len = u32::try_from(elems.len()).unwrap();
389
390 // Allocate the array and write each field value into the appropriate
391 // offset.
392 let arrayref = store
393 .require_gc_store_mut()?
394 .alloc_uninit_array(allocator.type_index(), len, allocator.layout())
395 .context("unrecoverable error when allocating new `arrayref`")?
396 .map_err(|n| GcHeapOutOfMemory::new((), n))?;
397
398 // From this point on, if we get any errors, then the array is not
399 // fully initialized, so we need to eagerly deallocate it before the
400 // next GC where the collector might try to interpret one of the
401 // uninitialized fields as a GC reference.
402 let mut store = AutoAssertNoGc::new(store);
403 match (|| {
404 let elem_ty = allocator.ty.element_type();
405 for (i, elem) in elems.enumerate() {
406 let i = u32::try_from(i).unwrap();
407 debug_assert!(i < len);
408 arrayref.initialize_elem(&mut store, allocator.layout(), &elem_ty, i, *elem)?;
409 }
410 Ok(())
411 })() {
412 Ok(()) => Ok(Rooted::new(&mut store, arrayref.into())),
413 Err(e) => {
414 store.require_gc_store_mut()?.dealloc_uninit_array(arrayref);
415 Err(e)
416 }
417 }
418 }
419
420 /// Synchronously allocate a new `array` containing the given elements.
421 ///
422 /// For example, `ArrayRef::new_fixed(ctx, pre, &[Val::I64(4), Val::I64(5),
423 /// Val::I64(6)])` allocates the array `[4, 5, 6]`.
424 ///
425 /// This is similar to the `array.new_fixed` instruction.
426 ///
427 /// # Automatic Garbage Collection
428 ///
429 /// If the GC heap is at capacity, and there isn't room for allocating this
430 /// new array, then this method will automatically trigger a synchronous
431 /// collection in an attempt to free up space in the GC heap.
432 ///
433 /// # Errors
434 ///
435 /// If any of the `elems` values' type does not match the `allocator`'s
436 /// array type's element type, an error is returned.
437 ///
438 /// If the allocation cannot be satisfied because the GC heap is currently
439 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
440 /// error is returned. The allocation might succeed on a second attempt if
441 /// you drop some rooted GC references and try again.
442 ///
443 /// # Panics
444 ///
445 /// Panics if the `store` is configured for async; use
446 /// [`ArrayRef::new_fixed_async`][crate::ArrayRef::new_fixed_async] to
447 /// perform asynchronous allocation instead.
448 ///
449 /// Panics if the allocator or any of the `elems` values are not associated
450 /// with the given store.
451 pub fn new_fixed(
452 mut store: impl AsContextMut,
453 allocator: &ArrayRefPre,
454 elems: &[Val],
455 ) -> Result<Rooted<ArrayRef>> {
456 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
457 assert!(!store.async_support());
458 vm::assert_ready(Self::_new_fixed_async(
459 store,
460 limiter.as_mut(),
461 allocator,
462 elems,
463 ))
464 }
465
466 /// Asynchronously allocate a new `array` containing the given elements.
467 ///
468 /// For example, `ArrayRef::new_fixed_async(ctx, pre, &[Val::I64(4),
469 /// Val::I64(5), Val::I64(6)])` allocates the array `[4, 5, 6]`.
470 ///
471 /// This is similar to the `array.new_fixed` instruction.
472 ///
473 /// If your engine is not configured for async, use
474 /// [`ArrayRef::new_fixed`][crate::ArrayRef::new_fixed] to perform
475 /// synchronous allocation.
476 ///
477 /// # Automatic Garbage Collection
478 ///
479 /// If the GC heap is at capacity, and there isn't room for allocating this
480 /// new array, then this method will automatically trigger a synchronous
481 /// collection in an attempt to free up space in the GC heap.
482 ///
483 /// # Errors
484 ///
485 /// If any of the `elems` values' type does not match the `allocator`'s
486 /// array type's element type, an error is returned.
487 ///
488 /// If the allocation cannot be satisfied because the GC heap is currently
489 /// out of memory, then a [`GcHeapOutOfMemory<()>`][crate::GcHeapOutOfMemory]
490 /// error is returned. The allocation might succeed on a second attempt if
491 /// you drop some rooted GC references and try again.
492 ///
493 /// # Panics
494 ///
495 /// Panics if the `store` is not configured for async; use
496 /// [`ArrayRef::new_fixed`][crate::ArrayRef::new_fixed] to perform
497 /// synchronous allocation instead.
498 ///
499 /// Panics if the allocator or any of the `elems` values are not associated
500 /// with the given store.
501 #[cfg(feature = "async")]
502 pub async fn new_fixed_async(
503 mut store: impl AsContextMut,
504 allocator: &ArrayRefPre,
505 elems: &[Val],
506 ) -> Result<Rooted<ArrayRef>> {
507 let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
508 Self::_new_fixed_async(store, limiter.as_mut(), allocator, elems).await
509 }
510
511 pub(crate) async fn _new_fixed_async(
512 store: &mut StoreOpaque,
513 limiter: Option<&mut StoreResourceLimiter<'_>>,
514 allocator: &ArrayRefPre,
515 elems: &[Val],
516 ) -> Result<Rooted<ArrayRef>> {
517 store
518 .retry_after_gc_async(limiter, (), |store, ()| {
519 Self::new_from_iter(store, allocator, elems.iter())
520 })
521 .await
522 }
523
524 #[inline]
525 pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
526 self.inner.comes_from_same_store(store)
527 }
528
529 /// Get this `arrayref`'s type.
530 ///
531 /// # Errors
532 ///
533 /// Return an error if this reference has been unrooted.
534 ///
535 /// # Panics
536 ///
537 /// Panics if this reference is associated with a different store.
538 pub fn ty(&self, store: impl AsContext) -> Result<ArrayType> {
539 self._ty(store.as_context().0)
540 }
541
542 pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ArrayType> {
543 assert!(self.comes_from_same_store(store));
544 let index = self.type_index(store)?;
545 Ok(ArrayType::from_shared_type_index(store.engine(), index))
546 }
547
548 /// Does this `arrayref` match the given type?
549 ///
550 /// That is, is this array's type a subtype of the given type?
551 ///
552 /// # Errors
553 ///
554 /// Return an error if this reference has been unrooted.
555 ///
556 /// # Panics
557 ///
558 /// Panics if this reference is associated with a different store or if the
559 /// type is not associated with the store's engine.
560 pub fn matches_ty(&self, store: impl AsContext, ty: &ArrayType) -> Result<bool> {
561 self._matches_ty(store.as_context().0, ty)
562 }
563
564 pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &ArrayType) -> Result<bool> {
565 assert!(self.comes_from_same_store(store));
566 Ok(self._ty(store)?.matches(ty))
567 }
568
569 pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &ArrayType) -> Result<()> {
570 if !self.comes_from_same_store(store) {
571 bail!("function used with wrong store");
572 }
573 if self._matches_ty(store, ty)? {
574 Ok(())
575 } else {
576 let actual_ty = self._ty(store)?;
577 bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
578 }
579 }
580
581 /// Get the length of this array.
582 ///
583 /// # Errors
584 ///
585 /// Return an error if this reference has been unrooted.
586 ///
587 /// # Panics
588 ///
589 /// Panics if this reference is associated with a different store.
590 pub fn len(&self, store: impl AsContext) -> Result<u32> {
591 self._len(store.as_context().0)
592 }
593
594 pub(crate) fn _len(&self, store: &StoreOpaque) -> Result<u32> {
595 assert!(self.comes_from_same_store(store));
596 let gc_ref = self.inner.try_gc_ref(store)?;
597 debug_assert!({
598 let header = store.require_gc_store()?.header(gc_ref);
599 header.kind().matches(VMGcKind::ArrayRef)
600 });
601 let arrayref = gc_ref.as_arrayref_unchecked();
602 Ok(arrayref.len(store))
603 }
604
605 /// Get the values of this array's elements.
606 ///
607 /// Note that `i8` and `i16` element values are zero-extended into
608 /// `Val::I32(_)`s.
609 ///
610 /// # Errors
611 ///
612 /// Return an error if this reference has been unrooted.
613 ///
614 /// # Panics
615 ///
616 /// Panics if this reference is associated with a different store.
617 pub fn elems<'a, T: 'static>(
618 &'a self,
619 store: impl Into<StoreContextMut<'a, T>>,
620 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
621 self._elems(store.into().0)
622 }
623
624 pub(crate) fn _elems<'a>(
625 &'a self,
626 store: &'a mut StoreOpaque,
627 ) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
628 assert!(self.comes_from_same_store(store));
629 let store = AutoAssertNoGc::new(store);
630
631 let gc_ref = self.inner.try_gc_ref(&store)?;
632 let header = store.require_gc_store()?.header(gc_ref);
633 debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
634
635 let len = self._len(&store)?;
636
637 return Ok(Elems {
638 arrayref: self,
639 store,
640 index: 0,
641 len,
642 });
643
644 struct Elems<'a, 'b> {
645 arrayref: &'a ArrayRef,
646 store: AutoAssertNoGc<'b>,
647 index: u32,
648 len: u32,
649 }
650
651 impl Iterator for Elems<'_, '_> {
652 type Item = Val;
653
654 #[inline]
655 fn next(&mut self) -> Option<Self::Item> {
656 let i = self.index;
657 debug_assert!(i <= self.len);
658 if i >= self.len {
659 return None;
660 }
661 self.index += 1;
662 Some(self.arrayref._get(&mut self.store, i).unwrap())
663 }
664
665 #[inline]
666 fn size_hint(&self) -> (usize, Option<usize>) {
667 let len = self.len - self.index;
668 let len = usize::try_from(len).unwrap();
669 (len, Some(len))
670 }
671 }
672
673 impl ExactSizeIterator for Elems<'_, '_> {
674 #[inline]
675 fn len(&self) -> usize {
676 let len = self.len - self.index;
677 usize::try_from(len).unwrap()
678 }
679 }
680 }
681
682 fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
683 assert!(self.comes_from_same_store(&store));
684 let gc_ref = self.inner.try_gc_ref(store)?;
685 Ok(store.require_gc_store()?.header(gc_ref))
686 }
687
688 fn arrayref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMArrayRef> {
689 assert!(self.comes_from_same_store(&store));
690 let gc_ref = self.inner.try_gc_ref(store)?;
691 debug_assert!(self.header(store)?.kind().matches(VMGcKind::ArrayRef));
692 Ok(gc_ref.as_arrayref_unchecked())
693 }
694
695 pub(crate) fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcArrayLayout> {
696 assert!(self.comes_from_same_store(&store));
697 let type_index = self.type_index(store)?;
698 let layout = store
699 .engine()
700 .signatures()
701 .layout(type_index)
702 .expect("array types should have GC layouts");
703 match layout {
704 GcLayout::Array(a) => Ok(a),
705 GcLayout::Struct(_) => unreachable!(),
706 }
707 }
708
709 fn field_ty(&self, store: &StoreOpaque) -> Result<FieldType> {
710 let ty = self._ty(store)?;
711 Ok(ty.field_type())
712 }
713
714 /// Get this array's `index`th element.
715 ///
716 /// Note that `i8` and `i16` field values are zero-extended into
717 /// `Val::I32(_)`s.
718 ///
719 /// # Errors
720 ///
721 /// Returns an `Err(_)` if the index is out of bounds or this reference has
722 /// been unrooted.
723 ///
724 /// # Panics
725 ///
726 /// Panics if this reference is associated with a different store.
727 pub fn get(&self, mut store: impl AsContextMut, index: u32) -> Result<Val> {
728 let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
729 self._get(&mut store, index)
730 }
731
732 pub(crate) fn _get(&self, store: &mut AutoAssertNoGc<'_>, index: u32) -> Result<Val> {
733 assert!(
734 self.comes_from_same_store(store),
735 "attempted to use an array with the wrong store",
736 );
737 let arrayref = self.arrayref(store)?.unchecked_copy();
738 let field_ty = self.field_ty(store)?;
739 let layout = self.layout(store)?;
740 let len = arrayref.len(store);
741 ensure!(
742 index < len,
743 "index out of bounds: the length is {len} but the index is {index}"
744 );
745 Ok(arrayref.read_elem(store, &layout, field_ty.element_type(), index))
746 }
747
748 /// Set this array's `index`th element.
749 ///
750 /// # Errors
751 ///
752 /// Returns an error in the following scenarios:
753 ///
754 /// * When given a value of the wrong type, such as trying to write an `f32`
755 /// value into an array of `i64` elements.
756 ///
757 /// * When the array elements are not mutable.
758 ///
759 /// * When `index` is not within the range `0..self.len(ctx)`.
760 ///
761 /// * When `value` is a GC reference that has since been unrooted.
762 ///
763 /// # Panics
764 ///
765 /// Panics if either this reference or the given `value` is associated with
766 /// a different store.
767 pub fn set(&self, mut store: impl AsContextMut, index: u32, value: Val) -> Result<()> {
768 self._set(store.as_context_mut().0, index, value)
769 }
770
771 pub(crate) fn _set(&self, store: &mut StoreOpaque, index: u32, value: Val) -> Result<()> {
772 assert!(
773 self.comes_from_same_store(store),
774 "attempted to use an array with the wrong store",
775 );
776 assert!(
777 value.comes_from_same_store(store),
778 "attempted to use a value with the wrong store",
779 );
780
781 let mut store = AutoAssertNoGc::new(store);
782
783 let field_ty = self.field_ty(&store)?;
784 ensure!(
785 field_ty.mutability().is_var(),
786 "cannot set element {index}: array elements are not mutable"
787 );
788
789 value
790 .ensure_matches_ty(&store, &field_ty.element_type().unpack())
791 .with_context(|| format!("cannot set element {index}: type mismatch"))?;
792
793 let layout = self.layout(&store)?;
794 let arrayref = self.arrayref(&store)?.unchecked_copy();
795
796 let len = arrayref.len(&store);
797 ensure!(
798 index < len,
799 "index out of bounds: the length is {len} but the index is {index}"
800 );
801
802 arrayref.write_elem(&mut store, &layout, field_ty.element_type(), index, value)
803 }
804
805 pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
806 let gc_ref = self.inner.try_gc_ref(store)?;
807 let header = store.require_gc_store()?.header(gc_ref);
808 debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
809 Ok(header.ty().expect("arrayrefs should have concrete types"))
810 }
811
812 /// Create a new `Rooted<ArrayRef>` from the given GC reference.
813 ///
814 /// `gc_ref` should point to a valid `arrayref` and should belong to the
815 /// store's GC heap. Failure to uphold these invariants is memory safe but
816 /// will lead to general incorrectness such as panics or wrong results.
817 pub(crate) fn from_cloned_gc_ref(
818 store: &mut AutoAssertNoGc<'_>,
819 gc_ref: VMGcRef,
820 ) -> Rooted<Self> {
821 debug_assert!(gc_ref.is_arrayref(&*store.unwrap_gc_store().gc_heap));
822 Rooted::new(store, gc_ref)
823 }
824}
825
826unsafe impl WasmTy for Rooted<ArrayRef> {
827 #[inline]
828 fn valtype() -> ValType {
829 ValType::Ref(RefType::new(false, HeapType::Array))
830 }
831
832 #[inline]
833 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
834 self.comes_from_same_store(store)
835 }
836
837 #[inline]
838 fn dynamic_concrete_type_check(
839 &self,
840 store: &StoreOpaque,
841 _nullable: bool,
842 ty: &HeapType,
843 ) -> Result<()> {
844 match ty {
845 HeapType::Any | HeapType::Eq | HeapType::Array => Ok(()),
846 HeapType::ConcreteArray(ty) => self.ensure_matches_ty(store, ty),
847
848 HeapType::Extern
849 | HeapType::NoExtern
850 | HeapType::Func
851 | HeapType::ConcreteFunc(_)
852 | HeapType::NoFunc
853 | HeapType::I31
854 | HeapType::Struct
855 | HeapType::ConcreteStruct(_)
856 | HeapType::Cont
857 | HeapType::NoCont
858 | HeapType::ConcreteCont(_)
859 | HeapType::Exn
860 | HeapType::NoExn
861 | HeapType::ConcreteExn(_)
862 | HeapType::None => bail!(
863 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
864 self._ty(store)?,
865 ),
866 }
867 }
868
869 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
870 self.wasm_ty_store(store, ptr, ValRaw::anyref)
871 }
872
873 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
874 Self::wasm_ty_load(store, ptr.get_anyref(), ArrayRef::from_cloned_gc_ref)
875 }
876}
877
878unsafe impl WasmTy for Option<Rooted<ArrayRef>> {
879 #[inline]
880 fn valtype() -> ValType {
881 ValType::ARRAYREF
882 }
883
884 #[inline]
885 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
886 self.map_or(true, |x| x.comes_from_same_store(store))
887 }
888
889 #[inline]
890 fn dynamic_concrete_type_check(
891 &self,
892 store: &StoreOpaque,
893 nullable: bool,
894 ty: &HeapType,
895 ) -> Result<()> {
896 match self {
897 Some(s) => Rooted::<ArrayRef>::dynamic_concrete_type_check(s, store, nullable, ty),
898 None => {
899 ensure!(
900 nullable,
901 "expected a non-null reference, but found a null reference"
902 );
903 Ok(())
904 }
905 }
906 }
907
908 #[inline]
909 fn is_vmgcref_and_points_to_object(&self) -> bool {
910 self.is_some()
911 }
912
913 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
914 <Rooted<ArrayRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
915 }
916
917 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
918 <Rooted<ArrayRef>>::wasm_ty_option_load(
919 store,
920 ptr.get_anyref(),
921 ArrayRef::from_cloned_gc_ref,
922 )
923 }
924}
925
926unsafe impl WasmTy for ManuallyRooted<ArrayRef> {
927 #[inline]
928 fn valtype() -> ValType {
929 ValType::Ref(RefType::new(false, HeapType::Array))
930 }
931
932 #[inline]
933 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
934 self.comes_from_same_store(store)
935 }
936
937 #[inline]
938 fn dynamic_concrete_type_check(
939 &self,
940 store: &StoreOpaque,
941 _: bool,
942 ty: &HeapType,
943 ) -> Result<()> {
944 match ty {
945 HeapType::Any | HeapType::Eq | HeapType::Array => Ok(()),
946 HeapType::ConcreteArray(ty) => self.ensure_matches_ty(store, ty),
947
948 HeapType::Extern
949 | HeapType::NoExtern
950 | HeapType::Func
951 | HeapType::ConcreteFunc(_)
952 | HeapType::NoFunc
953 | HeapType::I31
954 | HeapType::Struct
955 | HeapType::ConcreteStruct(_)
956 | HeapType::Cont
957 | HeapType::NoCont
958 | HeapType::ConcreteCont(_)
959 | HeapType::Exn
960 | HeapType::NoExn
961 | HeapType::ConcreteExn(_)
962 | HeapType::None => bail!(
963 "type mismatch: expected `(ref {ty})`, got `(ref {})`",
964 self._ty(store)?,
965 ),
966 }
967 }
968
969 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
970 self.wasm_ty_store(store, ptr, ValRaw::anyref)
971 }
972
973 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
974 Self::wasm_ty_load(store, ptr.get_anyref(), ArrayRef::from_cloned_gc_ref)
975 }
976}
977
978unsafe impl WasmTy for Option<ManuallyRooted<ArrayRef>> {
979 #[inline]
980 fn valtype() -> ValType {
981 ValType::ARRAYREF
982 }
983
984 #[inline]
985 fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
986 self.as_ref()
987 .map_or(true, |x| x.comes_from_same_store(store))
988 }
989
990 #[inline]
991 fn dynamic_concrete_type_check(
992 &self,
993 store: &StoreOpaque,
994 nullable: bool,
995 ty: &HeapType,
996 ) -> Result<()> {
997 match self {
998 Some(s) => {
999 ManuallyRooted::<ArrayRef>::dynamic_concrete_type_check(s, store, nullable, ty)
1000 }
1001 None => {
1002 ensure!(
1003 nullable,
1004 "expected a non-null reference, but found a null reference"
1005 );
1006 Ok(())
1007 }
1008 }
1009 }
1010
1011 #[inline]
1012 fn is_vmgcref_and_points_to_object(&self) -> bool {
1013 self.is_some()
1014 }
1015
1016 fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
1017 <ManuallyRooted<ArrayRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
1018 }
1019
1020 unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
1021 <ManuallyRooted<ArrayRef>>::wasm_ty_option_load(
1022 store,
1023 ptr.get_anyref(),
1024 ArrayRef::from_cloned_gc_ref,
1025 )
1026 }
1027}