wasmtime/runtime/vm/gc/enabled/
arrayref.rs1use super::{truncate_i32_to_i8, truncate_i32_to_i16};
2use crate::{
3 AnyRef, ExnRef, ExternRef, Func, HeapType, RootedGcRefImpl, StorageType, Val, ValType,
4 prelude::*,
5 runtime::vm::{GcHeap, GcStore, VMGcRef},
6 store::{AutoAssertNoGc, StoreOpaque},
7 vm::{FuncRefTableId, SendSyncPtr},
8};
9use core::fmt;
10use wasmtime_environ::{GcArrayLayout, VMGcKind};
11
12#[derive(Debug, PartialEq, Eq, Hash)]
22#[repr(transparent)]
23pub struct VMArrayRef(VMGcRef);
24
25impl fmt::Pointer for VMArrayRef {
26 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
27 fmt::Pointer::fmt(&self.0, f)
28 }
29}
30
31impl From<VMArrayRef> for VMGcRef {
32 #[inline]
33 fn from(x: VMArrayRef) -> Self {
34 x.0
35 }
36}
37
38impl VMGcRef {
39 pub fn is_arrayref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> bool {
41 if self.is_i31() {
42 return false;
43 }
44
45 let header = gc_heap.header(&self);
46 header.kind().matches(VMGcKind::ArrayRef)
47 }
48
49 pub fn into_arrayref(self, gc_heap: &(impl GcHeap + ?Sized)) -> Result<VMArrayRef, VMGcRef> {
54 if self.is_arrayref(gc_heap) {
55 Ok(self.into_arrayref_unchecked())
56 } else {
57 Err(self)
58 }
59 }
60
61 #[inline]
69 pub fn into_arrayref_unchecked(self) -> VMArrayRef {
70 debug_assert!(!self.is_i31());
71 VMArrayRef(self)
72 }
73
74 pub fn as_arrayref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> Option<&VMArrayRef> {
77 if self.is_arrayref(gc_heap) {
78 Some(self.as_arrayref_unchecked())
79 } else {
80 None
81 }
82 }
83
84 pub fn as_arrayref_unchecked(&self) -> &VMArrayRef {
90 debug_assert!(!self.is_i31());
91 let ptr = self as *const VMGcRef;
92 let ret = unsafe { &*ptr.cast() };
93 assert!(matches!(ret, VMArrayRef(VMGcRef { .. })));
94 ret
95 }
96}
97
98impl VMArrayRef {
99 pub fn as_gc_ref(&self) -> &VMGcRef {
101 &self.0
102 }
103
104 pub fn clone(&self, gc_store: &mut GcStore) -> Self {
106 Self(gc_store.clone_gc_ref(&self.0))
107 }
108
109 pub fn drop(self, gc_store: &mut GcStore) {
111 gc_store.drop_gc_ref(self.0);
112 }
113
114 pub fn unchecked_copy(&self) -> Self {
123 Self(self.0.unchecked_copy())
124 }
125
126 pub fn len(&self, store: &StoreOpaque) -> u32 {
128 store.unwrap_gc_store().array_len(self)
129 }
130
131 pub fn read_elem(
142 &self,
143 store: &mut AutoAssertNoGc,
144 layout: &GcArrayLayout,
145 ty: &StorageType,
146 index: u32,
147 ) -> Val {
148 let offset = layout.elem_offset(index);
149 let data = store.unwrap_gc_store_mut().gc_object_data(self.as_gc_ref());
150 match ty {
151 StorageType::I8 => Val::I32(data.read_u8(offset).into()),
152 StorageType::I16 => Val::I32(data.read_u16(offset).into()),
153 StorageType::ValType(ValType::I32) => Val::I32(data.read_i32(offset)),
154 StorageType::ValType(ValType::I64) => Val::I64(data.read_i64(offset)),
155 StorageType::ValType(ValType::F32) => Val::F32(data.read_u32(offset)),
156 StorageType::ValType(ValType::F64) => Val::F64(data.read_u64(offset)),
157 StorageType::ValType(ValType::V128) => Val::V128(data.read_v128(offset)),
158 StorageType::ValType(ValType::Ref(r)) => match r.heap_type().top() {
159 HeapType::Extern => {
160 let raw = data.read_u32(offset);
161 Val::ExternRef(ExternRef::_from_raw(store, raw))
162 }
163 HeapType::Any => {
164 let raw = data.read_u32(offset);
165 Val::AnyRef(AnyRef::_from_raw(store, raw))
166 }
167 HeapType::Exn => {
168 let raw = data.read_u32(offset);
169 Val::ExnRef(ExnRef::_from_raw(store, raw))
170 }
171 HeapType::Func => {
172 let func_ref_id = data.read_u32(offset);
173 let func_ref_id = FuncRefTableId::from_raw(func_ref_id);
174 let func_ref = store
175 .unwrap_gc_store()
176 .func_ref_table
177 .get_untyped(func_ref_id);
178 Val::FuncRef(unsafe {
179 func_ref.map(|p| Func::from_vm_func_ref(store.id(), p.as_non_null()))
180 })
181 }
182 otherwise => unreachable!("not a top type: {otherwise:?}"),
183 },
184 }
185 }
186
187 pub fn write_elem(
199 &self,
200 store: &mut AutoAssertNoGc,
201 layout: &GcArrayLayout,
202 ty: &StorageType,
203 index: u32,
204 val: Val,
205 ) -> Result<()> {
206 debug_assert!(val._matches_ty(&store, &ty.unpack())?);
207
208 let offset = layout.elem_offset(index);
209 let data = store.unwrap_gc_store_mut().gc_object_data(self.as_gc_ref());
210 match val {
211 Val::I32(i) if ty.is_i8() => data.write_i8(offset, truncate_i32_to_i8(i)),
212 Val::I32(i) if ty.is_i16() => data.write_i16(offset, truncate_i32_to_i16(i)),
213 Val::I32(i) => data.write_i32(offset, i),
214 Val::I64(i) => data.write_i64(offset, i),
215 Val::F32(f) => data.write_u32(offset, f),
216 Val::F64(f) => data.write_u64(offset, f),
217 Val::V128(v) => data.write_v128(offset, v),
218
219 Val::ExternRef(e) => {
232 let raw = data.read_u32(offset);
233 let mut gc_ref = VMGcRef::from_raw_u32(raw);
234 let e = match e {
235 Some(e) => Some(e.try_gc_ref(store)?.unchecked_copy()),
236 None => None,
237 };
238 let store = store.require_gc_store_mut()?;
239 store.write_gc_ref(&mut gc_ref, e.as_ref());
240 let data = store.gc_object_data(self.as_gc_ref());
241 data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
242 }
243 Val::AnyRef(a) => {
244 let raw = data.read_u32(offset);
245 let mut gc_ref = VMGcRef::from_raw_u32(raw);
246 let a = match a {
247 Some(a) => Some(a.try_gc_ref(store)?.unchecked_copy()),
248 None => None,
249 };
250 let store = store.require_gc_store_mut()?;
251 store.write_gc_ref(&mut gc_ref, a.as_ref());
252 let data = store.gc_object_data(self.as_gc_ref());
253 data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
254 }
255 Val::ExnRef(e) => {
256 let raw = data.read_u32(offset);
257 let mut gc_ref = VMGcRef::from_raw_u32(raw);
258 let e = match e {
259 Some(e) => Some(e.try_gc_ref(store)?.unchecked_copy()),
260 None => None,
261 };
262 let store = store.require_gc_store_mut()?;
263 store.write_gc_ref(&mut gc_ref, e.as_ref());
264 let data = store.gc_object_data(self.as_gc_ref());
265 data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
266 }
267
268 Val::FuncRef(f) => {
269 let func_ref = match f {
270 Some(f) => Some(SendSyncPtr::new(f.vm_func_ref(store))),
271 None => None,
272 };
273 let store = store.require_gc_store_mut()?;
274 let id = unsafe { store.func_ref_table.intern(func_ref) };
275 store
276 .gc_object_data(self.as_gc_ref())
277 .write_u32(offset, id.into_raw());
278 }
279 Val::ContRef(_) => {
280 return Err(anyhow::anyhow!(
282 "setting continuation references in array elements not yet supported"
283 ));
284 }
285 }
286 Ok(())
287 }
288
289 pub fn initialize_elem(
314 &self,
315 store: &mut AutoAssertNoGc,
316 layout: &GcArrayLayout,
317 ty: &StorageType,
318 index: u32,
319 val: Val,
320 ) -> Result<()> {
321 debug_assert!(val._matches_ty(&store, &ty.unpack())?);
322 let offset = layout.elem_offset(index);
323 let gcstore = store.require_gc_store_mut()?;
324 match val {
325 Val::I32(i) if ty.is_i8() => gcstore
326 .gc_object_data(self.as_gc_ref())
327 .write_i8(offset, truncate_i32_to_i8(i)),
328 Val::I32(i) if ty.is_i16() => gcstore
329 .gc_object_data(self.as_gc_ref())
330 .write_i16(offset, truncate_i32_to_i16(i)),
331 Val::I32(i) => gcstore
332 .gc_object_data(self.as_gc_ref())
333 .write_i32(offset, i),
334 Val::I64(i) => gcstore
335 .gc_object_data(self.as_gc_ref())
336 .write_i64(offset, i),
337 Val::F32(f) => gcstore
338 .gc_object_data(self.as_gc_ref())
339 .write_u32(offset, f),
340 Val::F64(f) => gcstore
341 .gc_object_data(self.as_gc_ref())
342 .write_u64(offset, f),
343 Val::V128(v) => gcstore
344 .gc_object_data(self.as_gc_ref())
345 .write_v128(offset, v),
346
347 Val::ExternRef(x) => {
351 let x = match x {
352 None => 0,
353 Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
354 };
355 store
356 .require_gc_store_mut()?
357 .gc_object_data(self.as_gc_ref())
358 .write_u32(offset, x);
359 }
360 Val::AnyRef(x) => {
361 let x = match x {
362 None => 0,
363 Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
364 };
365 store
366 .require_gc_store_mut()?
367 .gc_object_data(self.as_gc_ref())
368 .write_u32(offset, x);
369 }
370 Val::ExnRef(x) => {
371 let x = match x {
372 None => 0,
373 Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
374 };
375 store
376 .require_gc_store_mut()?
377 .gc_object_data(self.as_gc_ref())
378 .write_u32(offset, x);
379 }
380
381 Val::FuncRef(f) => {
382 let func_ref = match f {
383 Some(f) => Some(SendSyncPtr::new(f.vm_func_ref(store))),
384 None => None,
385 };
386 let gcstore = store.require_gc_store_mut()?;
387 let id = unsafe { gcstore.func_ref_table.intern(func_ref) };
388 gcstore
389 .gc_object_data(self.as_gc_ref())
390 .write_u32(offset, id.into_raw());
391 }
392 Val::ContRef(_) => {
393 return Err(anyhow::anyhow!(
395 "initializing continuation references in array elements not yet supported"
396 ));
397 }
398 }
399 Ok(())
400 }
401}