wasmtime/runtime/vm/gc/enabled/
structref.rs1use super::{truncate_i32_to_i8, truncate_i32_to_i16};
2use crate::{
3 AnyRef, ExnRef, ExternRef, Func, HeapType, RootedGcRefImpl, StorageType, Val, ValType,
4 prelude::*,
5 runtime::vm::{GcHeap, GcStore, VMGcRef},
6 store::AutoAssertNoGc,
7 vm::{FuncRefTableId, SendSyncPtr},
8};
9use core::fmt;
10use wasmtime_environ::{GcStructLayout, VMGcKind};
11
12#[derive(Debug, PartialEq, Eq, Hash)]
22#[repr(transparent)]
23pub struct VMStructRef(VMGcRef);
24
25impl fmt::Pointer for VMStructRef {
26 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
27 fmt::Pointer::fmt(&self.0, f)
28 }
29}
30
31impl From<VMStructRef> for VMGcRef {
32 #[inline]
33 fn from(x: VMStructRef) -> Self {
34 x.0
35 }
36}
37
38impl VMGcRef {
39 pub fn is_structref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> bool {
41 if self.is_i31() {
42 return false;
43 }
44
45 let header = gc_heap.header(&self);
46 header.kind().matches(VMGcKind::StructRef)
47 }
48
49 pub fn into_structref(self, gc_heap: &impl GcHeap) -> Result<VMStructRef, VMGcRef> {
54 if self.is_structref(gc_heap) {
55 Ok(self.into_structref_unchecked())
56 } else {
57 Err(self)
58 }
59 }
60
61 #[inline]
69 pub fn into_structref_unchecked(self) -> VMStructRef {
70 debug_assert!(!self.is_i31());
71 VMStructRef(self)
72 }
73
74 pub fn as_structref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> Option<&VMStructRef> {
77 if self.is_structref(gc_heap) {
78 Some(self.as_structref_unchecked())
79 } else {
80 None
81 }
82 }
83
84 pub fn as_structref_unchecked(&self) -> &VMStructRef {
90 debug_assert!(!self.is_i31());
91 let ptr = self as *const VMGcRef;
92 let ret = unsafe { &*ptr.cast() };
93 assert!(matches!(ret, VMStructRef(VMGcRef { .. })));
94 ret
95 }
96}
97
98impl VMStructRef {
99 pub fn as_gc_ref(&self) -> &VMGcRef {
101 &self.0
102 }
103
104 pub fn clone(&self, gc_store: &mut GcStore) -> Self {
106 Self(gc_store.clone_gc_ref(&self.0))
107 }
108
109 pub fn drop(self, gc_store: &mut GcStore) {
111 gc_store.drop_gc_ref(self.0);
112 }
113
114 pub fn unchecked_copy(&self) -> Self {
123 Self(self.0.unchecked_copy())
124 }
125
126 pub fn read_field(
136 &self,
137 store: &mut AutoAssertNoGc,
138 layout: &GcStructLayout,
139 ty: &StorageType,
140 field: usize,
141 ) -> Val {
142 let offset = layout.fields[field].offset;
143 read_field_impl(self.as_gc_ref(), store, ty, offset)
144 }
145
146 pub fn write_field(
158 &self,
159 store: &mut AutoAssertNoGc,
160 layout: &GcStructLayout,
161 ty: &StorageType,
162 field: usize,
163 val: Val,
164 ) -> Result<()> {
165 debug_assert!(val._matches_ty(&store, &ty.unpack())?);
166
167 let offset = layout.fields[field].offset;
168 let gcstore = store.require_gc_store_mut()?;
169 let data = gcstore.gc_object_data(self.as_gc_ref());
170 match val {
171 Val::I32(i) if ty.is_i8() => data.write_i8(offset, truncate_i32_to_i8(i)),
172 Val::I32(i) if ty.is_i16() => data.write_i16(offset, truncate_i32_to_i16(i)),
173 Val::I32(i) => data.write_i32(offset, i),
174 Val::I64(i) => data.write_i64(offset, i),
175 Val::F32(f) => data.write_u32(offset, f),
176 Val::F64(f) => data.write_u64(offset, f),
177 Val::V128(v) => data.write_v128(offset, v),
178
179 Val::ExternRef(e) => {
192 let raw = data.read_u32(offset);
193 let mut gc_ref = VMGcRef::from_raw_u32(raw);
194 let e = match e {
195 Some(e) => Some(e.try_gc_ref(store)?.unchecked_copy()),
196 None => None,
197 };
198 let store = store.require_gc_store_mut()?;
199 store.write_gc_ref(&mut gc_ref, e.as_ref());
200 let data = store.gc_object_data(self.as_gc_ref());
201 data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
202 }
203 Val::AnyRef(a) => {
204 let raw = data.read_u32(offset);
205 let mut gc_ref = VMGcRef::from_raw_u32(raw);
206 let a = match a {
207 Some(a) => Some(a.try_gc_ref(store)?.unchecked_copy()),
208 None => None,
209 };
210 let store = store.require_gc_store_mut()?;
211 store.write_gc_ref(&mut gc_ref, a.as_ref());
212 let data = store.gc_object_data(self.as_gc_ref());
213 data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
214 }
215 Val::ExnRef(e) => {
216 let raw = data.read_u32(offset);
217 let mut gc_ref = VMGcRef::from_raw_u32(raw);
218 let e = match e {
219 Some(e) => Some(e.try_gc_ref(store)?.unchecked_copy()),
220 None => None,
221 };
222 let store = store.require_gc_store_mut()?;
223 store.write_gc_ref(&mut gc_ref, e.as_ref());
224 let data = store.gc_object_data(self.as_gc_ref());
225 data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
226 }
227
228 Val::FuncRef(f) => {
229 let f = f.map(|f| SendSyncPtr::new(f.vm_func_ref(store)));
230 let gcstore = store.require_gc_store_mut()?;
231 let id = unsafe { gcstore.func_ref_table.intern(f) };
232 gcstore
233 .gc_object_data(self.as_gc_ref())
234 .write_u32(offset, id.into_raw());
235 }
236 Val::ContRef(_) => {
237 return Err(anyhow::anyhow!(
239 "setting continuation references in struct fields not yet supported"
240 ));
241 }
242 }
243 Ok(())
244 }
245
246 pub fn initialize_field(
271 &self,
272 store: &mut AutoAssertNoGc,
273 layout: &GcStructLayout,
274 ty: &StorageType,
275 field: usize,
276 val: Val,
277 ) -> Result<()> {
278 debug_assert!(val._matches_ty(&store, &ty.unpack())?);
279 let offset = layout.fields[field].offset;
280 initialize_field_impl(self.as_gc_ref(), store, ty, offset, val)
281 }
282}
283
284pub(crate) fn read_field_impl(
289 gc_ref: &VMGcRef,
290 store: &mut AutoAssertNoGc,
291 ty: &StorageType,
292 offset: u32,
293) -> Val {
294 let data = store.unwrap_gc_store_mut().gc_object_data(gc_ref);
295 match ty {
296 StorageType::I8 => Val::I32(data.read_u8(offset).into()),
297 StorageType::I16 => Val::I32(data.read_u16(offset).into()),
298 StorageType::ValType(ValType::I32) => Val::I32(data.read_i32(offset)),
299 StorageType::ValType(ValType::I64) => Val::I64(data.read_i64(offset)),
300 StorageType::ValType(ValType::F32) => Val::F32(data.read_u32(offset)),
301 StorageType::ValType(ValType::F64) => Val::F64(data.read_u64(offset)),
302 StorageType::ValType(ValType::V128) => Val::V128(data.read_v128(offset)),
303 StorageType::ValType(ValType::Ref(r)) => match r.heap_type().top() {
304 HeapType::Extern => {
305 let raw = data.read_u32(offset);
306 Val::ExternRef(ExternRef::_from_raw(store, raw))
307 }
308 HeapType::Any => {
309 let raw = data.read_u32(offset);
310 Val::AnyRef(AnyRef::_from_raw(store, raw))
311 }
312 HeapType::Exn => {
313 let raw = data.read_u32(offset);
314 Val::ExnRef(ExnRef::_from_raw(store, raw))
315 }
316 HeapType::Func => {
317 let func_ref_id = data.read_u32(offset);
318 let func_ref_id = FuncRefTableId::from_raw(func_ref_id);
319 let func_ref = store
320 .unwrap_gc_store()
321 .func_ref_table
322 .get_untyped(func_ref_id);
323 Val::FuncRef(unsafe {
324 func_ref.map(|p| Func::from_vm_func_ref(store.id(), p.as_non_null()))
325 })
326 }
327 otherwise => unreachable!("not a top type: {otherwise:?}"),
328 },
329 }
330}
331
332pub(crate) fn initialize_field_impl(
333 gc_ref: &VMGcRef,
334 store: &mut AutoAssertNoGc,
335 ty: &StorageType,
336 offset: u32,
337 val: Val,
338) -> Result<()> {
339 let gcstore = store.require_gc_store_mut()?;
340 match val {
341 Val::I32(i) if ty.is_i8() => gcstore
342 .gc_object_data(gc_ref)
343 .write_i8(offset, truncate_i32_to_i8(i)),
344 Val::I32(i) if ty.is_i16() => gcstore
345 .gc_object_data(gc_ref)
346 .write_i16(offset, truncate_i32_to_i16(i)),
347 Val::I32(i) => gcstore.gc_object_data(gc_ref).write_i32(offset, i),
348 Val::I64(i) => gcstore.gc_object_data(gc_ref).write_i64(offset, i),
349 Val::F32(f) => gcstore.gc_object_data(gc_ref).write_u32(offset, f),
350 Val::F64(f) => gcstore.gc_object_data(gc_ref).write_u64(offset, f),
351 Val::V128(v) => gcstore.gc_object_data(gc_ref).write_v128(offset, v),
352
353 Val::ExternRef(x) => {
357 let x = match x {
358 None => 0,
359 Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
360 };
361 store
362 .require_gc_store_mut()?
363 .gc_object_data(gc_ref)
364 .write_u32(offset, x);
365 }
366 Val::AnyRef(x) => {
367 let x = match x {
368 None => 0,
369 Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
370 };
371 store
372 .require_gc_store_mut()?
373 .gc_object_data(gc_ref)
374 .write_u32(offset, x);
375 }
376 Val::ExnRef(x) => {
377 let x = match x {
378 None => 0,
379 Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
380 };
381 store
382 .require_gc_store_mut()?
383 .gc_object_data(gc_ref)
384 .write_u32(offset, x);
385 }
386
387 Val::FuncRef(f) => {
388 let f = f.map(|f| SendSyncPtr::new(f.vm_func_ref(store)));
389 let gcstore = store.require_gc_store_mut()?;
390 let id = unsafe { gcstore.func_ref_table.intern(f) };
391 gcstore
392 .gc_object_data(gc_ref)
393 .write_u32(offset, id.into_raw());
394 }
395 Val::ContRef(_) => {
396 return Err(anyhow::anyhow!(
398 "initializing continuation references in struct fields not yet supported"
399 ));
400 }
401 }
402 Ok(())
403}