wasmtime/runtime/vm/gc/enabled/arrayref.rs
1use super::{truncate_i32_to_i16, truncate_i32_to_i8};
2use crate::{
3 prelude::*,
4 runtime::vm::{GcHeap, GcStore, VMGcRef},
5 store::{AutoAssertNoGc, StoreOpaque},
6 vm::{FuncRefTableId, SendSyncPtr},
7 AnyRef, ExternRef, Func, HeapType, RootedGcRefImpl, StorageType, Val, ValType,
8};
9use core::fmt;
10use wasmtime_environ::{GcArrayLayout, VMGcKind};
11
12/// A `VMGcRef` that we know points to a `array`.
13///
14/// Create a `VMArrayRef` via `VMGcRef::into_arrayref` and
15/// `VMGcRef::as_arrayref`, or their untyped equivalents
16/// `VMGcRef::into_arrayref_unchecked` and `VMGcRef::as_arrayref_unchecked`.
17///
18/// Note: This is not a `TypedGcRef<_>` because each collector can have a
19/// different concrete representation of `arrayref` that they allocate inside
20/// their heaps.
21#[derive(Debug, PartialEq, Eq, Hash)]
22#[repr(transparent)]
23pub struct VMArrayRef(VMGcRef);
24
25impl fmt::Pointer for VMArrayRef {
26 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
27 fmt::Pointer::fmt(&self.0, f)
28 }
29}
30
31impl From<VMArrayRef> for VMGcRef {
32 #[inline]
33 fn from(x: VMArrayRef) -> Self {
34 x.0
35 }
36}
37
38impl VMGcRef {
39 /// Is this `VMGcRef` pointing to a `array`?
40 pub fn is_arrayref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> bool {
41 if self.is_i31() {
42 return false;
43 }
44
45 let header = gc_heap.header(&self);
46 header.kind().matches(VMGcKind::ArrayRef)
47 }
48
49 /// Create a new `VMArrayRef` from the given `gc_ref`.
50 ///
51 /// If this is not a GC reference to an `arrayref`, `Err(self)` is
52 /// returned.
53 pub fn into_arrayref(self, gc_heap: &(impl GcHeap + ?Sized)) -> Result<VMArrayRef, VMGcRef> {
54 if self.is_arrayref(gc_heap) {
55 Ok(self.into_arrayref_unchecked())
56 } else {
57 Err(self)
58 }
59 }
60
61 /// Create a new `VMArrayRef` from `self` without actually checking that
62 /// `self` is an `arrayref`.
63 ///
64 /// This method does not check that `self` is actually an `arrayref`, but
65 /// it should be. Failure to uphold this invariant is memory safe but will
66 /// result in general incorrectness down the line such as panics or wrong
67 /// results.
68 #[inline]
69 pub fn into_arrayref_unchecked(self) -> VMArrayRef {
70 debug_assert!(!self.is_i31());
71 VMArrayRef(self)
72 }
73
74 /// Get this GC reference as an `arrayref` reference, if it actually is an
75 /// `arrayref` reference.
76 pub fn as_arrayref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> Option<&VMArrayRef> {
77 if self.is_arrayref(gc_heap) {
78 Some(self.as_arrayref_unchecked())
79 } else {
80 None
81 }
82 }
83
84 /// Get this GC reference as an `arrayref` reference without checking if it
85 /// actually is an `arrayref` reference.
86 ///
87 /// Calling this method on a non-`arrayref` reference is memory safe, but
88 /// will lead to general incorrectness like panics and wrong results.
89 pub fn as_arrayref_unchecked(&self) -> &VMArrayRef {
90 debug_assert!(!self.is_i31());
91 let ptr = self as *const VMGcRef;
92 let ret = unsafe { &*ptr.cast() };
93 assert!(matches!(ret, VMArrayRef(VMGcRef { .. })));
94 ret
95 }
96}
97
98impl VMArrayRef {
99 /// Get the underlying `VMGcRef`.
100 pub fn as_gc_ref(&self) -> &VMGcRef {
101 &self.0
102 }
103
104 /// Clone this `VMArrayRef`, running any GC barriers as necessary.
105 pub fn clone(&self, gc_store: &mut GcStore) -> Self {
106 Self(gc_store.clone_gc_ref(&self.0))
107 }
108
109 /// Explicitly drop this `arrayref`, running GC drop barriers as necessary.
110 pub fn drop(self, gc_store: &mut GcStore) {
111 gc_store.drop_gc_ref(self.0);
112 }
113
114 /// Copy this `VMArrayRef` without running the GC's clone barriers.
115 ///
116 /// Prefer calling `clone(&mut GcStore)` instead! This is mostly an internal
117 /// escape hatch for collector implementations.
118 ///
119 /// Failure to run GC barriers when they would otherwise be necessary can
120 /// lead to leaks, panics, and wrong results. It cannot lead to memory
121 /// unsafety, however.
122 pub fn unchecked_copy(&self) -> Self {
123 Self(self.0.unchecked_copy())
124 }
125
126 /// Get the length of this array.
127 pub fn len(&self, store: &StoreOpaque) -> u32 {
128 store.unwrap_gc_store().array_len(self)
129 }
130
131 /// Read an element of the given `StorageType` into a `Val`.
132 ///
133 /// `i8` and `i16` fields are zero-extended into `Val::I32(_)`s.
134 ///
135 /// Does not check that this array's elements are actually of type
136 /// `ty`. That is the caller's responsibility. Failure to do so is memory
137 /// safe, but will lead to general incorrectness such as panics and wrong
138 /// results.
139 ///
140 /// Panics on out-of-bounds accesses.
141 pub fn read_elem(
142 &self,
143 store: &mut AutoAssertNoGc,
144 layout: &GcArrayLayout,
145 ty: &StorageType,
146 index: u32,
147 ) -> Val {
148 let offset = layout.elem_offset(index);
149 let data = store.unwrap_gc_store_mut().gc_object_data(self.as_gc_ref());
150 match ty {
151 StorageType::I8 => Val::I32(data.read_u8(offset).into()),
152 StorageType::I16 => Val::I32(data.read_u16(offset).into()),
153 StorageType::ValType(ValType::I32) => Val::I32(data.read_i32(offset)),
154 StorageType::ValType(ValType::I64) => Val::I64(data.read_i64(offset)),
155 StorageType::ValType(ValType::F32) => Val::F32(data.read_u32(offset)),
156 StorageType::ValType(ValType::F64) => Val::F64(data.read_u64(offset)),
157 StorageType::ValType(ValType::V128) => Val::V128(data.read_v128(offset)),
158 StorageType::ValType(ValType::Ref(r)) => match r.heap_type().top() {
159 HeapType::Extern => {
160 let raw = data.read_u32(offset);
161 Val::ExternRef(ExternRef::_from_raw(store, raw))
162 }
163 HeapType::Any => {
164 let raw = data.read_u32(offset);
165 Val::AnyRef(AnyRef::_from_raw(store, raw))
166 }
167 HeapType::Func => {
168 let func_ref_id = data.read_u32(offset);
169 let func_ref_id = FuncRefTableId::from_raw(func_ref_id);
170 let func_ref = store
171 .unwrap_gc_store()
172 .func_ref_table
173 .get_untyped(func_ref_id);
174 Val::FuncRef(unsafe {
175 func_ref.map(|p| Func::from_vm_func_ref(store, p.as_non_null()))
176 })
177 }
178 otherwise => unreachable!("not a top type: {otherwise:?}"),
179 },
180 }
181 }
182
183 /// Write the given value into this array at the given offset.
184 ///
185 /// Returns an error if `val` is a GC reference that has since been
186 /// unrooted.
187 ///
188 /// Does not check that `val` matches `ty`, nor that the field is actually
189 /// of type `ty`. Checking those things is the caller's responsibility.
190 /// Failure to do so is memory safe, but will lead to general incorrectness
191 /// such as panics and wrong results.
192 ///
193 /// Panics on out-of-bounds accesses.
194 pub fn write_elem(
195 &self,
196 store: &mut AutoAssertNoGc,
197 layout: &GcArrayLayout,
198 ty: &StorageType,
199 index: u32,
200 val: Val,
201 ) -> Result<()> {
202 debug_assert!(val._matches_ty(&store, &ty.unpack())?);
203
204 let offset = layout.elem_offset(index);
205 let mut data = store.unwrap_gc_store_mut().gc_object_data(self.as_gc_ref());
206 match val {
207 Val::I32(i) if ty.is_i8() => data.write_i8(offset, truncate_i32_to_i8(i)),
208 Val::I32(i) if ty.is_i16() => data.write_i16(offset, truncate_i32_to_i16(i)),
209 Val::I32(i) => data.write_i32(offset, i),
210 Val::I64(i) => data.write_i64(offset, i),
211 Val::F32(f) => data.write_u32(offset, f),
212 Val::F64(f) => data.write_u64(offset, f),
213 Val::V128(v) => data.write_v128(offset, v),
214
215 // For GC-managed references, we need to take care to run the
216 // appropriate barriers, even when we are writing null references
217 // into the array.
218 //
219 // POD-read the old value into a local copy, run the GC write
220 // barrier on that local copy, and then POD-write the updated
221 // value back into the array. This avoids transmuting the inner
222 // data, which would probably be fine, but this approach is
223 // Obviously Correct and should get us by for now. If LLVM isn't
224 // able to elide some of these unnecessary copies, and this
225 // method is ever hot enough, we can always come back and clean
226 // it up in the future.
227 Val::ExternRef(e) => {
228 let raw = data.read_u32(offset);
229 let mut gc_ref = VMGcRef::from_raw_u32(raw);
230 let e = match e {
231 Some(e) => Some(e.try_gc_ref(store)?.unchecked_copy()),
232 None => None,
233 };
234 store.gc_store_mut()?.write_gc_ref(&mut gc_ref, e.as_ref());
235 let mut data = store.gc_store_mut()?.gc_object_data(self.as_gc_ref());
236 data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
237 }
238 Val::AnyRef(a) => {
239 let raw = data.read_u32(offset);
240 let mut gc_ref = VMGcRef::from_raw_u32(raw);
241 let a = match a {
242 Some(a) => Some(a.try_gc_ref(store)?.unchecked_copy()),
243 None => None,
244 };
245 store.gc_store_mut()?.write_gc_ref(&mut gc_ref, a.as_ref());
246 let mut data = store.gc_store_mut()?.gc_object_data(self.as_gc_ref());
247 data.write_u32(offset, gc_ref.map_or(0, |r| r.as_raw_u32()));
248 }
249
250 Val::FuncRef(f) => {
251 let func_ref = match f {
252 Some(f) => Some(SendSyncPtr::new(f.vm_func_ref(store))),
253 None => None,
254 };
255 let id = unsafe { store.gc_store_mut()?.func_ref_table.intern(func_ref) };
256 store
257 .gc_store_mut()?
258 .gc_object_data(self.as_gc_ref())
259 .write_u32(offset, id.into_raw());
260 }
261 }
262 Ok(())
263 }
264
265 /// Initialize an element in this arrayref that is currently uninitialized.
266 ///
267 /// The difference between this method and `write_elem` is that GC barriers
268 /// are handled differently. When overwriting an initialized element (aka
269 /// `write_elem`) we need to call the full write GC write barrier, which
270 /// logically drops the old GC reference and clones the new GC
271 /// reference. When we are initializing an element for the first time, there
272 /// is no old GC reference that is being overwritten and which we need to
273 /// drop, so we only need to clone the new GC reference.
274 ///
275 /// Calling this method on a arrayref that has already had the associated
276 /// element initialized will result in GC bugs. These are memory safe but
277 /// will lead to generally incorrect behavior such as panics, leaks, and
278 /// incorrect results.
279 ///
280 /// Does not check that `val` matches `ty`, nor that the field is actually
281 /// of type `ty`. Checking those things is the caller's responsibility.
282 /// Failure to do so is memory safe, but will lead to general incorrectness
283 /// such as panics and wrong results.
284 ///
285 /// Returns an error if `val` is a GC reference that has since been
286 /// unrooted.
287 ///
288 /// Panics on out-of-bounds accesses.
289 pub fn initialize_elem(
290 &self,
291 store: &mut AutoAssertNoGc,
292 layout: &GcArrayLayout,
293 ty: &StorageType,
294 index: u32,
295 val: Val,
296 ) -> Result<()> {
297 debug_assert!(val._matches_ty(&store, &ty.unpack())?);
298 let offset = layout.elem_offset(index);
299 match val {
300 Val::I32(i) if ty.is_i8() => store
301 .gc_store_mut()?
302 .gc_object_data(self.as_gc_ref())
303 .write_i8(offset, truncate_i32_to_i8(i)),
304 Val::I32(i) if ty.is_i16() => store
305 .gc_store_mut()?
306 .gc_object_data(self.as_gc_ref())
307 .write_i16(offset, truncate_i32_to_i16(i)),
308 Val::I32(i) => store
309 .gc_store_mut()?
310 .gc_object_data(self.as_gc_ref())
311 .write_i32(offset, i),
312 Val::I64(i) => store
313 .gc_store_mut()?
314 .gc_object_data(self.as_gc_ref())
315 .write_i64(offset, i),
316 Val::F32(f) => store
317 .gc_store_mut()?
318 .gc_object_data(self.as_gc_ref())
319 .write_u32(offset, f),
320 Val::F64(f) => store
321 .gc_store_mut()?
322 .gc_object_data(self.as_gc_ref())
323 .write_u64(offset, f),
324 Val::V128(v) => store
325 .gc_store_mut()?
326 .gc_object_data(self.as_gc_ref())
327 .write_v128(offset, v),
328
329 // NB: We don't need to do a write barrier when initializing a
330 // field, because there is nothing being overwritten. Therefore, we
331 // just the clone barrier.
332 Val::ExternRef(x) => {
333 let x = match x {
334 None => 0,
335 Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
336 };
337 store
338 .gc_store_mut()?
339 .gc_object_data(self.as_gc_ref())
340 .write_u32(offset, x);
341 }
342 Val::AnyRef(x) => {
343 let x = match x {
344 None => 0,
345 Some(x) => x.try_clone_gc_ref(store)?.as_raw_u32(),
346 };
347 store
348 .gc_store_mut()?
349 .gc_object_data(self.as_gc_ref())
350 .write_u32(offset, x);
351 }
352
353 Val::FuncRef(f) => {
354 let func_ref = match f {
355 Some(f) => Some(SendSyncPtr::new(f.vm_func_ref(store))),
356 None => None,
357 };
358 let id = unsafe { store.gc_store_mut()?.func_ref_table.intern(func_ref) };
359 store
360 .gc_store_mut()?
361 .gc_object_data(self.as_gc_ref())
362 .write_u32(offset, id.into_raw());
363 }
364 }
365 Ok(())
366 }
367}