wasmtime/runtime/vm/
gc.rs

1#[cfg(feature = "gc")]
2mod enabled;
3#[cfg(feature = "gc")]
4pub use enabled::*;
5
6#[cfg(not(feature = "gc"))]
7mod disabled;
8#[cfg(not(feature = "gc"))]
9pub use disabled::*;
10
11mod func_ref;
12mod gc_ref;
13mod gc_runtime;
14mod host_data;
15mod i31;
16
17pub use func_ref::*;
18pub use gc_ref::*;
19pub use gc_runtime::*;
20pub use host_data::*;
21pub use i31::*;
22
23use crate::prelude::*;
24use crate::runtime::vm::{GcHeapAllocationIndex, VMMemoryDefinition};
25use core::any::Any;
26use core::mem::MaybeUninit;
27use core::{alloc::Layout, num::NonZeroU32};
28use wasmtime_environ::{GcArrayLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
29
30/// GC-related data that is one-to-one with a `wasmtime::Store`.
31///
32/// Contains everything we need to do collections, invoke barriers, etc...
33///
34/// In general, exposes a very similar interface to `GcHeap`, but fills in some
35/// of the context arguments for callers (such as the `ExternRefHostDataTable`)
36/// since they are all stored together inside `GcStore`.
37pub struct GcStore {
38    /// This GC heap's allocation index (primarily used for integrating with the
39    /// pooling allocator).
40    pub allocation_index: GcHeapAllocationIndex,
41
42    /// The actual GC heap.
43    pub gc_heap: Box<dyn GcHeap>,
44
45    /// The `externref` host data table for this GC heap.
46    pub host_data_table: ExternRefHostDataTable,
47
48    /// The function-references table for this GC heap.
49    pub func_ref_table: FuncRefTable,
50}
51
52impl GcStore {
53    /// Create a new `GcStore`.
54    pub fn new(allocation_index: GcHeapAllocationIndex, gc_heap: Box<dyn GcHeap>) -> Self {
55        let host_data_table = ExternRefHostDataTable::default();
56        let func_ref_table = FuncRefTable::default();
57        Self {
58            allocation_index,
59            gc_heap,
60            host_data_table,
61            func_ref_table,
62        }
63    }
64
65    /// Get the `VMMemoryDefinition` for this GC heap.
66    pub fn vmmemory_definition(&self) -> VMMemoryDefinition {
67        self.gc_heap.vmmemory()
68    }
69
70    /// Asynchronously perform garbage collection within this heap.
71    pub async fn gc(&mut self, async_yield: bool, roots: GcRootsIter<'_>) {
72        let collection = self.gc_heap.gc(roots, &mut self.host_data_table);
73        collect_async(collection, async_yield).await;
74    }
75
76    /// Get the kind of the given GC reference.
77    pub fn kind(&self, gc_ref: &VMGcRef) -> VMGcKind {
78        debug_assert!(!gc_ref.is_i31());
79        self.header(gc_ref).kind()
80    }
81
82    /// Get the header of the given GC reference.
83    pub fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
84        debug_assert!(!gc_ref.is_i31());
85        self.gc_heap.header(gc_ref)
86    }
87
88    /// Clone a GC reference, calling GC write barriers as necessary.
89    pub fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
90        if gc_ref.is_i31() {
91            gc_ref.copy_i31()
92        } else {
93            self.gc_heap.clone_gc_ref(gc_ref)
94        }
95    }
96
97    /// Write the `source` GC reference into the uninitialized `destination`
98    /// slot, performing write barriers as necessary.
99    pub fn init_gc_ref(
100        &mut self,
101        destination: &mut MaybeUninit<Option<VMGcRef>>,
102        source: Option<&VMGcRef>,
103    ) {
104        // Initialize the destination to `None`, at which point the regular GC
105        // write barrier is safe to reuse.
106        let destination = destination.write(None);
107        self.write_gc_ref(destination, source);
108    }
109
110    /// Dynamically tests whether a `init_gc_ref` is needed to write `gc_ref`
111    /// into an uninitialized destination.
112    pub(crate) fn needs_init_barrier(gc_ref: Option<&VMGcRef>) -> bool {
113        assert!(cfg!(feature = "gc") || gc_ref.is_none());
114        gc_ref.is_some_and(|r| !r.is_i31())
115    }
116
117    /// Dynamically tests whether a `write_gc_ref` is needed to write `gc_ref`
118    /// into `dest`.
119    pub(crate) fn needs_write_barrier(
120        dest: &mut Option<VMGcRef>,
121        gc_ref: Option<&VMGcRef>,
122    ) -> bool {
123        assert!(cfg!(feature = "gc") || gc_ref.is_none());
124        assert!(cfg!(feature = "gc") || dest.is_none());
125        dest.as_ref().is_some_and(|r| !r.is_i31()) || gc_ref.is_some_and(|r| !r.is_i31())
126    }
127
128    /// Same as [`Self::write_gc_ref`] but doesn't require a `store` when
129    /// possible.
130    ///
131    /// # Panics
132    ///
133    /// Panics if `store` is `None` and one of `dest` or `gc_ref` requires a
134    /// write barrier.
135    pub(crate) fn write_gc_ref_optional_store(
136        store: Option<&mut Self>,
137        dest: &mut Option<VMGcRef>,
138        gc_ref: Option<&VMGcRef>,
139    ) {
140        if Self::needs_write_barrier(dest, gc_ref) {
141            store.unwrap().write_gc_ref(dest, gc_ref)
142        } else {
143            *dest = gc_ref.map(|r| r.copy_i31());
144        }
145    }
146
147    /// Write the `source` GC reference into the `destination` slot, performing
148    /// write barriers as necessary.
149    pub fn write_gc_ref(&mut self, destination: &mut Option<VMGcRef>, source: Option<&VMGcRef>) {
150        // If neither the source nor destination actually point to a GC object
151        // (that is, they are both either null or `i31ref`s) then we can skip
152        // the GC barrier.
153        if Self::needs_write_barrier(destination, source) {
154            self.gc_heap
155                .write_gc_ref(&mut self.host_data_table, destination, source);
156        } else {
157            *destination = source.map(|s| s.copy_i31());
158        }
159    }
160
161    /// Drop the given GC reference, performing drop barriers as necessary.
162    pub fn drop_gc_ref(&mut self, gc_ref: VMGcRef) {
163        if !gc_ref.is_i31() {
164            self.gc_heap.drop_gc_ref(&mut self.host_data_table, gc_ref);
165        }
166    }
167
168    /// Hook to call whenever a GC reference is about to be exposed to Wasm.
169    ///
170    /// Returns the raw representation of this GC ref, ready to be passed to
171    /// Wasm.
172    #[must_use]
173    pub fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) -> NonZeroU32 {
174        let raw = gc_ref.as_raw_non_zero_u32();
175        if !gc_ref.is_i31() {
176            log::trace!("exposing GC ref to Wasm: {gc_ref:p}");
177            self.gc_heap.expose_gc_ref_to_wasm(gc_ref);
178        }
179        raw
180    }
181
182    /// Allocate a new `externref`.
183    ///
184    /// Returns:
185    ///
186    /// * `Ok(Ok(_))`: Successfully allocated the `externref`.
187    ///
188    /// * `Ok(Err((value, n)))`: Failed to allocate the `externref`, but doing a GC
189    ///   and then trying again may succeed. Returns the given `value` as the
190    ///   error payload, along with the size of the failed allocation.
191    ///
192    /// * `Err(_)`: Unrecoverable allocation failure.
193    pub fn alloc_externref(
194        &mut self,
195        value: Box<dyn Any + Send + Sync>,
196    ) -> Result<Result<VMExternRef, (Box<dyn Any + Send + Sync>, u64)>> {
197        let host_data_id = self.host_data_table.alloc(value);
198        match self.gc_heap.alloc_externref(host_data_id)? {
199            Ok(x) => Ok(Ok(x)),
200            Err(n) => Ok(Err((self.host_data_table.dealloc(host_data_id), n))),
201        }
202    }
203
204    /// Get a shared borrow of the given `externref`'s host data.
205    ///
206    /// Passing invalid `VMExternRef`s (eg garbage values or `externref`s
207    /// associated with a different heap is memory safe but will lead to general
208    /// incorrectness such as panics and wrong results.
209    pub fn externref_host_data(&self, externref: &VMExternRef) -> &(dyn Any + Send + Sync) {
210        let host_data_id = self.gc_heap.externref_host_data(externref);
211        self.host_data_table.get(host_data_id)
212    }
213
214    /// Get a mutable borrow of the given `externref`'s host data.
215    ///
216    /// Passing invalid `VMExternRef`s (eg garbage values or `externref`s
217    /// associated with a different heap is memory safe but will lead to general
218    /// incorrectness such as panics and wrong results.
219    pub fn externref_host_data_mut(
220        &mut self,
221        externref: &VMExternRef,
222    ) -> &mut (dyn Any + Send + Sync) {
223        let host_data_id = self.gc_heap.externref_host_data(externref);
224        self.host_data_table.get_mut(host_data_id)
225    }
226
227    /// Allocate a raw object with the given header and layout.
228    pub fn alloc_raw(
229        &mut self,
230        header: VMGcHeader,
231        layout: Layout,
232    ) -> Result<Result<VMGcRef, u64>> {
233        self.gc_heap.alloc_raw(header, layout)
234    }
235
236    /// Allocate an uninitialized struct with the given type index and layout.
237    ///
238    /// This does NOT check that the index is currently allocated in the types
239    /// registry or that the layout matches the index's type. Failure to uphold
240    /// those invariants is memory safe, but will lead to general incorrectness
241    /// such as panics and wrong results.
242    pub fn alloc_uninit_struct(
243        &mut self,
244        ty: VMSharedTypeIndex,
245        layout: &GcStructLayout,
246    ) -> Result<Result<VMStructRef, u64>> {
247        self.gc_heap
248            .alloc_uninit_struct_or_exn(ty, layout)
249            .map(|r| r.map(|r| r.into_structref_unchecked()))
250    }
251
252    /// Deallocate an uninitialized struct.
253    pub fn dealloc_uninit_struct(&mut self, structref: VMStructRef) {
254        self.gc_heap.dealloc_uninit_struct_or_exn(structref.into())
255    }
256
257    /// Get the data for the given object reference.
258    ///
259    /// Panics when the structref and its size is out of the GC heap bounds.
260    pub fn gc_object_data(&mut self, gc_ref: &VMGcRef) -> &mut VMGcObjectData {
261        self.gc_heap.gc_object_data_mut(gc_ref)
262    }
263
264    /// Get the object datas for the given pair of object references.
265    ///
266    /// Panics if `a` and `b` are the same reference or either is out of bounds.
267    pub fn gc_object_data_pair(
268        &mut self,
269        a: &VMGcRef,
270        b: &VMGcRef,
271    ) -> (&mut VMGcObjectData, &mut VMGcObjectData) {
272        assert_ne!(a, b);
273        self.gc_heap.gc_object_data_pair(a, b)
274    }
275
276    /// Allocate an uninitialized array with the given type index.
277    ///
278    /// This does NOT check that the index is currently allocated in the types
279    /// registry or that the layout matches the index's type. Failure to uphold
280    /// those invariants is memory safe, but will lead to general incorrectness
281    /// such as panics and wrong results.
282    pub fn alloc_uninit_array(
283        &mut self,
284        ty: VMSharedTypeIndex,
285        len: u32,
286        layout: &GcArrayLayout,
287    ) -> Result<Result<VMArrayRef, u64>> {
288        self.gc_heap.alloc_uninit_array(ty, len, layout)
289    }
290
291    /// Deallocate an uninitialized array.
292    pub fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef) {
293        self.gc_heap.dealloc_uninit_array(arrayref);
294    }
295
296    /// Get the length of the given array.
297    pub fn array_len(&self, arrayref: &VMArrayRef) -> u32 {
298        self.gc_heap.array_len(arrayref)
299    }
300
301    /// Allocate an uninitialized exception object with the given type
302    /// index.
303    ///
304    /// This does NOT check that the index is currently allocated in the types
305    /// registry or that the layout matches the index's type. Failure to uphold
306    /// those invariants is memory safe, but will lead to general incorrectness
307    /// such as panics and wrong results.
308    pub fn alloc_uninit_exn(
309        &mut self,
310        ty: VMSharedTypeIndex,
311        layout: &GcStructLayout,
312    ) -> Result<Result<VMExnRef, u64>> {
313        self.gc_heap
314            .alloc_uninit_struct_or_exn(ty, layout)
315            .map(|r| r.map(|r| r.into_exnref_unchecked()))
316    }
317
318    /// Deallocate an uninitialized exception object.
319    pub fn dealloc_uninit_exn(&mut self, exnref: VMExnRef) {
320        self.gc_heap.dealloc_uninit_struct_or_exn(exnref.into());
321    }
322}