Skip to main content

wasmtime/runtime/vm/
gc.rs

1#[cfg(feature = "gc")]
2mod enabled;
3#[cfg(feature = "gc")]
4pub use enabled::*;
5
6#[cfg(not(feature = "gc"))]
7mod disabled;
8#[cfg(not(feature = "gc"))]
9pub use disabled::*;
10
11mod func_ref;
12mod gc_ref;
13mod gc_runtime;
14mod host_data;
15mod i31;
16
17pub use func_ref::*;
18pub use gc_ref::*;
19pub use gc_runtime::*;
20pub use host_data::*;
21pub use i31::*;
22
23use crate::prelude::*;
24use crate::runtime::vm::{GcHeapAllocationIndex, VMMemoryDefinition};
25use crate::store::Asyncness;
26use core::any::Any;
27use core::mem::MaybeUninit;
28use core::{alloc::Layout, num::NonZeroU32};
29use wasmtime_environ::{GcArrayLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
30
31/// GC-related data that is one-to-one with a `wasmtime::Store`.
32///
33/// Contains everything we need to do collections, invoke barriers, etc...
34///
35/// In general, exposes a very similar interface to `GcHeap`, but fills in some
36/// of the context arguments for callers (such as the `ExternRefHostDataTable`)
37/// since they are all stored together inside `GcStore`.
38pub struct GcStore {
39    /// This GC heap's allocation index (primarily used for integrating with the
40    /// pooling allocator).
41    pub allocation_index: GcHeapAllocationIndex,
42
43    /// The actual GC heap.
44    pub gc_heap: Box<dyn GcHeap>,
45
46    /// The `externref` host data table for this GC heap.
47    pub host_data_table: ExternRefHostDataTable,
48
49    /// The function-references table for this GC heap.
50    pub func_ref_table: FuncRefTable,
51}
52
53impl GcStore {
54    /// Create a new `GcStore`.
55    pub fn new(allocation_index: GcHeapAllocationIndex, gc_heap: Box<dyn GcHeap>) -> Self {
56        let host_data_table = ExternRefHostDataTable::default();
57        let func_ref_table = FuncRefTable::default();
58        Self {
59            allocation_index,
60            gc_heap,
61            host_data_table,
62            func_ref_table,
63        }
64    }
65
66    /// Get the `VMMemoryDefinition` for this GC heap.
67    pub fn vmmemory_definition(&self) -> VMMemoryDefinition {
68        self.gc_heap.vmmemory()
69    }
70
71    /// Asynchronously perform garbage collection within this heap.
72    pub async fn gc(&mut self, asyncness: Asyncness, roots: GcRootsIter<'_>) {
73        let collection = self.gc_heap.gc(roots, &mut self.host_data_table);
74        collect_async(collection, asyncness).await;
75    }
76
77    /// Get the kind of the given GC reference.
78    pub fn kind(&self, gc_ref: &VMGcRef) -> VMGcKind {
79        debug_assert!(!gc_ref.is_i31());
80        self.header(gc_ref).kind()
81    }
82
83    /// Get the header of the given GC reference.
84    pub fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
85        debug_assert!(!gc_ref.is_i31());
86        self.gc_heap.header(gc_ref)
87    }
88
89    /// Clone a GC reference, calling GC write barriers as necessary.
90    pub fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
91        if gc_ref.is_i31() {
92            gc_ref.copy_i31()
93        } else {
94            self.gc_heap.clone_gc_ref(gc_ref)
95        }
96    }
97
98    /// Write the `source` GC reference into the uninitialized `destination`
99    /// slot, performing write barriers as necessary.
100    pub fn init_gc_ref(
101        &mut self,
102        destination: &mut MaybeUninit<Option<VMGcRef>>,
103        source: Option<&VMGcRef>,
104    ) {
105        // Initialize the destination to `None`, at which point the regular GC
106        // write barrier is safe to reuse.
107        let destination = destination.write(None);
108        self.write_gc_ref(destination, source);
109    }
110
111    /// Dynamically tests whether a `init_gc_ref` is needed to write `gc_ref`
112    /// into an uninitialized destination.
113    pub(crate) fn needs_init_barrier(gc_ref: Option<&VMGcRef>) -> bool {
114        assert!(cfg!(feature = "gc") || gc_ref.is_none());
115        gc_ref.is_some_and(|r| !r.is_i31())
116    }
117
118    /// Dynamically tests whether a `write_gc_ref` is needed to write `gc_ref`
119    /// into `dest`.
120    pub(crate) fn needs_write_barrier(
121        dest: &mut Option<VMGcRef>,
122        gc_ref: Option<&VMGcRef>,
123    ) -> bool {
124        assert!(cfg!(feature = "gc") || gc_ref.is_none());
125        assert!(cfg!(feature = "gc") || dest.is_none());
126        dest.as_ref().is_some_and(|r| !r.is_i31()) || gc_ref.is_some_and(|r| !r.is_i31())
127    }
128
129    /// Same as [`Self::write_gc_ref`] but doesn't require a `store` when
130    /// possible.
131    ///
132    /// # Panics
133    ///
134    /// Panics if `store` is `None` and one of `dest` or `gc_ref` requires a
135    /// write barrier.
136    pub(crate) fn write_gc_ref_optional_store(
137        store: Option<&mut Self>,
138        dest: &mut Option<VMGcRef>,
139        gc_ref: Option<&VMGcRef>,
140    ) {
141        if Self::needs_write_barrier(dest, gc_ref) {
142            store.unwrap().write_gc_ref(dest, gc_ref)
143        } else {
144            *dest = gc_ref.map(|r| r.copy_i31());
145        }
146    }
147
148    /// Write the `source` GC reference into the `destination` slot, performing
149    /// write barriers as necessary.
150    pub fn write_gc_ref(&mut self, destination: &mut Option<VMGcRef>, source: Option<&VMGcRef>) {
151        // If neither the source nor destination actually point to a GC object
152        // (that is, they are both either null or `i31ref`s) then we can skip
153        // the GC barrier.
154        if Self::needs_write_barrier(destination, source) {
155            self.gc_heap
156                .write_gc_ref(&mut self.host_data_table, destination, source);
157        } else {
158            *destination = source.map(|s| s.copy_i31());
159        }
160    }
161
162    /// Drop the given GC reference, performing drop barriers as necessary.
163    pub fn drop_gc_ref(&mut self, gc_ref: VMGcRef) {
164        if !gc_ref.is_i31() {
165            self.gc_heap.drop_gc_ref(&mut self.host_data_table, gc_ref);
166        }
167    }
168
169    /// Hook to call whenever a GC reference is about to be exposed to Wasm.
170    ///
171    /// Returns the raw representation of this GC ref, ready to be passed to
172    /// Wasm.
173    #[must_use]
174    pub fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) -> NonZeroU32 {
175        let raw = gc_ref.as_raw_non_zero_u32();
176        if !gc_ref.is_i31() {
177            log::trace!("exposing GC ref to Wasm: {gc_ref:p}");
178            self.gc_heap.expose_gc_ref_to_wasm(gc_ref);
179        }
180        raw
181    }
182
183    /// Allocate a new `externref`.
184    ///
185    /// Returns:
186    ///
187    /// * `Ok(Ok(_))`: Successfully allocated the `externref`.
188    ///
189    /// * `Ok(Err((value, n)))`: Failed to allocate the `externref`, but doing a GC
190    ///   and then trying again may succeed. Returns the given `value` as the
191    ///   error payload, along with the size of the failed allocation.
192    ///
193    /// * `Err(_)`: Unrecoverable allocation failure.
194    pub fn alloc_externref(
195        &mut self,
196        value: Box<dyn Any + Send + Sync>,
197    ) -> Result<Result<VMExternRef, (Box<dyn Any + Send + Sync>, u64)>> {
198        let host_data_id = self.host_data_table.alloc(value);
199        match self.gc_heap.alloc_externref(host_data_id)? {
200            Ok(x) => Ok(Ok(x)),
201            Err(n) => Ok(Err((self.host_data_table.dealloc(host_data_id), n))),
202        }
203    }
204
205    /// Get a shared borrow of the given `externref`'s host data.
206    ///
207    /// Passing invalid `VMExternRef`s (eg garbage values or `externref`s
208    /// associated with a different heap is memory safe but will lead to general
209    /// incorrectness such as panics and wrong results.
210    pub fn externref_host_data(&self, externref: &VMExternRef) -> &(dyn Any + Send + Sync) {
211        let host_data_id = self.gc_heap.externref_host_data(externref);
212        self.host_data_table.get(host_data_id)
213    }
214
215    /// Get a mutable borrow of the given `externref`'s host data.
216    ///
217    /// Passing invalid `VMExternRef`s (eg garbage values or `externref`s
218    /// associated with a different heap is memory safe but will lead to general
219    /// incorrectness such as panics and wrong results.
220    pub fn externref_host_data_mut(
221        &mut self,
222        externref: &VMExternRef,
223    ) -> &mut (dyn Any + Send + Sync) {
224        let host_data_id = self.gc_heap.externref_host_data(externref);
225        self.host_data_table.get_mut(host_data_id)
226    }
227
228    /// Allocate a raw object with the given header and layout.
229    pub fn alloc_raw(
230        &mut self,
231        header: VMGcHeader,
232        layout: Layout,
233    ) -> Result<Result<VMGcRef, u64>> {
234        self.gc_heap.alloc_raw(header, layout)
235    }
236
237    /// Allocate an uninitialized struct with the given type index and layout.
238    ///
239    /// This does NOT check that the index is currently allocated in the types
240    /// registry or that the layout matches the index's type. Failure to uphold
241    /// those invariants is memory safe, but will lead to general incorrectness
242    /// such as panics and wrong results.
243    pub fn alloc_uninit_struct(
244        &mut self,
245        ty: VMSharedTypeIndex,
246        layout: &GcStructLayout,
247    ) -> Result<Result<VMStructRef, u64>> {
248        self.gc_heap
249            .alloc_uninit_struct_or_exn(ty, layout)
250            .map(|r| r.map(|r| r.into_structref_unchecked()))
251    }
252
253    /// Deallocate an uninitialized struct.
254    pub fn dealloc_uninit_struct(&mut self, structref: VMStructRef) {
255        self.gc_heap.dealloc_uninit_struct_or_exn(structref.into())
256    }
257
258    /// Get the data for the given object reference.
259    ///
260    /// Panics when the structref and its size is out of the GC heap bounds.
261    pub fn gc_object_data(&mut self, gc_ref: &VMGcRef) -> &mut VMGcObjectData {
262        self.gc_heap.gc_object_data_mut(gc_ref)
263    }
264
265    /// Get the object datas for the given pair of object references.
266    ///
267    /// Panics if `a` and `b` are the same reference or either is out of bounds.
268    pub fn gc_object_data_pair(
269        &mut self,
270        a: &VMGcRef,
271        b: &VMGcRef,
272    ) -> (&mut VMGcObjectData, &mut VMGcObjectData) {
273        assert_ne!(a, b);
274        self.gc_heap.gc_object_data_pair(a, b)
275    }
276
277    /// Allocate an uninitialized array with the given type index.
278    ///
279    /// This does NOT check that the index is currently allocated in the types
280    /// registry or that the layout matches the index's type. Failure to uphold
281    /// those invariants is memory safe, but will lead to general incorrectness
282    /// such as panics and wrong results.
283    pub fn alloc_uninit_array(
284        &mut self,
285        ty: VMSharedTypeIndex,
286        len: u32,
287        layout: &GcArrayLayout,
288    ) -> Result<Result<VMArrayRef, u64>> {
289        self.gc_heap.alloc_uninit_array(ty, len, layout)
290    }
291
292    /// Deallocate an uninitialized array.
293    pub fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef) {
294        self.gc_heap.dealloc_uninit_array(arrayref);
295    }
296
297    /// Get the length of the given array.
298    pub fn array_len(&self, arrayref: &VMArrayRef) -> u32 {
299        self.gc_heap.array_len(arrayref)
300    }
301
302    /// Allocate an uninitialized exception object with the given type
303    /// index.
304    ///
305    /// This does NOT check that the index is currently allocated in the types
306    /// registry or that the layout matches the index's type. Failure to uphold
307    /// those invariants is memory safe, but will lead to general incorrectness
308    /// such as panics and wrong results.
309    pub fn alloc_uninit_exn(
310        &mut self,
311        ty: VMSharedTypeIndex,
312        layout: &GcStructLayout,
313    ) -> Result<Result<VMExnRef, u64>> {
314        self.gc_heap
315            .alloc_uninit_struct_or_exn(ty, layout)
316            .map(|r| r.map(|r| r.into_exnref_unchecked()))
317    }
318
319    /// Deallocate an uninitialized exception object.
320    pub fn dealloc_uninit_exn(&mut self, exnref: VMExnRef) {
321        self.gc_heap.dealloc_uninit_struct_or_exn(exnref.into());
322    }
323}