wasmtime/runtime/vm/
gc.rs

1#[cfg(feature = "gc")]
2mod enabled;
3#[cfg(feature = "gc")]
4pub use enabled::*;
5
6#[cfg(not(feature = "gc"))]
7mod disabled;
8#[cfg(not(feature = "gc"))]
9pub use disabled::*;
10
11mod func_ref;
12mod gc_ref;
13mod gc_runtime;
14mod host_data;
15mod i31;
16
17pub use func_ref::*;
18pub use gc_ref::*;
19pub use gc_runtime::*;
20pub use host_data::*;
21pub use i31::*;
22
23use crate::prelude::*;
24use crate::runtime::vm::{GcHeapAllocationIndex, VMMemoryDefinition};
25use core::any::Any;
26use core::mem::MaybeUninit;
27use core::{alloc::Layout, num::NonZeroU32};
28use wasmtime_environ::{GcArrayLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
29
30/// GC-related data that is one-to-one with a `wasmtime::Store`.
31///
32/// Contains everything we need to do collections, invoke barriers, etc...
33///
34/// In general, exposes a very similar interface to `GcHeap`, but fills in some
35/// of the context arguments for callers (such as the `ExternRefHostDataTable`)
36/// since they are all stored together inside `GcStore`.
37pub struct GcStore {
38    /// This GC heap's allocation index (primarily used for integrating with the
39    /// pooling allocator).
40    pub allocation_index: GcHeapAllocationIndex,
41
42    /// The actual GC heap.
43    pub gc_heap: Box<dyn GcHeap>,
44
45    /// The `externref` host data table for this GC heap.
46    pub host_data_table: ExternRefHostDataTable,
47
48    /// The function-references table for this GC heap.
49    pub func_ref_table: FuncRefTable,
50}
51
52impl GcStore {
53    /// Create a new `GcStore`.
54    pub fn new(allocation_index: GcHeapAllocationIndex, gc_heap: Box<dyn GcHeap>) -> Self {
55        let host_data_table = ExternRefHostDataTable::default();
56        let func_ref_table = FuncRefTable::default();
57        Self {
58            allocation_index,
59            gc_heap,
60            host_data_table,
61            func_ref_table,
62        }
63    }
64
65    /// Get the `VMMemoryDefinition` for this GC heap.
66    pub fn vmmemory_definition(&self) -> VMMemoryDefinition {
67        self.gc_heap.vmmemory()
68    }
69
70    /// Perform garbage collection within this heap.
71    pub fn gc(&mut self, roots: GcRootsIter<'_>) {
72        let mut collection = self.gc_heap.gc(roots, &mut self.host_data_table);
73        collection.collect();
74    }
75
76    /// Asynchronously perform garbage collection within this heap.
77    #[cfg(feature = "async")]
78    pub async fn gc_async(&mut self, roots: GcRootsIter<'_>) {
79        let collection = self.gc_heap.gc(roots, &mut self.host_data_table);
80        collect_async(collection).await;
81    }
82
83    /// Get the kind of the given GC reference.
84    pub fn kind(&self, gc_ref: &VMGcRef) -> VMGcKind {
85        debug_assert!(!gc_ref.is_i31());
86        self.header(gc_ref).kind()
87    }
88
89    /// Get the header of the given GC reference.
90    pub fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
91        debug_assert!(!gc_ref.is_i31());
92        self.gc_heap.header(gc_ref)
93    }
94
95    /// Clone a GC reference, calling GC write barriers as necessary.
96    pub fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
97        if gc_ref.is_i31() {
98            gc_ref.unchecked_copy()
99        } else {
100            self.gc_heap.clone_gc_ref(gc_ref)
101        }
102    }
103
104    /// Write the `source` GC reference into the uninitialized `destination`
105    /// slot, performing write barriers as necessary.
106    pub fn init_gc_ref(
107        &mut self,
108        destination: &mut MaybeUninit<Option<VMGcRef>>,
109        source: Option<&VMGcRef>,
110    ) {
111        // Initialize the destination to `None`, at which point the regular GC
112        // write barrier is safe to reuse.
113        let destination = destination.write(None);
114        self.write_gc_ref(destination, source);
115    }
116
117    /// Write the `source` GC reference into the `destination` slot, performing
118    /// write barriers as necessary.
119    pub fn write_gc_ref(&mut self, destination: &mut Option<VMGcRef>, source: Option<&VMGcRef>) {
120        // If neither the source nor destination actually point to a GC object
121        // (that is, they are both either null or `i31ref`s) then we can skip
122        // the GC barrier.
123        if destination.as_ref().map_or(true, |d| d.is_i31())
124            && source.as_ref().map_or(true, |s| s.is_i31())
125        {
126            *destination = source.map(|s| s.unchecked_copy());
127            return;
128        }
129
130        self.gc_heap
131            .write_gc_ref(&mut self.host_data_table, destination, source);
132    }
133
134    /// Drop the given GC reference, performing drop barriers as necessary.
135    pub fn drop_gc_ref(&mut self, gc_ref: VMGcRef) {
136        if !gc_ref.is_i31() {
137            self.gc_heap.drop_gc_ref(&mut self.host_data_table, gc_ref);
138        }
139    }
140
141    /// Hook to call whenever a GC reference is about to be exposed to Wasm.
142    ///
143    /// Returns the raw representation of this GC ref, ready to be passed to
144    /// Wasm.
145    #[must_use]
146    pub fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) -> NonZeroU32 {
147        let raw = gc_ref.as_raw_non_zero_u32();
148        if !gc_ref.is_i31() {
149            log::trace!("exposing GC ref to Wasm: {gc_ref:p}");
150            self.gc_heap.expose_gc_ref_to_wasm(gc_ref);
151        }
152        raw
153    }
154
155    /// Allocate a new `externref`.
156    ///
157    /// Returns:
158    ///
159    /// * `Ok(Ok(_))`: Successfully allocated the `externref`.
160    ///
161    /// * `Ok(Err((value, n)))`: Failed to allocate the `externref`, but doing a GC
162    ///   and then trying again may succeed. Returns the given `value` as the
163    ///   error payload, along with the size of the failed allocation.
164    ///
165    /// * `Err(_)`: Unrecoverable allocation failure.
166    pub fn alloc_externref(
167        &mut self,
168        value: Box<dyn Any + Send + Sync>,
169    ) -> Result<Result<VMExternRef, (Box<dyn Any + Send + Sync>, u64)>> {
170        let host_data_id = self.host_data_table.alloc(value);
171        match self.gc_heap.alloc_externref(host_data_id)? {
172            Ok(x) => Ok(Ok(x)),
173            Err(n) => Ok(Err((self.host_data_table.dealloc(host_data_id), n))),
174        }
175    }
176
177    /// Get a shared borrow of the given `externref`'s host data.
178    ///
179    /// Passing invalid `VMExternRef`s (eg garbage values or `externref`s
180    /// associated with a different heap is memory safe but will lead to general
181    /// incorrectness such as panics and wrong results.
182    pub fn externref_host_data(&self, externref: &VMExternRef) -> &(dyn Any + Send + Sync) {
183        let host_data_id = self.gc_heap.externref_host_data(externref);
184        self.host_data_table.get(host_data_id)
185    }
186
187    /// Get a mutable borrow of the given `externref`'s host data.
188    ///
189    /// Passing invalid `VMExternRef`s (eg garbage values or `externref`s
190    /// associated with a different heap is memory safe but will lead to general
191    /// incorrectness such as panics and wrong results.
192    pub fn externref_host_data_mut(
193        &mut self,
194        externref: &VMExternRef,
195    ) -> &mut (dyn Any + Send + Sync) {
196        let host_data_id = self.gc_heap.externref_host_data(externref);
197        self.host_data_table.get_mut(host_data_id)
198    }
199
200    /// Allocate a raw object with the given header and layout.
201    pub fn alloc_raw(
202        &mut self,
203        header: VMGcHeader,
204        layout: Layout,
205    ) -> Result<Result<VMGcRef, u64>> {
206        self.gc_heap.alloc_raw(header, layout)
207    }
208
209    /// Allocate an uninitialized struct with the given type index and layout.
210    ///
211    /// This does NOT check that the index is currently allocated in the types
212    /// registry or that the layout matches the index's type. Failure to uphold
213    /// those invariants is memory safe, but will lead to general incorrectness
214    /// such as panics and wrong results.
215    pub fn alloc_uninit_struct(
216        &mut self,
217        ty: VMSharedTypeIndex,
218        layout: &GcStructLayout,
219    ) -> Result<Result<VMStructRef, u64>> {
220        self.gc_heap.alloc_uninit_struct(ty, layout)
221    }
222
223    /// Deallocate an uninitialized struct.
224    pub fn dealloc_uninit_struct(&mut self, structref: VMStructRef) {
225        self.gc_heap.dealloc_uninit_struct(structref);
226    }
227
228    /// Get the data for the given object reference.
229    ///
230    /// Panics when the structref and its size is out of the GC heap bounds.
231    pub fn gc_object_data(&mut self, gc_ref: &VMGcRef) -> &mut VMGcObjectData {
232        self.gc_heap.gc_object_data_mut(gc_ref)
233    }
234
235    /// Get the object datas for the given pair of object references.
236    ///
237    /// Panics if `a` and `b` are the same reference or either is out of bounds.
238    pub fn gc_object_data_pair(
239        &mut self,
240        a: &VMGcRef,
241        b: &VMGcRef,
242    ) -> (&mut VMGcObjectData, &mut VMGcObjectData) {
243        assert_ne!(a, b);
244        self.gc_heap.gc_object_data_pair(a, b)
245    }
246
247    /// Allocate an uninitialized array with the given type index.
248    ///
249    /// This does NOT check that the index is currently allocated in the types
250    /// registry or that the layout matches the index's type. Failure to uphold
251    /// those invariants is memory safe, but will lead to general incorrectness
252    /// such as panics and wrong results.
253    pub fn alloc_uninit_array(
254        &mut self,
255        ty: VMSharedTypeIndex,
256        len: u32,
257        layout: &GcArrayLayout,
258    ) -> Result<Result<VMArrayRef, u64>> {
259        self.gc_heap.alloc_uninit_array(ty, len, layout)
260    }
261
262    /// Deallocate an uninitialized array.
263    pub fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef) {
264        self.gc_heap.dealloc_uninit_array(arrayref);
265    }
266
267    /// Get the length of the given array.
268    pub fn array_len(&self, arrayref: &VMArrayRef) -> u32 {
269        self.gc_heap.array_len(arrayref)
270    }
271}