wasmtime/runtime/vm/
gc.rs1#[cfg(feature = "gc")]
2mod enabled;
3#[cfg(feature = "gc")]
4pub use enabled::*;
5
6#[cfg(not(feature = "gc"))]
7mod disabled;
8#[cfg(not(feature = "gc"))]
9pub use disabled::*;
10
11mod func_ref;
12mod gc_ref;
13mod gc_runtime;
14mod host_data;
15mod i31;
16
17pub use func_ref::*;
18pub use gc_ref::*;
19pub use gc_runtime::*;
20pub use host_data::*;
21pub use i31::*;
22
23use crate::prelude::*;
24use crate::runtime::vm::{GcHeapAllocationIndex, VMMemoryDefinition};
25use core::any::Any;
26use core::mem::MaybeUninit;
27use core::{alloc::Layout, num::NonZeroU32};
28use wasmtime_environ::{GcArrayLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
29
30pub struct GcStore {
38 pub allocation_index: GcHeapAllocationIndex,
41
42 pub gc_heap: Box<dyn GcHeap>,
44
45 pub host_data_table: ExternRefHostDataTable,
47
48 pub func_ref_table: FuncRefTable,
50}
51
52impl GcStore {
53 pub fn new(allocation_index: GcHeapAllocationIndex, gc_heap: Box<dyn GcHeap>) -> Self {
55 let host_data_table = ExternRefHostDataTable::default();
56 let func_ref_table = FuncRefTable::default();
57 Self {
58 allocation_index,
59 gc_heap,
60 host_data_table,
61 func_ref_table,
62 }
63 }
64
65 pub fn vmmemory_definition(&self) -> VMMemoryDefinition {
67 self.gc_heap.vmmemory()
68 }
69
70 pub async fn gc(&mut self, async_yield: bool, roots: GcRootsIter<'_>) {
72 let collection = self.gc_heap.gc(roots, &mut self.host_data_table);
73 collect_async(collection, async_yield).await;
74 }
75
76 pub fn kind(&self, gc_ref: &VMGcRef) -> VMGcKind {
78 debug_assert!(!gc_ref.is_i31());
79 self.header(gc_ref).kind()
80 }
81
82 pub fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
84 debug_assert!(!gc_ref.is_i31());
85 self.gc_heap.header(gc_ref)
86 }
87
88 pub fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
90 if gc_ref.is_i31() {
91 gc_ref.copy_i31()
92 } else {
93 self.gc_heap.clone_gc_ref(gc_ref)
94 }
95 }
96
97 pub fn init_gc_ref(
100 &mut self,
101 destination: &mut MaybeUninit<Option<VMGcRef>>,
102 source: Option<&VMGcRef>,
103 ) {
104 let destination = destination.write(None);
107 self.write_gc_ref(destination, source);
108 }
109
110 pub(crate) fn needs_init_barrier(gc_ref: Option<&VMGcRef>) -> bool {
113 assert!(cfg!(feature = "gc") || gc_ref.is_none());
114 gc_ref.is_some_and(|r| !r.is_i31())
115 }
116
117 pub(crate) fn needs_write_barrier(
120 dest: &mut Option<VMGcRef>,
121 gc_ref: Option<&VMGcRef>,
122 ) -> bool {
123 assert!(cfg!(feature = "gc") || gc_ref.is_none());
124 assert!(cfg!(feature = "gc") || dest.is_none());
125 dest.as_ref().is_some_and(|r| !r.is_i31()) || gc_ref.is_some_and(|r| !r.is_i31())
126 }
127
128 pub(crate) fn write_gc_ref_optional_store(
136 store: Option<&mut Self>,
137 dest: &mut Option<VMGcRef>,
138 gc_ref: Option<&VMGcRef>,
139 ) {
140 if Self::needs_write_barrier(dest, gc_ref) {
141 store.unwrap().write_gc_ref(dest, gc_ref)
142 } else {
143 *dest = gc_ref.map(|r| r.copy_i31());
144 }
145 }
146
147 pub fn write_gc_ref(&mut self, destination: &mut Option<VMGcRef>, source: Option<&VMGcRef>) {
150 if Self::needs_write_barrier(destination, source) {
154 self.gc_heap
155 .write_gc_ref(&mut self.host_data_table, destination, source);
156 } else {
157 *destination = source.map(|s| s.copy_i31());
158 }
159 }
160
161 pub fn drop_gc_ref(&mut self, gc_ref: VMGcRef) {
163 if !gc_ref.is_i31() {
164 self.gc_heap.drop_gc_ref(&mut self.host_data_table, gc_ref);
165 }
166 }
167
168 #[must_use]
173 pub fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) -> NonZeroU32 {
174 let raw = gc_ref.as_raw_non_zero_u32();
175 if !gc_ref.is_i31() {
176 log::trace!("exposing GC ref to Wasm: {gc_ref:p}");
177 self.gc_heap.expose_gc_ref_to_wasm(gc_ref);
178 }
179 raw
180 }
181
182 pub fn alloc_externref(
194 &mut self,
195 value: Box<dyn Any + Send + Sync>,
196 ) -> Result<Result<VMExternRef, (Box<dyn Any + Send + Sync>, u64)>> {
197 let host_data_id = self.host_data_table.alloc(value);
198 match self.gc_heap.alloc_externref(host_data_id)? {
199 Ok(x) => Ok(Ok(x)),
200 Err(n) => Ok(Err((self.host_data_table.dealloc(host_data_id), n))),
201 }
202 }
203
204 pub fn externref_host_data(&self, externref: &VMExternRef) -> &(dyn Any + Send + Sync) {
210 let host_data_id = self.gc_heap.externref_host_data(externref);
211 self.host_data_table.get(host_data_id)
212 }
213
214 pub fn externref_host_data_mut(
220 &mut self,
221 externref: &VMExternRef,
222 ) -> &mut (dyn Any + Send + Sync) {
223 let host_data_id = self.gc_heap.externref_host_data(externref);
224 self.host_data_table.get_mut(host_data_id)
225 }
226
227 pub fn alloc_raw(
229 &mut self,
230 header: VMGcHeader,
231 layout: Layout,
232 ) -> Result<Result<VMGcRef, u64>> {
233 self.gc_heap.alloc_raw(header, layout)
234 }
235
236 pub fn alloc_uninit_struct(
243 &mut self,
244 ty: VMSharedTypeIndex,
245 layout: &GcStructLayout,
246 ) -> Result<Result<VMStructRef, u64>> {
247 self.gc_heap
248 .alloc_uninit_struct_or_exn(ty, layout)
249 .map(|r| r.map(|r| r.into_structref_unchecked()))
250 }
251
252 pub fn dealloc_uninit_struct(&mut self, structref: VMStructRef) {
254 self.gc_heap.dealloc_uninit_struct_or_exn(structref.into())
255 }
256
257 pub fn gc_object_data(&mut self, gc_ref: &VMGcRef) -> &mut VMGcObjectData {
261 self.gc_heap.gc_object_data_mut(gc_ref)
262 }
263
264 pub fn gc_object_data_pair(
268 &mut self,
269 a: &VMGcRef,
270 b: &VMGcRef,
271 ) -> (&mut VMGcObjectData, &mut VMGcObjectData) {
272 assert_ne!(a, b);
273 self.gc_heap.gc_object_data_pair(a, b)
274 }
275
276 pub fn alloc_uninit_array(
283 &mut self,
284 ty: VMSharedTypeIndex,
285 len: u32,
286 layout: &GcArrayLayout,
287 ) -> Result<Result<VMArrayRef, u64>> {
288 self.gc_heap.alloc_uninit_array(ty, len, layout)
289 }
290
291 pub fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef) {
293 self.gc_heap.dealloc_uninit_array(arrayref);
294 }
295
296 pub fn array_len(&self, arrayref: &VMArrayRef) -> u32 {
298 self.gc_heap.array_len(arrayref)
299 }
300
301 pub fn alloc_uninit_exn(
309 &mut self,
310 ty: VMSharedTypeIndex,
311 layout: &GcStructLayout,
312 ) -> Result<Result<VMExnRef, u64>> {
313 self.gc_heap
314 .alloc_uninit_struct_or_exn(ty, layout)
315 .map(|r| r.map(|r| r.into_exnref_unchecked()))
316 }
317
318 pub fn dealloc_uninit_exn(&mut self, exnref: VMExnRef) {
320 self.gc_heap.dealloc_uninit_struct_or_exn(exnref.into());
321 }
322}