wasmtime/runtime/vm/
gc.rs1#[cfg(feature = "gc")]
2mod enabled;
3#[cfg(feature = "gc")]
4pub use enabled::*;
5
6#[cfg(not(feature = "gc"))]
7mod disabled;
8#[cfg(not(feature = "gc"))]
9pub use disabled::*;
10
11mod func_ref;
12mod gc_ref;
13mod gc_runtime;
14mod host_data;
15mod i31;
16
17pub use func_ref::*;
18pub use gc_ref::*;
19pub use gc_runtime::*;
20pub use host_data::*;
21pub use i31::*;
22
23use crate::prelude::*;
24use crate::runtime::vm::{GcHeapAllocationIndex, VMMemoryDefinition};
25use crate::store::Asyncness;
26use core::any::Any;
27use core::mem::MaybeUninit;
28use core::{alloc::Layout, num::NonZeroU32};
29use wasmtime_environ::{GcArrayLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
30
31pub struct GcStore {
39 pub allocation_index: GcHeapAllocationIndex,
42
43 pub gc_heap: Box<dyn GcHeap>,
45
46 pub host_data_table: ExternRefHostDataTable,
48
49 pub func_ref_table: FuncRefTable,
51}
52
53impl GcStore {
54 pub fn new(allocation_index: GcHeapAllocationIndex, gc_heap: Box<dyn GcHeap>) -> Self {
56 let host_data_table = ExternRefHostDataTable::default();
57 let func_ref_table = FuncRefTable::default();
58 Self {
59 allocation_index,
60 gc_heap,
61 host_data_table,
62 func_ref_table,
63 }
64 }
65
66 pub fn vmmemory_definition(&self) -> VMMemoryDefinition {
68 self.gc_heap.vmmemory()
69 }
70
71 pub async fn gc(&mut self, asyncness: Asyncness, roots: GcRootsIter<'_>) {
73 let collection = self.gc_heap.gc(roots, &mut self.host_data_table);
74 collect_async(collection, asyncness).await;
75 }
76
77 pub fn kind(&self, gc_ref: &VMGcRef) -> VMGcKind {
79 debug_assert!(!gc_ref.is_i31());
80 self.header(gc_ref).kind()
81 }
82
83 pub fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
85 debug_assert!(!gc_ref.is_i31());
86 self.gc_heap.header(gc_ref)
87 }
88
89 pub fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
91 if gc_ref.is_i31() {
92 gc_ref.copy_i31()
93 } else {
94 self.gc_heap.clone_gc_ref(gc_ref)
95 }
96 }
97
98 pub fn init_gc_ref(
101 &mut self,
102 destination: &mut MaybeUninit<Option<VMGcRef>>,
103 source: Option<&VMGcRef>,
104 ) {
105 let destination = destination.write(None);
108 self.write_gc_ref(destination, source);
109 }
110
111 pub(crate) fn needs_init_barrier(gc_ref: Option<&VMGcRef>) -> bool {
114 assert!(cfg!(feature = "gc") || gc_ref.is_none());
115 gc_ref.is_some_and(|r| !r.is_i31())
116 }
117
118 pub(crate) fn needs_write_barrier(
121 dest: &mut Option<VMGcRef>,
122 gc_ref: Option<&VMGcRef>,
123 ) -> bool {
124 assert!(cfg!(feature = "gc") || gc_ref.is_none());
125 assert!(cfg!(feature = "gc") || dest.is_none());
126 dest.as_ref().is_some_and(|r| !r.is_i31()) || gc_ref.is_some_and(|r| !r.is_i31())
127 }
128
129 pub(crate) fn write_gc_ref_optional_store(
137 store: Option<&mut Self>,
138 dest: &mut Option<VMGcRef>,
139 gc_ref: Option<&VMGcRef>,
140 ) {
141 if Self::needs_write_barrier(dest, gc_ref) {
142 store.unwrap().write_gc_ref(dest, gc_ref)
143 } else {
144 *dest = gc_ref.map(|r| r.copy_i31());
145 }
146 }
147
148 pub fn write_gc_ref(&mut self, destination: &mut Option<VMGcRef>, source: Option<&VMGcRef>) {
151 if Self::needs_write_barrier(destination, source) {
155 self.gc_heap
156 .write_gc_ref(&mut self.host_data_table, destination, source);
157 } else {
158 *destination = source.map(|s| s.copy_i31());
159 }
160 }
161
162 pub fn drop_gc_ref(&mut self, gc_ref: VMGcRef) {
164 if !gc_ref.is_i31() {
165 self.gc_heap.drop_gc_ref(&mut self.host_data_table, gc_ref);
166 }
167 }
168
169 #[must_use]
174 pub fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) -> NonZeroU32 {
175 let raw = gc_ref.as_raw_non_zero_u32();
176 if !gc_ref.is_i31() {
177 log::trace!("exposing GC ref to Wasm: {gc_ref:p}");
178 self.gc_heap.expose_gc_ref_to_wasm(gc_ref);
179 }
180 raw
181 }
182
183 pub fn alloc_externref(
195 &mut self,
196 value: Box<dyn Any + Send + Sync>,
197 ) -> Result<Result<VMExternRef, (Box<dyn Any + Send + Sync>, u64)>> {
198 let host_data_id = self.host_data_table.alloc(value);
199 match self.gc_heap.alloc_externref(host_data_id)? {
200 Ok(x) => Ok(Ok(x)),
201 Err(n) => Ok(Err((self.host_data_table.dealloc(host_data_id), n))),
202 }
203 }
204
205 pub fn externref_host_data(&self, externref: &VMExternRef) -> &(dyn Any + Send + Sync) {
211 let host_data_id = self.gc_heap.externref_host_data(externref);
212 self.host_data_table.get(host_data_id)
213 }
214
215 pub fn externref_host_data_mut(
221 &mut self,
222 externref: &VMExternRef,
223 ) -> &mut (dyn Any + Send + Sync) {
224 let host_data_id = self.gc_heap.externref_host_data(externref);
225 self.host_data_table.get_mut(host_data_id)
226 }
227
228 pub fn alloc_raw(
230 &mut self,
231 header: VMGcHeader,
232 layout: Layout,
233 ) -> Result<Result<VMGcRef, u64>> {
234 self.gc_heap.alloc_raw(header, layout)
235 }
236
237 pub fn alloc_uninit_struct(
244 &mut self,
245 ty: VMSharedTypeIndex,
246 layout: &GcStructLayout,
247 ) -> Result<Result<VMStructRef, u64>> {
248 self.gc_heap
249 .alloc_uninit_struct_or_exn(ty, layout)
250 .map(|r| r.map(|r| r.into_structref_unchecked()))
251 }
252
253 pub fn dealloc_uninit_struct(&mut self, structref: VMStructRef) {
255 self.gc_heap.dealloc_uninit_struct_or_exn(structref.into())
256 }
257
258 pub fn gc_object_data(&mut self, gc_ref: &VMGcRef) -> &mut VMGcObjectData {
262 self.gc_heap.gc_object_data_mut(gc_ref)
263 }
264
265 pub fn gc_object_data_pair(
269 &mut self,
270 a: &VMGcRef,
271 b: &VMGcRef,
272 ) -> (&mut VMGcObjectData, &mut VMGcObjectData) {
273 assert_ne!(a, b);
274 self.gc_heap.gc_object_data_pair(a, b)
275 }
276
277 pub fn alloc_uninit_array(
284 &mut self,
285 ty: VMSharedTypeIndex,
286 len: u32,
287 layout: &GcArrayLayout,
288 ) -> Result<Result<VMArrayRef, u64>> {
289 self.gc_heap.alloc_uninit_array(ty, len, layout)
290 }
291
292 pub fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef) {
294 self.gc_heap.dealloc_uninit_array(arrayref);
295 }
296
297 pub fn array_len(&self, arrayref: &VMArrayRef) -> u32 {
299 self.gc_heap.array_len(arrayref)
300 }
301
302 pub fn alloc_uninit_exn(
310 &mut self,
311 ty: VMSharedTypeIndex,
312 layout: &GcStructLayout,
313 ) -> Result<Result<VMExnRef, u64>> {
314 self.gc_heap
315 .alloc_uninit_struct_or_exn(ty, layout)
316 .map(|r| r.map(|r| r.into_exnref_unchecked()))
317 }
318
319 pub fn dealloc_uninit_exn(&mut self, exnref: VMExnRef) {
321 self.gc_heap.dealloc_uninit_struct_or_exn(exnref.into());
322 }
323}