wasmtime/runtime/vm/
gc.rs1#[cfg(feature = "gc")]
2mod enabled;
3#[cfg(feature = "gc")]
4pub use enabled::*;
5
6#[cfg(not(feature = "gc"))]
7mod disabled;
8#[cfg(not(feature = "gc"))]
9pub use disabled::*;
10
11mod func_ref;
12mod gc_ref;
13mod gc_runtime;
14mod host_data;
15mod i31;
16
17pub use func_ref::*;
18pub use gc_ref::*;
19pub use gc_runtime::*;
20pub use host_data::*;
21pub use i31::*;
22
23use crate::prelude::*;
24use crate::runtime::vm::GcHeapAllocationIndex;
25use core::alloc::Layout;
26use core::any::Any;
27use core::mem::MaybeUninit;
28use wasmtime_environ::{GcArrayLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
29
30pub struct GcStore {
38 pub allocation_index: GcHeapAllocationIndex,
41
42 pub gc_heap: Box<dyn GcHeap>,
44
45 pub host_data_table: ExternRefHostDataTable,
47
48 pub func_ref_table: FuncRefTable,
50}
51
52impl GcStore {
53 pub fn new(allocation_index: GcHeapAllocationIndex, gc_heap: Box<dyn GcHeap>) -> Self {
55 let host_data_table = ExternRefHostDataTable::default();
56 let func_ref_table = FuncRefTable::default();
57 Self {
58 allocation_index,
59 gc_heap,
60 host_data_table,
61 func_ref_table,
62 }
63 }
64
65 pub fn gc(&mut self, roots: GcRootsIter<'_>) {
67 let mut collection = self.gc_heap.gc(roots, &mut self.host_data_table);
68 collection.collect();
69 }
70
71 #[cfg(feature = "async")]
73 pub async fn gc_async(&mut self, roots: GcRootsIter<'_>) {
74 let collection = self.gc_heap.gc(roots, &mut self.host_data_table);
75 collect_async(collection).await;
76 }
77
78 pub fn kind(&self, gc_ref: &VMGcRef) -> VMGcKind {
80 debug_assert!(!gc_ref.is_i31());
81 self.header(gc_ref).kind()
82 }
83
84 pub fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
86 debug_assert!(!gc_ref.is_i31());
87 self.gc_heap.header(gc_ref)
88 }
89
90 pub fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
92 if gc_ref.is_i31() {
93 gc_ref.unchecked_copy()
94 } else {
95 self.gc_heap.clone_gc_ref(gc_ref)
96 }
97 }
98
99 pub fn init_gc_ref(
102 &mut self,
103 destination: &mut MaybeUninit<Option<VMGcRef>>,
104 source: Option<&VMGcRef>,
105 ) {
106 let destination = destination.write(None);
109 self.write_gc_ref(destination, source);
110 }
111
112 pub fn write_gc_ref(&mut self, destination: &mut Option<VMGcRef>, source: Option<&VMGcRef>) {
115 if destination.as_ref().map_or(true, |d| d.is_i31())
119 && source.as_ref().map_or(true, |s| s.is_i31())
120 {
121 *destination = source.map(|s| s.unchecked_copy());
122 return;
123 }
124
125 self.gc_heap
126 .write_gc_ref(&mut self.host_data_table, destination, source);
127 }
128
129 pub fn drop_gc_ref(&mut self, gc_ref: VMGcRef) {
131 if !gc_ref.is_i31() {
132 self.gc_heap.drop_gc_ref(&mut self.host_data_table, gc_ref);
133 }
134 }
135
136 #[must_use]
141 pub fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) -> u32 {
142 let raw = gc_ref.as_raw_u32();
143 debug_assert_ne!(raw, 0);
144 if !gc_ref.is_i31() {
145 log::trace!("exposing GC ref to Wasm: {gc_ref:p}");
146 self.gc_heap.expose_gc_ref_to_wasm(gc_ref);
147 }
148 raw
149 }
150
151 pub fn alloc_externref(
163 &mut self,
164 value: Box<dyn Any + Send + Sync>,
165 ) -> Result<Result<VMExternRef, Box<dyn Any + Send + Sync>>> {
166 let host_data_id = self.host_data_table.alloc(value);
167 match self.gc_heap.alloc_externref(host_data_id)? {
168 #[cfg_attr(not(feature = "gc"), allow(unreachable_patterns))]
169 Some(x) => Ok(Ok(x)),
170 None => Ok(Err(self.host_data_table.dealloc(host_data_id))),
171 }
172 }
173
174 pub fn externref_host_data(&self, externref: &VMExternRef) -> &(dyn Any + Send + Sync) {
180 let host_data_id = self.gc_heap.externref_host_data(externref);
181 self.host_data_table.get(host_data_id)
182 }
183
184 pub fn externref_host_data_mut(
190 &mut self,
191 externref: &VMExternRef,
192 ) -> &mut (dyn Any + Send + Sync) {
193 let host_data_id = self.gc_heap.externref_host_data(externref);
194 self.host_data_table.get_mut(host_data_id)
195 }
196
197 pub fn alloc_raw(&mut self, header: VMGcHeader, layout: Layout) -> Result<Option<VMGcRef>> {
199 self.gc_heap.alloc_raw(header, layout)
200 }
201
202 pub fn alloc_uninit_struct(
209 &mut self,
210 ty: VMSharedTypeIndex,
211 layout: &GcStructLayout,
212 ) -> Result<Option<VMStructRef>> {
213 self.gc_heap.alloc_uninit_struct(ty, layout)
214 }
215
216 pub fn dealloc_uninit_struct(&mut self, structref: VMStructRef) {
218 self.gc_heap.dealloc_uninit_struct(structref);
219 }
220
221 pub fn gc_object_data(&mut self, gc_ref: &VMGcRef) -> VMGcObjectDataMut<'_> {
225 self.gc_heap.gc_object_data(gc_ref)
226 }
227
228 pub fn gc_object_data_pair(
232 &mut self,
233 a: &VMGcRef,
234 b: &VMGcRef,
235 ) -> (VMGcObjectDataMut<'_>, VMGcObjectDataMut<'_>) {
236 assert_ne!(a, b);
237 self.gc_heap.gc_object_data_pair(a, b)
238 }
239
240 pub fn alloc_uninit_array(
247 &mut self,
248 ty: VMSharedTypeIndex,
249 len: u32,
250 layout: &GcArrayLayout,
251 ) -> Result<Option<VMArrayRef>> {
252 self.gc_heap.alloc_uninit_array(ty, len, layout)
253 }
254
255 pub fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef) {
257 self.gc_heap.dealloc_uninit_array(arrayref);
258 }
259
260 pub fn array_len(&self, arrayref: &VMArrayRef) -> u32 {
262 self.gc_heap.array_len(arrayref)
263 }
264}