wasmtime/runtime/vm/
gc.rs1#[cfg(feature = "gc")]
2mod enabled;
3#[cfg(feature = "gc")]
4pub use enabled::*;
5
6#[cfg(not(feature = "gc"))]
7mod disabled;
8#[cfg(not(feature = "gc"))]
9pub use disabled::*;
10
11mod func_ref;
12mod gc_ref;
13mod gc_runtime;
14mod host_data;
15mod i31;
16
17pub use func_ref::*;
18pub use gc_ref::*;
19pub use gc_runtime::*;
20pub use host_data::*;
21pub use i31::*;
22
23use crate::prelude::*;
24use crate::runtime::vm::{GcHeapAllocationIndex, VMMemoryDefinition};
25use core::any::Any;
26use core::mem::MaybeUninit;
27use core::{alloc::Layout, num::NonZeroU32};
28use wasmtime_environ::{GcArrayLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
29
30pub struct GcStore {
38 pub allocation_index: GcHeapAllocationIndex,
41
42 pub gc_heap: Box<dyn GcHeap>,
44
45 pub host_data_table: ExternRefHostDataTable,
47
48 pub func_ref_table: FuncRefTable,
50}
51
52impl GcStore {
53 pub fn new(allocation_index: GcHeapAllocationIndex, gc_heap: Box<dyn GcHeap>) -> Self {
55 let host_data_table = ExternRefHostDataTable::default();
56 let func_ref_table = FuncRefTable::default();
57 Self {
58 allocation_index,
59 gc_heap,
60 host_data_table,
61 func_ref_table,
62 }
63 }
64
65 pub fn vmmemory_definition(&self) -> VMMemoryDefinition {
67 self.gc_heap.vmmemory()
68 }
69
70 pub fn gc(&mut self, roots: GcRootsIter<'_>) {
72 let mut collection = self.gc_heap.gc(roots, &mut self.host_data_table);
73 collection.collect();
74 }
75
76 #[cfg(feature = "async")]
78 pub async fn gc_async(&mut self, roots: GcRootsIter<'_>) {
79 let collection = self.gc_heap.gc(roots, &mut self.host_data_table);
80 collect_async(collection).await;
81 }
82
83 pub fn kind(&self, gc_ref: &VMGcRef) -> VMGcKind {
85 debug_assert!(!gc_ref.is_i31());
86 self.header(gc_ref).kind()
87 }
88
89 pub fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
91 debug_assert!(!gc_ref.is_i31());
92 self.gc_heap.header(gc_ref)
93 }
94
95 pub fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
97 if gc_ref.is_i31() {
98 gc_ref.unchecked_copy()
99 } else {
100 self.gc_heap.clone_gc_ref(gc_ref)
101 }
102 }
103
104 pub fn init_gc_ref(
107 &mut self,
108 destination: &mut MaybeUninit<Option<VMGcRef>>,
109 source: Option<&VMGcRef>,
110 ) {
111 let destination = destination.write(None);
114 self.write_gc_ref(destination, source);
115 }
116
117 pub fn write_gc_ref(&mut self, destination: &mut Option<VMGcRef>, source: Option<&VMGcRef>) {
120 if destination.as_ref().map_or(true, |d| d.is_i31())
124 && source.as_ref().map_or(true, |s| s.is_i31())
125 {
126 *destination = source.map(|s| s.unchecked_copy());
127 return;
128 }
129
130 self.gc_heap
131 .write_gc_ref(&mut self.host_data_table, destination, source);
132 }
133
134 pub fn drop_gc_ref(&mut self, gc_ref: VMGcRef) {
136 if !gc_ref.is_i31() {
137 self.gc_heap.drop_gc_ref(&mut self.host_data_table, gc_ref);
138 }
139 }
140
141 #[must_use]
146 pub fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) -> NonZeroU32 {
147 let raw = gc_ref.as_raw_non_zero_u32();
148 if !gc_ref.is_i31() {
149 log::trace!("exposing GC ref to Wasm: {gc_ref:p}");
150 self.gc_heap.expose_gc_ref_to_wasm(gc_ref);
151 }
152 raw
153 }
154
155 pub fn alloc_externref(
167 &mut self,
168 value: Box<dyn Any + Send + Sync>,
169 ) -> Result<Result<VMExternRef, (Box<dyn Any + Send + Sync>, u64)>> {
170 let host_data_id = self.host_data_table.alloc(value);
171 match self.gc_heap.alloc_externref(host_data_id)? {
172 Ok(x) => Ok(Ok(x)),
173 Err(n) => Ok(Err((self.host_data_table.dealloc(host_data_id), n))),
174 }
175 }
176
177 pub fn externref_host_data(&self, externref: &VMExternRef) -> &(dyn Any + Send + Sync) {
183 let host_data_id = self.gc_heap.externref_host_data(externref);
184 self.host_data_table.get(host_data_id)
185 }
186
187 pub fn externref_host_data_mut(
193 &mut self,
194 externref: &VMExternRef,
195 ) -> &mut (dyn Any + Send + Sync) {
196 let host_data_id = self.gc_heap.externref_host_data(externref);
197 self.host_data_table.get_mut(host_data_id)
198 }
199
200 pub fn alloc_raw(
202 &mut self,
203 header: VMGcHeader,
204 layout: Layout,
205 ) -> Result<Result<VMGcRef, u64>> {
206 self.gc_heap.alloc_raw(header, layout)
207 }
208
209 pub fn alloc_uninit_struct(
216 &mut self,
217 ty: VMSharedTypeIndex,
218 layout: &GcStructLayout,
219 ) -> Result<Result<VMStructRef, u64>> {
220 self.gc_heap.alloc_uninit_struct(ty, layout)
221 }
222
223 pub fn dealloc_uninit_struct(&mut self, structref: VMStructRef) {
225 self.gc_heap.dealloc_uninit_struct(structref);
226 }
227
228 pub fn gc_object_data(&mut self, gc_ref: &VMGcRef) -> &mut VMGcObjectData {
232 self.gc_heap.gc_object_data_mut(gc_ref)
233 }
234
235 pub fn gc_object_data_pair(
239 &mut self,
240 a: &VMGcRef,
241 b: &VMGcRef,
242 ) -> (&mut VMGcObjectData, &mut VMGcObjectData) {
243 assert_ne!(a, b);
244 self.gc_heap.gc_object_data_pair(a, b)
245 }
246
247 pub fn alloc_uninit_array(
254 &mut self,
255 ty: VMSharedTypeIndex,
256 len: u32,
257 layout: &GcArrayLayout,
258 ) -> Result<Result<VMArrayRef, u64>> {
259 self.gc_heap.alloc_uninit_array(ty, len, layout)
260 }
261
262 pub fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef) {
264 self.gc_heap.dealloc_uninit_array(arrayref);
265 }
266
267 pub fn array_len(&self, arrayref: &VMArrayRef) -> u32 {
269 self.gc_heap.array_len(arrayref)
270 }
271}