wasmtime/runtime/vm/gc/enabled/
null.rs1use super::*;
8use crate::{
9 Engine,
10 prelude::*,
11 vm::{
12 ExternRefHostDataId, ExternRefHostDataTable, GarbageCollection, GcHeap, GcHeapObject,
13 GcProgress, GcRootsIter, GcRuntime, SendSyncUnsafeCell, TypedGcRef, VMGcHeader, VMGcRef,
14 VMMemoryDefinition,
15 },
16};
17use core::ptr::NonNull;
18use core::{alloc::Layout, any::Any, num::NonZeroU32};
19use wasmtime_environ::{
20 GcArrayLayout, GcStructLayout, GcTypeLayouts, VMGcKind, VMSharedTypeIndex,
21 null::NullTypeLayouts,
22};
23
24#[derive(Default)]
26pub struct NullCollector {
27 layouts: NullTypeLayouts,
28}
29
30unsafe impl GcRuntime for NullCollector {
31 fn layouts(&self) -> &dyn GcTypeLayouts {
32 &self.layouts
33 }
34
35 fn new_gc_heap(&self, _: &Engine) -> Result<Box<dyn GcHeap>> {
36 let heap = NullHeap::new()?;
37 Ok(Box::new(heap) as _)
38 }
39}
40
41#[repr(C)]
43struct NullHeap {
44 next: SendSyncUnsafeCell<NonZeroU32>,
49
50 no_gc_count: usize,
52
53 memory: Option<crate::vm::Memory>,
55}
56
57#[repr(C)]
59struct VMNullArrayHeader {
60 header: VMGcHeader,
61 length: u32,
62}
63
64unsafe impl GcHeapObject for VMNullArrayHeader {
65 #[inline]
66 fn is(header: &VMGcHeader) -> bool {
67 header.kind() == VMGcKind::ArrayRef
68 }
69}
70
71impl VMNullArrayHeader {
72 fn typed_ref<'a>(
73 gc_heap: &NullHeap,
74 array: &'a VMArrayRef,
75 ) -> &'a TypedGcRef<VMNullArrayHeader> {
76 let gc_ref = array.as_gc_ref();
77 debug_assert!(gc_ref.is_typed::<VMNullArrayHeader>(gc_heap));
78 gc_ref.as_typed_unchecked()
79 }
80}
81
82#[repr(C)]
84struct VMNullExternRef {
85 header: VMGcHeader,
86 host_data: ExternRefHostDataId,
87}
88
89unsafe impl GcHeapObject for VMNullExternRef {
90 #[inline]
91 fn is(header: &VMGcHeader) -> bool {
92 header.kind() == VMGcKind::ExternRef
93 }
94}
95
96impl VMNullExternRef {
97 fn typed_ref<'a>(
100 gc_heap: &NullHeap,
101 externref: &'a VMExternRef,
102 ) -> &'a TypedGcRef<VMNullExternRef> {
103 let gc_ref = externref.as_gc_ref();
104 debug_assert!(gc_ref.is_typed::<VMNullExternRef>(gc_heap));
105 gc_ref.as_typed_unchecked()
106 }
107}
108
109impl NullHeap {
110 fn new() -> Result<Self> {
112 Ok(Self {
113 no_gc_count: 0,
114 next: SendSyncUnsafeCell::new(NonZeroU32::new(u32::MAX).unwrap()),
115 memory: None,
116 })
117 }
118
119 fn alloc(&mut self, mut header: VMGcHeader, layout: Layout) -> Result<Result<VMGcRef, u64>> {
127 debug_assert!(layout.size() >= core::mem::size_of::<VMGcHeader>());
128 debug_assert!(layout.align() >= core::mem::align_of::<VMGcHeader>());
129
130 let size = match u32::try_from(layout.size()).ok().and_then(|size| {
133 if VMGcKind::value_fits_in_unused_bits(size) {
134 Some(size)
135 } else {
136 None
137 }
138 }) {
139 Some(size) => size,
140 None => return Err(crate::Trap::AllocationTooLarge.into()),
141 };
142
143 let next = *self.next.get_mut();
144
145 let aligned = match u32::try_from(layout.align())
147 .ok()
148 .and_then(|align| next.get().checked_next_multiple_of(align))
149 {
150 Some(aligned) => aligned,
151 None => return Err(crate::Trap::AllocationTooLarge.into()),
152 };
153
154 let end_of_object = match aligned.checked_add(size) {
156 Some(end) => end,
157 None => return Err(crate::Trap::AllocationTooLarge.into()),
158 };
159 let len = self.memory.as_ref().unwrap().byte_size();
160 let len = u32::try_from(len).unwrap_or(u32::MAX);
161 if end_of_object > len {
162 return Ok(Err(u64::try_from(layout.size()).unwrap()));
163 }
164
165 *self.next.get_mut() = NonZeroU32::new(end_of_object).unwrap();
167
168 let aligned = NonZeroU32::new(aligned).unwrap();
169 let gc_ref = VMGcRef::from_heap_index(aligned).unwrap();
170
171 debug_assert_eq!(header.reserved_u27(), 0);
172 header.set_reserved_u27(size);
173 *self.header_mut(&gc_ref) = header;
174
175 Ok(Ok(gc_ref))
176 }
177}
178
179unsafe impl GcHeap for NullHeap {
180 fn is_attached(&self) -> bool {
181 self.memory.is_some()
182 }
183
184 fn attach(&mut self, memory: crate::vm::Memory) {
185 assert!(!self.is_attached());
186 self.memory = Some(memory);
187 self.next = SendSyncUnsafeCell::new(NonZeroU32::new(1).unwrap());
188 }
189
190 fn detach(&mut self) -> crate::vm::Memory {
191 assert!(self.is_attached());
192
193 let NullHeap {
194 next,
195 no_gc_count,
196 memory,
197 } = self;
198
199 *next.get_mut() = NonZeroU32::new(1).unwrap();
200 *no_gc_count = 0;
201
202 self.next = SendSyncUnsafeCell::new(NonZeroU32::new(u32::MAX).unwrap());
203 memory.take().unwrap()
204 }
205
206 fn as_any(&self) -> &dyn Any {
207 self as _
208 }
209
210 fn as_any_mut(&mut self) -> &mut dyn Any {
211 self as _
212 }
213
214 fn enter_no_gc_scope(&mut self) {
215 self.no_gc_count += 1;
216 }
217
218 fn exit_no_gc_scope(&mut self) {
219 self.no_gc_count -= 1;
220 }
221
222 unsafe fn take_memory(&mut self) -> crate::vm::Memory {
223 debug_assert!(self.is_attached());
224 self.memory.take().unwrap()
225 }
226
227 unsafe fn replace_memory(&mut self, memory: crate::vm::Memory, _delta_bytes_grown: u64) {
228 debug_assert!(self.memory.is_none());
229 self.memory = Some(memory);
230 }
231
232 fn vmmemory(&self) -> VMMemoryDefinition {
233 debug_assert!(self.is_attached());
234 self.memory.as_ref().unwrap().vmmemory()
235 }
236
237 fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
238 gc_ref.unchecked_copy()
239 }
240
241 fn write_gc_ref(
242 &mut self,
243 _host_data_table: &mut ExternRefHostDataTable,
244 destination: &mut Option<VMGcRef>,
245 source: Option<&VMGcRef>,
246 ) {
247 *destination = source.map(|s| s.unchecked_copy());
248 }
249
250 fn expose_gc_ref_to_wasm(&mut self, _gc_ref: VMGcRef) {
251 }
253
254 fn alloc_externref(
255 &mut self,
256 host_data: ExternRefHostDataId,
257 ) -> Result<Result<VMExternRef, u64>> {
258 let gc_ref = match self.alloc(VMGcHeader::externref(), Layout::new::<VMNullExternRef>())? {
259 Ok(r) => r,
260 Err(bytes_needed) => return Ok(Err(bytes_needed)),
261 };
262 self.index_mut::<VMNullExternRef>(gc_ref.as_typed_unchecked())
263 .host_data = host_data;
264 Ok(Ok(gc_ref.into_externref_unchecked()))
265 }
266
267 fn externref_host_data(&self, externref: &VMExternRef) -> ExternRefHostDataId {
268 let typed_ref = VMNullExternRef::typed_ref(self, externref);
269 self.index(typed_ref).host_data
270 }
271
272 fn object_size(&self, gc_ref: &VMGcRef) -> usize {
273 let size = self.header(gc_ref).reserved_u27();
274 usize::try_from(size).unwrap()
275 }
276
277 fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
278 self.index(gc_ref.as_typed_unchecked())
279 }
280
281 fn header_mut(&mut self, gc_ref: &VMGcRef) -> &mut VMGcHeader {
282 self.index_mut(gc_ref.as_typed_unchecked())
283 }
284
285 fn alloc_raw(&mut self, header: VMGcHeader, layout: Layout) -> Result<Result<VMGcRef, u64>> {
286 self.alloc(header, layout)
287 }
288
289 fn alloc_uninit_struct(
290 &mut self,
291 ty: VMSharedTypeIndex,
292 layout: &GcStructLayout,
293 ) -> Result<Result<VMStructRef, u64>> {
294 self.alloc(
295 VMGcHeader::from_kind_and_index(VMGcKind::StructRef, ty),
296 layout.layout(),
297 )
298 .map(|r| r.map(|r| r.into_structref_unchecked()))
299 }
300
301 fn dealloc_uninit_struct(&mut self, _struct_ref: VMStructRef) {}
302
303 fn alloc_uninit_array(
304 &mut self,
305 ty: VMSharedTypeIndex,
306 length: u32,
307 layout: &GcArrayLayout,
308 ) -> Result<Result<VMArrayRef, u64>> {
309 self.alloc(
310 VMGcHeader::from_kind_and_index(VMGcKind::ArrayRef, ty),
311 layout.layout(length),
312 )
313 .map(|r| {
314 r.map(|r| {
315 self.index_mut::<VMNullArrayHeader>(r.as_typed_unchecked())
316 .length = length;
317 r.into_arrayref_unchecked()
318 })
319 })
320 }
321
322 fn dealloc_uninit_array(&mut self, _array_ref: VMArrayRef) {}
323
324 fn array_len(&self, arrayref: &VMArrayRef) -> u32 {
325 let arrayref = VMNullArrayHeader::typed_ref(self, arrayref);
326 self.index(arrayref).length
327 }
328
329 fn gc<'a>(
330 &'a mut self,
331 _roots: GcRootsIter<'a>,
332 _host_data_table: &'a mut ExternRefHostDataTable,
333 ) -> Box<dyn GarbageCollection<'a> + 'a> {
334 assert_eq!(self.no_gc_count, 0, "Cannot GC inside a no-GC scope!");
335 Box::new(NullCollection {})
336 }
337
338 unsafe fn vmctx_gc_heap_data(&self) -> NonNull<u8> {
339 let ptr_to_next: *mut NonZeroU32 = self.next.get();
340 NonNull::new(ptr_to_next).unwrap().cast()
341 }
342}
343
344struct NullCollection {}
345
346impl<'a> GarbageCollection<'a> for NullCollection {
347 fn collect_increment(&mut self) -> GcProgress {
348 GcProgress::Complete
349 }
350}
351
352#[cfg(test)]
353mod tests {
354 use super::*;
355
356 #[test]
357 fn vm_gc_null_header_size_align() {
358 assert_eq!(
359 (wasmtime_environ::null::HEADER_SIZE as usize),
360 core::mem::size_of::<VMGcHeader>()
361 );
362 assert_eq!(
363 (wasmtime_environ::null::HEADER_ALIGN as usize),
364 core::mem::align_of::<VMGcHeader>()
365 );
366 }
367
368 #[test]
369 fn vm_null_array_header_length_offset() {
370 assert_eq!(
371 wasmtime_environ::null::ARRAY_LENGTH_OFFSET,
372 u32::try_from(core::mem::offset_of!(VMNullArrayHeader, length)).unwrap(),
373 );
374 }
375}