wasmtime/runtime/vm/gc/enabled/
null.rs

1//! The null collector.
2//!
3//! The null collector bump allocates objects until it runs out of space, at
4//! which point it returns an out-of-memory error. It never collects garbage.
5//! It does not require any GC barriers.
6
7use super::*;
8use crate::{
9    prelude::*,
10    vm::{
11        mmap::AlignedLength, ExternRefHostDataId, ExternRefHostDataTable, GarbageCollection,
12        GcHeap, GcHeapObject, GcProgress, GcRootsIter, GcRuntime, Mmap, SendSyncUnsafeCell,
13        TypedGcRef, VMGcHeader, VMGcRef,
14    },
15    GcHeapOutOfMemory,
16};
17use core::ptr::NonNull;
18use core::{
19    alloc::Layout,
20    any::Any,
21    cell::UnsafeCell,
22    num::{NonZeroU32, NonZeroUsize},
23};
24use wasmtime_environ::{
25    null::NullTypeLayouts, GcArrayLayout, GcStructLayout, GcTypeLayouts, VMGcKind,
26    VMSharedTypeIndex,
27};
28
29/// The null collector.
30#[derive(Default)]
31pub struct NullCollector {
32    layouts: NullTypeLayouts,
33}
34
35unsafe impl GcRuntime for NullCollector {
36    fn layouts(&self) -> &dyn GcTypeLayouts {
37        &self.layouts
38    }
39
40    fn new_gc_heap(&self) -> Result<Box<dyn GcHeap>> {
41        let heap = NullHeap::new()?;
42        Ok(Box::new(heap) as _)
43    }
44}
45
46/// A GC heap for the null collector.
47#[repr(C)]
48struct NullHeap {
49    /// Bump-allocation finger indexing within `1..self.heap.len()`.
50    ///
51    /// NB: this is an `UnsafeCell` because it is written to by compiled Wasm
52    /// code.
53    next: SendSyncUnsafeCell<NonZeroU32>,
54
55    /// The number of active no-gc scopes at the current moment.
56    no_gc_count: usize,
57
58    /// The actual GC heap.
59    heap: Mmap<AlignedLength>,
60}
61
62/// The common header for all arrays in the null collector.
63#[repr(C)]
64struct VMNullArrayHeader {
65    header: VMGcHeader,
66    length: u32,
67}
68
69unsafe impl GcHeapObject for VMNullArrayHeader {
70    #[inline]
71    fn is(header: &VMGcHeader) -> bool {
72        header.kind() == VMGcKind::ArrayRef
73    }
74}
75
76impl VMNullArrayHeader {
77    fn typed_ref<'a>(
78        gc_heap: &NullHeap,
79        array: &'a VMArrayRef,
80    ) -> &'a TypedGcRef<VMNullArrayHeader> {
81        let gc_ref = array.as_gc_ref();
82        debug_assert!(gc_ref.is_typed::<VMNullArrayHeader>(gc_heap));
83        gc_ref.as_typed_unchecked()
84    }
85}
86
87/// The representation of an `externref` in the null collector.
88#[repr(C)]
89struct VMNullExternRef {
90    header: VMGcHeader,
91    host_data: ExternRefHostDataId,
92}
93
94unsafe impl GcHeapObject for VMNullExternRef {
95    #[inline]
96    fn is(header: &VMGcHeader) -> bool {
97        header.kind() == VMGcKind::ExternRef
98    }
99}
100
101impl VMNullExternRef {
102    /// Convert a generic `externref` to a typed reference to our concrete
103    /// `externref` type.
104    fn typed_ref<'a>(
105        gc_heap: &NullHeap,
106        externref: &'a VMExternRef,
107    ) -> &'a TypedGcRef<VMNullExternRef> {
108        let gc_ref = externref.as_gc_ref();
109        debug_assert!(gc_ref.is_typed::<VMNullExternRef>(gc_heap));
110        gc_ref.as_typed_unchecked()
111    }
112}
113
114fn oom() -> Error {
115    GcHeapOutOfMemory::new(()).into()
116}
117
118impl NullHeap {
119    /// Construct a new, default heap for the null collector.
120    fn new() -> Result<Self> {
121        Self::with_capacity(super::DEFAULT_GC_HEAP_CAPACITY)
122    }
123
124    /// Create a new DRC heap with the given capacity.
125    fn with_capacity(capacity: usize) -> Result<Self> {
126        let heap = Mmap::with_at_least(capacity)?;
127        Ok(Self {
128            no_gc_count: 0,
129            next: SendSyncUnsafeCell::new(NonZeroU32::new(1).unwrap()),
130            heap,
131        })
132    }
133
134    fn alloc(&mut self, mut header: VMGcHeader, layout: Layout) -> Result<VMGcRef> {
135        debug_assert!(layout.size() >= core::mem::size_of::<VMGcHeader>());
136        debug_assert!(layout.align() >= core::mem::align_of::<VMGcHeader>());
137
138        // Make sure that the requested allocation's size fits in the GC
139        // header's unused bits.
140        let size = match u32::try_from(layout.size()).ok().and_then(|size| {
141            if VMGcKind::value_fits_in_unused_bits(size) {
142                Some(size)
143            } else {
144                None
145            }
146        }) {
147            Some(size) => size,
148            None => return Err(crate::Trap::AllocationTooLarge.into()),
149        };
150
151        let next = *self.next.get_mut();
152
153        // Increment the bump pointer to the layout's requested alignment.
154        let aligned = match u32::try_from(layout.align())
155            .ok()
156            .and_then(|align| next.get().checked_next_multiple_of(align))
157        {
158            Some(aligned) => aligned,
159            None => return Err(oom()),
160        };
161
162        // Check whether the allocation fits in the heap space we have left.
163        let end_of_object = match aligned.checked_add(size) {
164            Some(end) => end,
165            None => return Err(oom()),
166        };
167        if u32::try_from(self.heap.len())
168            .ok()
169            .map_or(true, |heap_len| end_of_object > heap_len)
170        {
171            return Err(oom());
172        }
173
174        // Update the bump pointer, write the header, and return the GC ref.
175        *self.next.get_mut() = NonZeroU32::new(end_of_object).unwrap();
176
177        let aligned = NonZeroU32::new(aligned).unwrap();
178        let gc_ref = VMGcRef::from_heap_index(aligned).unwrap();
179
180        debug_assert_eq!(header.reserved_u27(), 0);
181        header.set_reserved_u27(size);
182        *self.header_mut(&gc_ref) = header;
183
184        Ok(gc_ref)
185    }
186}
187
188unsafe impl GcHeap for NullHeap {
189    fn as_any(&self) -> &dyn Any {
190        self as _
191    }
192
193    fn as_any_mut(&mut self) -> &mut dyn Any {
194        self as _
195    }
196
197    fn enter_no_gc_scope(&mut self) {
198        self.no_gc_count += 1;
199    }
200
201    fn exit_no_gc_scope(&mut self) {
202        self.no_gc_count -= 1;
203    }
204
205    fn heap_slice(&self) -> &[UnsafeCell<u8>] {
206        let ptr = self.heap.as_ptr().cast();
207        let len = self.heap.len();
208        unsafe { core::slice::from_raw_parts(ptr, len) }
209    }
210
211    fn heap_slice_mut(&mut self) -> &mut [u8] {
212        let ptr = self.heap.as_mut_ptr();
213        let len = self.heap.len();
214        unsafe { core::slice::from_raw_parts_mut(ptr, len) }
215    }
216
217    fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
218        gc_ref.unchecked_copy()
219    }
220
221    fn write_gc_ref(
222        &mut self,
223        _host_data_table: &mut ExternRefHostDataTable,
224        destination: &mut Option<VMGcRef>,
225        source: Option<&VMGcRef>,
226    ) {
227        *destination = source.map(|s| s.unchecked_copy());
228    }
229
230    fn expose_gc_ref_to_wasm(&mut self, _gc_ref: VMGcRef) {
231        // Don't need to do anything special here.
232    }
233
234    fn need_gc_before_entering_wasm(&self, _num_gc_refs: NonZeroUsize) -> bool {
235        // Never need to GC before entering Wasm.
236        false
237    }
238
239    fn alloc_externref(&mut self, host_data: ExternRefHostDataId) -> Result<Option<VMExternRef>> {
240        let gc_ref = self.alloc(VMGcHeader::externref(), Layout::new::<VMNullExternRef>())?;
241        self.index_mut::<VMNullExternRef>(gc_ref.as_typed_unchecked())
242            .host_data = host_data;
243        Ok(Some(gc_ref.into_externref_unchecked()))
244    }
245
246    fn externref_host_data(&self, externref: &VMExternRef) -> ExternRefHostDataId {
247        let typed_ref = VMNullExternRef::typed_ref(self, externref);
248        self.index(typed_ref).host_data
249    }
250
251    fn object_size(&self, gc_ref: &VMGcRef) -> usize {
252        let size = self.header(gc_ref).reserved_u27();
253        usize::try_from(size).unwrap()
254    }
255
256    fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
257        self.index(gc_ref.as_typed_unchecked())
258    }
259
260    fn header_mut(&mut self, gc_ref: &VMGcRef) -> &mut VMGcHeader {
261        self.index_mut(gc_ref.as_typed_unchecked())
262    }
263
264    fn alloc_raw(&mut self, header: VMGcHeader, layout: Layout) -> Result<Option<VMGcRef>> {
265        self.alloc(header, layout).map(Some)
266    }
267
268    fn alloc_uninit_struct(
269        &mut self,
270        ty: VMSharedTypeIndex,
271        layout: &GcStructLayout,
272    ) -> Result<Option<VMStructRef>> {
273        let gc_ref = self.alloc(
274            VMGcHeader::from_kind_and_index(VMGcKind::StructRef, ty),
275            layout.layout(),
276        )?;
277        Ok(Some(gc_ref.into_structref_unchecked()))
278    }
279
280    fn dealloc_uninit_struct(&mut self, _struct_ref: VMStructRef) {}
281
282    fn alloc_uninit_array(
283        &mut self,
284        ty: VMSharedTypeIndex,
285        length: u32,
286        layout: &GcArrayLayout,
287    ) -> Result<Option<VMArrayRef>> {
288        let gc_ref = self.alloc(
289            VMGcHeader::from_kind_and_index(VMGcKind::ArrayRef, ty),
290            layout.layout(length),
291        )?;
292        self.index_mut::<VMNullArrayHeader>(gc_ref.as_typed_unchecked())
293            .length = length;
294        Ok(Some(gc_ref.into_arrayref_unchecked()))
295    }
296
297    fn dealloc_uninit_array(&mut self, _array_ref: VMArrayRef) {}
298
299    fn array_len(&self, arrayref: &VMArrayRef) -> u32 {
300        let arrayref = VMNullArrayHeader::typed_ref(self, arrayref);
301        self.index(arrayref).length
302    }
303
304    fn gc<'a>(
305        &'a mut self,
306        _roots: GcRootsIter<'a>,
307        _host_data_table: &'a mut ExternRefHostDataTable,
308    ) -> Box<dyn GarbageCollection<'a> + 'a> {
309        assert_eq!(self.no_gc_count, 0, "Cannot GC inside a no-GC scope!");
310        Box::new(NullCollection {})
311    }
312
313    unsafe fn vmctx_gc_heap_data(&self) -> NonNull<u8> {
314        NonNull::new(self.next.get()).unwrap().cast()
315    }
316
317    #[cfg(feature = "pooling-allocator")]
318    fn reset(&mut self) {
319        let NullHeap {
320            next,
321            no_gc_count,
322            heap: _,
323        } = self;
324
325        *next.get_mut() = NonZeroU32::new(1).unwrap();
326        *no_gc_count = 0;
327    }
328}
329
330struct NullCollection {}
331
332impl<'a> GarbageCollection<'a> for NullCollection {
333    fn collect_increment(&mut self) -> GcProgress {
334        GcProgress::Complete
335    }
336}
337
338#[cfg(test)]
339mod tests {
340    use super::*;
341
342    #[test]
343    fn vm_gc_null_header_size_align() {
344        assert_eq!(
345            (wasmtime_environ::null::HEADER_SIZE as usize),
346            core::mem::size_of::<VMGcHeader>()
347        );
348        assert_eq!(
349            (wasmtime_environ::null::HEADER_ALIGN as usize),
350            core::mem::align_of::<VMGcHeader>()
351        );
352    }
353
354    #[test]
355    fn vm_null_array_header_length_offset() {
356        assert_eq!(
357            wasmtime_environ::null::ARRAY_LENGTH_OFFSET,
358            u32::try_from(core::mem::offset_of!(VMNullArrayHeader, length)).unwrap(),
359        );
360    }
361}