embedding/
allocator.rs

1//! An allocator definition for this embedding.
2//!
3//! The Rust standard library and Wasmtime require a memory allocator to be
4//! configured. For custom embeddings of Wasmtime this might likely already be
5//! defined elsewhere in the system in which case that should be used. This file
6//! contains an example implementation using the Rust `dlmalloc` crate using
7//! memory created by `wasmtime_*` platform symbols. This provides a file that
8//! manages memory without any extra runtime dependencies, but this is just an
9//! example.
10//!
11//! Allocators in Rust are configured with the `#[global_allocator]` attribute
12//! and the `GlobalAlloc for T` trait impl. This should be used when hooking
13//! up to an allocator elsewhere in the system.
14
15use alloc::alloc::{GlobalAlloc, Layout};
16use core::cell::UnsafeCell;
17use core::ops::{Deref, DerefMut};
18use core::ptr;
19use core::sync::atomic::{
20    AtomicBool,
21    Ordering::{Acquire, Release},
22};
23use dlmalloc::Dlmalloc;
24
25#[global_allocator]
26static MALLOC: MyGlobalDmalloc = MyGlobalDmalloc {
27    dlmalloc: Mutex::new(Dlmalloc::new_with_allocator(MyAllocator)),
28};
29
30struct MyGlobalDmalloc {
31    dlmalloc: Mutex<Dlmalloc<MyAllocator>>,
32}
33
34struct MyAllocator;
35
36unsafe impl GlobalAlloc for MyGlobalDmalloc {
37    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
38        unsafe {
39            self.dlmalloc
40                .try_lock()
41                .unwrap()
42                .malloc(layout.size(), layout.align())
43        }
44    }
45
46    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
47        unsafe {
48            self.dlmalloc
49                .try_lock()
50                .unwrap()
51                .calloc(layout.size(), layout.align())
52        }
53    }
54
55    unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
56        unsafe {
57            self.dlmalloc
58                .try_lock()
59                .unwrap()
60                .realloc(ptr, layout.size(), layout.align(), new_size)
61        }
62    }
63
64    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
65        unsafe {
66            self.dlmalloc
67                .try_lock()
68                .unwrap()
69                .free(ptr, layout.size(), layout.align())
70        }
71    }
72}
73
74#[cfg(not(feature = "wasi"))]
75const INITIAL_HEAP_SIZE: usize = 64 * 1024;
76// The wasi component requires a larger heap than the module tests
77#[cfg(feature = "wasi")]
78const INITIAL_HEAP_SIZE: usize = 4 * 1024 * 1024;
79
80static mut INITIAL_HEAP: [u8; INITIAL_HEAP_SIZE] = [0; INITIAL_HEAP_SIZE];
81static mut INITIAL_HEAP_ALLOCATED: bool = false;
82
83unsafe impl dlmalloc::Allocator for MyAllocator {
84    fn alloc(&self, _size: usize) -> (*mut u8, usize, u32) {
85        unsafe {
86            if INITIAL_HEAP_ALLOCATED {
87                (ptr::null_mut(), 0, 0)
88            } else {
89                INITIAL_HEAP_ALLOCATED = true;
90                ((&raw mut INITIAL_HEAP).cast(), INITIAL_HEAP_SIZE, 0)
91            }
92        }
93    }
94
95    fn remap(&self, _ptr: *mut u8, _old: usize, _new: usize, _can_move: bool) -> *mut u8 {
96        core::ptr::null_mut()
97    }
98
99    fn free_part(&self, _ptr: *mut u8, _old: usize, _new: usize) -> bool {
100        false
101    }
102
103    fn free(&self, _ptr: *mut u8, _size: usize) -> bool {
104        false
105    }
106
107    fn can_release_part(&self, _flags: u32) -> bool {
108        false
109    }
110
111    fn allocates_zeros(&self) -> bool {
112        true
113    }
114
115    fn page_size(&self) -> usize {
116        4096
117    }
118}
119
120// Simple mutex which only supports `try_lock` at this time. This would probably
121// be replaced with a "real" mutex in a "real" embedding.
122struct Mutex<T> {
123    data: UnsafeCell<T>,
124    locked: AtomicBool,
125}
126
127unsafe impl<T: Send> Send for Mutex<T> {}
128unsafe impl<T: Send> Sync for Mutex<T> {}
129
130impl<T> Mutex<T> {
131    const fn new(val: T) -> Mutex<T> {
132        Mutex {
133            data: UnsafeCell::new(val),
134            locked: AtomicBool::new(false),
135        }
136    }
137
138    fn try_lock(&self) -> Option<impl DerefMut<Target = T> + '_> {
139        if self.locked.swap(true, Acquire) {
140            None
141        } else {
142            Some(MutexGuard { lock: self })
143        }
144    }
145}
146
147struct MutexGuard<'a, T> {
148    lock: &'a Mutex<T>,
149}
150
151impl<T> Deref for MutexGuard<'_, T> {
152    type Target = T;
153
154    fn deref(&self) -> &T {
155        unsafe { &*self.lock.data.get() }
156    }
157}
158
159impl<T> DerefMut for MutexGuard<'_, T> {
160    fn deref_mut(&mut self) -> &mut T {
161        unsafe { &mut *self.lock.data.get() }
162    }
163}
164
165impl<T> Drop for MutexGuard<'_, T> {
166    fn drop(&mut self) {
167        self.lock.locked.store(false, Release);
168    }
169}