1use alloc::alloc::{GlobalAlloc, Layout};
16use core::cell::UnsafeCell;
17use core::ops::{Deref, DerefMut};
18use core::ptr;
19use core::sync::atomic::{
20 AtomicBool,
21 Ordering::{Acquire, Release},
22};
23use dlmalloc::Dlmalloc;
24
25#[global_allocator]
26static MALLOC: MyGlobalDmalloc = MyGlobalDmalloc {
27 dlmalloc: Mutex::new(Dlmalloc::new_with_allocator(MyAllocator)),
28};
29
30struct MyGlobalDmalloc {
31 dlmalloc: Mutex<Dlmalloc<MyAllocator>>,
32}
33
34struct MyAllocator;
35
36unsafe impl GlobalAlloc for MyGlobalDmalloc {
37 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
38 self.dlmalloc
39 .try_lock()
40 .unwrap()
41 .malloc(layout.size(), layout.align())
42 }
43
44 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
45 self.dlmalloc
46 .try_lock()
47 .unwrap()
48 .calloc(layout.size(), layout.align())
49 }
50
51 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
52 self.dlmalloc
53 .try_lock()
54 .unwrap()
55 .realloc(ptr, layout.size(), layout.align(), new_size)
56 }
57
58 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
59 self.dlmalloc
60 .try_lock()
61 .unwrap()
62 .free(ptr, layout.size(), layout.align())
63 }
64}
65
66#[cfg(not(feature = "wasi"))]
67const INITIAL_HEAP_SIZE: usize = 64 * 1024;
68#[cfg(feature = "wasi")]
70const INITIAL_HEAP_SIZE: usize = 4 * 1024 * 1024;
71
72static mut INITIAL_HEAP: [u8; INITIAL_HEAP_SIZE] = [0; INITIAL_HEAP_SIZE];
73static mut INITIAL_HEAP_ALLOCATED: bool = false;
74
75unsafe impl dlmalloc::Allocator for MyAllocator {
76 fn alloc(&self, _size: usize) -> (*mut u8, usize, u32) {
77 unsafe {
78 if INITIAL_HEAP_ALLOCATED {
79 (ptr::null_mut(), 0, 0)
80 } else {
81 INITIAL_HEAP_ALLOCATED = true;
82 ((&raw mut INITIAL_HEAP).cast(), INITIAL_HEAP_SIZE, 0)
83 }
84 }
85 }
86
87 fn remap(&self, _ptr: *mut u8, _old: usize, _new: usize, _can_move: bool) -> *mut u8 {
88 core::ptr::null_mut()
89 }
90
91 fn free_part(&self, _ptr: *mut u8, _old: usize, _new: usize) -> bool {
92 false
93 }
94
95 fn free(&self, _ptr: *mut u8, _size: usize) -> bool {
96 false
97 }
98
99 fn can_release_part(&self, _flags: u32) -> bool {
100 false
101 }
102
103 fn allocates_zeros(&self) -> bool {
104 true
105 }
106
107 fn page_size(&self) -> usize {
108 4096
109 }
110}
111
112struct Mutex<T> {
115 data: UnsafeCell<T>,
116 locked: AtomicBool,
117}
118
119unsafe impl<T: Send> Send for Mutex<T> {}
120unsafe impl<T: Send> Sync for Mutex<T> {}
121
122impl<T> Mutex<T> {
123 const fn new(val: T) -> Mutex<T> {
124 Mutex {
125 data: UnsafeCell::new(val),
126 locked: AtomicBool::new(false),
127 }
128 }
129
130 fn try_lock(&self) -> Option<impl DerefMut<Target = T> + '_> {
131 if self.locked.swap(true, Acquire) {
132 None
133 } else {
134 Some(MutexGuard { lock: self })
135 }
136 }
137}
138
139struct MutexGuard<'a, T> {
140 lock: &'a Mutex<T>,
141}
142
143impl<T> Deref for MutexGuard<'_, T> {
144 type Target = T;
145
146 fn deref(&self) -> &T {
147 unsafe { &*self.lock.data.get() }
148 }
149}
150
151impl<T> DerefMut for MutexGuard<'_, T> {
152 fn deref_mut(&mut self) -> &mut T {
153 unsafe { &mut *self.lock.data.get() }
154 }
155}
156
157impl<T> Drop for MutexGuard<'_, T> {
158 fn drop(&mut self) {
159 self.lock.locked.store(false, Release);
160 }
161}