cranelift_jit/memory/
arena.rs1use std::io;
2use std::mem::ManuallyDrop;
3use std::ptr;
4
5use cranelift_module::ModuleResult;
6
7use super::{BranchProtection, JITMemoryProvider};
8
9fn align_up(addr: usize, align: usize) -> usize {
10 debug_assert!(align.is_power_of_two());
11 (addr + align - 1) & !(align - 1)
12}
13
14#[derive(Debug)]
15struct Segment {
16 ptr: *mut u8,
17 len: usize,
18 position: usize,
19 target_prot: region::Protection,
20 finalized: bool,
21}
22
23impl Segment {
24 fn new(ptr: *mut u8, len: usize, target_prot: region::Protection) -> Self {
25 debug_assert_eq!(ptr as usize % region::page::size(), 0);
27 debug_assert_eq!(len % region::page::size(), 0);
28 let mut segment = Segment {
29 ptr,
30 len,
31 target_prot,
32 position: 0,
33 finalized: false,
34 };
35 segment.set_rw();
38 segment
39 }
40
41 fn set_rw(&mut self) {
42 unsafe {
43 region::protect(self.ptr, self.len, region::Protection::READ_WRITE)
44 .expect("unable to change memory protection for jit memory segment");
45 }
46 }
47
48 fn finalize(&mut self, branch_protection: BranchProtection) {
49 if self.finalized {
50 return;
51 }
52
53 if self.target_prot == region::Protection::READ_EXECUTE {
56 super::set_readable_and_executable(self.ptr, self.len, branch_protection)
57 .expect("unable to set memory protection for jit memory segment");
58 } else {
59 unsafe {
60 region::protect(self.ptr, self.len, self.target_prot)
61 .expect("unable to change memory protection for jit memory segment");
62 }
63 }
64 self.finalized = true;
65 }
66
67 fn allocate(&mut self, size: usize, align: usize) -> *mut u8 {
70 assert!(self.has_space_for(size, align));
71 self.position = align_up(self.position, align);
72 let ptr = unsafe { self.ptr.add(self.position) };
73 self.position += size;
74 ptr
75 }
76
77 fn has_space_for(&self, size: usize, align: usize) -> bool {
78 !self.finalized && align_up(self.position, align) + size <= self.len
79 }
80}
81
82pub struct ArenaMemoryProvider {
97 alloc: ManuallyDrop<Option<region::Allocation>>,
98 ptr: *mut u8,
99 size: usize,
100 position: usize,
101 segments: Vec<Segment>,
102}
103
104unsafe impl Send for ArenaMemoryProvider {}
105
106impl ArenaMemoryProvider {
107 pub fn new_with_size(reserve_size: usize) -> Result<Self, region::Error> {
109 let size = align_up(reserve_size, region::page::size());
110 let mut alloc = region::alloc(size, region::Protection::NONE)?;
115 let ptr = alloc.as_mut_ptr();
116
117 Ok(Self {
118 alloc: ManuallyDrop::new(Some(alloc)),
119 segments: Vec::new(),
120 ptr,
121 size,
122 position: 0,
123 })
124 }
125
126 fn allocate(
127 &mut self,
128 size: usize,
129 align: u64,
130 protection: region::Protection,
131 ) -> io::Result<*mut u8> {
132 let align = usize::try_from(align).expect("alignment too big");
133 assert!(
134 align <= region::page::size(),
135 "alignment over page size is not supported"
136 );
137
138 if let Some(segment) = self.segments.iter_mut().find(|seg| {
142 seg.target_prot == protection && !seg.finalized && seg.has_space_for(size, align)
143 }) {
144 return Ok(segment.allocate(size, align));
145 }
146
147 if let Some(segment) = self.segments.iter_mut().last() {
149 if segment.target_prot == protection && !segment.finalized {
150 let additional_size = align_up(size, region::page::size());
151
152 if self.position + additional_size <= self.size {
155 segment.len += additional_size;
156 segment.set_rw();
157 self.position += additional_size;
158 return Ok(segment.allocate(size, align));
159 }
160 }
161 }
162
163 self.allocate_segment(size, protection)?;
165 let i = self.segments.len() - 1;
166 Ok(self.segments[i].allocate(size, align))
167 }
168
169 fn allocate_segment(
170 &mut self,
171 size: usize,
172 target_prot: region::Protection,
173 ) -> Result<(), io::Error> {
174 let size = align_up(size, region::page::size());
175 let ptr = unsafe { self.ptr.add(self.position) };
176 if self.position + size > self.size {
177 return Err(io::Error::new(
178 io::ErrorKind::Other,
179 "pre-allocated jit memory region exhausted",
180 ));
181 }
182 self.position += size;
183 self.segments.push(Segment::new(ptr, size, target_prot));
184 Ok(())
185 }
186
187 pub(crate) fn finalize(&mut self, branch_protection: BranchProtection) {
188 for segment in &mut self.segments {
189 segment.finalize(branch_protection);
190 }
191
192 wasmtime_jit_icache_coherence::pipeline_flush_mt().expect("Failed pipeline flush");
194 }
195
196 pub(crate) unsafe fn free_memory(&mut self) {
199 if self.ptr == ptr::null_mut() {
200 return;
201 }
202 self.segments.clear();
203 let _: Option<region::Allocation> = self.alloc.take();
205 self.ptr = ptr::null_mut();
206 }
207}
208
209impl Drop for ArenaMemoryProvider {
210 fn drop(&mut self) {
211 if self.ptr == ptr::null_mut() {
212 return;
213 }
214 let is_live = self.segments.iter().any(|seg| seg.finalized);
215 if !is_live {
216 unsafe { self.free_memory() };
219 }
220 }
221}
222
223impl JITMemoryProvider for ArenaMemoryProvider {
224 fn allocate_readexec(&mut self, size: usize, align: u64) -> io::Result<*mut u8> {
225 self.allocate(size, align, region::Protection::READ_EXECUTE)
226 }
227
228 fn allocate_readwrite(&mut self, size: usize, align: u64) -> io::Result<*mut u8> {
229 self.allocate(size, align, region::Protection::READ_WRITE)
230 }
231
232 fn allocate_readonly(&mut self, size: usize, align: u64) -> io::Result<*mut u8> {
233 self.allocate(size, align, region::Protection::READ)
234 }
235
236 unsafe fn free_memory(&mut self) {
237 self.free_memory();
238 }
239
240 fn finalize(&mut self, branch_protection: BranchProtection) -> ModuleResult<()> {
241 self.finalize(branch_protection);
242 Ok(())
243 }
244}
245
246#[cfg(test)]
247mod tests {
248 use super::*;
249
250 #[test]
251 fn alignment_ok() {
252 let mut arena = ArenaMemoryProvider::new_with_size(1 << 20).unwrap();
253
254 for align_log2 in 0..8 {
255 let align = 1usize << align_log2;
256 for size in 1..128 {
257 let ptr = arena.allocate_readwrite(size, align as u64).unwrap();
258 assert_eq!(ptr.addr() % align, 0);
260 }
261 }
262 }
263
264 #[test]
265 #[cfg(all(target_pointer_width = "64", not(target_os = "windows")))]
266 fn large_virtual_allocation() {
268 let reserve_size = 1 << 40;
271 let mut arena = ArenaMemoryProvider::new_with_size(reserve_size).unwrap();
272 let ptr = arena.allocate_readwrite(1, 1).unwrap();
273 assert_eq!(ptr.addr(), arena.ptr.addr());
274 arena.finalize(BranchProtection::None);
275 unsafe { ptr.write_volatile(42) };
276 unsafe { arena.free_memory() };
277 }
278
279 #[test]
280 fn over_capacity() {
281 let mut arena = ArenaMemoryProvider::new_with_size(1 << 20).unwrap(); let _ = arena.allocate_readwrite(900_000, 1).unwrap();
284 let _ = arena.allocate_readwrite(200_000, 1).unwrap_err();
285 }
286
287 #[test]
288 fn test_is_send() {
289 fn assert_is_send<T: Send>() {}
290 assert_is_send::<ArenaMemoryProvider>();
291 }
292}