Skip to main content

wasmtime/runtime/vm/
mmap_vec.rs

1#[cfg(not(has_virtual_memory))]
2use crate::error::OutOfMemory;
3use crate::prelude::*;
4use crate::runtime::vm::send_sync_ptr::SendSyncPtr;
5#[cfg(has_virtual_memory)]
6use crate::runtime::vm::{Mmap, mmap::UnalignedLength};
7#[cfg(not(has_virtual_memory))]
8use alloc::alloc::Layout;
9use alloc::sync::Arc;
10use core::ops::{Deref, Range};
11use core::ptr::NonNull;
12#[cfg(feature = "std")]
13use std::fs::File;
14
15/// A type which prefers to store backing memory in an OS-backed memory mapping
16/// but can fall back to the regular memory allocator as well.
17///
18/// This type is used to store code in Wasmtime and manage read-only and
19/// executable permissions of compiled images. This is created from either an
20/// in-memory compilation or by deserializing an artifact from disk. Methods
21/// are provided for managing VM permissions when the `signals-based-traps`
22/// Cargo feature is enabled.
23///
24/// The length of an `MmapVec` is not guaranteed to be page-aligned. That means
25/// that if the contents are not themselves page-aligned, which compiled images
26/// are typically not, then the remaining bytes in the final page for
27/// mmap-backed instances are unused.
28///
29/// Note that when `signals-based-traps` is disabled then this type is
30/// backed by the regular memory allocator via `alloc` APIs. In such a
31/// scenario this type does not support read-only or executable bits
32/// and the methods are not available. However, the `CustomCodeMemory`
33/// mechanism may be used by the embedder to set up and tear down
34/// executable permissions on parts of this storage.
35pub enum MmapVec {
36    #[doc(hidden)]
37    #[cfg(not(has_virtual_memory))]
38    Alloc {
39        base: SendSyncPtr<u8>,
40        layout: Layout,
41    },
42    #[doc(hidden)]
43    ExternallyOwned { memory: SendSyncPtr<[u8]> },
44    #[doc(hidden)]
45    #[cfg(has_virtual_memory)]
46    Mmap {
47        mmap: Mmap<UnalignedLength>,
48        len: usize,
49    },
50}
51
52impl MmapVec {
53    /// Consumes an existing `mmap` and wraps it up into an `MmapVec`.
54    ///
55    /// The returned `MmapVec` will have the `size` specified, which can be
56    /// smaller than the region mapped by the `Mmap`. The returned `MmapVec`
57    /// will only have at most `size` bytes accessible.
58    #[cfg(has_virtual_memory)]
59    fn new_mmap<M>(mmap: M, len: usize) -> MmapVec
60    where
61        M: Into<Mmap<UnalignedLength>>,
62    {
63        let mmap = mmap.into();
64        assert!(len <= mmap.len());
65        MmapVec::Mmap { mmap, len }
66    }
67
68    #[cfg(not(has_virtual_memory))]
69    fn new_alloc(len: usize, alignment: usize) -> Result<MmapVec, OutOfMemory> {
70        let layout = Layout::from_size_align(len, alignment)
71            .expect("Invalid size or alignment for MmapVec allocation");
72        match NonNull::new(unsafe { alloc::alloc::alloc_zeroed(layout.clone()) }) {
73            Some(ptr) => {
74                let base = SendSyncPtr::new(ptr);
75                Ok(MmapVec::Alloc { base, layout })
76            }
77            None => return Err(OutOfMemory::new(layout.size())),
78        }
79    }
80
81    fn new_externally_owned(memory: NonNull<[u8]>) -> MmapVec {
82        let memory = SendSyncPtr::new(memory);
83        MmapVec::ExternallyOwned { memory }
84    }
85
86    /// Creates a new zero-initialized `MmapVec` with the given `size`
87    /// and `alignment`.
88    ///
89    /// This commit will return a new `MmapVec` suitably sized to hold `size`
90    /// bytes. All bytes will be initialized to zero since this is a fresh OS
91    /// page allocation.
92    pub fn with_capacity_and_alignment(size: usize, alignment: usize) -> Result<MmapVec> {
93        #[cfg(has_virtual_memory)]
94        {
95            assert!(alignment <= crate::runtime::vm::host_page_size());
96            return Ok(MmapVec::new_mmap(Mmap::with_at_least(size)?, size));
97        }
98        #[cfg(not(has_virtual_memory))]
99        {
100            return Ok(MmapVec::new_alloc(size, alignment)?);
101        }
102    }
103
104    /// Creates a new `MmapVec` from the contents of an existing `slice`.
105    ///
106    /// A new `MmapVec` is allocated to hold the contents of `slice` and then
107    /// `slice` is copied into the new mmap. It's recommended to avoid this
108    /// method if possible to avoid the need to copy data around.
109    pub fn from_slice(slice: &[u8]) -> Result<MmapVec> {
110        MmapVec::from_slice_with_alignment(slice, 1)
111    }
112
113    /// Creates a new `MmapVec` from an existing memory region
114    ///
115    /// This method avoids the copy performed by [`Self::from_slice`] by
116    /// directly using the memory region provided. This must be done with
117    /// extreme care, however, as any concurrent modification of the provided
118    /// memory will cause undefined and likely very, very bad things to
119    /// happen.
120    ///
121    /// The memory provided is guaranteed to not be mutated by the runtime.
122    ///
123    /// # Safety
124    ///
125    /// As there is no copy here, the runtime will be making direct readonly use
126    /// of the provided memory. As such, outside writes to this memory region
127    /// will result in undefined and likely very undesirable behavior.
128    pub unsafe fn from_raw(memory: NonNull<[u8]>) -> Result<MmapVec> {
129        Ok(MmapVec::new_externally_owned(memory))
130    }
131
132    /// Creates a new `MmapVec` from the contents of an existing
133    /// `slice`, with a minimum alignment.
134    ///
135    /// `align` must be a power of two. This is useful when page
136    /// alignment is required when the system otherwise does not use
137    /// virtual memory but has a custom code publish handler.
138    ///
139    /// A new `MmapVec` is allocated to hold the contents of `slice` and then
140    /// `slice` is copied into the new mmap. It's recommended to avoid this
141    /// method if possible to avoid the need to copy data around.
142    pub fn from_slice_with_alignment(slice: &[u8], align: usize) -> Result<MmapVec> {
143        let mut result = MmapVec::with_capacity_and_alignment(slice.len(), align)?;
144        // SAFETY: The mmap hasn't been made readonly yet so this should be
145        // safe to call.
146        unsafe {
147            result.as_mut_slice().copy_from_slice(slice);
148        }
149        Ok(result)
150    }
151
152    /// Return `true` if the `MmapVec` support virtual memory operations
153    ///
154    /// In some cases, such as when using externally owned memory, the underlying
155    /// platform may support virtual memory but it still may not be legal
156    /// to perform virtual memory operations on this memory.
157    pub fn supports_virtual_memory(&self) -> bool {
158        match self {
159            #[cfg(has_virtual_memory)]
160            MmapVec::Mmap { .. } => true,
161            MmapVec::ExternallyOwned { .. } => false,
162            #[cfg(not(has_virtual_memory))]
163            MmapVec::Alloc { .. } => false,
164        }
165    }
166
167    /// Return true if this `MmapVec` is always readonly
168    ///
169    /// Attempting to get access to mutate readonly memory via
170    /// [`MmapVec::as_mut`] will result in a panic.  Note that this method
171    /// does not change with runtime changes to portions of the code memory
172    /// via `MmapVec::make_readonly` for platforms with virtual memory.
173    pub fn is_always_readonly(&self) -> bool {
174        match self {
175            #[cfg(has_virtual_memory)]
176            MmapVec::Mmap { .. } => false,
177            MmapVec::ExternallyOwned { .. } => true,
178            #[cfg(not(has_virtual_memory))]
179            MmapVec::Alloc { .. } => false,
180        }
181    }
182
183    /// Creates a new `MmapVec` which is the given `File` mmap'd into memory.
184    ///
185    /// This function will determine the file's size and map the full contents
186    /// into memory. This will return an error if the file is too large to be
187    /// fully mapped into memory.
188    ///
189    /// The file is mapped into memory with a "private mapping" meaning that
190    /// changes are not persisted back to the file itself and are only visible
191    /// within this process.
192    #[cfg(feature = "std")]
193    pub fn from_file(file: File) -> Result<MmapVec> {
194        let file = Arc::new(file);
195        let mmap = Mmap::from_file(Arc::clone(&file))
196            .with_context(move || format!("failed to create mmap for file {file:?}"))?;
197        let len = mmap.len();
198        Ok(MmapVec::new_mmap(mmap, len))
199    }
200
201    /// Makes the specified `range` within this `mmap` to be read/execute.
202    #[cfg(has_virtual_memory)]
203    pub unsafe fn make_executable(
204        &self,
205        range: Range<usize>,
206        enable_branch_protection: bool,
207    ) -> Result<()> {
208        let (mmap, len) = match self {
209            MmapVec::Mmap { mmap, len } => (mmap, *len),
210            MmapVec::ExternallyOwned { .. } => {
211                bail!("Unable to make externally owned memory executable");
212            }
213        };
214        assert!(range.start <= range.end);
215        assert!(range.end <= len);
216        unsafe { mmap.make_executable(range.start..range.end, enable_branch_protection) }
217    }
218
219    /// Makes the specified `range` within this `mmap` to be read-only.
220    #[cfg(has_virtual_memory)]
221    pub unsafe fn make_readonly(&self, range: Range<usize>) -> Result<()> {
222        let (mmap, len) = match self {
223            MmapVec::Mmap { mmap, len } => (mmap, *len),
224            MmapVec::ExternallyOwned { .. } => {
225                bail!("Unable to make externally owned memory readonly");
226            }
227        };
228        assert!(range.start <= range.end);
229        assert!(range.end <= len);
230        unsafe { mmap.make_readonly(range.start..range.end) }
231    }
232
233    /// Makes the specified `range` within this `mmap` to be
234    /// read-write (and not executable).
235    #[cfg(has_virtual_memory)]
236    pub unsafe fn make_readwrite(&self, range: Range<usize>) -> Result<()> {
237        let (mmap, len) = match self {
238            MmapVec::Mmap { mmap, len } => (mmap, *len),
239            MmapVec::ExternallyOwned { .. } => {
240                bail!("Unable to make externally owned memory read-write");
241            }
242        };
243        assert!(range.start <= range.end);
244        assert!(range.end <= len);
245        unsafe { mmap.make_readwrite(range.start..range.end) }
246    }
247
248    /// Returns the underlying file that this mmap is mapping, if present.
249    #[cfg(feature = "std")]
250    pub fn original_file(&self) -> Option<&Arc<File>> {
251        match self {
252            #[cfg(not(has_virtual_memory))]
253            MmapVec::Alloc { .. } => None,
254            MmapVec::ExternallyOwned { .. } => None,
255            #[cfg(has_virtual_memory)]
256            MmapVec::Mmap { mmap, .. } => mmap.original_file(),
257        }
258    }
259
260    /// Returns the bounds, in host memory, of where this mmap
261    /// image resides.
262    pub fn image_range(&self) -> Range<*const u8> {
263        let base = self.as_ptr();
264        let len = self.len();
265        base..base.wrapping_add(len)
266    }
267
268    /// Views this region of memory as a mutable slice.
269    ///
270    /// # Unsafety
271    ///
272    /// This method is only safe if `make_readonly` hasn't been called yet to
273    /// ensure that the memory is indeed writable.  For a MmapVec created from
274    /// a raw pointer using this memory as mutable is only safe if there are
275    /// no outside reads or writes to the memory region.
276    ///
277    /// Externally owned code is implicitly considered to be readonly and this
278    /// code will panic if called on externally owned memory.
279    pub unsafe fn as_mut_slice(&mut self) -> &mut [u8] {
280        match self {
281            #[cfg(not(has_virtual_memory))]
282            MmapVec::Alloc { base, layout } => unsafe {
283                core::slice::from_raw_parts_mut(base.as_mut(), layout.size())
284            },
285            MmapVec::ExternallyOwned { .. } => {
286                panic!("Mutating externally owned memory is prohibited");
287            }
288            #[cfg(has_virtual_memory)]
289            MmapVec::Mmap { mmap, len } => unsafe { mmap.slice_mut(0..*len) },
290        }
291    }
292
293    /// Create a copy of this `MmapVec` that can be separately
294    /// mutated.
295    #[cfg(feature = "debug")]
296    pub(crate) fn deep_clone(&self) -> Result<MmapVec> {
297        match self {
298            #[cfg(not(has_virtual_memory))]
299            MmapVec::Alloc { layout, .. } => {
300                MmapVec::from_slice_with_alignment(&self[..], layout.align())
301            }
302            MmapVec::ExternallyOwned { .. } => {
303                crate::bail!("Cannot clone an externally-owned code memory.");
304            }
305            #[cfg(has_virtual_memory)]
306            #[allow(
307                unused_variables,
308                reason = "`mmap` and `len` only used with `std` feature"
309            )]
310            MmapVec::Mmap { mmap, len } => {
311                #[cfg(feature = "std")]
312                if let Some(original_file) = mmap.original_file() {
313                    let mmap = Mmap::from_file(original_file.clone())?;
314                    return Ok(MmapVec::Mmap { mmap, len: *len });
315                }
316                MmapVec::from_slice_with_alignment(&self[..], crate::runtime::vm::host_page_size())
317            }
318        }
319    }
320}
321
322impl Deref for MmapVec {
323    type Target = [u8];
324
325    #[inline]
326    fn deref(&self) -> &[u8] {
327        match self {
328            #[cfg(not(has_virtual_memory))]
329            MmapVec::Alloc { base, layout } => unsafe {
330                core::slice::from_raw_parts(base.as_ptr(), layout.size())
331            },
332            MmapVec::ExternallyOwned { memory } => unsafe { memory.as_ref() },
333            #[cfg(has_virtual_memory)]
334            MmapVec::Mmap { mmap, len } => {
335                // SAFETY: all bytes for this mmap, which is owned by
336                // `MmapVec`, are always at least readable.
337                unsafe { mmap.slice(0..*len) }
338            }
339        }
340    }
341}
342
343impl Drop for MmapVec {
344    fn drop(&mut self) {
345        match self {
346            #[cfg(not(has_virtual_memory))]
347            MmapVec::Alloc { base, layout, .. } => unsafe {
348                alloc::alloc::dealloc(base.as_mut(), layout.clone());
349            },
350            MmapVec::ExternallyOwned { .. } => {
351                // Memory is allocated externally, nothing to do
352            }
353            #[cfg(has_virtual_memory)]
354            MmapVec::Mmap { .. } => {
355                // Drop impl on the `mmap` takes care of this case.
356            }
357        }
358    }
359}
360
361#[cfg(test)]
362mod tests {
363    use super::MmapVec;
364
365    #[test]
366    fn smoke() {
367        let mut mmap = MmapVec::with_capacity_and_alignment(10, 1).unwrap();
368        assert_eq!(mmap.len(), 10);
369        assert_eq!(&mmap[..], &[0; 10]);
370
371        unsafe {
372            mmap.as_mut_slice()[0] = 1;
373            mmap.as_mut_slice()[2] = 3;
374        }
375        assert!(mmap.get(10).is_none());
376        assert_eq!(mmap[0], 1);
377        assert_eq!(mmap[2], 3);
378    }
379
380    #[test]
381    fn alignment() {
382        let mmap = MmapVec::with_capacity_and_alignment(10, 4096).unwrap();
383        let raw_ptr = &mmap[0] as *const _ as usize;
384        assert_eq!(raw_ptr & (4096 - 1), 0);
385    }
386}