wasmtime/runtime/vm/
mmap_vec.rs

1use crate::prelude::*;
2use crate::runtime::vm::send_sync_ptr::SendSyncPtr;
3#[cfg(has_virtual_memory)]
4use crate::runtime::vm::{mmap::UnalignedLength, Mmap};
5#[cfg(not(has_virtual_memory))]
6use alloc::alloc::Layout;
7use alloc::sync::Arc;
8use core::ops::{Deref, Range};
9use core::ptr::NonNull;
10#[cfg(feature = "std")]
11use std::fs::File;
12
13/// A type which prefers to store backing memory in an OS-backed memory mapping
14/// but can fall back to the regular memory allocator as well.
15///
16/// This type is used to store code in Wasmtime and manage read-only and
17/// executable permissions of compiled images. This is created from either an
18/// in-memory compilation or by deserializing an artifact from disk. Methods
19/// are provided for managing VM permissions when the `signals-based-traps`
20/// Cargo feature is enabled.
21///
22/// The length of an `MmapVec` is not guaranteed to be page-aligned. That means
23/// that if the contents are not themselves page-aligned, which compiled images
24/// are typically not, then the remaining bytes in the final page for
25/// mmap-backed instances are unused.
26///
27/// Note that when `signals-based-traps` is disabled then this type is
28/// backed by the regular memory allocator via `alloc` APIs. In such a
29/// scenario this type does not support read-only or executable bits
30/// and the methods are not available. However, the `CustomCodeMemory`
31/// mechanism may be used by the embedder to set up and tear down
32/// executable permissions on parts of this storage.
33pub enum MmapVec {
34    #[doc(hidden)]
35    #[cfg(not(has_virtual_memory))]
36    Alloc {
37        base: SendSyncPtr<u8>,
38        layout: Layout,
39    },
40    #[doc(hidden)]
41    ExternallyOwned { memory: SendSyncPtr<[u8]> },
42    #[doc(hidden)]
43    #[cfg(has_virtual_memory)]
44    Mmap {
45        mmap: Mmap<UnalignedLength>,
46        len: usize,
47    },
48}
49
50impl MmapVec {
51    /// Consumes an existing `mmap` and wraps it up into an `MmapVec`.
52    ///
53    /// The returned `MmapVec` will have the `size` specified, which can be
54    /// smaller than the region mapped by the `Mmap`. The returned `MmapVec`
55    /// will only have at most `size` bytes accessible.
56    #[cfg(has_virtual_memory)]
57    fn new_mmap<M>(mmap: M, len: usize) -> MmapVec
58    where
59        M: Into<Mmap<UnalignedLength>>,
60    {
61        let mmap = mmap.into();
62        assert!(len <= mmap.len());
63        MmapVec::Mmap { mmap, len }
64    }
65
66    #[cfg(not(has_virtual_memory))]
67    fn new_alloc(len: usize, alignment: usize) -> MmapVec {
68        let layout = Layout::from_size_align(len, alignment)
69            .expect("Invalid size or alignment for MmapVec allocation");
70        let base = SendSyncPtr::new(
71            NonNull::new(unsafe { alloc::alloc::alloc_zeroed(layout.clone()) })
72                .expect("Allocation of MmapVec storage failed"),
73        );
74        MmapVec::Alloc { base, layout }
75    }
76
77    fn new_externally_owned(memory: NonNull<[u8]>) -> MmapVec {
78        let memory = SendSyncPtr::new(memory);
79        MmapVec::ExternallyOwned { memory }
80    }
81
82    /// Creates a new zero-initialized `MmapVec` with the given `size`
83    /// and `alignment`.
84    ///
85    /// This commit will return a new `MmapVec` suitably sized to hold `size`
86    /// bytes. All bytes will be initialized to zero since this is a fresh OS
87    /// page allocation.
88    pub fn with_capacity_and_alignment(size: usize, alignment: usize) -> Result<MmapVec> {
89        #[cfg(has_virtual_memory)]
90        {
91            assert!(alignment <= crate::runtime::vm::host_page_size());
92            return Ok(MmapVec::new_mmap(Mmap::with_at_least(size)?, size));
93        }
94        #[cfg(not(has_virtual_memory))]
95        {
96            return Ok(MmapVec::new_alloc(size, alignment));
97        }
98    }
99
100    /// Creates a new `MmapVec` from the contents of an existing `slice`.
101    ///
102    /// A new `MmapVec` is allocated to hold the contents of `slice` and then
103    /// `slice` is copied into the new mmap. It's recommended to avoid this
104    /// method if possible to avoid the need to copy data around.
105    pub fn from_slice(slice: &[u8]) -> Result<MmapVec> {
106        MmapVec::from_slice_with_alignment(slice, 1)
107    }
108
109    /// Creates a new `MmapVec` from an existing memory region
110    ///
111    /// This method avoids the copy performed by [`Self::from_slice`] by
112    /// directly using the memory region provided. This must be done with
113    /// extreme care, however, as any concurrent modification of the provided
114    /// memory will cause undefined and likely very, very bad things to
115    /// happen.
116    ///
117    /// The memory provided is guaranteed to not be mutated by the runtime.
118    ///
119    /// # Safety
120    ///
121    /// As there is no copy here, the runtime will be making direct readonly use
122    /// of the provided memory. As such, outside writes to this memory region
123    /// will result in undefined and likely very undesirable behavior.
124    pub unsafe fn from_raw(memory: NonNull<[u8]>) -> Result<MmapVec> {
125        Ok(MmapVec::new_externally_owned(memory))
126    }
127
128    /// Creates a new `MmapVec` from the contents of an existing
129    /// `slice`, with a minimum alignment.
130    ///
131    /// `align` must be a power of two. This is useful when page
132    /// alignment is required when the system otherwise does not use
133    /// virtual memory but has a custom code publish handler.
134    ///
135    /// A new `MmapVec` is allocated to hold the contents of `slice` and then
136    /// `slice` is copied into the new mmap. It's recommended to avoid this
137    /// method if possible to avoid the need to copy data around.
138    pub fn from_slice_with_alignment(slice: &[u8], align: usize) -> Result<MmapVec> {
139        let mut result = MmapVec::with_capacity_and_alignment(slice.len(), align)?;
140        // SAFETY: The mmap hasn't been made readonly yet so this should be
141        // safe to call.
142        unsafe {
143            result.as_mut_slice().copy_from_slice(slice);
144        }
145        Ok(result)
146    }
147
148    /// Return `true` if the `MmapVec` suport virtual memory operations
149    ///
150    /// In some cases, such as when using externally owned memory, the underlying
151    /// platform may support virtual memory but it still may not be legal
152    /// to perform virtual memory operations on this memory.
153    pub fn supports_virtual_memory(&self) -> bool {
154        match self {
155            #[cfg(has_virtual_memory)]
156            MmapVec::Mmap { .. } => true,
157            MmapVec::ExternallyOwned { .. } => false,
158            #[cfg(not(has_virtual_memory))]
159            MmapVec::Alloc { .. } => false,
160        }
161    }
162
163    /// Return true if this `MmapVec` is always readonly
164    ///
165    /// Attempting to get access to mutate readonly memory via
166    /// [`MmapVec::as_mut`] will result in a panic.  Note that this method
167    /// does not change with runtime changes to portions of the code memory
168    /// via `MmapVec::make_readonly` for platforms with virtual memory.
169    pub fn is_always_readonly(&self) -> bool {
170        match self {
171            #[cfg(has_virtual_memory)]
172            MmapVec::Mmap { .. } => false,
173            MmapVec::ExternallyOwned { .. } => true,
174            #[cfg(not(has_virtual_memory))]
175            MmapVec::Alloc { .. } => false,
176        }
177    }
178
179    /// Creates a new `MmapVec` which is the given `File` mmap'd into memory.
180    ///
181    /// This function will determine the file's size and map the full contents
182    /// into memory. This will return an error if the file is too large to be
183    /// fully mapped into memory.
184    ///
185    /// The file is mapped into memory with a "private mapping" meaning that
186    /// changes are not persisted back to the file itself and are only visible
187    /// within this process.
188    #[cfg(feature = "std")]
189    pub fn from_file(file: File) -> Result<MmapVec> {
190        let file = Arc::new(file);
191        let mmap = Mmap::from_file(Arc::clone(&file))
192            .with_context(move || format!("failed to create mmap for file {file:?}"))?;
193        let len = mmap.len();
194        Ok(MmapVec::new_mmap(mmap, len))
195    }
196
197    /// Makes the specified `range` within this `mmap` to be read/execute.
198    #[cfg(has_virtual_memory)]
199    pub unsafe fn make_executable(
200        &self,
201        range: Range<usize>,
202        enable_branch_protection: bool,
203    ) -> Result<()> {
204        let (mmap, len) = match self {
205            MmapVec::Mmap { mmap, len } => (mmap, *len),
206            MmapVec::ExternallyOwned { .. } => {
207                bail!("Unable to make externally owned memory executable");
208            }
209        };
210        assert!(range.start <= range.end);
211        assert!(range.end <= len);
212        mmap.make_executable(range.start..range.end, enable_branch_protection)
213    }
214
215    /// Makes the specified `range` within this `mmap` to be read-only.
216    #[cfg(has_virtual_memory)]
217    pub unsafe fn make_readonly(&self, range: Range<usize>) -> Result<()> {
218        let (mmap, len) = match self {
219            MmapVec::Mmap { mmap, len } => (mmap, *len),
220            MmapVec::ExternallyOwned { .. } => {
221                bail!("Unable to make externally owned memory readonly");
222            }
223        };
224        assert!(range.start <= range.end);
225        assert!(range.end <= len);
226        mmap.make_readonly(range.start..range.end)
227    }
228
229    /// Returns the underlying file that this mmap is mapping, if present.
230    #[cfg(feature = "std")]
231    pub fn original_file(&self) -> Option<&Arc<File>> {
232        match self {
233            #[cfg(not(has_virtual_memory))]
234            MmapVec::Alloc { .. } => None,
235            MmapVec::ExternallyOwned { .. } => None,
236            #[cfg(has_virtual_memory)]
237            MmapVec::Mmap { mmap, .. } => mmap.original_file(),
238        }
239    }
240
241    /// Returns the bounds, in host memory, of where this mmap
242    /// image resides.
243    pub fn image_range(&self) -> Range<*const u8> {
244        let base = self.as_ptr();
245        let len = self.len();
246        base..base.wrapping_add(len)
247    }
248
249    /// Views this region of memory as a mutable slice.
250    ///
251    /// # Unsafety
252    ///
253    /// This method is only safe if `make_readonly` hasn't been called yet to
254    /// ensure that the memory is indeed writable.  For a MmapVec created from
255    /// a raw pointer using this memory as mutable is only safe if there are
256    /// no outside reads or writes to the memory region.
257    ///
258    /// Externally owned code is implicitly considered to be readonly and this
259    /// code will panic if called on externally owned memory.
260    pub unsafe fn as_mut_slice(&mut self) -> &mut [u8] {
261        match self {
262            #[cfg(not(has_virtual_memory))]
263            MmapVec::Alloc { base, layout } => {
264                core::slice::from_raw_parts_mut(base.as_mut(), layout.size())
265            }
266            MmapVec::ExternallyOwned { .. } => {
267                panic!("Mutating externally owned memory is prohibited");
268            }
269            #[cfg(has_virtual_memory)]
270            MmapVec::Mmap { mmap, len } => mmap.slice_mut(0..*len),
271        }
272    }
273}
274
275impl Deref for MmapVec {
276    type Target = [u8];
277
278    #[inline]
279    fn deref(&self) -> &[u8] {
280        match self {
281            #[cfg(not(has_virtual_memory))]
282            MmapVec::Alloc { base, layout } => unsafe {
283                core::slice::from_raw_parts(base.as_ptr(), layout.size())
284            },
285            MmapVec::ExternallyOwned { memory } => unsafe { memory.as_ref() },
286            #[cfg(has_virtual_memory)]
287            MmapVec::Mmap { mmap, len } => {
288                // SAFETY: all bytes for this mmap, which is owned by
289                // `MmapVec`, are always at least readable.
290                unsafe { mmap.slice(0..*len) }
291            }
292        }
293    }
294}
295
296impl Drop for MmapVec {
297    fn drop(&mut self) {
298        match self {
299            #[cfg(not(has_virtual_memory))]
300            MmapVec::Alloc { base, layout, .. } => unsafe {
301                alloc::alloc::dealloc(base.as_mut(), layout.clone());
302            },
303            MmapVec::ExternallyOwned { .. } => {
304                // Memory is allocated externally, nothing to do
305            }
306            #[cfg(has_virtual_memory)]
307            MmapVec::Mmap { .. } => {
308                // Drop impl on the `mmap` takes care of this case.
309            }
310        }
311    }
312}
313
314#[cfg(test)]
315mod tests {
316    use super::MmapVec;
317
318    #[test]
319    fn smoke() {
320        let mut mmap = MmapVec::with_capacity_and_alignment(10, 1).unwrap();
321        assert_eq!(mmap.len(), 10);
322        assert_eq!(&mmap[..], &[0; 10]);
323
324        unsafe {
325            mmap.as_mut_slice()[0] = 1;
326            mmap.as_mut_slice()[2] = 3;
327        }
328        assert!(mmap.get(10).is_none());
329        assert_eq!(mmap[0], 1);
330        assert_eq!(mmap[2], 3);
331    }
332
333    #[test]
334    fn alignment() {
335        let mmap = MmapVec::with_capacity_and_alignment(10, 4096).unwrap();
336        let raw_ptr = &mmap[0] as *const _ as usize;
337        assert_eq!(raw_ptr & (4096 - 1), 0);
338    }
339}