wasmtime/runtime/
code_memory.rs

1//! Memory management for executable code.
2
3use crate::prelude::*;
4use crate::runtime::vm::{libcalls, MmapVec};
5use crate::Engine;
6use alloc::sync::Arc;
7use core::ops::Range;
8use object::endian::Endianness;
9use object::read::{elf::ElfFile64, Object, ObjectSection};
10use object::{ObjectSymbol, SectionFlags};
11use wasmtime_environ::{lookup_trap_code, obj, Trap};
12
13/// Management of executable memory within a `MmapVec`
14///
15/// This type consumes ownership of a region of memory and will manage the
16/// executable permissions of the contained JIT code as necessary.
17pub struct CodeMemory {
18    mmap: MmapVec,
19    #[cfg(has_host_compiler_backend)]
20    unwind_registration: Option<crate::runtime::vm::UnwindRegistration>,
21    #[cfg(feature = "debug-builtins")]
22    debug_registration: Option<crate::runtime::vm::GdbJitImageRegistration>,
23    published: bool,
24    enable_branch_protection: bool,
25    needs_executable: bool,
26    #[cfg(feature = "debug-builtins")]
27    has_native_debug_info: bool,
28    custom_code_memory: Option<Arc<dyn CustomCodeMemory>>,
29
30    relocations: Vec<(usize, obj::LibCall)>,
31
32    // Ranges within `self.mmap` of where the particular sections lie.
33    text: Range<usize>,
34    unwind: Range<usize>,
35    trap_data: Range<usize>,
36    wasm_data: Range<usize>,
37    address_map_data: Range<usize>,
38    stack_map_data: Range<usize>,
39    func_name_data: Range<usize>,
40    info_data: Range<usize>,
41    wasm_dwarf: Range<usize>,
42}
43
44impl Drop for CodeMemory {
45    fn drop(&mut self) {
46        // If there is a custom code memory handler, restore the
47        // original (non-executable) state of the memory.
48        if let Some(mem) = self.custom_code_memory.as_ref() {
49            if self.published && self.needs_executable {
50                let text = self.text();
51                mem.unpublish_executable(text.as_ptr(), text.len())
52                    .expect("Executable memory unpublish failed");
53            }
54        }
55
56        // Drop the registrations before `self.mmap` since they (implicitly) refer to it.
57        #[cfg(has_host_compiler_backend)]
58        let _ = self.unwind_registration.take();
59        #[cfg(feature = "debug-builtins")]
60        let _ = self.debug_registration.take();
61    }
62}
63
64fn _assert() {
65    fn _assert_send_sync<T: Send + Sync>() {}
66    _assert_send_sync::<CodeMemory>();
67}
68
69/// Interface implemented by an embedder to provide custom
70/// implementations of code-memory protection and execute permissions.
71pub trait CustomCodeMemory: Send + Sync {
72    /// The minimal alignment granularity for an address region that
73    /// can be made executable.
74    ///
75    /// Wasmtime does not assume the system page size for this because
76    /// custom code-memory protection can be used when all other uses
77    /// of virtual memory are disabled.
78    fn required_alignment(&self) -> usize;
79
80    /// Publish a region of memory as executable.
81    ///
82    /// This should update permissions from the default RW
83    /// (readable/writable but not executable) to RX
84    /// (readable/executable but not writable), enforcing W^X
85    /// discipline.
86    ///
87    /// If the platform requires any data/instruction coherence
88    /// action, that should be performed as part of this hook as well.
89    ///
90    /// `ptr` and `ptr.offset(len)` are guaranteed to be aligned as
91    /// per `required_alignment()`.
92    fn publish_executable(&self, ptr: *const u8, len: usize) -> anyhow::Result<()>;
93
94    /// Unpublish a region of memory.
95    ///
96    /// This should perform the opposite effect of `make_executable`,
97    /// switching a range of memory back from RX (readable/executable)
98    /// to RW (readable/writable). It is guaranteed that no code is
99    /// running anymore from this region.
100    ///
101    /// `ptr` and `ptr.offset(len)` are guaranteed to be aligned as
102    /// per `required_alignment()`.
103    fn unpublish_executable(&self, ptr: *const u8, len: usize) -> anyhow::Result<()>;
104}
105
106impl CodeMemory {
107    /// Creates a new `CodeMemory` by taking ownership of the provided
108    /// `MmapVec`.
109    ///
110    /// The returned `CodeMemory` manages the internal `MmapVec` and the
111    /// `publish` method is used to actually make the memory executable.
112    pub fn new(engine: &Engine, mmap: MmapVec) -> Result<Self> {
113        let obj = ElfFile64::<Endianness>::parse(&mmap[..])
114            .map_err(obj::ObjectCrateErrorWrapper)
115            .with_context(|| "failed to parse internal compilation artifact")?;
116
117        let mut relocations = Vec::new();
118        let mut text = 0..0;
119        let mut unwind = 0..0;
120        let mut enable_branch_protection = None;
121        let mut needs_executable = true;
122        #[cfg(feature = "debug-builtins")]
123        let mut has_native_debug_info = false;
124        let mut trap_data = 0..0;
125        let mut wasm_data = 0..0;
126        let mut address_map_data = 0..0;
127        let mut stack_map_data = 0..0;
128        let mut func_name_data = 0..0;
129        let mut info_data = 0..0;
130        let mut wasm_dwarf = 0..0;
131        for section in obj.sections() {
132            let data = section.data().map_err(obj::ObjectCrateErrorWrapper)?;
133            let name = section.name().map_err(obj::ObjectCrateErrorWrapper)?;
134            let range = subslice_range(data, &mmap);
135
136            // Double-check that sections are all aligned properly.
137            if section.align() != 0 && data.len() != 0 {
138                if (data.as_ptr() as u64 - mmap.as_ptr() as u64) % section.align() != 0 {
139                    bail!(
140                        "section `{}` isn't aligned to {:#x}",
141                        section.name().unwrap_or("ERROR"),
142                        section.align()
143                    );
144                }
145            }
146
147            match name {
148                obj::ELF_WASM_BTI => match data.len() {
149                    1 => enable_branch_protection = Some(data[0] != 0),
150                    _ => bail!("invalid `{name}` section"),
151                },
152                ".text" => {
153                    text = range;
154
155                    if let SectionFlags::Elf { sh_flags } = section.flags() {
156                        if sh_flags & obj::SH_WASMTIME_NOT_EXECUTED != 0 {
157                            needs_executable = false;
158                        }
159                    }
160
161                    // The text section might have relocations for things like
162                    // libcalls which need to be applied, so handle those here.
163                    //
164                    // Note that only a small subset of possible relocations are
165                    // handled. Only those required by the compiler side of
166                    // things are processed.
167                    for (offset, reloc) in section.relocations() {
168                        assert_eq!(reloc.kind(), object::RelocationKind::Absolute);
169                        assert_eq!(reloc.encoding(), object::RelocationEncoding::Generic);
170                        assert_eq!(usize::from(reloc.size()), core::mem::size_of::<usize>() * 8);
171                        assert_eq!(reloc.addend(), 0);
172                        let sym = match reloc.target() {
173                            object::RelocationTarget::Symbol(id) => id,
174                            other => panic!("unknown relocation target {other:?}"),
175                        };
176                        let sym = obj.symbol_by_index(sym).unwrap().name().unwrap();
177                        let libcall = obj::LibCall::from_str(sym)
178                            .unwrap_or_else(|| panic!("unknown symbol relocation: {sym}"));
179
180                        let offset = usize::try_from(offset).unwrap();
181                        relocations.push((offset, libcall));
182                    }
183                }
184                #[cfg(has_host_compiler_backend)]
185                crate::runtime::vm::UnwindRegistration::SECTION_NAME => unwind = range,
186                obj::ELF_WASM_DATA => wasm_data = range,
187                obj::ELF_WASMTIME_ADDRMAP => address_map_data = range,
188                obj::ELF_WASMTIME_STACK_MAP => stack_map_data = range,
189                obj::ELF_WASMTIME_TRAPS => trap_data = range,
190                obj::ELF_NAME_DATA => func_name_data = range,
191                obj::ELF_WASMTIME_INFO => info_data = range,
192                obj::ELF_WASMTIME_DWARF => wasm_dwarf = range,
193                #[cfg(feature = "debug-builtins")]
194                ".debug_info" => has_native_debug_info = true,
195
196                _ => log::debug!("ignoring section {name}"),
197            }
198        }
199
200        // require mutability even when this is turned off
201        #[cfg(not(has_host_compiler_backend))]
202        let _ = &mut unwind;
203
204        Ok(Self {
205            mmap,
206            #[cfg(has_host_compiler_backend)]
207            unwind_registration: None,
208            #[cfg(feature = "debug-builtins")]
209            debug_registration: None,
210            published: false,
211            enable_branch_protection: enable_branch_protection
212                .ok_or_else(|| anyhow!("missing `{}` section", obj::ELF_WASM_BTI))?,
213            needs_executable,
214            #[cfg(feature = "debug-builtins")]
215            has_native_debug_info,
216            custom_code_memory: engine.custom_code_memory().cloned(),
217            text,
218            unwind,
219            trap_data,
220            address_map_data,
221            stack_map_data,
222            func_name_data,
223            wasm_dwarf,
224            info_data,
225            wasm_data,
226            relocations,
227        })
228    }
229
230    /// Returns a reference to the underlying `MmapVec` this memory owns.
231    #[inline]
232    pub fn mmap(&self) -> &MmapVec {
233        &self.mmap
234    }
235
236    /// Returns the contents of the text section of the ELF executable this
237    /// represents.
238    #[inline]
239    pub fn text(&self) -> &[u8] {
240        &self.mmap[self.text.clone()]
241    }
242
243    /// Returns the contents of the `ELF_WASMTIME_DWARF` section.
244    #[inline]
245    pub fn wasm_dwarf(&self) -> &[u8] {
246        &self.mmap[self.wasm_dwarf.clone()]
247    }
248
249    /// Returns the data in the `ELF_NAME_DATA` section.
250    #[inline]
251    pub fn func_name_data(&self) -> &[u8] {
252        &self.mmap[self.func_name_data.clone()]
253    }
254
255    /// Returns the concatenated list of all data associated with this wasm
256    /// module.
257    ///
258    /// This is used for initialization of memories and all data ranges stored
259    /// in a `Module` are relative to the slice returned here.
260    #[inline]
261    pub fn wasm_data(&self) -> &[u8] {
262        &self.mmap[self.wasm_data.clone()]
263    }
264
265    /// Returns the encoded address map section used to pass to
266    /// `wasmtime_environ::lookup_file_pos`.
267    #[inline]
268    pub fn address_map_data(&self) -> &[u8] {
269        &self.mmap[self.address_map_data.clone()]
270    }
271
272    /// Returns the encoded stack map section used to pass to
273    /// `wasmtime_environ::StackMap::lookup`.
274    pub fn stack_map_data(&self) -> &[u8] {
275        &self.mmap[self.stack_map_data.clone()]
276    }
277
278    /// Returns the contents of the `ELF_WASMTIME_INFO` section, or an empty
279    /// slice if it wasn't found.
280    #[inline]
281    pub fn wasmtime_info(&self) -> &[u8] {
282        &self.mmap[self.info_data.clone()]
283    }
284
285    /// Returns the contents of the `ELF_WASMTIME_TRAPS` section, or an empty
286    /// slice if it wasn't found.
287    #[inline]
288    pub fn trap_data(&self) -> &[u8] {
289        &self.mmap[self.trap_data.clone()]
290    }
291
292    /// Publishes the internal ELF image to be ready for execution.
293    ///
294    /// This method can only be called once and will panic if called twice. This
295    /// will parse the ELF image from the original `MmapVec` and do everything
296    /// necessary to get it ready for execution, including:
297    ///
298    /// * Change page protections from read/write to read/execute.
299    /// * Register unwinding information with the OS
300    /// * Register this image with the debugger if native DWARF is present
301    ///
302    /// After this function executes all JIT code should be ready to execute.
303    pub fn publish(&mut self) -> Result<()> {
304        assert!(!self.published);
305        self.published = true;
306
307        if self.text().is_empty() {
308            return Ok(());
309        }
310
311        // The unsafety here comes from a few things:
312        //
313        // * We're actually updating some page protections to executable memory.
314        //
315        // * We're registering unwinding information which relies on the
316        //   correctness of the information in the first place. This applies to
317        //   both the actual unwinding tables as well as the validity of the
318        //   pointers we pass in itself.
319        unsafe {
320            // First, if necessary, apply relocations. This can happen for
321            // things like libcalls which happen late in the lowering process
322            // that don't go through the Wasm-based libcalls layer that's
323            // indirected through the `VMContext`. Note that most modules won't
324            // have relocations, so this typically doesn't do anything.
325            self.apply_relocations()?;
326
327            // Next freeze the contents of this image by making all of the
328            // memory readonly. Nothing after this point should ever be modified
329            // so commit everything. For a compiled-in-memory image this will
330            // mean IPIs to evict writable mappings from other cores. For
331            // loaded-from-disk images this shouldn't result in IPIs so long as
332            // there weren't any relocations because nothing should have
333            // otherwise written to the image at any point either.
334            //
335            // Note that if virtual memory is disabled this is skipped because
336            // we aren't able to make it readonly, but this is just a
337            // defense-in-depth measure and isn't required for correctness.
338            #[cfg(has_virtual_memory)]
339            if self.mmap.supports_virtual_memory() {
340                self.mmap.make_readonly(0..self.mmap.len())?;
341            }
342
343            // Switch the executable portion from readonly to read/execute.
344            if self.needs_executable {
345                if !self.custom_publish()? {
346                    if !self.mmap.supports_virtual_memory() {
347                        bail!("this target requires virtual memory to be enabled");
348                    }
349
350                    #[cfg(has_virtual_memory)]
351                    {
352                        let text = self.text();
353
354                        use wasmtime_jit_icache_coherence as icache_coherence;
355
356                        // Clear the newly allocated code from cache if the processor requires it
357                        //
358                        // Do this before marking the memory as R+X, technically we should be able to do it after
359                        // but there are some CPU's that have had errata about doing this with read only memory.
360                        icache_coherence::clear_cache(text.as_ptr().cast(), text.len())
361                            .expect("Failed cache clear");
362
363                        self.mmap
364                            .make_executable(self.text.clone(), self.enable_branch_protection)
365                            .context("unable to make memory executable")?;
366
367                        // Flush any in-flight instructions from the pipeline
368                        icache_coherence::pipeline_flush_mt().expect("Failed pipeline flush");
369                    }
370                }
371            }
372
373            // With all our memory set up use the platform-specific
374            // `UnwindRegistration` implementation to inform the general
375            // runtime that there's unwinding information available for all
376            // our just-published JIT functions.
377            self.register_unwind_info()?;
378
379            #[cfg(feature = "debug-builtins")]
380            self.register_debug_image()?;
381        }
382
383        Ok(())
384    }
385
386    fn custom_publish(&mut self) -> Result<bool> {
387        if let Some(mem) = self.custom_code_memory.as_ref() {
388            let text = self.text();
389            // The text section should be aligned to
390            // `custom_code_memory.required_alignment()` due to a
391            // combination of two invariants:
392            //
393            // - MmapVec aligns its start address, even in owned-Vec mode; and
394            // - The text segment inside the ELF image will be aligned according
395            //   to the platform's requirements.
396            let text_addr = text.as_ptr() as usize;
397            assert_eq!(text_addr & (mem.required_alignment() - 1), 0);
398
399            // The custom code memory handler will ensure the
400            // memory is executable and also handle icache
401            // coherence.
402            mem.publish_executable(text.as_ptr(), text.len())?;
403            Ok(true)
404        } else {
405            Ok(false)
406        }
407    }
408
409    unsafe fn apply_relocations(&mut self) -> Result<()> {
410        if self.relocations.is_empty() {
411            return Ok(());
412        }
413
414        if self.mmap.is_always_readonly() {
415            bail!("Unable to apply relocations to readonly MmapVec");
416        }
417
418        for (offset, libcall) in self.relocations.iter() {
419            let offset = self.text.start + offset;
420            let libcall = match libcall {
421                obj::LibCall::FloorF32 => libcalls::relocs::floorf32 as usize,
422                obj::LibCall::FloorF64 => libcalls::relocs::floorf64 as usize,
423                obj::LibCall::NearestF32 => libcalls::relocs::nearestf32 as usize,
424                obj::LibCall::NearestF64 => libcalls::relocs::nearestf64 as usize,
425                obj::LibCall::CeilF32 => libcalls::relocs::ceilf32 as usize,
426                obj::LibCall::CeilF64 => libcalls::relocs::ceilf64 as usize,
427                obj::LibCall::TruncF32 => libcalls::relocs::truncf32 as usize,
428                obj::LibCall::TruncF64 => libcalls::relocs::truncf64 as usize,
429                obj::LibCall::FmaF32 => libcalls::relocs::fmaf32 as usize,
430                obj::LibCall::FmaF64 => libcalls::relocs::fmaf64 as usize,
431                #[cfg(target_arch = "x86_64")]
432                obj::LibCall::X86Pshufb => libcalls::relocs::x86_pshufb as usize,
433                #[cfg(not(target_arch = "x86_64"))]
434                obj::LibCall::X86Pshufb => unreachable!(),
435            };
436
437            self.mmap
438                .as_mut_slice()
439                .as_mut_ptr()
440                .add(offset)
441                .cast::<usize>()
442                .write_unaligned(libcall);
443        }
444        Ok(())
445    }
446
447    unsafe fn register_unwind_info(&mut self) -> Result<()> {
448        if self.unwind.len() == 0 {
449            return Ok(());
450        }
451        #[cfg(has_host_compiler_backend)]
452        {
453            let text = self.text();
454            let unwind_info = &self.mmap[self.unwind.clone()];
455            let registration = crate::runtime::vm::UnwindRegistration::new(
456                text.as_ptr(),
457                unwind_info.as_ptr(),
458                unwind_info.len(),
459            )
460            .context("failed to create unwind info registration")?;
461            self.unwind_registration = Some(registration);
462            return Ok(());
463        }
464        #[cfg(not(has_host_compiler_backend))]
465        {
466            bail!("should not have unwind info for non-native backend")
467        }
468    }
469
470    #[cfg(feature = "debug-builtins")]
471    fn register_debug_image(&mut self) -> Result<()> {
472        if !self.has_native_debug_info {
473            return Ok(());
474        }
475
476        // TODO-DebugInfo: we're copying the whole image here, which is pretty wasteful.
477        // Use the existing memory by teaching code here about relocations in DWARF sections
478        // and anything else necessary that is done in "create_gdbjit_image" right now.
479        let image = self.mmap().to_vec();
480        let text: &[u8] = self.text();
481        let bytes = crate::debug::create_gdbjit_image(image, (text.as_ptr(), text.len()))?;
482        let reg = crate::runtime::vm::GdbJitImageRegistration::register(bytes);
483        self.debug_registration = Some(reg);
484        Ok(())
485    }
486
487    /// Looks up the given offset within this module's text section and returns
488    /// the trap code associated with that instruction, if there is one.
489    pub fn lookup_trap_code(&self, text_offset: usize) -> Option<Trap> {
490        lookup_trap_code(self.trap_data(), text_offset)
491    }
492}
493
494/// Returns the range of `inner` within `outer`, such that `outer[range]` is the
495/// same as `inner`.
496///
497/// This method requires that `inner` is a sub-slice of `outer`, and if that
498/// isn't true then this method will panic.
499fn subslice_range(inner: &[u8], outer: &[u8]) -> Range<usize> {
500    if inner.len() == 0 {
501        return 0..0;
502    }
503
504    assert!(outer.as_ptr() <= inner.as_ptr());
505    assert!((&inner[inner.len() - 1] as *const _) <= (&outer[outer.len() - 1] as *const _));
506
507    let start = inner.as_ptr() as usize - outer.as_ptr() as usize;
508    start..start + inner.len()
509}