Skip to main content

wasmtime/runtime/
code_memory.rs

1//! Memory management for executable code.
2
3use crate::Engine;
4use crate::prelude::*;
5use crate::runtime::vm::MmapVec;
6use alloc::sync::Arc;
7use core::ops::Range;
8use object::read::elf::SectionTable;
9use object::{LittleEndian, SectionIndex, U32Bytes};
10use object::{
11    elf::{FileHeader64, SectionHeader64},
12    endian::Endianness,
13    read::elf::{FileHeader as _, SectionHeader as _},
14};
15use wasmtime_environ::StaticModuleIndex;
16use wasmtime_environ::{Trap, lookup_trap_code, obj};
17use wasmtime_unwinder::ExceptionTable;
18
19/// Management of executable memory within a `MmapVec`
20///
21/// This type consumes ownership of a region of memory and will manage the
22/// executable permissions of the contained JIT code as necessary.
23pub struct CodeMemory {
24    mmap: MmapVec,
25    #[cfg(has_host_compiler_backend)]
26    unwind_registration: Option<crate::runtime::vm::UnwindRegistration>,
27    #[cfg(feature = "debug-builtins")]
28    debug_registration: Option<crate::runtime::vm::GdbJitImageRegistration>,
29    published: bool,
30    registered: bool,
31    enable_branch_protection: bool,
32    needs_executable: bool,
33    #[cfg(feature = "debug-builtins")]
34    has_native_debug_info: bool,
35    custom_code_memory: Option<Arc<dyn CustomCodeMemory>>,
36
37    // Ranges within `self.mmap` of where the particular sections lie.
38    text: Range<usize>,
39    unwind: Range<usize>,
40    trap_data: Range<usize>,
41    wasm_data: Range<usize>,
42    address_map_data: Range<usize>,
43    stack_map_data: Range<usize>,
44    exception_data: Range<usize>,
45    frame_tables_data: Range<usize>,
46    func_name_data: Range<usize>,
47    info_data: Range<usize>,
48    wasm_dwarf: Range<usize>,
49    wasm_bytecode: Range<usize>,
50    wasm_bytecode_ends: Range<usize>,
51}
52
53impl Drop for CodeMemory {
54    fn drop(&mut self) {
55        // If there is a custom code memory handler, restore the
56        // original (non-executable) state of the memory.
57        //
58        // We do this rather than invoking `unpublish()` because we
59        // want to skip the mprotect() if we natively own the mmap and
60        // are going to munmap soon anyway.
61        if let Some(mem) = self.custom_code_memory.as_ref() {
62            if self.published && self.needs_executable {
63                let text = self.text();
64                mem.unpublish_executable(text.as_ptr(), text.len())
65                    .expect("Executable memory unpublish failed");
66            }
67        }
68
69        // Drop the registrations before `self.mmap` since they (implicitly) refer to it.
70        #[cfg(has_host_compiler_backend)]
71        let _ = self.unwind_registration.take();
72        #[cfg(feature = "debug-builtins")]
73        let _ = self.debug_registration.take();
74    }
75}
76
77fn _assert() {
78    fn _assert_send_sync<T: Send + Sync>() {}
79    _assert_send_sync::<CodeMemory>();
80}
81
82/// Interface implemented by an embedder to provide custom
83/// implementations of code-memory protection and execute permissions.
84pub trait CustomCodeMemory: Send + Sync {
85    /// The minimal alignment granularity for an address region that
86    /// can be made executable.
87    ///
88    /// Wasmtime does not assume the system page size for this because
89    /// custom code-memory protection can be used when all other uses
90    /// of virtual memory are disabled.
91    fn required_alignment(&self) -> usize;
92
93    /// Publish a region of memory as executable.
94    ///
95    /// This should update permissions from the default RW
96    /// (readable/writable but not executable) to RX
97    /// (readable/executable but not writable), enforcing W^X
98    /// discipline.
99    ///
100    /// If the platform requires any data/instruction coherence
101    /// action, that should be performed as part of this hook as well.
102    ///
103    /// `ptr` and `ptr.offset(len)` are guaranteed to be aligned as
104    /// per `required_alignment()`.
105    fn publish_executable(&self, ptr: *const u8, len: usize) -> crate::Result<()>;
106
107    /// Unpublish a region of memory.
108    ///
109    /// This should perform the opposite effect of `make_executable`,
110    /// switching a range of memory back from RX (readable/executable)
111    /// to RW (readable/writable). It is guaranteed that no code is
112    /// running anymore from this region.
113    ///
114    /// `ptr` and `ptr.offset(len)` are guaranteed to be aligned as
115    /// per `required_alignment()`.
116    fn unpublish_executable(&self, ptr: *const u8, len: usize) -> crate::Result<()>;
117}
118
119impl CodeMemory {
120    /// Creates a new `CodeMemory` by taking ownership of the provided
121    /// `MmapVec`.
122    ///
123    /// The returned `CodeMemory` manages the internal `MmapVec` and the
124    /// `publish` method is used to actually make the memory executable.
125    pub fn new(engine: &Engine, mmap: MmapVec) -> Result<Self> {
126        let mmap_data = &*mmap;
127        let header = FileHeader64::<Endianness>::parse(mmap_data)
128            .map_err(obj::ObjectCrateErrorWrapper)
129            .context("failed to parse precompiled artifact as an ELF")?;
130        let endian = header
131            .endian()
132            .context("failed to parse header endianness")?;
133
134        let section_headers = header
135            .section_headers(endian, mmap_data)
136            .context("failed to parse section headers")?;
137        let strings = header
138            .section_strings(endian, mmap_data, section_headers)
139            .context("failed to parse strings table")?;
140        let sections = header
141            .sections(endian, mmap_data)
142            .context("failed to parse sections table")?;
143
144        let mut text = 0..0;
145        let mut unwind = 0..0;
146        let mut enable_branch_protection = None;
147        let mut needs_executable = true;
148        #[cfg(feature = "debug-builtins")]
149        let mut has_native_debug_info = false;
150        let mut trap_data = 0..0;
151        let mut exception_data = 0..0;
152        let mut frame_tables_data = 0..0;
153        let mut wasm_data = 0..0;
154        let mut address_map_data = 0..0;
155        let mut stack_map_data = 0..0;
156        let mut func_name_data = 0..0;
157        let mut info_data = 0..0;
158        let mut wasm_dwarf = 0..0;
159        let mut wasm_bytecode = 0..0;
160        let mut wasm_bytecode_ends = 0..0;
161        for section_header in sections.iter() {
162            let data = section_header
163                .data(endian, mmap_data)
164                .map_err(obj::ObjectCrateErrorWrapper)?;
165            let name = section_name(endian, strings, section_header)?;
166            let range = subslice_range(data, &mmap);
167
168            // Double-check that sections are all aligned properly.
169            let section_align = usize::try_from(section_header.sh_addralign(endian))?;
170            if section_align != 0 && data.len() != 0 {
171                let section_offset = data.as_ptr().addr() - mmap.as_ptr().addr();
172                ensure!(
173                    section_offset % section_align == 0,
174                    "section {name:?} isn't aligned to {section_align:#x}",
175                );
176            }
177
178            // Check that we don't have any relocations, which would make
179            // loading precompiled Wasm modules slower and also force them to
180            // get paged into memory from disk.
181            //
182            // We avoid using things like Cranelift's `floor`, `ceil`,
183            // etc... operators in the Wasm-to-CLIF translator specifically to
184            // avoid having to do any relocations here. This also ensures that
185            // all builtins use the same trampoline mechanism.
186            //
187            // We do, however, allow relocations in `.debug_*` DWARF sections.
188            if let Some(target_section) = reloc_section_target(&sections, section_header, endian)? {
189                let target_name = section_name(endian, strings, target_section)?;
190                ensure!(
191                    target_name.starts_with(".debug_"),
192                    "section {target_name:?} has unexpected relocations \
193                     (defined in section {name:?})",
194                );
195            }
196
197            match name {
198                obj::ELF_WASM_BTI => match data.len() {
199                    1 => enable_branch_protection = Some(data[0] != 0),
200                    _ => bail!("invalid {name:?} section"),
201                },
202                ".text" => {
203                    text = range;
204
205                    if section_header.sh_flags(endian) & obj::SH_WASMTIME_NOT_EXECUTED != 0 {
206                        needs_executable = false;
207                    }
208                }
209                #[cfg(has_host_compiler_backend)]
210                crate::runtime::vm::UnwindRegistration::SECTION_NAME => unwind = range,
211                obj::ELF_WASM_DATA => wasm_data = range,
212                obj::ELF_WASMTIME_ADDRMAP => address_map_data = range,
213                obj::ELF_WASMTIME_STACK_MAP => stack_map_data = range,
214                obj::ELF_WASMTIME_TRAPS => trap_data = range,
215                obj::ELF_WASMTIME_EXCEPTIONS => exception_data = range,
216                obj::ELF_WASMTIME_FRAMES => frame_tables_data = range,
217                obj::ELF_NAME_DATA => func_name_data = range,
218                obj::ELF_WASMTIME_INFO => info_data = range,
219                obj::ELF_WASMTIME_DWARF => wasm_dwarf = range,
220                obj::ELF_WASMTIME_WASM_BYTECODE => wasm_bytecode = range,
221                obj::ELF_WASMTIME_WASM_BYTECODE_ENDS => wasm_bytecode_ends = range,
222
223                #[cfg(feature = "debug-builtins")]
224                ".debug_info" => has_native_debug_info = true,
225
226                // These sections are expected, but we do not need to retain any
227                // info about them.
228                "" | ".symtab" | ".strtab" | ".shstrtab" | ".xdata" | obj::ELF_WASM_ENGINE => {
229                    log::debug!("ignoring section {name:?}")
230                }
231                _ if name.starts_with(".debug_") || name.starts_with(".rela.debug_") => {
232                    log::debug!("ignoring debug section {name:?}")
233                }
234
235                _ => bail!("unexpected section {name:?} in Wasm compilation artifact"),
236            }
237        }
238
239        // Silence unused `mut` warning.
240        #[cfg(not(has_host_compiler_backend))]
241        let _ = &mut unwind;
242
243        // Ensure that the exception table is well-formed. This parser
244        // construction is cheap: it reads the header and validates
245        // ranges but nothing else. We do this only in debug-assertion
246        // builds because we otherwise require for safety that the
247        // compiled artifact is as-produced-by this version of
248        // Wasmtime, and we should always produce a correct exception
249        // table (i.e., we are not expecting untrusted data here).
250        if cfg!(debug_assertions) {
251            let _ = ExceptionTable::parse(&mmap[exception_data.clone()])?;
252        }
253
254        Ok(Self {
255            mmap,
256            #[cfg(has_host_compiler_backend)]
257            unwind_registration: None,
258            #[cfg(feature = "debug-builtins")]
259            debug_registration: None,
260            published: false,
261            registered: false,
262            enable_branch_protection: enable_branch_protection
263                .ok_or_else(|| format_err!("missing `{}` section", obj::ELF_WASM_BTI))?,
264            needs_executable,
265            #[cfg(feature = "debug-builtins")]
266            has_native_debug_info,
267            custom_code_memory: engine.custom_code_memory().cloned(),
268            text,
269            unwind,
270            trap_data,
271            address_map_data,
272            stack_map_data,
273            exception_data,
274            frame_tables_data,
275            func_name_data,
276            wasm_dwarf,
277            info_data,
278            wasm_data,
279            wasm_bytecode,
280            wasm_bytecode_ends,
281        })
282    }
283
284    /// Returns a reference to the underlying `MmapVec` this memory owns.
285    #[inline]
286    pub fn mmap(&self) -> &MmapVec {
287        &self.mmap
288    }
289
290    /// Returns the contents of the text section of the ELF executable this
291    /// represents.
292    #[inline]
293    pub fn text(&self) -> &[u8] {
294        &self.mmap[self.text.clone()]
295    }
296
297    /// Returns the contents of the `ELF_WASMTIME_DWARF` section.
298    #[inline]
299    pub fn wasm_dwarf(&self) -> &[u8] {
300        &self.mmap[self.wasm_dwarf.clone()]
301    }
302
303    /// Returns the data in the `ELF_NAME_DATA` section.
304    #[inline]
305    pub fn func_name_data(&self) -> &[u8] {
306        &self.mmap[self.func_name_data.clone()]
307    }
308
309    /// Returns the concatenated list of all data associated with this wasm
310    /// module.
311    ///
312    /// This is used for initialization of memories and all data ranges stored
313    /// in a `Module` are relative to the slice returned here.
314    #[inline]
315    pub fn wasm_data(&self) -> &[u8] {
316        &self.mmap[self.wasm_data.clone()]
317    }
318
319    /// Returns the encoded address map section used to pass to
320    /// `wasmtime_environ::lookup_file_pos`.
321    #[inline]
322    pub fn address_map_data(&self) -> &[u8] {
323        &self.mmap[self.address_map_data.clone()]
324    }
325
326    /// Returns the encoded stack map section used to pass to
327    /// `wasmtime_environ::StackMap::lookup`.
328    pub fn stack_map_data(&self) -> &[u8] {
329        &self.mmap[self.stack_map_data.clone()]
330    }
331
332    /// Returns the encoded exception-tables section to pass to
333    /// `wasmtime_unwinder::ExceptionTable::parse`.
334    pub fn exception_tables(&self) -> &[u8] {
335        &self.mmap[self.exception_data.clone()]
336    }
337
338    /// Returns the encoded frame-tables section to pass to
339    /// `wasmtime_environ::FrameTable::parse`.
340    pub fn frame_tables(&self) -> &[u8] {
341        &self.mmap[self.frame_tables_data.clone()]
342    }
343
344    /// Returns the concatenated Wasm bytecode section, or an empty slice if
345    /// the artifact was not compiled with `guest-debug` enabled.
346    pub fn wasm_bytecode(&self) -> &[u8] {
347        &self.mmap[self.wasm_bytecode.clone()]
348    }
349
350    /// Returns the Wasm bytecode section end-offset array.
351    pub fn wasm_bytecode_ends(&self) -> &[u8] {
352        &self.mmap[self.wasm_bytecode_ends.clone()]
353    }
354
355    /// Returns the contents of the `ELF_WASMTIME_INFO` section, or an empty
356    /// slice if it wasn't found.
357    #[inline]
358    pub fn wasmtime_info(&self) -> &[u8] {
359        &self.mmap[self.info_data.clone()]
360    }
361
362    /// Returns the contents of the `ELF_WASMTIME_TRAPS` section, or an empty
363    /// slice if it wasn't found.
364    #[inline]
365    pub fn trap_data(&self) -> &[u8] {
366        &self.mmap[self.trap_data.clone()]
367    }
368
369    /// Returns the Wasm bytecode section end-offset for a given core
370    /// module, or `None` if no bytecode is present.
371    ///
372    /// # Panics
373    ///
374    /// Panics if index is out-of-range.
375    fn wasm_bytecode_end_for_module(&self, index: StaticModuleIndex) -> Option<usize> {
376        if self.wasm_bytecode_ends().is_empty() {
377            return None;
378        }
379        let ends = self.wasm_bytecode_ends();
380        let count = ends.len() / core::mem::size_of::<u32>();
381        let (ends, _) = object::slice_from_bytes::<U32Bytes<LittleEndian>>(ends, count)
382            .expect("Invalid alignment of `ends` section");
383        let index = usize::try_from(index.as_u32()).unwrap();
384        Some(usize::try_from(ends[index].get(LittleEndian)).unwrap())
385    }
386
387    /// Returns the Wasm bytecode for the a core module in this
388    /// artifact, or `None` if bytecode was not preserved.
389    pub(crate) fn wasm_bytecode_for_module(&self, index: StaticModuleIndex) -> Option<&[u8]> {
390        let start = if index.as_u32() == 0 {
391            0
392        } else {
393            self.wasm_bytecode_end_for_module(StaticModuleIndex::from_u32(index.as_u32() - 1))?
394        };
395        let end = self.wasm_bytecode_end_for_module(index)?;
396        Some(&self.wasm_bytecode()[start..end])
397    }
398
399    /// Publishes the internal ELF image to be ready for execution.
400    ///
401    /// This method can only be when the image is not published (its
402    /// default state) and will panic if called when already
403    /// published. This will parse the ELF image from the original
404    /// `MmapVec` and do everything necessary to get it ready for
405    /// execution, including:
406    ///
407    /// * Change page protections from read/write to read/execute.
408    /// * Register unwinding information with the OS
409    /// * Register this image with the debugger if native DWARF is present
410    ///
411    /// After this function executes all JIT code should be ready to execute.
412    ///
413    /// The action may be reversed by calling [`Self::unpublish`], as long
414    /// as that method's safety requirements are upheld.
415    pub fn publish(&mut self) -> Result<()> {
416        assert!(!self.published);
417        self.published = true;
418
419        if self.text().is_empty() {
420            return Ok(());
421        }
422
423        // The unsafety here comes from a few things:
424        //
425        // * We're actually updating some page protections to executable memory.
426        //
427        // * We're registering unwinding information which relies on the
428        //   correctness of the information in the first place. This applies to
429        //   both the actual unwinding tables as well as the validity of the
430        //   pointers we pass in itself.
431        unsafe {
432            // Next freeze the contents of this image by making all of the
433            // memory readonly. Nothing after this point should ever be modified
434            // so commit everything. For a compiled-in-memory image this will
435            // mean IPIs to evict writable mappings from other cores. For
436            // loaded-from-disk images this shouldn't result in IPIs so long as
437            // there weren't any relocations because nothing should have
438            // otherwise written to the image at any point either.
439            //
440            // Note that if virtual memory is disabled this is skipped because
441            // we aren't able to make it readonly, but this is just a
442            // defense-in-depth measure and isn't required for correctness.
443            #[cfg(has_virtual_memory)]
444            if self.mmap.supports_virtual_memory() {
445                self.mmap.make_readonly(0..self.mmap.len())?;
446            }
447
448            // Switch the executable portion from readonly to read/execute.
449            if self.needs_executable {
450                if !self.custom_publish()? {
451                    if !self.mmap.supports_virtual_memory() {
452                        bail!("this target requires virtual memory to be enabled");
453                    }
454                    #[cfg(has_virtual_memory)]
455                    self.mmap
456                        .make_executable(self.text.clone(), self.enable_branch_protection)
457                        .context("unable to make memory executable")?;
458                }
459            }
460
461            if !self.registered {
462                // With all our memory set up use the platform-specific
463                // `UnwindRegistration` implementation to inform the general
464                // runtime that there's unwinding information available for all
465                // our just-published JIT functions.
466                self.register_unwind_info()?;
467
468                #[cfg(feature = "debug-builtins")]
469                self.register_debug_image()?;
470                self.registered = true;
471            }
472        }
473
474        Ok(())
475    }
476
477    fn custom_publish(&mut self) -> Result<bool> {
478        if let Some(mem) = self.custom_code_memory.as_ref() {
479            let text = self.text();
480            // The text section should be aligned to
481            // `custom_code_memory.required_alignment()` due to a
482            // combination of two invariants:
483            //
484            // - MmapVec aligns its start address, even in owned-Vec mode; and
485            // - The text segment inside the ELF image will be aligned according
486            //   to the platform's requirements.
487            let text_addr = text.as_ptr() as usize;
488            assert_eq!(text_addr & (mem.required_alignment() - 1), 0);
489
490            // The custom code memory handler will ensure the
491            // memory is executable and also handle icache
492            // coherence.
493            mem.publish_executable(text.as_ptr(), text.len())?;
494            Ok(true)
495        } else {
496            Ok(false)
497        }
498    }
499
500    /// "Unpublish" code memory (transition it from executable to read/writable).
501    ///
502    /// This may be used to edit the code image, as long as the
503    /// overall size of the memory remains the same. Note the hazards
504    /// inherent in editing code that may have been executed: any
505    /// stack frames with PC still active in this code must be
506    /// suspended (e.g., called into a hostcall that is then invoking
507    /// this method, or async-yielded) and any active PC values must
508    /// point to valid instructions. Thus this is mostly useful for
509    /// patching in-place at particular sites, such as by the use of
510    /// Cranelift's `patchable_call` instruction.
511    ///
512    /// If this fails, then the memory remains executable.
513    pub fn unpublish(&mut self) -> Result<()> {
514        assert!(self.published);
515        self.published = false;
516
517        if self.text().is_empty() {
518            return Ok(());
519        }
520
521        if self.custom_unpublish()? {
522            return Ok(());
523        }
524
525        if !self.mmap.supports_virtual_memory() {
526            bail!("this target requires virtual memory to be enabled");
527        }
528
529        // SAFETY: we are guaranteed by our own safety conditions that
530        // we have exclusive access to this code and can change its
531        // permissions (removing the execute bit) without causing
532        // problems.
533        #[cfg(has_virtual_memory)]
534        unsafe {
535            self.mmap.make_readwrite(0..self.mmap.len())?;
536        }
537
538        // Note that we do *not* unregister: we expect unpublish
539        // to be used for temporary edits, so we want the
540        // registration to "stick" after the initial publish and
541        // not toggle in subsequent unpublish/publish cycles.
542
543        Ok(())
544    }
545
546    fn custom_unpublish(&mut self) -> Result<bool> {
547        if let Some(mem) = self.custom_code_memory.as_ref() {
548            let text = self.text();
549            mem.unpublish_executable(text.as_ptr(), text.len())?;
550            Ok(true)
551        } else {
552            Ok(false)
553        }
554    }
555
556    /// Return a mutable borrow to the code, suitable for editing.
557    ///
558    /// Must not be published.
559    ///
560    /// # Panics
561    ///
562    /// This method panics if the code has been published (and not
563    /// subsequently unpublished).
564    pub fn text_mut(&mut self) -> &mut [u8] {
565        assert!(!self.published);
566        // SAFETY: we assert !published, which means we either have
567        // not yet applied readonly + execute permissinos, or we have
568        // undone that and flipped back to read-write via unpublish.
569        unsafe { &mut self.mmap.as_mut_slice()[self.text.clone()] }
570    }
571
572    unsafe fn register_unwind_info(&mut self) -> Result<()> {
573        if self.unwind.len() == 0 {
574            return Ok(());
575        }
576        #[cfg(has_host_compiler_backend)]
577        {
578            let text = self.text();
579            let unwind_info = &self.mmap[self.unwind.clone()];
580            let registration = unsafe {
581                crate::runtime::vm::UnwindRegistration::new(
582                    text.as_ptr(),
583                    unwind_info.as_ptr(),
584                    unwind_info.len(),
585                )
586                .context("failed to create unwind info registration")?
587            };
588            self.unwind_registration = Some(registration);
589            return Ok(());
590        }
591        #[cfg(not(has_host_compiler_backend))]
592        {
593            bail!("should not have unwind info for non-native backend")
594        }
595    }
596
597    #[cfg(feature = "debug-builtins")]
598    fn register_debug_image(&mut self) -> Result<()> {
599        if !self.has_native_debug_info {
600            return Ok(());
601        }
602
603        // TODO-DebugInfo: we're copying the whole image here, which is pretty wasteful.
604        // Use the existing memory by teaching code here about relocations in DWARF sections
605        // and anything else necessary that is done in "create_gdbjit_image" right now.
606        let image = self.mmap().to_vec();
607        let text: &[u8] = self.text();
608        let bytes = crate::native_debug::create_gdbjit_image(image, (text.as_ptr(), text.len()))?;
609        let reg = crate::runtime::vm::GdbJitImageRegistration::register(bytes);
610        self.debug_registration = Some(reg);
611        Ok(())
612    }
613
614    /// Looks up the given offset within this module's text section and returns
615    /// the trap code associated with that instruction, if there is one.
616    pub fn lookup_trap_code(&self, text_offset: usize) -> Option<Trap> {
617        lookup_trap_code(self.trap_data(), text_offset)
618    }
619
620    /// Get the raw address range of this CodeMemory.
621    pub(crate) fn raw_addr_range(&self) -> Range<usize> {
622        let start = self.text().as_ptr().addr();
623        let end = start + self.text().len();
624        start..end
625    }
626
627    /// Create a "deep clone": a separate CodeMemory for the same code
628    /// that can be patched or mutated independently. Also returns a
629    /// "metadata and location" handle that can be registered with the
630    /// global module registry and used for trap metadata lookups.
631    #[cfg(feature = "debug")]
632    pub(crate) fn deep_clone(self: &Arc<Self>, engine: &Engine) -> Result<CodeMemory> {
633        let mmap = self.mmap.deep_clone()?;
634        Self::new(engine, mmap)
635    }
636}
637
638fn section_name<'a>(
639    endian: Endianness,
640    strings: object::StringTable<'a>,
641    section_header: &SectionHeader64<Endianness>,
642) -> Result<&'a str> {
643    let name = section_header
644        .name(endian, strings)
645        .map_err(obj::ObjectCrateErrorWrapper)?;
646    Ok(str::from_utf8(name).context("invalid section name in Wasm compilation artifact")?)
647}
648
649fn is_reloc_section(section_header: &SectionHeader64<Endianness>, endian: Endianness) -> bool {
650    let sh_type = section_header.sh_type(endian);
651    matches!(
652        sh_type,
653        object::elf::SHT_REL | object::elf::SHT_RELA | object::elf::SHT_CREL
654    )
655}
656
657fn reloc_section_target<'a>(
658    sections: &'a SectionTable<'a, FileHeader64<Endianness>, &'a [u8]>,
659    section: &'a SectionHeader64<Endianness>,
660    endian: Endianness,
661) -> Result<Option<&'a SectionHeader64<Endianness>>> {
662    if !is_reloc_section(&section, endian) {
663        return Ok(None);
664    }
665
666    let sh_info = section.info_link(endian);
667
668    // Dynamic relocation.
669    if sh_info == SectionIndex(0) {
670        return Ok(None);
671    }
672
673    ensure!(
674        sh_info.0 < sections.len(),
675        "invalid ELF `sh_info` for relocation section",
676    );
677
678    Ok(Some(sections.section(sh_info)?))
679}
680
681/// Returns the range of `inner` within `outer`, such that `outer[range]` is the
682/// same as `inner`.
683///
684/// This method requires that `inner` is a sub-slice of `outer`, and if that
685/// isn't true then this method will panic.
686fn subslice_range(inner: &[u8], outer: &[u8]) -> Range<usize> {
687    if inner.len() == 0 {
688        return 0..0;
689    }
690
691    assert!(outer.as_ptr() <= inner.as_ptr());
692    assert!((&inner[inner.len() - 1] as *const _) <= (&outer[outer.len() - 1] as *const _));
693
694    let start = inner.as_ptr() as usize - outer.as_ptr() as usize;
695    start..start + inner.len()
696}