wasmtime/runtime/vm/traphandlers/
backtrace.rs

1//! Backtrace and stack walking functionality for Wasm.
2//!
3//! Walking the Wasm stack is comprised of
4//!
5//! 1. identifying sequences of contiguous Wasm frames on the stack
6//!    (i.e. skipping over native host frames), and
7//!
8//! 2. walking the Wasm frames within such a sequence.
9//!
10//! To perform (1) we maintain the entry stack pointer (SP) and exit frame
11//! pointer (FP) and program counter (PC) each time we call into Wasm and Wasm
12//! calls into the host via trampolines (see
13//! `crates/wasmtime/src/runtime/vm/trampolines`). The most recent entry is
14//! stored in `VMStoreContext` and older entries are saved in
15//! `CallThreadState`. This lets us identify ranges of contiguous Wasm frames on
16//! the stack.
17//!
18//! To solve (2) and walk the Wasm frames within a region of contiguous Wasm
19//! frames on the stack, we configure Cranelift's `preserve_frame_pointers =
20//! true` setting. Then we can do simple frame pointer traversal starting at the
21//! exit FP and stopping once we reach the entry SP (meaning that the next older
22//! frame is a host frame).
23
24use crate::prelude::*;
25use crate::runtime::store::StoreOpaque;
26use crate::runtime::vm::{
27    traphandlers::{tls, CallThreadState},
28    Unwind, VMStoreContext,
29};
30use core::ops::ControlFlow;
31
32/// A WebAssembly stack trace.
33#[derive(Debug)]
34pub struct Backtrace(Vec<Frame>);
35
36/// A stack frame within a Wasm stack trace.
37#[derive(Debug)]
38pub struct Frame {
39    pc: usize,
40    #[cfg_attr(
41        not(feature = "gc"),
42        expect(dead_code, reason = "not worth #[cfg] annotations to remove")
43    )]
44    fp: usize,
45}
46
47impl Frame {
48    /// Get this frame's program counter.
49    pub fn pc(&self) -> usize {
50        self.pc
51    }
52
53    /// Get this frame's frame pointer.
54    #[cfg(feature = "gc")]
55    pub fn fp(&self) -> usize {
56        self.fp
57    }
58}
59
60impl Backtrace {
61    /// Returns an empty backtrace
62    pub fn empty() -> Backtrace {
63        Backtrace(Vec::new())
64    }
65
66    /// Capture the current Wasm stack in a backtrace.
67    pub fn new(store: &StoreOpaque) -> Backtrace {
68        let vm_store_context = store.vm_store_context();
69        let unwind = store.unwinder();
70        tls::with(|state| match state {
71            Some(state) => unsafe {
72                Self::new_with_trap_state(vm_store_context, unwind, state, None)
73            },
74            None => Backtrace(vec![]),
75        })
76    }
77
78    /// Capture the current Wasm stack trace.
79    ///
80    /// If Wasm hit a trap, and we calling this from the trap handler, then the
81    /// Wasm exit trampoline didn't run, and we use the provided PC and FP
82    /// instead of looking them up in `VMStoreContext`.
83    pub(crate) unsafe fn new_with_trap_state(
84        vm_store_context: *const VMStoreContext,
85        unwind: &dyn Unwind,
86        state: &CallThreadState,
87        trap_pc_and_fp: Option<(usize, usize)>,
88    ) -> Backtrace {
89        let mut frames = vec![];
90        Self::trace_with_trap_state(vm_store_context, unwind, state, trap_pc_and_fp, |frame| {
91            frames.push(frame);
92            ControlFlow::Continue(())
93        });
94        Backtrace(frames)
95    }
96
97    /// Walk the current Wasm stack, calling `f` for each frame we walk.
98    #[cfg(feature = "gc")]
99    pub fn trace(store: &StoreOpaque, f: impl FnMut(Frame) -> ControlFlow<()>) {
100        let vm_store_context = store.vm_store_context();
101        let unwind = store.unwinder();
102        tls::with(|state| match state {
103            Some(state) => unsafe {
104                Self::trace_with_trap_state(vm_store_context, unwind, state, None, f)
105            },
106            None => {}
107        });
108    }
109
110    /// Walk the current Wasm stack, calling `f` for each frame we walk.
111    ///
112    /// If Wasm hit a trap, and we calling this from the trap handler, then the
113    /// Wasm exit trampoline didn't run, and we use the provided PC and FP
114    /// instead of looking them up in `VMStoreContext`.
115    pub(crate) unsafe fn trace_with_trap_state(
116        vm_store_context: *const VMStoreContext,
117        unwind: &dyn Unwind,
118        state: &CallThreadState,
119        trap_pc_and_fp: Option<(usize, usize)>,
120        mut f: impl FnMut(Frame) -> ControlFlow<()>,
121    ) {
122        log::trace!("====== Capturing Backtrace ======");
123
124        let (last_wasm_exit_pc, last_wasm_exit_fp) = match trap_pc_and_fp {
125            // If we exited Wasm by catching a trap, then the Wasm-to-host
126            // trampoline did not get a chance to save the last Wasm PC and FP,
127            // and we need to use the plumbed-through values instead.
128            Some((pc, fp)) => {
129                assert!(core::ptr::eq(
130                    vm_store_context,
131                    state.vm_store_context.as_ptr()
132                ));
133                (pc, fp)
134            }
135            // Either there is no Wasm currently on the stack, or we exited Wasm
136            // through the Wasm-to-host trampoline.
137            None => {
138                let pc = *(*vm_store_context).last_wasm_exit_pc.get();
139                let fp = *(*vm_store_context).last_wasm_exit_fp.get();
140                (pc, fp)
141            }
142        };
143
144        let activations = core::iter::once((
145            last_wasm_exit_pc,
146            last_wasm_exit_fp,
147            *(*vm_store_context).last_wasm_entry_fp.get(),
148        ))
149        .chain(
150            state
151                .iter()
152                .filter(|state| core::ptr::eq(vm_store_context, state.vm_store_context.as_ptr()))
153                .map(|state| {
154                    (
155                        state.old_last_wasm_exit_pc(),
156                        state.old_last_wasm_exit_fp(),
157                        state.old_last_wasm_entry_fp(),
158                    )
159                }),
160        )
161        .take_while(|&(pc, fp, sp)| {
162            if pc == 0 {
163                debug_assert_eq!(fp, 0);
164                debug_assert_eq!(sp, 0);
165            }
166            pc != 0
167        });
168
169        for (pc, fp, sp) in activations {
170            if let ControlFlow::Break(()) = Self::trace_through_wasm(unwind, pc, fp, sp, &mut f) {
171                log::trace!("====== Done Capturing Backtrace (closure break) ======");
172                return;
173            }
174        }
175
176        log::trace!("====== Done Capturing Backtrace (reached end of activations) ======");
177    }
178
179    /// Walk through a contiguous sequence of Wasm frames starting with the
180    /// frame at the given PC and FP and ending at `trampoline_sp`.
181    unsafe fn trace_through_wasm(
182        unwind: &dyn Unwind,
183        mut pc: usize,
184        mut fp: usize,
185        trampoline_fp: usize,
186        mut f: impl FnMut(Frame) -> ControlFlow<()>,
187    ) -> ControlFlow<()> {
188        log::trace!("=== Tracing through contiguous sequence of Wasm frames ===");
189        log::trace!("trampoline_fp = 0x{:016x}", trampoline_fp);
190        log::trace!("   initial pc = 0x{:016x}", pc);
191        log::trace!("   initial fp = 0x{:016x}", fp);
192
193        // We already checked for this case in the `trace_with_trap_state`
194        // caller.
195        assert_ne!(pc, 0);
196        assert_ne!(fp, 0);
197        assert_ne!(trampoline_fp, 0);
198
199        // This loop will walk the linked list of frame pointers starting at
200        // `fp` and going up until `trampoline_fp`. We know that both `fp` and
201        // `trampoline_fp` are "trusted values" aka generated and maintained by
202        // Cranelift. This means that it should be safe to walk the linked list
203        // of pointers and inspect wasm frames.
204        //
205        // Note, though, that any frames outside of this range are not
206        // guaranteed to have valid frame pointers. For example native code
207        // might be using the frame pointer as a general purpose register. Thus
208        // we need to be careful to only walk frame pointers in this one
209        // contiguous linked list.
210        //
211        // To know when to stop iteration all architectures' stacks currently
212        // look something like this:
213        //
214        //     | ...               |
215        //     | Native Frames     |
216        //     | ...               |
217        //     |-------------------|
218        //     | ...               | <-- Trampoline FP            |
219        //     | Trampoline Frame  |                              |
220        //     | ...               | <-- Trampoline SP            |
221        //     |-------------------|                            Stack
222        //     | Return Address    |                            Grows
223        //     | Previous FP       | <-- Wasm FP                Down
224        //     | ...               |                              |
225        //     | Wasm Frames       |                              |
226        //     | ...               |                              V
227        //
228        // The trampoline records its own frame pointer (`trampoline_fp`),
229        // which is guaranteed to be above all Wasm. To check when we've
230        // reached the trampoline frame, it is therefore sufficient to
231        // check when the next frame pointer is equal to `trampoline_fp`. Once
232        // that's hit then we know that the entire linked list has been
233        // traversed.
234        //
235        // Note that it might be possible that this loop doesn't execute at all.
236        // For example if the entry trampoline called wasm which `return_call`'d
237        // an imported function which is an exit trampoline, then
238        // `fp == trampoline_fp` on the entry of this function, meaning the loop
239        // won't actually execute anything.
240        while fp != trampoline_fp {
241            // At the start of each iteration of the loop, we know that `fp` is
242            // a frame pointer from Wasm code. Therefore, we know it is not
243            // being used as an extra general-purpose register, and it is safe
244            // dereference to get the PC and the next older frame pointer.
245            //
246            // The stack also grows down, and therefore any frame pointer we are
247            // dealing with should be less than the frame pointer on entry to
248            // Wasm. Finally also assert that it's aligned correctly as an
249            // additional sanity check.
250            assert!(trampoline_fp > fp, "{trampoline_fp:#x} > {fp:#x}");
251            unwind.assert_fp_is_aligned(fp);
252
253            log::trace!("--- Tracing through one Wasm frame ---");
254            log::trace!("pc = {:p}", pc as *const ());
255            log::trace!("fp = {:p}", fp as *const ());
256
257            f(Frame { pc, fp })?;
258
259            pc = unwind.get_next_older_pc_from_fp(fp);
260
261            // We rely on this offset being zero for all supported architectures
262            // in `crates/cranelift/src/component/compiler.rs` when we set the
263            // Wasm exit FP. If this ever changes, we will need to update that
264            // code as well!
265            assert_eq!(unwind.next_older_fp_from_fp_offset(), 0);
266
267            // Get the next older frame pointer from the current Wasm frame
268            // pointer.
269            let next_older_fp = *(fp as *mut usize).add(unwind.next_older_fp_from_fp_offset());
270
271            // Because the stack always grows down, the older FP must be greater
272            // than the current FP.
273            assert!(next_older_fp > fp, "{next_older_fp:#x} > {fp:#x}");
274            fp = next_older_fp;
275        }
276
277        log::trace!("=== Done tracing contiguous sequence of Wasm frames ===");
278        ControlFlow::Continue(())
279    }
280
281    /// Iterate over the frames inside this backtrace.
282    pub fn frames<'a>(
283        &'a self,
284    ) -> impl ExactSizeIterator<Item = &'a Frame> + DoubleEndedIterator + 'a {
285        self.0.iter()
286    }
287}