wasmtime/runtime/vm/traphandlers/
backtrace.rs

1//! Backtrace and stack walking functionality for Wasm.
2//!
3//! Walking the Wasm stack is comprised of
4//!
5//! 1. identifying sequences of contiguous Wasm frames on the stack
6//!    (i.e. skipping over native host frames), and
7//!
8//! 2. walking the Wasm frames within such a sequence.
9//!
10//! To perform (1) we maintain the entry stack pointer (SP) and exit frame
11//! pointer (FP) and program counter (PC) each time we call into Wasm and Wasm
12//! calls into the host via trampolines (see
13//! `crates/wasmtime/src/runtime/vm/trampolines`). The most recent entry is
14//! stored in `VMStoreContext` and older entries are saved in
15//! `CallThreadState`. This lets us identify ranges of contiguous Wasm frames on
16//! the stack.
17//!
18//! To solve (2) and walk the Wasm frames within a region of contiguous Wasm
19//! frames on the stack, we configure Cranelift's `preserve_frame_pointers =
20//! true` setting. Then we can do simple frame pointer traversal starting at the
21//! exit FP and stopping once we reach the entry SP (meaning that the next older
22//! frame is a host frame).
23
24#[cfg(feature = "debug")]
25use crate::StoreContextMut;
26use crate::prelude::*;
27use crate::runtime::store::StoreOpaque;
28use crate::runtime::vm::stack_switching::VMStackChain;
29use crate::runtime::vm::{
30    Unwind, VMStoreContext,
31    traphandlers::{CallThreadState, tls},
32};
33#[cfg(all(feature = "gc", feature = "stack-switching"))]
34use crate::vm::stack_switching::{VMContRef, VMStackState};
35use core::ops::ControlFlow;
36use wasmtime_unwinder::Frame;
37
38/// A WebAssembly stack trace.
39#[derive(Debug)]
40pub struct Backtrace(Vec<Frame>);
41
42impl Backtrace {
43    /// Returns an empty backtrace
44    pub fn empty() -> Backtrace {
45        Backtrace(Vec::new())
46    }
47
48    /// Capture the current Wasm stack in a backtrace.
49    pub fn new(store: &StoreOpaque) -> Backtrace {
50        let vm_store_context = store.vm_store_context();
51        let unwind = store.unwinder();
52        tls::with(|state| match state {
53            Some(state) => unsafe {
54                Self::new_with_trap_state(vm_store_context, unwind, state, None)
55            },
56            None => Backtrace(vec![]),
57        })
58    }
59
60    /// Capture the current Wasm stack trace.
61    ///
62    /// If Wasm hit a trap, and we calling this from the trap handler, then the
63    /// Wasm exit trampoline didn't run, and we use the provided PC and FP
64    /// instead of looking them up in `VMStoreContext`.
65    pub(crate) unsafe fn new_with_trap_state(
66        vm_store_context: *const VMStoreContext,
67        unwind: &dyn Unwind,
68        state: &CallThreadState,
69        trap_pc_and_fp: Option<(usize, usize)>,
70    ) -> Backtrace {
71        let mut frames = vec![];
72        unsafe {
73            Self::trace_with_trap_state(vm_store_context, unwind, state, trap_pc_and_fp, |frame| {
74                frames.push(frame);
75                ControlFlow::Continue(())
76            });
77        }
78        Backtrace(frames)
79    }
80
81    /// Walk the current Wasm stack, calling `f` for each frame we walk.
82    #[cfg(feature = "gc")]
83    pub fn trace(store: &StoreOpaque, f: impl FnMut(Frame) -> ControlFlow<()>) {
84        let vm_store_context = store.vm_store_context();
85        let unwind = store.unwinder();
86        tls::with(|state| match state {
87            Some(state) => unsafe {
88                Self::trace_with_trap_state(vm_store_context, unwind, state, None, f)
89            },
90            None => {}
91        });
92    }
93
94    // Walk the stack of the given continuation, which must be suspended, and
95    // all of its parent continuations (if any).
96    #[cfg(all(feature = "gc", feature = "stack-switching"))]
97    pub fn trace_suspended_continuation(
98        store: &StoreOpaque,
99        continuation: &VMContRef,
100        f: impl FnMut(Frame) -> ControlFlow<()>,
101    ) {
102        log::trace!("====== Capturing Backtrace (suspended continuation) ======");
103
104        assert_eq!(
105            continuation.common_stack_information.state,
106            VMStackState::Suspended
107        );
108
109        let unwind = store.unwinder();
110
111        let pc = continuation.stack.control_context_instruction_pointer();
112        let fp = continuation.stack.control_context_frame_pointer();
113        let trampoline_fp = continuation
114            .common_stack_information
115            .limits
116            .last_wasm_entry_fp;
117
118        unsafe {
119            // FIXME(frank-emrich) Casting from *const to *mut pointer is
120            // terrible, but we won't actually modify any of the continuations
121            // here.
122            let stack_chain =
123                VMStackChain::Continuation(continuation as *const VMContRef as *mut VMContRef);
124
125            if let ControlFlow::Break(()) =
126                Self::trace_through_continuations(unwind, stack_chain, pc, fp, trampoline_fp, f)
127            {
128                log::trace!("====== Done Capturing Backtrace (closure break) ======");
129                return;
130            }
131        }
132
133        log::trace!("====== Done Capturing Backtrace (reached end of stack chain) ======");
134    }
135
136    /// Walk the current Wasm stack, calling `f` for each frame we walk.
137    ///
138    /// If Wasm hit a trap, and we calling this from the trap handler, then the
139    /// Wasm exit trampoline didn't run, and we use the provided PC and FP
140    /// instead of looking them up in `VMStoreContext`.
141    ///
142    /// We define "current Wasm stack" here as "all activations
143    /// associated with the given store". That is: if we have a stack like
144    ///
145    /// ```plain
146    ///     host --> (Wasm functions in store A) --> host --> (Wasm functions in store B) --> host
147    ///          --> (Wasm functions in store A) --> host --> call `trace_with_trap_state` with store A
148    /// ```
149    ///
150    /// then we will see the first and third Wasm activations (those
151    /// associated with store A), but not that with store B. In
152    /// essence, activations from another store might as well be some
153    /// other opaque host code; we don't know anything about it.
154    pub(crate) unsafe fn trace_with_trap_state(
155        vm_store_context: *const VMStoreContext,
156        unwind: &dyn Unwind,
157        state: &CallThreadState,
158        trap_pc_and_fp: Option<(usize, usize)>,
159        mut f: impl FnMut(Frame) -> ControlFlow<()>,
160    ) {
161        log::trace!("====== Capturing Backtrace ======");
162
163        let (last_wasm_exit_pc, last_wasm_exit_fp) = match trap_pc_and_fp {
164            // If we exited Wasm by catching a trap, then the Wasm-to-host
165            // trampoline did not get a chance to save the last Wasm PC and FP,
166            // and we need to use the plumbed-through values instead.
167            Some((pc, fp)) => {
168                assert!(core::ptr::eq(
169                    vm_store_context,
170                    state.vm_store_context.as_ptr()
171                ));
172                (pc, fp)
173            }
174            // Either there is no Wasm currently on the stack, or we exited Wasm
175            // through the Wasm-to-host trampoline.
176            None => unsafe {
177                let pc = *(*vm_store_context).last_wasm_exit_pc.get();
178                let fp = (*vm_store_context).last_wasm_exit_fp();
179                (pc, fp)
180            },
181        };
182
183        let stack_chain = unsafe { (*(*vm_store_context).stack_chain.get()).clone() };
184
185        // The first value in `activations` is for the most recently running
186        // wasm. We thus provide the stack chain of `first_wasm_state` to
187        // traverse the potential continuation stacks. For the subsequent
188        // activations, we unconditionally use `None` as the corresponding stack
189        // chain. This is justified because only the most recent execution of
190        // wasm may execute off the initial stack (see comments in
191        // `wasmtime::invoke_wasm_and_catch_traps` for details).
192        let activations =
193            core::iter::once((stack_chain, last_wasm_exit_pc, last_wasm_exit_fp, unsafe {
194                *(*vm_store_context).last_wasm_entry_fp.get()
195            }))
196            .chain(
197                state
198                    .iter()
199                    .flat_map(|state| state.iter())
200                    .filter(|state| {
201                        core::ptr::eq(vm_store_context, state.vm_store_context.as_ptr())
202                    })
203                    .map(|state| unsafe {
204                        (
205                            state.old_stack_chain(),
206                            state.old_last_wasm_exit_pc(),
207                            state.old_last_wasm_exit_fp(),
208                            state.old_last_wasm_entry_fp(),
209                        )
210                    }),
211            )
212            .take_while(|(chain, pc, fp, sp)| {
213                if *pc == 0 {
214                    debug_assert_eq!(*fp, 0);
215                    debug_assert_eq!(*sp, 0);
216                } else {
217                    debug_assert_ne!(chain.clone(), VMStackChain::Absent)
218                }
219                *pc != 0
220            });
221
222        for (chain, pc, fp, sp) in activations {
223            let res =
224                unsafe { Self::trace_through_continuations(unwind, chain, pc, fp, sp, &mut f) };
225            if let ControlFlow::Break(()) = res {
226                log::trace!("====== Done Capturing Backtrace (closure break) ======");
227                return;
228            }
229        }
230
231        log::trace!("====== Done Capturing Backtrace (reached end of activations) ======");
232    }
233
234    /// Traces through a sequence of stacks, creating a backtrace for each one,
235    /// beginning at the given `pc` and `fp`.
236    ///
237    /// If `chain` is `InitialStack`, we are tracing through the initial stack,
238    /// and this function behaves like `trace_through_wasm`.
239    /// Otherwise, we can interpret `chain` as a linked list of stacks, which
240    /// ends with the initial stack. We then trace through each of these stacks
241    /// individually, up to (and including) the initial stack.
242    unsafe fn trace_through_continuations(
243        unwind: &dyn Unwind,
244        chain: VMStackChain,
245        pc: usize,
246        fp: usize,
247        trampoline_fp: usize,
248        mut f: impl FnMut(Frame) -> ControlFlow<()>,
249    ) -> ControlFlow<()> {
250        use crate::runtime::vm::stack_switching::{VMContRef, VMStackLimits};
251
252        // Handle the stack that is currently running (which may be a
253        // continuation or the initial stack).
254        unsafe {
255            wasmtime_unwinder::visit_frames(unwind, pc, fp, trampoline_fp, &mut f)?;
256        }
257
258        // Note that the rest of this function has no effect if `chain` is
259        // `Some(VMStackChain::InitialStack(_))` (i.e., there is only one stack to
260        // trace through: the initial stack)
261
262        assert_ne!(chain, VMStackChain::Absent);
263        let stack_limits_vec: Vec<*mut VMStackLimits> =
264            unsafe { chain.clone().into_stack_limits_iter().collect() };
265        let continuations_vec: Vec<*mut VMContRef> =
266            unsafe { chain.clone().into_continuation_iter().collect() };
267
268        // The VMStackLimits of the currently running stack (whether that's a
269        // continuation or the initial stack) contains undefined data, the
270        // information about that stack is saved in the Store's
271        // `VMStoreContext` and handled at the top of this function
272        // already. That's why we ignore `stack_limits_vec[0]`.
273        //
274        // Note that a continuation stack's control context stores
275        // information about how to resume execution *in its parent*. Thus,
276        // we combine the information from continuations_vec[i] with
277        // stack_limits_vec[i + 1] below to get information about a
278        // particular stack.
279        //
280        // There must be exactly one more `VMStackLimits` object than there
281        // are continuations, due to the initial stack having one, too.
282        assert_eq!(stack_limits_vec.len(), continuations_vec.len() + 1);
283
284        for i in 0..continuations_vec.len() {
285            // The continuation whose control context we want to
286            // access, to get information about how to continue
287            // execution in its parent.
288            let continuation = unsafe { &*continuations_vec[i] };
289
290            // The stack limits describing the parent of `continuation`.
291            let parent_limits = unsafe { &*stack_limits_vec[i + 1] };
292
293            // The parent of `continuation` if present not the last in the chain.
294            let parent_continuation = continuations_vec.get(i + 1).map(|&c| unsafe { &*c });
295
296            let fiber_stack = continuation.fiber_stack();
297            let resume_pc = fiber_stack.control_context_instruction_pointer();
298            let resume_fp = fiber_stack.control_context_frame_pointer();
299
300            // If the parent is indeed a continuation, we know the
301            // boundaries of its stack and can perform some extra debugging
302            // checks.
303            let parent_stack_range = parent_continuation.and_then(|p| p.fiber_stack().range());
304            parent_stack_range.inspect(|parent_stack_range| {
305                debug_assert!(parent_stack_range.contains(&resume_fp));
306                debug_assert!(parent_stack_range.contains(&parent_limits.last_wasm_entry_fp));
307                debug_assert!(parent_stack_range.contains(&parent_limits.stack_limit));
308            });
309
310            unsafe {
311                wasmtime_unwinder::visit_frames(
312                    unwind,
313                    resume_pc,
314                    resume_fp,
315                    parent_limits.last_wasm_entry_fp,
316                    &mut f,
317                )?
318            }
319        }
320        ControlFlow::Continue(())
321    }
322
323    /// Iterate over the frames inside this backtrace.
324    pub fn frames<'a>(
325        &'a self,
326    ) -> impl ExactSizeIterator<Item = &'a Frame> + DoubleEndedIterator + 'a {
327        self.0.iter()
328    }
329}
330
331/// An iterator over one Wasm activation.
332#[cfg(feature = "debug")]
333pub(crate) struct CurrentActivationBacktrace<'a, T: 'static> {
334    pub(crate) store: StoreContextMut<'a, T>,
335    inner: Box<dyn Iterator<Item = Frame>>,
336}
337
338#[cfg(feature = "debug")]
339impl<'a, T: 'static> CurrentActivationBacktrace<'a, T> {
340    /// Return an iterator over the most recent Wasm activation.
341    ///
342    /// The iterator captures the store with a mutable borrow, and
343    /// then yields it back at each frame. This ensures that the stack
344    /// remains live while still providing a mutable store that may be
345    /// needed to access items in the frame (e.g., to create new roots
346    /// when reading out GC refs).
347    ///
348    /// This serves as an alternative to `Backtrace::trace()` and
349    /// friends: it allows external iteration (and e.g. lazily walking
350    /// through frames in a stack) rather than visiting via a closure.
351    ///
352    /// # Safety
353    ///
354    /// Although the iterator provides mutable store as a public
355    /// field, this *must not* be used to mutate the stack activation
356    /// itself that this iterator is visiting. While the `store`
357    /// technically owns the stack in question, the only way to do
358    /// this with the current API would be to return back into the
359    /// Wasm activation. As long as this iterator is held and used
360    /// while within host code called from that activation (which will
361    /// ordinarily be ensured if the `store`'s lifetime came from the
362    /// host entry point) then everything will be sound.
363    pub(crate) unsafe fn new(store: StoreContextMut<'a, T>) -> CurrentActivationBacktrace<'a, T> {
364        // Get the initial exit FP, exit PC, and entry FP.
365        let vm_store_context = store.0.vm_store_context();
366        let exit_pc = unsafe { *(*vm_store_context).last_wasm_exit_pc.get() };
367        let exit_fp = unsafe { (*vm_store_context).last_wasm_exit_fp() };
368        let trampoline_fp = unsafe { *(*vm_store_context).last_wasm_entry_fp.get() };
369        let inner: Box<dyn Iterator<Item = Frame>> = if exit_fp == 0 {
370            // No activations on this Store; return an empty iterator.
371            Box::new(core::iter::empty())
372        } else {
373            let unwind = store.0.unwinder();
374            // Establish the iterator.
375            Box::new(unsafe {
376                wasmtime_unwinder::frame_iterator(unwind, exit_pc, exit_fp, trampoline_fp)
377            })
378        };
379
380        CurrentActivationBacktrace { store, inner }
381    }
382}
383
384#[cfg(feature = "debug")]
385impl<'a, T: 'static> Iterator for CurrentActivationBacktrace<'a, T> {
386    type Item = Frame;
387    fn next(&mut self) -> Option<Self::Item> {
388        self.inner.next()
389    }
390}