wasmtime/runtime/vm/traphandlers/
backtrace.rs

1//! Backtrace and stack walking functionality for Wasm.
2//!
3//! Walking the Wasm stack is comprised of
4//!
5//! 1. identifying sequences of contiguous Wasm frames on the stack
6//!    (i.e. skipping over native host frames), and
7//!
8//! 2. walking the Wasm frames within such a sequence.
9//!
10//! To perform (1) we maintain the entry stack pointer (SP) and exit frame
11//! pointer (FP) and program counter (PC) each time we call into Wasm and Wasm
12//! calls into the host via trampolines (see
13//! `crates/wasmtime/src/runtime/vm/trampolines`). The most recent entry is
14//! stored in `VMStoreContext` and older entries are saved in
15//! `CallThreadState`. This lets us identify ranges of contiguous Wasm frames on
16//! the stack.
17//!
18//! To solve (2) and walk the Wasm frames within a region of contiguous Wasm
19//! frames on the stack, we configure Cranelift's `preserve_frame_pointers =
20//! true` setting. Then we can do simple frame pointer traversal starting at the
21//! exit FP and stopping once we reach the entry SP (meaning that the next older
22//! frame is a host frame).
23
24use crate::prelude::*;
25use crate::runtime::store::StoreOpaque;
26use crate::runtime::vm::stack_switching::VMStackChain;
27use crate::runtime::vm::{
28    Unwind, VMStoreContext,
29    traphandlers::{CallThreadState, tls},
30};
31#[cfg(all(feature = "gc", feature = "stack-switching"))]
32use crate::vm::stack_switching::{VMContRef, VMStackState};
33#[cfg(feature = "debug")]
34use crate::{StoreContext, StoreContextMut};
35use core::ops::ControlFlow;
36use wasmtime_unwinder::Frame;
37
38/// A WebAssembly stack trace.
39#[derive(Debug)]
40pub struct Backtrace(Vec<Frame>);
41
42/// One activation: information sufficient to trace an activation on a
43/// frame as long as that frame remains alive.
44pub(crate) struct Activation {
45    exit_pc: usize,
46    exit_fp: usize,
47    entry_trampoline_fp: usize,
48}
49
50impl Backtrace {
51    /// Returns an empty backtrace
52    pub fn empty() -> Backtrace {
53        Backtrace(Vec::new())
54    }
55
56    /// Capture the current Wasm stack in a backtrace.
57    pub fn new(store: &StoreOpaque) -> Backtrace {
58        let vm_store_context = store.vm_store_context();
59        let unwind = store.unwinder();
60        tls::with(|state| match state {
61            Some(state) => unsafe {
62                Self::new_with_trap_state(vm_store_context, unwind, state, None)
63            },
64            None => Backtrace(vec![]),
65        })
66    }
67
68    /// Capture the current Wasm stack trace.
69    ///
70    /// If Wasm hit a trap, and we calling this from the trap handler, then the
71    /// Wasm exit trampoline didn't run, and we use the provided PC and FP
72    /// instead of looking them up in `VMStoreContext`.
73    pub(crate) unsafe fn new_with_trap_state(
74        vm_store_context: *const VMStoreContext,
75        unwind: &dyn Unwind,
76        state: &CallThreadState,
77        trap_pc_and_fp: Option<(usize, usize)>,
78    ) -> Backtrace {
79        let mut frames = vec![];
80        let f = |activation: Activation| unsafe {
81            wasmtime_unwinder::visit_frames(
82                unwind,
83                activation.exit_pc,
84                activation.exit_fp,
85                activation.entry_trampoline_fp,
86                |frame| {
87                    frames.push(frame);
88                    ControlFlow::Continue(())
89                },
90            )
91        };
92        unsafe {
93            Self::trace_with_trap_state(vm_store_context, state, trap_pc_and_fp, f);
94        }
95        Backtrace(frames)
96    }
97
98    /// Walk the current Wasm stack, calling `f` for each frame we walk.
99    #[cfg(feature = "gc")]
100    pub fn trace(store: &StoreOpaque, mut f: impl FnMut(Frame) -> ControlFlow<()>) {
101        let vm_store_context = store.vm_store_context();
102        let unwind = store.unwinder();
103        tls::with(|state| match state {
104            Some(state) => unsafe {
105                let f = |activation: Activation| {
106                    wasmtime_unwinder::visit_frames(
107                        unwind,
108                        activation.exit_pc,
109                        activation.exit_fp,
110                        activation.entry_trampoline_fp,
111                        &mut f,
112                    )
113                };
114                Self::trace_with_trap_state(vm_store_context, state, None, f)
115            },
116            None => {}
117        });
118    }
119
120    // Walk the stack of the given continuation, which must be suspended, and
121    // all of its parent continuations (if any).
122    #[cfg(all(feature = "gc", feature = "stack-switching"))]
123    pub fn trace_suspended_continuation(
124        store: &StoreOpaque,
125        continuation: &VMContRef,
126        mut f: impl FnMut(Frame) -> ControlFlow<()>,
127    ) {
128        log::trace!("====== Capturing Backtrace (suspended continuation) ======");
129
130        assert_eq!(
131            continuation.common_stack_information.state,
132            VMStackState::Suspended
133        );
134
135        let unwind = store.unwinder();
136
137        let pc = continuation.stack.control_context_instruction_pointer();
138        let fp = continuation.stack.control_context_frame_pointer();
139        let trampoline_fp = continuation
140            .common_stack_information
141            .limits
142            .last_wasm_entry_fp;
143
144        unsafe {
145            // FIXME(frank-emrich) Casting from *const to *mut pointer is
146            // terrible, but we won't actually modify any of the continuations
147            // here.
148            let stack_chain =
149                VMStackChain::Continuation(continuation as *const VMContRef as *mut VMContRef);
150
151            if let ControlFlow::Break(()) = Self::trace_through_continuations(
152                stack_chain,
153                pc,
154                fp,
155                trampoline_fp,
156                |activation| {
157                    wasmtime_unwinder::visit_frames(
158                        unwind,
159                        activation.exit_pc,
160                        activation.exit_fp,
161                        activation.entry_trampoline_fp,
162                        &mut f,
163                    )
164                },
165            ) {
166                log::trace!("====== Done Capturing Backtrace (closure break) ======");
167                return;
168            }
169        }
170
171        log::trace!("====== Done Capturing Backtrace (reached end of stack chain) ======");
172    }
173
174    /// Walk the current Wasm stack, calling `f` for each frame we walk.
175    ///
176    /// If Wasm hit a trap, and we calling this from the trap handler, then the
177    /// Wasm exit trampoline didn't run, and we use the provided PC and FP
178    /// instead of looking them up in `VMStoreContext`.
179    ///
180    /// We define "current Wasm stack" here as "all activations
181    /// associated with the given store". That is: if we have a stack like
182    ///
183    /// ```plain
184    ///     host --> (Wasm functions in store A) --> host --> (Wasm functions in store B) --> host
185    ///          --> (Wasm functions in store A) --> host --> call `trace_with_trap_state` with store A
186    /// ```
187    ///
188    /// then we will see the first and third Wasm activations (those
189    /// associated with store A), but not that with store B. In
190    /// essence, activations from another store might as well be some
191    /// other opaque host code; we don't know anything about it.
192    pub(crate) unsafe fn trace_with_trap_state(
193        vm_store_context: *const VMStoreContext,
194        state: &CallThreadState,
195        trap_pc_and_fp: Option<(usize, usize)>,
196        mut f: impl FnMut(Activation) -> ControlFlow<()>,
197    ) {
198        log::trace!("====== Capturing Backtrace ======");
199
200        let (last_wasm_exit_pc, last_wasm_exit_fp) = match trap_pc_and_fp {
201            // If we exited Wasm by catching a trap, then the Wasm-to-host
202            // trampoline did not get a chance to save the last Wasm PC and FP,
203            // and we need to use the plumbed-through values instead.
204            Some((pc, fp)) => {
205                assert!(core::ptr::eq(
206                    vm_store_context,
207                    state.vm_store_context.as_ptr()
208                ));
209                (pc, fp)
210            }
211            // Either there is no Wasm currently on the stack, or we exited Wasm
212            // through the Wasm-to-host trampoline.
213            None => unsafe {
214                let pc = *(*vm_store_context).last_wasm_exit_pc.get();
215                let fp = (*vm_store_context).last_wasm_exit_fp();
216                (pc, fp)
217            },
218        };
219
220        let stack_chain = unsafe { (*(*vm_store_context).stack_chain.get()).clone() };
221
222        // The first value in `activations` is for the most recently running
223        // wasm. We thus provide the stack chain of `first_wasm_state` to
224        // traverse the potential continuation stacks. For the subsequent
225        // activations, we unconditionally use `None` as the corresponding stack
226        // chain. This is justified because only the most recent execution of
227        // wasm may execute off the initial stack (see comments in
228        // `wasmtime::invoke_wasm_and_catch_traps` for details).
229        let activations =
230            core::iter::once((stack_chain, last_wasm_exit_pc, last_wasm_exit_fp, unsafe {
231                *(*vm_store_context).last_wasm_entry_fp.get()
232            }))
233            .chain(
234                state
235                    .iter()
236                    .flat_map(|state| state.iter())
237                    .filter(|state| {
238                        core::ptr::eq(vm_store_context, state.vm_store_context.as_ptr())
239                    })
240                    .map(|state| unsafe {
241                        (
242                            state.old_stack_chain(),
243                            state.old_last_wasm_exit_pc(),
244                            state.old_last_wasm_exit_fp(),
245                            state.old_last_wasm_entry_fp(),
246                        )
247                    }),
248            )
249            .take_while(|(chain, pc, fp, sp)| {
250                if *pc == 0 {
251                    debug_assert_eq!(*fp, 0);
252                    debug_assert_eq!(*sp, 0);
253                } else {
254                    debug_assert_ne!(chain.clone(), VMStackChain::Absent)
255                }
256                *pc != 0
257            });
258
259        for (chain, exit_pc, exit_fp, entry_trampoline_fp) in activations {
260            let res = unsafe {
261                Self::trace_through_continuations(
262                    chain,
263                    exit_pc,
264                    exit_fp,
265                    entry_trampoline_fp,
266                    &mut f,
267                )
268            };
269            if let ControlFlow::Break(()) = res {
270                log::trace!("====== Done Capturing Backtrace (closure break) ======");
271                return;
272            }
273        }
274
275        log::trace!("====== Done Capturing Backtrace (reached end of activations) ======");
276    }
277
278    /// Traces through a sequence of stacks, creating a backtrace for each one,
279    /// beginning at the given `pc` and `fp`.
280    ///
281    /// If `chain` is `InitialStack`, we are tracing through the initial stack,
282    /// and this function behaves like `trace_through_wasm`.
283    /// Otherwise, we can interpret `chain` as a linked list of stacks, which
284    /// ends with the initial stack. We then trace through each of these stacks
285    /// individually, up to (and including) the initial stack.
286    unsafe fn trace_through_continuations(
287        chain: VMStackChain,
288        exit_pc: usize,
289        exit_fp: usize,
290        entry_trampoline_fp: usize,
291        mut f: impl FnMut(Activation) -> ControlFlow<()>,
292    ) -> ControlFlow<()> {
293        use crate::runtime::vm::stack_switching::{VMContRef, VMStackLimits};
294
295        // Handle the stack that is currently running (which may be a
296        // continuation or the initial stack).
297        f(Activation {
298            exit_pc,
299            exit_fp,
300            entry_trampoline_fp,
301        })?;
302
303        // Note that the rest of this function has no effect if `chain` is
304        // `Some(VMStackChain::InitialStack(_))` (i.e., there is only one stack to
305        // trace through: the initial stack)
306
307        assert_ne!(chain, VMStackChain::Absent);
308        let stack_limits_vec: Vec<*mut VMStackLimits> =
309            unsafe { chain.clone().into_stack_limits_iter().collect() };
310        let continuations_vec: Vec<*mut VMContRef> =
311            unsafe { chain.clone().into_continuation_iter().collect() };
312
313        // The VMStackLimits of the currently running stack (whether that's a
314        // continuation or the initial stack) contains undefined data, the
315        // information about that stack is saved in the Store's
316        // `VMStoreContext` and handled at the top of this function
317        // already. That's why we ignore `stack_limits_vec[0]`.
318        //
319        // Note that a continuation stack's control context stores
320        // information about how to resume execution *in its parent*. Thus,
321        // we combine the information from continuations_vec[i] with
322        // stack_limits_vec[i + 1] below to get information about a
323        // particular stack.
324        //
325        // There must be exactly one more `VMStackLimits` object than there
326        // are continuations, due to the initial stack having one, too.
327        assert_eq!(stack_limits_vec.len(), continuations_vec.len() + 1);
328
329        for i in 0..continuations_vec.len() {
330            // The continuation whose control context we want to
331            // access, to get information about how to continue
332            // execution in its parent.
333            let continuation = unsafe { &*continuations_vec[i] };
334
335            // The stack limits describing the parent of `continuation`.
336            let parent_limits = unsafe { &*stack_limits_vec[i + 1] };
337
338            // The parent of `continuation` if present not the last in the chain.
339            let parent_continuation = continuations_vec.get(i + 1).map(|&c| unsafe { &*c });
340
341            let fiber_stack = continuation.fiber_stack();
342            let resume_pc = fiber_stack.control_context_instruction_pointer();
343            let resume_fp = fiber_stack.control_context_frame_pointer();
344
345            // If the parent is indeed a continuation, we know the
346            // boundaries of its stack and can perform some extra debugging
347            // checks.
348            let parent_stack_range = parent_continuation.and_then(|p| p.fiber_stack().range());
349            parent_stack_range.inspect(|parent_stack_range| {
350                debug_assert!(parent_stack_range.contains(&resume_fp));
351                debug_assert!(parent_stack_range.contains(&parent_limits.last_wasm_entry_fp));
352                debug_assert!(parent_stack_range.contains(&parent_limits.stack_limit));
353            });
354
355            f(Activation {
356                exit_pc: resume_pc,
357                exit_fp: resume_fp,
358                entry_trampoline_fp: parent_limits.last_wasm_entry_fp,
359            })?;
360        }
361        ControlFlow::Continue(())
362    }
363
364    /// Capture all Activations reachable from the current point
365    /// within a hostcall.
366    #[cfg(feature = "debug")]
367    fn activations(store: &StoreOpaque) -> Vec<Activation> {
368        let mut activations = vec![];
369        let vm_store_context = store.vm_store_context();
370        tls::with(|state| match state {
371            Some(state) => unsafe {
372                Self::trace_with_trap_state(vm_store_context, state, None, |act| {
373                    activations.push(act);
374                    ControlFlow::Continue(())
375                });
376            },
377            None => {}
378        });
379        activations
380    }
381
382    /// Iterate over the frames inside this backtrace.
383    pub fn frames<'a>(
384        &'a self,
385    ) -> impl ExactSizeIterator<Item = &'a Frame> + DoubleEndedIterator + 'a {
386        self.0.iter()
387    }
388}
389
390/// An iterator over one Wasm activation.
391#[cfg(feature = "debug")]
392struct ActivationBacktrace<'a, T: 'static> {
393    pub(crate) store: StoreContextMut<'a, T>,
394    inner: Box<dyn Iterator<Item = Frame>>,
395}
396
397#[cfg(feature = "debug")]
398impl<'a, T: 'static> ActivationBacktrace<'a, T> {
399    /// Return an iterator over a Wasm activation.
400    ///
401    /// The iterator captures the store with a mutable borrow, and
402    /// then yields it back at each frame. This ensures that the stack
403    /// remains live while still providing a mutable store that may be
404    /// needed to access items in the frame (e.g., to create new roots
405    /// when reading out GC refs).
406    ///
407    /// This serves as an alternative to `Backtrace::trace()` and
408    /// friends: it allows external iteration (and e.g. lazily walking
409    /// through frames in a stack) rather than visiting via a closure.
410    pub(crate) fn new(
411        store: StoreContextMut<'a, T>,
412        activation: Activation,
413    ) -> ActivationBacktrace<'a, T> {
414        let inner: Box<dyn Iterator<Item = Frame>> = if activation.exit_fp == 0 {
415            // No activations on this Store; return an empty iterator.
416            Box::new(core::iter::empty())
417        } else {
418            let unwind = store.0.unwinder();
419            // Establish the iterator.
420            Box::new(unsafe {
421                wasmtime_unwinder::frame_iterator(
422                    unwind,
423                    activation.exit_pc,
424                    activation.exit_fp,
425                    activation.entry_trampoline_fp,
426                )
427            })
428        };
429
430        ActivationBacktrace { store, inner }
431    }
432}
433
434#[cfg(feature = "debug")]
435impl<'a, T: 'static> Iterator for ActivationBacktrace<'a, T> {
436    type Item = Frame;
437    fn next(&mut self) -> Option<Self::Item> {
438        self.inner.next()
439    }
440}
441
442/// An iterator over all Wasm activations in a Store.
443#[cfg(feature = "debug")]
444pub(crate) struct StoreBacktrace<'a, T: 'static> {
445    /// The current activation iterator or the Store itself if no more
446    /// activations.
447    ///
448    /// This is `Option` so that we can move to the next while
449    /// transferring ownership of the Store without deconstructing
450    /// this whole iterator.
451    current: Option<StoreOrActivationBacktrace<'a, T>>,
452    activations: Vec<Activation>,
453}
454
455/// Either an iterator over a Wasm activation, or a `StoreContextMut`
456/// if no activations are left.
457#[cfg(feature = "debug")]
458enum StoreOrActivationBacktrace<'a, T: 'static> {
459    Store(StoreContextMut<'a, T>),
460    Activation(ActivationBacktrace<'a, T>),
461}
462
463#[cfg(feature = "debug")]
464impl<'a, T: 'static> StoreOrActivationBacktrace<'a, T> {
465    fn is_activation(&self) -> bool {
466        match self {
467            Self::Activation(_) => true,
468            _ => false,
469        }
470    }
471}
472
473#[cfg(feature = "debug")]
474impl<'a, T> StoreBacktrace<'a, T> {
475    /// Return an iterator over all Wasm activations in a Store, in
476    /// invocation order.
477    ///
478    /// The iterator captures the store with a mutable borrow, and
479    /// then yields it back at each frame. This ensures that the stack
480    /// remains live while still providing a mutable store that may be
481    /// needed to access items in the frame (e.g., to create new roots
482    /// when reading out GC refs).
483    ///
484    /// This serves as an alternative to `Backtrace::trace()` and
485    /// friends: it allows external iteration (and e.g. lazily walking
486    /// through frames in a stack) rather than visiting via a closure.
487    pub(crate) fn new(store: StoreContextMut<'a, T>) -> StoreBacktrace<'a, T> {
488        // Get all activations, in innermost-to-outermost order.
489        use crate::store::AsStoreOpaque;
490        let mut activations = Backtrace::activations(store.0.as_store_opaque());
491        // Reverse to outermost-to-innermost so we can pop off the end.
492        activations.reverse();
493        // Create our inner state: either an activation iterator on
494        // the innermost activation that owns the store, or a sentinel
495        // if there are no activations.
496        let current = match activations.pop() {
497            Some(innermost) => {
498                StoreOrActivationBacktrace::Activation(ActivationBacktrace::new(store, innermost))
499            }
500            None => StoreOrActivationBacktrace::Store(store),
501        };
502        StoreBacktrace {
503            current: Some(current),
504            activations,
505        }
506    }
507
508    /// Get the Store underlying this iteration.
509    pub fn store(&self) -> StoreContext<'_, T> {
510        match self.current.as_ref().unwrap() {
511            StoreOrActivationBacktrace::Activation(activation) => StoreContext(activation.store.0),
512            StoreOrActivationBacktrace::Store(store) => StoreContext(store.0),
513        }
514    }
515
516    /// Get the Store underlying this iteration.
517    pub fn store_mut(&mut self) -> StoreContextMut<'_, T> {
518        match self.current.as_mut().unwrap() {
519            StoreOrActivationBacktrace::Activation(activation) => {
520                StoreContextMut(activation.store.0)
521            }
522            StoreOrActivationBacktrace::Store(store) => StoreContextMut(store.0),
523        }
524    }
525
526    fn take_store(&mut self) -> StoreContextMut<'a, T> {
527        match self.current.take().unwrap() {
528            StoreOrActivationBacktrace::Activation(activation) => activation.store,
529            StoreOrActivationBacktrace::Store(store) => store,
530        }
531    }
532
533    /// Move to the next activation.
534    fn next_activation(&mut self) {
535        let activation = self.activations.pop();
536        let store = self.take_store();
537        self.current = Some(match activation {
538            Some(activation) => {
539                StoreOrActivationBacktrace::Activation(ActivationBacktrace::new(store, activation))
540            }
541            None => StoreOrActivationBacktrace::Store(store),
542        });
543    }
544}
545
546/// A single item in an iteration over a store's frames: either a Wasm
547/// frame, or a sentinel representing host code between activations.
548#[cfg(feature = "debug")]
549pub enum FrameOrHostCode {
550    /// A WebAsembly frame.
551    Frame(Frame),
552    /// Some number of host frames between Wasm activations.
553    HostCode,
554}
555
556#[cfg(feature = "debug")]
557impl<'a, T: 'static> Iterator for StoreBacktrace<'a, T> {
558    type Item = FrameOrHostCode;
559    fn next(&mut self) -> Option<Self::Item> {
560        match self.current.as_mut().unwrap() {
561            StoreOrActivationBacktrace::Store(_) => None,
562            StoreOrActivationBacktrace::Activation(act) => match act.next() {
563                Some(frame) => Some(FrameOrHostCode::Frame(frame)),
564                None => {
565                    self.next_activation();
566                    // If there's another activation waiting, return
567                    // HostCode between the two; otherwise, don't.
568                    if self.current.as_ref().unwrap().is_activation() {
569                        Some(FrameOrHostCode::HostCode)
570                    } else {
571                        None
572                    }
573                }
574            },
575        }
576    }
577}