wasmtime/runtime/debug.rs
1//! Debugging API.
2
3use crate::{
4 AnyRef, AsContext, AsContextMut, ExnRef, ExternRef, Func, Instance, Module, OwnedRooted,
5 StoreContext, StoreContextMut, Val,
6 store::{AutoAssertNoGc, StoreOpaque},
7 vm::{CurrentActivationBacktrace, VMContext},
8};
9use alloc::vec;
10use alloc::vec::Vec;
11use core::{ffi::c_void, ptr::NonNull};
12#[cfg(feature = "gc")]
13use wasmtime_environ::FrameTable;
14use wasmtime_environ::{
15 DefinedFuncIndex, FrameInstPos, FrameStackShape, FrameStateSlot, FrameStateSlotOffset,
16 FrameTableDescriptorIndex, FrameValType, FuncKey, Trap,
17};
18use wasmtime_unwinder::Frame;
19
20use super::store::AsStoreOpaque;
21
22impl<'a, T> StoreContextMut<'a, T> {
23 /// Provide an object that captures Wasm stack state, including
24 /// Wasm VM-level values (locals and operand stack).
25 ///
26 /// This object views all activations for the current store that
27 /// are on the stack. An activation is a contiguous sequence of
28 /// Wasm frames (called functions) that were called from host code
29 /// and called back out to host code. If there are activations
30 /// from multiple stores on the stack, for example if Wasm code in
31 /// one store calls out to host code which invokes another Wasm
32 /// function in another store, then the other stores are "opaque"
33 /// to our view here in the same way that host code is.
34 ///
35 /// Returns `None` if debug instrumentation is not enabled for
36 /// the engine containing this store.
37 pub fn debug_frames(self) -> Option<DebugFrameCursor<'a, T>> {
38 if !self.engine().tunables().debug_guest {
39 return None;
40 }
41
42 // SAFETY: This takes a mutable borrow of `self` (the
43 // `StoreOpaque`), which owns all active stacks in the
44 // store. We do not provide any API that could mutate the
45 // frames that we are walking on the `DebugFrameCursor`.
46 let iter = unsafe { CurrentActivationBacktrace::new(self) };
47 let mut view = DebugFrameCursor {
48 iter,
49 is_trapping_frame: false,
50 frames: vec![],
51 current: None,
52 };
53 view.move_to_parent(); // Load the first frame.
54 Some(view)
55 }
56}
57
58/// A view of an active stack frame, with the ability to move up the
59/// stack.
60///
61/// See the documentation on `Store::stack_value` for more information
62/// about which frames this view will show.
63pub struct DebugFrameCursor<'a, T: 'static> {
64 /// Iterator over frames.
65 ///
66 /// This iterator owns the store while the view exists (accessible
67 /// as `iter.store`).
68 iter: CurrentActivationBacktrace<'a, T>,
69
70 /// Is the next frame to be visited by the iterator a trapping
71 /// frame?
72 ///
73 /// This alters how we interpret `pc`: for a trap, we look at the
74 /// instruction that *starts* at `pc`, while for all frames
75 /// further up the stack (i.e., at a callsite), we look at the
76 /// instruction that *ends* at `pc`.
77 is_trapping_frame: bool,
78
79 /// Virtual frame queue: decoded from `iter`, not yet
80 /// yielded. Innermost frame on top (last).
81 ///
82 /// This is only non-empty when there is more than one virtual
83 /// frame in a physical frame (i.e., for inlining); thus, its size
84 /// is bounded by our inlining depth.
85 frames: Vec<VirtualFrame>,
86
87 /// Currently focused virtual frame.
88 current: Option<FrameData>,
89}
90
91impl<'a, T: 'static> DebugFrameCursor<'a, T> {
92 /// Move up to the next frame in the activation.
93 pub fn move_to_parent(&mut self) {
94 // If there are no virtual frames to yield, take and decode
95 // the next physical frame.
96 //
97 // Note that `if` rather than `while` here, and the assert
98 // that we get some virtual frames back, enforce the invariant
99 // that each physical frame decodes to at least one virtual
100 // frame (i.e., there are no physical frames for interstitial
101 // functions or other things that we completely ignore). If
102 // this ever changes, we can remove the assert and convert
103 // this to a loop that polls until it finds virtual frames.
104 self.current = None;
105 if self.frames.is_empty() {
106 let Some(next_frame) = self.iter.next() else {
107 return;
108 };
109 self.frames = VirtualFrame::decode(
110 self.iter.store.0.as_store_opaque(),
111 next_frame,
112 self.is_trapping_frame,
113 );
114 debug_assert!(!self.frames.is_empty());
115 self.is_trapping_frame = false;
116 }
117
118 // Take a frame and focus it as the current one.
119 self.current = self.frames.pop().map(|vf| FrameData::compute(vf));
120 }
121
122 /// Has the iterator reached the end of the activation?
123 pub fn done(&self) -> bool {
124 self.current.is_none()
125 }
126
127 fn frame_data(&self) -> &FrameData {
128 self.current.as_ref().expect("No current frame")
129 }
130
131 fn raw_instance(&self) -> &crate::vm::Instance {
132 // Read out the vmctx slot.
133
134 // SAFETY: vmctx is always at offset 0 in the slot.
135 // (See crates/cranelift/src/func_environ.rs in `update_stack_slot_vmctx()`.)
136 let vmctx: *mut VMContext = unsafe { *(self.frame_data().slot_addr as *mut _) };
137 let vmctx = NonNull::new(vmctx).expect("null vmctx in debug state slot");
138 // SAFETY: the stored vmctx value is a valid instance in this
139 // store; we only visit frames from this store in the
140 // backtrace.
141 let instance = unsafe { crate::vm::Instance::from_vmctx(vmctx) };
142 // SAFETY: the instance pointer read above is valid.
143 unsafe { instance.as_ref() }
144 }
145
146 /// Get the instance associated with the current frame.
147 pub fn instance(&mut self) -> Instance {
148 let instance = self.raw_instance();
149 Instance::from_wasmtime(instance.id(), self.iter.store.0.as_store_opaque())
150 }
151
152 /// Get the module associated with the current frame, if any
153 /// (i.e., not a container instance for a host-created entity).
154 pub fn module(&self) -> Option<&Module> {
155 let instance = self.raw_instance();
156 instance.runtime_module()
157 }
158
159 /// Get the raw function index associated with the current frame, and the
160 /// PC as an offset within its code section, if it is a Wasm
161 /// function directly from the given `Module` (rather than a
162 /// trampoline).
163 pub fn wasm_function_index_and_pc(&self) -> Option<(DefinedFuncIndex, u32)> {
164 let data = self.frame_data();
165 let FuncKey::DefinedWasmFunction(module, func) = data.func_key else {
166 return None;
167 };
168 debug_assert_eq!(
169 module,
170 self.module()
171 .expect("module should be defined if this is a defined function")
172 .env_module()
173 .module_index
174 );
175 Some((func, data.wasm_pc))
176 }
177
178 /// Get the number of locals in this frame.
179 pub fn num_locals(&self) -> u32 {
180 u32::try_from(self.frame_data().locals.len()).unwrap()
181 }
182
183 /// Get the depth of the operand stack in this frame.
184 pub fn num_stacks(&self) -> u32 {
185 u32::try_from(self.frame_data().stack.len()).unwrap()
186 }
187
188 /// Get the type and value of the given local in this frame.
189 ///
190 /// # Panics
191 ///
192 /// Panics if the index is out-of-range (greater than
193 /// `num_locals()`).
194 pub fn local(&mut self, index: u32) -> Val {
195 let data = self.frame_data();
196 let (offset, ty) = data.locals[usize::try_from(index).unwrap()];
197 let slot_addr = data.slot_addr;
198 // SAFETY: compiler produced metadata to describe this local
199 // slot and stored a value of the correct type into it.
200 unsafe { read_value(&mut self.iter.store.0, slot_addr, offset, ty) }
201 }
202
203 /// Get the type and value of the given operand-stack value in
204 /// this frame.
205 ///
206 /// Index 0 corresponds to the bottom-of-stack, and higher indices
207 /// from there are more recently pushed values. In other words,
208 /// index order reads the Wasm virtual machine's abstract stack
209 /// state left-to-right.
210 pub fn stack(&mut self, index: u32) -> Val {
211 let data = self.frame_data();
212 let (offset, ty) = data.stack[usize::try_from(index).unwrap()];
213 let slot_addr = data.slot_addr;
214 // SAFETY: compiler produced metadata to describe this
215 // operand-stack slot and stored a value of the correct type
216 // into it.
217 unsafe { read_value(&mut self.iter.store.0, slot_addr, offset, ty) }
218 }
219}
220
221/// Internal data pre-computed for one stack frame.
222///
223/// This combines physical frame info (pc, fp) with the module this PC
224/// maps to (yielding a frame table) and one frame as produced by the
225/// progpoint lookup (Wasm PC, frame descriptor index, stack shape).
226struct VirtualFrame {
227 /// The frame pointer.
228 fp: *const u8,
229 /// The resolved module handle for the physical PC.
230 ///
231 /// The module for each inlined frame within the physical frame is
232 /// resolved from the vmctx reachable for each such frame; this
233 /// module isused only for looking up the frame table.
234 module: Module,
235 /// The Wasm PC for this frame.
236 wasm_pc: u32,
237 /// The frame descriptor for this frame.
238 frame_descriptor: FrameTableDescriptorIndex,
239 /// The stack shape for this frame.
240 stack_shape: FrameStackShape,
241}
242
243impl VirtualFrame {
244 /// Return virtual frames corresponding to a physical frame, from
245 /// outermost to innermost.
246 fn decode(store: &mut StoreOpaque, frame: Frame, is_trapping_frame: bool) -> Vec<VirtualFrame> {
247 let module = store
248 .modules()
249 .lookup_module_by_pc(frame.pc())
250 .expect("Wasm frame PC does not correspond to a module");
251 let base = module.code_object().code_memory().text().as_ptr() as usize;
252 let pc = frame.pc().wrapping_sub(base);
253 let table = module.frame_table().unwrap();
254 let pc = u32::try_from(pc).expect("PC offset too large");
255 let pos = if is_trapping_frame {
256 FrameInstPos::Pre
257 } else {
258 FrameInstPos::Post
259 };
260 let program_points = table.find_program_point(pc, pos).expect("There must be a program point record in every frame when debug instrumentation is enabled");
261
262 program_points
263 .map(|(wasm_pc, frame_descriptor, stack_shape)| VirtualFrame {
264 fp: core::ptr::with_exposed_provenance(frame.fp()),
265 module: module.clone(),
266 wasm_pc,
267 frame_descriptor,
268 stack_shape,
269 })
270 .collect()
271 }
272}
273
274/// Data computed when we visit a given frame.
275struct FrameData {
276 slot_addr: *const u8,
277 func_key: FuncKey,
278 wasm_pc: u32,
279 /// Shape of locals in this frame.
280 ///
281 /// We need to store this locally because `FrameView` cannot
282 /// borrow the store: it needs a mut borrow, and an iterator
283 /// cannot yield the same mut borrow multiple times because it
284 /// cannot control the lifetime of the values it yields (the
285 /// signature of `next()` does not bound the return value to the
286 /// `&mut self` arg).
287 locals: Vec<(FrameStateSlotOffset, FrameValType)>,
288 /// Shape of the stack slots at this program point in this frame.
289 ///
290 /// In addition to the borrowing-related reason above, we also
291 /// materialize this because we want to provide O(1) access to the
292 /// stack by depth, and the frame slot descriptor stores info in a
293 /// linked-list (actually DAG, with dedup'ing) way.
294 stack: Vec<(FrameStateSlotOffset, FrameValType)>,
295}
296
297impl FrameData {
298 fn compute(frame: VirtualFrame) -> Self {
299 let frame_table = frame.module.frame_table().unwrap();
300 // Parse the frame descriptor.
301 let (data, slot_to_fp_offset) = frame_table
302 .frame_descriptor(frame.frame_descriptor)
303 .unwrap();
304 let frame_state_slot = FrameStateSlot::parse(data).unwrap();
305 let slot_addr = frame
306 .fp
307 .wrapping_sub(usize::try_from(slot_to_fp_offset).unwrap());
308
309 // Materialize the stack shape so we have O(1) access to its
310 // elements, and so we don't need to keep the borrow to the
311 // module alive.
312 let mut stack = frame_state_slot
313 .stack(frame.stack_shape)
314 .collect::<Vec<_>>();
315 stack.reverse(); // Put top-of-stack last.
316
317 // Materialize the local offsets/types so we don't need to
318 // keep the borrow to the module alive.
319 let locals = frame_state_slot.locals().collect::<Vec<_>>();
320
321 FrameData {
322 slot_addr,
323 func_key: frame_state_slot.func_key(),
324 wasm_pc: frame.wasm_pc,
325 stack,
326 locals,
327 }
328 }
329}
330
331/// Read the value at the given offset.
332///
333/// # Safety
334///
335/// The `offset` and `ty` must correspond to a valid value written
336/// to the frame by generated code of the correct type. This will
337/// be the case if this information comes from the frame tables
338/// (as long as the frontend that generates the tables and
339/// instrumentation is correct, and as long as the tables are
340/// preserved through serialization).
341unsafe fn read_value(
342 store: &mut StoreOpaque,
343 slot_base: *const u8,
344 offset: FrameStateSlotOffset,
345 ty: FrameValType,
346) -> Val {
347 let address = unsafe { slot_base.offset(isize::try_from(offset.offset()).unwrap()) };
348
349 // SAFETY: each case reads a value from memory that should be
350 // valid according to our safety condition.
351 match ty {
352 FrameValType::I32 => {
353 let value = unsafe { *(address as *const i32) };
354 Val::I32(value)
355 }
356 FrameValType::I64 => {
357 let value = unsafe { *(address as *const i64) };
358 Val::I64(value)
359 }
360 FrameValType::F32 => {
361 let value = unsafe { *(address as *const u32) };
362 Val::F32(value)
363 }
364 FrameValType::F64 => {
365 let value = unsafe { *(address as *const u64) };
366 Val::F64(value)
367 }
368 FrameValType::V128 => {
369 let value = unsafe { *(address as *const u128) };
370 Val::V128(value.into())
371 }
372 FrameValType::AnyRef => {
373 let mut nogc = AutoAssertNoGc::new(store);
374 let value = unsafe { *(address as *const u32) };
375 let value = AnyRef::_from_raw(&mut nogc, value);
376 Val::AnyRef(value)
377 }
378 FrameValType::ExnRef => {
379 let mut nogc = AutoAssertNoGc::new(store);
380 let value = unsafe { *(address as *const u32) };
381 let value = ExnRef::_from_raw(&mut nogc, value);
382 Val::ExnRef(value)
383 }
384 FrameValType::ExternRef => {
385 let mut nogc = AutoAssertNoGc::new(store);
386 let value = unsafe { *(address as *const u32) };
387 let value = ExternRef::_from_raw(&mut nogc, value);
388 Val::ExternRef(value)
389 }
390 FrameValType::FuncRef => {
391 let value = unsafe { *(address as *const *mut c_void) };
392 let value = unsafe { Func::_from_raw(store, value) };
393 Val::FuncRef(value)
394 }
395 FrameValType::ContRef => {
396 unimplemented!("contref values are not implemented in the host API yet")
397 }
398 }
399}
400
401/// Compute raw pointers to all GC refs in the given frame.
402// Note: ideally this would be an impl Iterator, but this is quite
403// awkward because of the locally computed data (FrameStateSlot::parse
404// structured result) within the closure borrowed by a nested closure.
405#[cfg(feature = "gc")]
406pub(crate) fn gc_refs_in_frame<'a>(ft: FrameTable<'a>, pc: u32, fp: *mut usize) -> Vec<*mut u32> {
407 let fp = fp.cast::<u8>();
408 let mut ret = vec![];
409 if let Some(frames) = ft.find_program_point(pc, FrameInstPos::Post) {
410 for (_wasm_pc, frame_desc, stack_shape) in frames {
411 let (frame_desc_data, slot_to_fp_offset) = ft.frame_descriptor(frame_desc).unwrap();
412 let frame_base = unsafe { fp.offset(-isize::try_from(slot_to_fp_offset).unwrap()) };
413 let frame_desc = FrameStateSlot::parse(frame_desc_data).unwrap();
414 for (offset, ty) in frame_desc.stack_and_locals(stack_shape) {
415 match ty {
416 FrameValType::AnyRef | FrameValType::ExnRef | FrameValType::ExternRef => {
417 let slot = unsafe {
418 frame_base
419 .offset(isize::try_from(offset.offset()).unwrap())
420 .cast::<u32>()
421 };
422 ret.push(slot);
423 }
424 FrameValType::ContRef | FrameValType::FuncRef => {}
425 FrameValType::I32
426 | FrameValType::I64
427 | FrameValType::F32
428 | FrameValType::F64
429 | FrameValType::V128 => {}
430 }
431 }
432 }
433 }
434 ret
435}
436
437impl<'a, T: 'static> AsContext for DebugFrameCursor<'a, T> {
438 type Data = T;
439 fn as_context(&self) -> StoreContext<'_, Self::Data> {
440 StoreContext(self.iter.store.0)
441 }
442}
443impl<'a, T: 'static> AsContextMut for DebugFrameCursor<'a, T> {
444 fn as_context_mut(&mut self) -> StoreContextMut<'_, Self::Data> {
445 StoreContextMut(self.iter.store.0)
446 }
447}
448
449/// One debug event that occurs when running Wasm code on a store with
450/// a debug handler attached.
451#[derive(Debug)]
452pub enum DebugEvent<'a> {
453 /// An `anyhow::Error` was raised by a hostcall.
454 HostcallError(&'a anyhow::Error),
455 /// An exception is thrown and caught by Wasm. The current state
456 /// is at the throw-point.
457 CaughtExceptionThrown(OwnedRooted<ExnRef>),
458 /// An exception was not caught and is escaping to the host.
459 UncaughtExceptionThrown(OwnedRooted<ExnRef>),
460 /// A Wasm trap occurred.
461 Trap(Trap),
462}
463
464/// A handler for debug events.
465///
466/// This is an async callback that is invoked directly within the
467/// context of a debug event that occurs, i.e., with the Wasm code
468/// still on the stack. The callback can thus observe that stack, up
469/// to the most recent entry to Wasm.[^1]
470///
471/// Because this callback receives a `StoreContextMut`, it has full
472/// access to any state that any other hostcall has, including the
473/// `T`. In that way, it is like an epoch-deadline callback or a
474/// call-hook callback. It also "freezes" the entire store for the
475/// duration of the debugger callback future.
476///
477/// In the future, we expect to provide an "externally async" API on
478/// the `Store` that allows receiving a stream of debug events and
479/// accessing the store mutably while frozen; that will need to
480/// integrate with [`Store::run_concurrent`] to properly timeslice and
481/// scope the mutable access to the store, and has not been built
482/// yet. In the meantime, it should be possible to build a fully
483/// functional debugger with this async-callback API by channeling
484/// debug events out, and requests to read the store back in, over
485/// message-passing channels between the callback and an external
486/// debugger main loop.
487///
488/// Note that the `handle` hook may use its mutable store access to
489/// invoke another Wasm. Debug events will also be caught and will
490/// cause further `handle` invocations during this recursive
491/// invocation. It is up to the debugger to handle any implications of
492/// this reentrancy (e.g., implications on a duplex channel protocol
493/// with an event/continue handshake) if it does so.
494///
495/// Note also that this trait has `Clone` as a supertrait, and the
496/// handler is cloned at every invocation as an artifact of the
497/// internal ownership structure of Wasmtime: the handler itself is
498/// owned by the store, but also receives a mutable borrow to the
499/// whole store, so we need to clone it out to invoke it. It is
500/// recommended that this trait be implemented by a type that is cheap
501/// to clone: for example, a single `Arc` handle to debugger state.
502///
503/// [^1]: Providing visibility further than the most recent entry to
504/// Wasm is not directly possible because it could see into
505/// another async stack, and the stack that polls the future
506/// running a particular Wasm invocation could change after each
507/// suspend point in the handler.
508pub trait DebugHandler: Clone + Send + Sync + 'static {
509 /// The data expected on the store that this handler is attached
510 /// to.
511 type Data;
512
513 /// Handle a debug event.
514 fn handle(
515 &self,
516 store: StoreContextMut<'_, Self::Data>,
517 event: DebugEvent<'_>,
518 ) -> impl Future<Output = ()> + Send;
519}