wasmtime/runtime/store/
func_refs.rs

1//! Lifetime management of `VMFuncRef`s inside of stores, and filling in their
2//! trampolines.
3
4use crate::module::ModuleRegistry;
5use crate::prelude::*;
6use crate::runtime::vm::{SendSyncPtr, VMArrayCallHostFuncContext, VMFuncRef};
7use alloc::sync::Arc;
8use core::ptr::NonNull;
9
10/// An arena of `VMFuncRef`s.
11///
12/// Allows a store to pin and own funcrefs so that it can patch in trampolines
13/// for `VMFuncRef`s that are missing a `wasm_call` trampoline and
14/// need Wasm to supply it.
15#[derive(Default)]
16pub struct FuncRefs {
17    /// A bump allocation arena where we allocate `VMFuncRef`s such
18    /// that they are pinned and owned.
19    bump: SendSyncBump,
20
21    /// Pointers into `self.bump` for entries that need `wasm_call` field filled
22    /// in.
23    with_holes: Vec<SendSyncPtr<VMFuncRef>>,
24
25    /// Pinned `VMFuncRef`s that had their `wasm_call` field
26    /// pre-patched when constructing an `InstancePre`, and which we need to
27    /// keep alive for our owning store's lifetime.
28    instance_pre_func_refs: Vec<Arc<[VMFuncRef]>>,
29}
30
31use send_sync_bump::SendSyncBump;
32mod send_sync_bump {
33    #[derive(Default)]
34    pub struct SendSyncBump(bumpalo::Bump);
35
36    impl SendSyncBump {
37        pub fn alloc<T>(&mut self, val: T) -> &mut T {
38            self.0.alloc(val)
39        }
40    }
41
42    // Safety: We require `&mut self` on the only public method, which means it
43    // is safe to send `&SendSyncBump` references across threads because they
44    // can't actually do anything with it.
45    unsafe impl Sync for SendSyncBump {}
46}
47
48impl FuncRefs {
49    /// Push the given `VMFuncRef` into this arena, returning a
50    /// pinned pointer to it.
51    ///
52    /// # Safety
53    ///
54    /// You may only access the return value on the same thread as this
55    /// `FuncRefs` and only while the store holding this `FuncRefs` exists.
56    pub unsafe fn push(&mut self, func_ref: VMFuncRef) -> NonNull<VMFuncRef> {
57        debug_assert!(func_ref.wasm_call.is_none());
58        // Debug assert that the vmctx is a `VMArrayCallHostFuncContext` as
59        // that is the only kind that can have holes.
60        let _ = unsafe { VMArrayCallHostFuncContext::from_opaque(func_ref.vmctx.as_non_null()) };
61
62        let func_ref = self.bump.alloc(func_ref);
63        let unpatched = SendSyncPtr::from(func_ref);
64        let ret = unpatched.as_non_null();
65        self.with_holes.push(unpatched);
66        ret
67    }
68
69    /// Patch any `VMFuncRef::wasm_call`s that need filling in.
70    pub fn fill(&mut self, modules: &ModuleRegistry) {
71        self.with_holes.retain_mut(|f| {
72            unsafe {
73                let func_ref = f.as_mut();
74                debug_assert!(func_ref.wasm_call.is_none());
75
76                // Debug assert that the vmctx is a `VMArrayCallHostFuncContext` as
77                // that is the only kind that can have holes.
78                let _ = VMArrayCallHostFuncContext::from_opaque(func_ref.vmctx.as_non_null());
79
80                func_ref.wasm_call = modules
81                    .wasm_to_array_trampoline(func_ref.type_index)
82                    .map(|f| f.into());
83                func_ref.wasm_call.is_none()
84            }
85        });
86    }
87
88    /// Push pre-patched `VMFuncRef`s from an `InstancePre`.
89    pub fn push_instance_pre_func_refs(&mut self, func_refs: Arc<[VMFuncRef]>) {
90        self.instance_pre_func_refs.push(func_refs);
91    }
92}