wasmtime/runtime/vm/
component.rs

1//! Runtime support for the component model in Wasmtime
2//!
3//! Currently this runtime support includes a `VMComponentContext` which is
4//! similar in purpose to `VMContext`. The context is read from
5//! cranelift-generated trampolines when entering the host from a wasm module.
6//! Eventually it's intended that module-to-module calls, which would be
7//! cranelift-compiled adapters, will use this `VMComponentContext` as well.
8
9use crate::prelude::*;
10use crate::runtime::vm::{
11    SendSyncPtr, VMArrayCallFunction, VMFuncRef, VMGlobalDefinition, VMMemoryDefinition,
12    VMOpaqueContext, VMStore, VMStoreRawPtr, VMWasmCallFunction, ValRaw, VmPtr, VmSafe,
13};
14use alloc::alloc::Layout;
15use alloc::sync::Arc;
16use core::any::Any;
17use core::marker;
18use core::mem;
19use core::mem::offset_of;
20use core::ops::Deref;
21use core::ptr::{self, NonNull};
22use sptr::Strict;
23use wasmtime_environ::component::*;
24use wasmtime_environ::{HostPtr, PrimaryMap, VMSharedTypeIndex};
25
26#[allow(clippy::cast_possible_truncation)] // it's intended this is truncated on
27                                           // 32-bit platforms
28const INVALID_PTR: usize = 0xdead_dead_beef_beef_u64 as usize;
29
30mod libcalls;
31mod resources;
32
33pub use self::resources::{CallContexts, ResourceTable, ResourceTables};
34
35/// Runtime representation of a component instance and all state necessary for
36/// the instance itself.
37///
38/// This type never exists by-value, but rather it's always behind a pointer.
39/// The size of the allocation for `ComponentInstance` includes the trailing
40/// `VMComponentContext` which is variably sized based on the `offsets`
41/// contained within.
42#[repr(C)]
43pub struct ComponentInstance {
44    /// Size and offset information for the trailing `VMComponentContext`.
45    offsets: VMComponentOffsets<HostPtr>,
46
47    /// For more information about this see the documentation on
48    /// `Instance::vmctx_self_reference`.
49    vmctx_self_reference: SendSyncPtr<VMComponentContext>,
50
51    /// Runtime type information about this component.
52    runtime_info: Arc<dyn ComponentRuntimeInfo>,
53
54    /// State of resources for all `TypeResourceTableIndex` values for this
55    /// component.
56    ///
57    /// This is paired with other information to create a `ResourceTables` which
58    /// is how this field is manipulated.
59    component_resource_tables: PrimaryMap<TypeResourceTableIndex, ResourceTable>,
60
61    /// Storage for the type information about resources within this component
62    /// instance.
63    ///
64    /// This is actually `Arc<PrimaryMap<ResourceIndex, ResourceType>>` but that
65    /// can't be in this crate because `ResourceType` isn't here. Not using `dyn
66    /// Any` is left as an exercise for a future refactoring.
67    resource_types: Arc<dyn Any + Send + Sync>,
68
69    /// Self-pointer back to `Store<T>` and its functions.
70    store: VMStoreRawPtr,
71
72    /// A zero-sized field which represents the end of the struct for the actual
73    /// `VMComponentContext` to be allocated behind.
74    vmctx: VMComponentContext,
75}
76
77/// Type signature for host-defined trampolines that are called from
78/// WebAssembly.
79///
80/// This function signature is invoked from a cranelift-compiled trampoline that
81/// adapts from the core wasm System-V ABI into the ABI provided here:
82///
83/// * `vmctx` - this is the first argument to the wasm import, and should always
84///   end up being a `VMComponentContext`.
85/// * `data` - this is the data pointer associated with the `VMLowering` for
86///   which this function pointer was registered.
87/// * `ty` - the type index, relative to the tables in `vmctx`, that is the
88///   type of the function being called.
89/// * `caller_instance` - the `RuntimeComponentInstanceIndex` representing the
90///   caller component instance, used to track the owner of an async host task.
91/// * `flags` - the component flags for may_enter/leave corresponding to the
92///   component instance that the lowering happened within.
93/// * `opt_memory` - this nullable pointer represents the memory configuration
94///   option for the canonical ABI options.
95/// * `opt_realloc` - this nullable pointer represents the realloc configuration
96///   option for the canonical ABI options.
97/// * `string_encoding` - this is the configured string encoding for the
98///   canonical ABI this lowering corresponds to.
99/// * `async_` - whether the caller is using the async ABI.
100/// * `args_and_results` - pointer to stack-allocated space in the caller where
101///   all the arguments are stored as well as where the results will be written
102///   to. The size and initialized bytes of this depends on the core wasm type
103///   signature that this callee corresponds to.
104/// * `nargs_and_results` - the size, in units of `ValRaw`, of
105///   `args_and_results`.
106///
107/// This function returns a `bool` which indicates whether the call succeeded
108/// or not. On failure this function records trap information in TLS which
109/// should be suitable for reading later.
110//
111// FIXME: 11 arguments is probably too many. The `data` through `string-encoding`
112// parameters should probably get packaged up into the `VMComponentContext`.
113// Needs benchmarking one way or another though to figure out what the best
114// balance is here.
115pub type VMLoweringCallee = extern "C" fn(
116    vmctx: NonNull<VMOpaqueContext>,
117    data: NonNull<u8>,
118    ty: u32,
119    caller_instance: u32,
120    flags: NonNull<VMGlobalDefinition>,
121    opt_memory: *mut VMMemoryDefinition,
122    opt_realloc: *mut VMFuncRef,
123    string_encoding: u8,
124    async_: u8,
125    args_and_results: NonNull<mem::MaybeUninit<ValRaw>>,
126    nargs_and_results: usize,
127) -> bool;
128
129/// Structure describing a lowered host function stored within a
130/// `VMComponentContext` per-lowering.
131#[derive(Copy, Clone)]
132#[repr(C)]
133pub struct VMLowering {
134    /// The host function pointer that is invoked when this lowering is
135    /// invoked.
136    pub callee: VMLoweringCallee,
137    /// The host data pointer (think void* pointer) to get passed to `callee`.
138    pub data: VmPtr<u8>,
139}
140
141// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
142unsafe impl VmSafe for VMLowering {}
143
144/// This is a marker type to represent the underlying allocation of a
145/// `VMComponentContext`.
146///
147/// This type is similar to `VMContext` for core wasm and is allocated once per
148/// component instance in Wasmtime. While the static size of this type is 0 the
149/// actual runtime size is variable depending on the shape of the component that
150/// this corresponds to. This structure always trails a `ComponentInstance`
151/// allocation and the allocation/lifetime of this allocation is managed by
152/// `ComponentInstance`.
153#[repr(C)]
154// Set an appropriate alignment for this structure where the most-aligned value
155// internally right now `VMGlobalDefinition` which has an alignment of 16 bytes.
156#[repr(align(16))]
157pub struct VMComponentContext {
158    /// For more information about this see the equivalent field in `VMContext`
159    _marker: marker::PhantomPinned,
160}
161
162impl ComponentInstance {
163    /// Converts the `vmctx` provided into a `ComponentInstance` and runs the
164    /// provided closure with that instance.
165    ///
166    /// # Unsafety
167    ///
168    /// This is `unsafe` because `vmctx` cannot be guaranteed to be a valid
169    /// pointer and it cannot be proven statically that it's safe to get a
170    /// mutable reference at this time to the instance from `vmctx`.
171    pub unsafe fn from_vmctx<R>(
172        vmctx: NonNull<VMComponentContext>,
173        f: impl FnOnce(&mut ComponentInstance) -> R,
174    ) -> R {
175        let mut ptr = vmctx
176            .byte_sub(mem::size_of::<ComponentInstance>())
177            .cast::<ComponentInstance>();
178        f(ptr.as_mut())
179    }
180
181    /// Returns the layout corresponding to what would be an allocation of a
182    /// `ComponentInstance` for the `offsets` provided.
183    ///
184    /// The returned layout has space for both the `ComponentInstance` and the
185    /// trailing `VMComponentContext`.
186    fn alloc_layout(offsets: &VMComponentOffsets<HostPtr>) -> Layout {
187        let size = mem::size_of::<Self>()
188            .checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap())
189            .unwrap();
190        let align = mem::align_of::<Self>();
191        Layout::from_size_align(size, align).unwrap()
192    }
193
194    /// Initializes an uninitialized pointer to a `ComponentInstance` in
195    /// addition to its trailing `VMComponentContext`.
196    ///
197    /// The `ptr` provided must be valid for `alloc_size` bytes and will be
198    /// entirely overwritten by this function call. The `offsets` correspond to
199    /// the shape of the component being instantiated and `store` is a pointer
200    /// back to the Wasmtime store for host functions to have access to.
201    unsafe fn new_at(
202        ptr: NonNull<ComponentInstance>,
203        alloc_size: usize,
204        offsets: VMComponentOffsets<HostPtr>,
205        runtime_info: Arc<dyn ComponentRuntimeInfo>,
206        resource_types: Arc<dyn Any + Send + Sync>,
207        store: NonNull<dyn VMStore>,
208    ) {
209        assert!(alloc_size >= Self::alloc_layout(&offsets).size());
210
211        let num_tables = runtime_info.component().num_resource_tables;
212        let mut component_resource_tables = PrimaryMap::with_capacity(num_tables);
213        for _ in 0..num_tables {
214            component_resource_tables.push(ResourceTable::default());
215        }
216
217        ptr::write(
218            ptr.as_ptr(),
219            ComponentInstance {
220                offsets,
221                vmctx_self_reference: SendSyncPtr::new(
222                    NonNull::new(
223                        ptr.as_ptr()
224                            .byte_add(mem::size_of::<ComponentInstance>())
225                            .cast(),
226                    )
227                    .unwrap(),
228                ),
229                component_resource_tables,
230                runtime_info,
231                resource_types,
232                store: VMStoreRawPtr(store),
233                vmctx: VMComponentContext {
234                    _marker: marker::PhantomPinned,
235                },
236            },
237        );
238
239        (*ptr.as_ptr()).initialize_vmctx();
240    }
241
242    fn vmctx(&self) -> NonNull<VMComponentContext> {
243        let addr = &raw const self.vmctx;
244        let ret = Strict::with_addr(self.vmctx_self_reference.as_ptr(), Strict::addr(addr));
245        NonNull::new(ret).unwrap()
246    }
247
248    unsafe fn vmctx_plus_offset<T: VmSafe>(&self, offset: u32) -> *const T {
249        self.vmctx()
250            .as_ptr()
251            .byte_add(usize::try_from(offset).unwrap())
252            .cast()
253    }
254
255    unsafe fn vmctx_plus_offset_mut<T: VmSafe>(&mut self, offset: u32) -> *mut T {
256        self.vmctx()
257            .as_ptr()
258            .byte_add(usize::try_from(offset).unwrap())
259            .cast()
260    }
261
262    /// Returns a pointer to the "may leave" flag for this instance specified
263    /// for canonical lowering and lifting operations.
264    #[inline]
265    pub fn instance_flags(&self, instance: RuntimeComponentInstanceIndex) -> InstanceFlags {
266        unsafe {
267            let ptr = self
268                .vmctx_plus_offset::<VMGlobalDefinition>(self.offsets.instance_flags(instance))
269                .cast_mut();
270            InstanceFlags(SendSyncPtr::new(NonNull::new(ptr).unwrap()))
271        }
272    }
273
274    /// Returns the store that this component was created with.
275    pub fn store(&self) -> *mut dyn VMStore {
276        self.store.0.as_ptr()
277    }
278
279    /// Returns the runtime memory definition corresponding to the index of the
280    /// memory provided.
281    ///
282    /// This can only be called after `idx` has been initialized at runtime
283    /// during the instantiation process of a component.
284    pub fn runtime_memory(&self, idx: RuntimeMemoryIndex) -> *mut VMMemoryDefinition {
285        unsafe {
286            let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_memory(idx));
287            debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
288            ret.as_ptr()
289        }
290    }
291
292    /// Returns the realloc pointer corresponding to the index provided.
293    ///
294    /// This can only be called after `idx` has been initialized at runtime
295    /// during the instantiation process of a component.
296    pub fn runtime_realloc(&self, idx: RuntimeReallocIndex) -> NonNull<VMFuncRef> {
297        unsafe {
298            let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_realloc(idx));
299            debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
300            ret.as_non_null()
301        }
302    }
303
304    /// Returns the post-return pointer corresponding to the index provided.
305    ///
306    /// This can only be called after `idx` has been initialized at runtime
307    /// during the instantiation process of a component.
308    pub fn runtime_post_return(&self, idx: RuntimePostReturnIndex) -> NonNull<VMFuncRef> {
309        unsafe {
310            let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_post_return(idx));
311            debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
312            ret.as_non_null()
313        }
314    }
315
316    /// Returns the host information for the lowered function at the index
317    /// specified.
318    ///
319    /// This can only be called after `idx` has been initialized at runtime
320    /// during the instantiation process of a component.
321    pub fn lowering(&self, idx: LoweredIndex) -> VMLowering {
322        unsafe {
323            let ret = *self.vmctx_plus_offset::<VMLowering>(self.offsets.lowering(idx));
324            debug_assert!(ret.callee as usize != INVALID_PTR);
325            debug_assert!(ret.data.as_ptr() as usize != INVALID_PTR);
326            ret
327        }
328    }
329
330    /// Returns the core wasm `funcref` corresponding to the trampoline
331    /// specified.
332    ///
333    /// The returned function is suitable to pass directly to a wasm module
334    /// instantiation and the function contains cranelift-compiled trampolines.
335    ///
336    /// This can only be called after `idx` has been initialized at runtime
337    /// during the instantiation process of a component.
338    pub fn trampoline_func_ref(&self, idx: TrampolineIndex) -> NonNull<VMFuncRef> {
339        unsafe {
340            let offset = self.offsets.trampoline_func_ref(idx);
341            let ret = self.vmctx_plus_offset::<VMFuncRef>(offset);
342            debug_assert!(
343                mem::transmute::<Option<VmPtr<VMWasmCallFunction>>, usize>((*ret).wasm_call)
344                    != INVALID_PTR
345            );
346            debug_assert!((*ret).vmctx.as_ptr() as usize != INVALID_PTR);
347            NonNull::new(ret.cast_mut()).unwrap()
348        }
349    }
350
351    /// Stores the runtime memory pointer at the index specified.
352    ///
353    /// This is intended to be called during the instantiation process of a
354    /// component once a memory is available, which may not be until part-way
355    /// through component instantiation.
356    ///
357    /// Note that it should be a property of the component model that the `ptr`
358    /// here is never needed prior to it being configured here in the instance.
359    pub fn set_runtime_memory(
360        &mut self,
361        idx: RuntimeMemoryIndex,
362        ptr: NonNull<VMMemoryDefinition>,
363    ) {
364        unsafe {
365            let storage = self.vmctx_plus_offset_mut::<VmPtr<VMMemoryDefinition>>(
366                self.offsets.runtime_memory(idx),
367            );
368            debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
369            *storage = ptr.into();
370        }
371    }
372
373    /// Same as `set_runtime_memory` but for realloc function pointers.
374    pub fn set_runtime_realloc(&mut self, idx: RuntimeReallocIndex, ptr: NonNull<VMFuncRef>) {
375        unsafe {
376            let storage =
377                self.vmctx_plus_offset_mut::<VmPtr<VMFuncRef>>(self.offsets.runtime_realloc(idx));
378            debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
379            *storage = ptr.into();
380        }
381    }
382
383    /// Same as `set_runtime_memory` but for async callback function pointers.
384    pub fn set_runtime_callback(&mut self, idx: RuntimeCallbackIndex, ptr: NonNull<VMFuncRef>) {
385        unsafe {
386            let storage =
387                self.vmctx_plus_offset_mut::<VmPtr<VMFuncRef>>(self.offsets.runtime_callback(idx));
388            debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
389            *storage = ptr.into();
390        }
391    }
392
393    /// Same as `set_runtime_memory` but for post-return function pointers.
394    pub fn set_runtime_post_return(
395        &mut self,
396        idx: RuntimePostReturnIndex,
397        ptr: NonNull<VMFuncRef>,
398    ) {
399        unsafe {
400            let storage = self
401                .vmctx_plus_offset_mut::<VmPtr<VMFuncRef>>(self.offsets.runtime_post_return(idx));
402            debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
403            *storage = ptr.into();
404        }
405    }
406
407    /// Configures host runtime lowering information associated with imported f
408    /// functions for the `idx` specified.
409    pub fn set_lowering(&mut self, idx: LoweredIndex, lowering: VMLowering) {
410        unsafe {
411            debug_assert!(
412                *self.vmctx_plus_offset::<usize>(self.offsets.lowering_callee(idx)) == INVALID_PTR
413            );
414            debug_assert!(
415                *self.vmctx_plus_offset::<usize>(self.offsets.lowering_data(idx)) == INVALID_PTR
416            );
417            *self.vmctx_plus_offset_mut(self.offsets.lowering(idx)) = lowering;
418        }
419    }
420
421    /// Same as `set_lowering` but for the resource.drop functions.
422    pub fn set_trampoline(
423        &mut self,
424        idx: TrampolineIndex,
425        wasm_call: NonNull<VMWasmCallFunction>,
426        array_call: NonNull<VMArrayCallFunction>,
427        type_index: VMSharedTypeIndex,
428    ) {
429        unsafe {
430            let offset = self.offsets.trampoline_func_ref(idx);
431            debug_assert!(*self.vmctx_plus_offset::<usize>(offset) == INVALID_PTR);
432            let vmctx = VMOpaqueContext::from_vmcomponent(self.vmctx());
433            *self.vmctx_plus_offset_mut(offset) = VMFuncRef {
434                wasm_call: Some(wasm_call.into()),
435                array_call: array_call.into(),
436                type_index,
437                vmctx: vmctx.into(),
438            };
439        }
440    }
441
442    /// Configures the destructor for a resource at the `idx` specified.
443    ///
444    /// This is required to be called for each resource as it's defined within a
445    /// component during the instantiation process.
446    pub fn set_resource_destructor(
447        &mut self,
448        idx: ResourceIndex,
449        dtor: Option<NonNull<VMFuncRef>>,
450    ) {
451        unsafe {
452            let offset = self.offsets.resource_destructor(idx);
453            debug_assert!(*self.vmctx_plus_offset::<usize>(offset) == INVALID_PTR);
454            *self.vmctx_plus_offset_mut(offset) = dtor.map(VmPtr::from);
455        }
456    }
457
458    /// Returns the destructor, if any, for `idx`.
459    ///
460    /// This is only valid to call after `set_resource_destructor`, or typically
461    /// after instantiation.
462    pub fn resource_destructor(&self, idx: ResourceIndex) -> Option<NonNull<VMFuncRef>> {
463        unsafe {
464            let offset = self.offsets.resource_destructor(idx);
465            debug_assert!(*self.vmctx_plus_offset::<usize>(offset) != INVALID_PTR);
466            (*self.vmctx_plus_offset::<Option<VmPtr<VMFuncRef>>>(offset)).map(|p| p.as_non_null())
467        }
468    }
469
470    unsafe fn initialize_vmctx(&mut self) {
471        *self.vmctx_plus_offset_mut(self.offsets.magic()) = VMCOMPONENT_MAGIC;
472        *self.vmctx_plus_offset_mut(self.offsets.builtins()) =
473            VmPtr::from(NonNull::from(&libcalls::VMComponentBuiltins::INIT));
474        *self.vmctx_plus_offset_mut(self.offsets.vm_store_context()) =
475            VmPtr::from(self.store.0.as_ref().vm_store_context_ptr());
476
477        for i in 0..self.offsets.num_runtime_component_instances {
478            let i = RuntimeComponentInstanceIndex::from_u32(i);
479            let mut def = VMGlobalDefinition::new();
480            *def.as_i32_mut() = FLAG_MAY_ENTER | FLAG_MAY_LEAVE;
481            self.instance_flags(i).as_raw().write(def);
482        }
483
484        // In debug mode set non-null bad values to all "pointer looking" bits
485        // and pices related to lowering and such. This'll help detect any
486        // erroneous usage and enable debug assertions above as well to prevent
487        // loading these before they're configured or setting them twice.
488        if cfg!(debug_assertions) {
489            for i in 0..self.offsets.num_lowerings {
490                let i = LoweredIndex::from_u32(i);
491                let offset = self.offsets.lowering_callee(i);
492                *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
493                let offset = self.offsets.lowering_data(i);
494                *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
495            }
496            for i in 0..self.offsets.num_trampolines {
497                let i = TrampolineIndex::from_u32(i);
498                let offset = self.offsets.trampoline_func_ref(i);
499                *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
500            }
501            for i in 0..self.offsets.num_runtime_memories {
502                let i = RuntimeMemoryIndex::from_u32(i);
503                let offset = self.offsets.runtime_memory(i);
504                *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
505            }
506            for i in 0..self.offsets.num_runtime_reallocs {
507                let i = RuntimeReallocIndex::from_u32(i);
508                let offset = self.offsets.runtime_realloc(i);
509                *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
510            }
511            for i in 0..self.offsets.num_runtime_callbacks {
512                let i = RuntimeCallbackIndex::from_u32(i);
513                let offset = self.offsets.runtime_callback(i);
514                *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
515            }
516            for i in 0..self.offsets.num_runtime_post_returns {
517                let i = RuntimePostReturnIndex::from_u32(i);
518                let offset = self.offsets.runtime_post_return(i);
519                *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
520            }
521            for i in 0..self.offsets.num_resources {
522                let i = ResourceIndex::from_u32(i);
523                let offset = self.offsets.resource_destructor(i);
524                *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
525            }
526        }
527    }
528
529    /// Returns a reference to the component type information for this instance.
530    pub fn component(&self) -> &Component {
531        self.runtime_info.component()
532    }
533
534    /// Returns the type information that this instance is instantiated with.
535    pub fn component_types(&self) -> &Arc<ComponentTypes> {
536        self.runtime_info.component_types()
537    }
538
539    /// Get the canonical ABI's `realloc` function's runtime type.
540    pub fn realloc_func_ty(&self) -> &Arc<dyn Any + Send + Sync> {
541        self.runtime_info.realloc_func_type()
542    }
543
544    /// Returns a reference to the resource type information as a `dyn Any`.
545    ///
546    /// Wasmtime is the one which then downcasts this to the appropriate type.
547    pub fn resource_types(&self) -> &Arc<dyn Any + Send + Sync> {
548        &self.resource_types
549    }
550
551    /// Returns whether the resource that `ty` points to is owned by the
552    /// instance that `ty` correspond to.
553    ///
554    /// This is used when lowering borrows to skip table management and instead
555    /// thread through the underlying representation directly.
556    pub fn resource_owned_by_own_instance(&self, ty: TypeResourceTableIndex) -> bool {
557        let resource = &self.component_types()[ty];
558        let component = self.component();
559        let idx = match component.defined_resource_index(resource.ty) {
560            Some(idx) => idx,
561            None => return false,
562        };
563        resource.instance == component.defined_resource_instances[idx]
564    }
565
566    /// Implementation of the `resource.new` intrinsic for `i32`
567    /// representations.
568    pub fn resource_new32(&mut self, resource: TypeResourceTableIndex, rep: u32) -> Result<u32> {
569        self.resource_tables().resource_new(Some(resource), rep)
570    }
571
572    /// Implementation of the `resource.rep` intrinsic for `i32`
573    /// representations.
574    pub fn resource_rep32(&mut self, resource: TypeResourceTableIndex, idx: u32) -> Result<u32> {
575        self.resource_tables().resource_rep(Some(resource), idx)
576    }
577
578    /// Implementation of the `resource.drop` intrinsic.
579    pub fn resource_drop(
580        &mut self,
581        resource: TypeResourceTableIndex,
582        idx: u32,
583    ) -> Result<Option<u32>> {
584        self.resource_tables().resource_drop(Some(resource), idx)
585    }
586
587    /// NB: this is intended to be a private method. This does not have
588    /// `host_table` information at this time meaning it's only suitable for
589    /// working with resources specified to this component which is currently
590    /// all that this is used for.
591    ///
592    /// If necessary though it's possible to enhance the `Store` trait to thread
593    /// through the relevant information and get `host_table` to be `Some` here.
594    fn resource_tables(&mut self) -> ResourceTables<'_> {
595        ResourceTables {
596            host_table: None,
597            calls: unsafe { (&mut *self.store()).component_calls() },
598            tables: Some(&mut self.component_resource_tables),
599        }
600    }
601
602    /// Returns the runtime state of resources associated with this component.
603    #[inline]
604    pub fn component_resource_tables(
605        &mut self,
606    ) -> &mut PrimaryMap<TypeResourceTableIndex, ResourceTable> {
607        &mut self.component_resource_tables
608    }
609
610    /// Returns the destructor and instance flags for the specified resource
611    /// table type.
612    ///
613    /// This will lookup the origin definition of the `ty` table and return the
614    /// destructor/flags for that.
615    pub fn dtor_and_flags(
616        &self,
617        ty: TypeResourceTableIndex,
618    ) -> (Option<NonNull<VMFuncRef>>, Option<InstanceFlags>) {
619        let resource = self.component_types()[ty].ty;
620        let dtor = self.resource_destructor(resource);
621        let component = self.component();
622        let flags = component.defined_resource_index(resource).map(|i| {
623            let instance = component.defined_resource_instances[i];
624            self.instance_flags(instance)
625        });
626        (dtor, flags)
627    }
628
629    pub(crate) fn resource_transfer_own(
630        &mut self,
631        idx: u32,
632        src: TypeResourceTableIndex,
633        dst: TypeResourceTableIndex,
634    ) -> Result<u32> {
635        let mut tables = self.resource_tables();
636        let rep = tables.resource_lift_own(Some(src), idx)?;
637        tables.resource_lower_own(Some(dst), rep)
638    }
639
640    pub(crate) fn resource_transfer_borrow(
641        &mut self,
642        idx: u32,
643        src: TypeResourceTableIndex,
644        dst: TypeResourceTableIndex,
645    ) -> Result<u32> {
646        let dst_owns_resource = self.resource_owned_by_own_instance(dst);
647        let mut tables = self.resource_tables();
648        let rep = tables.resource_lift_borrow(Some(src), idx)?;
649        // Implement `lower_borrow`'s special case here where if a borrow's
650        // resource type is owned by `dst` then the destination receives the
651        // representation directly rather than a handle to the representation.
652        //
653        // This can perhaps become a different libcall in the future to avoid
654        // this check at runtime since we know at compile time whether the
655        // destination type owns the resource, but that's left as a future
656        // refactoring if truly necessary.
657        if dst_owns_resource {
658            return Ok(rep);
659        }
660        tables.resource_lower_borrow(Some(dst), rep)
661    }
662
663    pub(crate) fn resource_enter_call(&mut self) {
664        self.resource_tables().enter_call()
665    }
666
667    pub(crate) fn resource_exit_call(&mut self) -> Result<()> {
668        self.resource_tables().exit_call()
669    }
670
671    #[cfg(feature = "component-model-async")]
672    pub(crate) fn future_transfer(
673        &mut self,
674        src_idx: u32,
675        src: TypeFutureTableIndex,
676        dst: TypeFutureTableIndex,
677    ) -> Result<u32> {
678        _ = (src_idx, src, dst);
679        todo!()
680    }
681
682    #[cfg(feature = "component-model-async")]
683    pub(crate) fn stream_transfer(
684        &mut self,
685        src_idx: u32,
686        src: TypeStreamTableIndex,
687        dst: TypeStreamTableIndex,
688    ) -> Result<u32> {
689        _ = (src_idx, src, dst);
690        todo!()
691    }
692
693    #[cfg(feature = "component-model-async")]
694    pub(crate) fn error_context_transfer(
695        &mut self,
696        src_idx: u32,
697        src: TypeComponentLocalErrorContextTableIndex,
698        dst: TypeComponentLocalErrorContextTableIndex,
699    ) -> Result<u32> {
700        _ = (src_idx, src, dst);
701        todo!()
702    }
703}
704
705impl VMComponentContext {
706    /// Moves the `self` pointer backwards to the `ComponentInstance` pointer
707    /// that this `VMComponentContext` trails.
708    pub fn instance(&self) -> *mut ComponentInstance {
709        unsafe {
710            (self as *const Self as *mut u8)
711                .offset(-(offset_of!(ComponentInstance, vmctx) as isize))
712                as *mut ComponentInstance
713        }
714    }
715}
716
717/// An owned version of `ComponentInstance` which is akin to
718/// `Box<ComponentInstance>`.
719///
720/// This type can be dereferenced to `ComponentInstance` to access the
721/// underlying methods.
722pub struct OwnedComponentInstance {
723    ptr: SendSyncPtr<ComponentInstance>,
724}
725
726impl OwnedComponentInstance {
727    /// Allocates a new `ComponentInstance + VMComponentContext` pair on the
728    /// heap with `malloc` and configures it for the `component` specified.
729    pub fn new(
730        runtime_info: Arc<dyn ComponentRuntimeInfo>,
731        resource_types: Arc<dyn Any + Send + Sync>,
732        store: NonNull<dyn VMStore>,
733    ) -> OwnedComponentInstance {
734        let component = runtime_info.component();
735        let offsets = VMComponentOffsets::new(HostPtr, component);
736        let layout = ComponentInstance::alloc_layout(&offsets);
737        unsafe {
738            // Technically it is not required to `alloc_zeroed` here. The
739            // primary reason for doing this is because a component context
740            // start is a "partly initialized" state where pointers and such are
741            // configured as the instantiation process continues. The component
742            // model should guarantee that we never access uninitialized memory
743            // in the context, but to help protect against possible bugs a
744            // zeroed allocation is done here to try to contain
745            // use-before-initialized issues.
746            let ptr = alloc::alloc::alloc_zeroed(layout) as *mut ComponentInstance;
747            let ptr = NonNull::new(ptr).unwrap();
748
749            ComponentInstance::new_at(
750                ptr,
751                layout.size(),
752                offsets,
753                runtime_info,
754                resource_types,
755                store,
756            );
757
758            let ptr = SendSyncPtr::new(ptr);
759            OwnedComponentInstance { ptr }
760        }
761    }
762
763    // Note that this is technically unsafe due to the fact that it enables
764    // `mem::swap`-ing two component instances which would get all the offsets
765    // mixed up and cause issues. This is scoped to just this module though as a
766    // convenience to forward to `&mut` methods on `ComponentInstance`.
767    unsafe fn instance_mut(&mut self) -> &mut ComponentInstance {
768        &mut *self.ptr.as_ptr()
769    }
770
771    /// Returns the underlying component instance's raw pointer.
772    pub fn instance_ptr(&self) -> *mut ComponentInstance {
773        self.ptr.as_ptr()
774    }
775
776    /// See `ComponentInstance::set_runtime_memory`
777    pub fn set_runtime_memory(
778        &mut self,
779        idx: RuntimeMemoryIndex,
780        ptr: NonNull<VMMemoryDefinition>,
781    ) {
782        unsafe { self.instance_mut().set_runtime_memory(idx, ptr) }
783    }
784
785    /// See `ComponentInstance::set_runtime_realloc`
786    pub fn set_runtime_realloc(&mut self, idx: RuntimeReallocIndex, ptr: NonNull<VMFuncRef>) {
787        unsafe { self.instance_mut().set_runtime_realloc(idx, ptr) }
788    }
789
790    /// See `ComponentInstance::set_runtime_callback`
791    pub fn set_runtime_callback(&mut self, idx: RuntimeCallbackIndex, ptr: NonNull<VMFuncRef>) {
792        unsafe { self.instance_mut().set_runtime_callback(idx, ptr) }
793    }
794
795    /// See `ComponentInstance::set_runtime_post_return`
796    pub fn set_runtime_post_return(
797        &mut self,
798        idx: RuntimePostReturnIndex,
799        ptr: NonNull<VMFuncRef>,
800    ) {
801        unsafe { self.instance_mut().set_runtime_post_return(idx, ptr) }
802    }
803
804    /// See `ComponentInstance::set_lowering`
805    pub fn set_lowering(&mut self, idx: LoweredIndex, lowering: VMLowering) {
806        unsafe { self.instance_mut().set_lowering(idx, lowering) }
807    }
808
809    /// See `ComponentInstance::set_resource_drop`
810    pub fn set_trampoline(
811        &mut self,
812        idx: TrampolineIndex,
813        wasm_call: NonNull<VMWasmCallFunction>,
814        array_call: NonNull<VMArrayCallFunction>,
815        type_index: VMSharedTypeIndex,
816    ) {
817        unsafe {
818            self.instance_mut()
819                .set_trampoline(idx, wasm_call, array_call, type_index)
820        }
821    }
822
823    /// See `ComponentInstance::set_resource_destructor`
824    pub fn set_resource_destructor(
825        &mut self,
826        idx: ResourceIndex,
827        dtor: Option<NonNull<VMFuncRef>>,
828    ) {
829        unsafe { self.instance_mut().set_resource_destructor(idx, dtor) }
830    }
831
832    /// See `ComponentInstance::resource_types`
833    pub fn resource_types_mut(&mut self) -> &mut Arc<dyn Any + Send + Sync> {
834        unsafe { &mut (*self.ptr.as_ptr()).resource_types }
835    }
836}
837
838impl Deref for OwnedComponentInstance {
839    type Target = ComponentInstance;
840    fn deref(&self) -> &ComponentInstance {
841        unsafe { &*self.ptr.as_ptr() }
842    }
843}
844
845impl Drop for OwnedComponentInstance {
846    fn drop(&mut self) {
847        let layout = ComponentInstance::alloc_layout(&self.offsets);
848        unsafe {
849            ptr::drop_in_place(self.ptr.as_ptr());
850            alloc::alloc::dealloc(self.ptr.as_ptr().cast(), layout);
851        }
852    }
853}
854
855impl VMComponentContext {
856    /// Helper function to cast between context types using a debug assertion to
857    /// protect against some mistakes.
858    #[inline]
859    pub unsafe fn from_opaque(opaque: NonNull<VMOpaqueContext>) -> NonNull<VMComponentContext> {
860        // See comments in `VMContext::from_opaque` for this debug assert
861        debug_assert_eq!(opaque.as_ref().magic, VMCOMPONENT_MAGIC);
862        opaque.cast()
863    }
864}
865
866impl VMOpaqueContext {
867    /// Helper function to clearly indicate the cast desired
868    #[inline]
869    pub fn from_vmcomponent(ptr: NonNull<VMComponentContext>) -> NonNull<VMOpaqueContext> {
870        ptr.cast()
871    }
872}
873
874#[allow(missing_docs)]
875#[repr(transparent)]
876#[derive(Copy, Clone)]
877pub struct InstanceFlags(SendSyncPtr<VMGlobalDefinition>);
878
879#[allow(missing_docs)]
880impl InstanceFlags {
881    /// Wraps the given pointer as an `InstanceFlags`
882    ///
883    /// # Unsafety
884    ///
885    /// This is a raw pointer argument which needs to be valid for the lifetime
886    /// that `InstanceFlags` is used.
887    pub unsafe fn from_raw(ptr: NonNull<VMGlobalDefinition>) -> InstanceFlags {
888        InstanceFlags(SendSyncPtr::from(ptr))
889    }
890
891    #[inline]
892    pub unsafe fn may_leave(&self) -> bool {
893        *self.as_raw().as_ref().as_i32() & FLAG_MAY_LEAVE != 0
894    }
895
896    #[inline]
897    pub unsafe fn set_may_leave(&mut self, val: bool) {
898        if val {
899            *self.as_raw().as_mut().as_i32_mut() |= FLAG_MAY_LEAVE;
900        } else {
901            *self.as_raw().as_mut().as_i32_mut() &= !FLAG_MAY_LEAVE;
902        }
903    }
904
905    #[inline]
906    pub unsafe fn may_enter(&self) -> bool {
907        *self.as_raw().as_ref().as_i32() & FLAG_MAY_ENTER != 0
908    }
909
910    #[inline]
911    pub unsafe fn set_may_enter(&mut self, val: bool) {
912        if val {
913            *self.as_raw().as_mut().as_i32_mut() |= FLAG_MAY_ENTER;
914        } else {
915            *self.as_raw().as_mut().as_i32_mut() &= !FLAG_MAY_ENTER;
916        }
917    }
918
919    #[inline]
920    pub unsafe fn needs_post_return(&self) -> bool {
921        *self.as_raw().as_ref().as_i32() & FLAG_NEEDS_POST_RETURN != 0
922    }
923
924    #[inline]
925    pub unsafe fn set_needs_post_return(&mut self, val: bool) {
926        if val {
927            *self.as_raw().as_mut().as_i32_mut() |= FLAG_NEEDS_POST_RETURN;
928        } else {
929            *self.as_raw().as_mut().as_i32_mut() &= !FLAG_NEEDS_POST_RETURN;
930        }
931    }
932
933    #[inline]
934    pub fn as_raw(&self) -> NonNull<VMGlobalDefinition> {
935        self.0.as_non_null()
936    }
937}
938
939/// Runtime information about a component stored locally for reflection.
940pub trait ComponentRuntimeInfo: Send + Sync + 'static {
941    /// Returns the type information about the compiled component.
942    fn component(&self) -> &Component;
943
944    /// Returns a handle to the tables of type information for this component.
945    fn component_types(&self) -> &Arc<ComponentTypes>;
946
947    /// Get the `wasmtime::FuncType` for the canonical ABI's `realloc` function.
948    fn realloc_func_type(&self) -> &Arc<dyn Any + Send + Sync>;
949}