wasmtime/runtime/component/func/
typed.rs

1use crate::component::func::{Func, LiftContext, LowerContext, Options};
2use crate::component::matching::InstanceType;
3use crate::component::storage::{storage_as_slice, storage_as_slice_mut};
4use crate::prelude::*;
5use crate::runtime::vm::component::ComponentInstance;
6use crate::runtime::vm::SendSyncPtr;
7use crate::{AsContextMut, StoreContext, StoreContextMut, ValRaw};
8use alloc::borrow::Cow;
9use alloc::sync::Arc;
10use core::fmt;
11use core::iter;
12use core::marker;
13use core::mem::{self, MaybeUninit};
14use core::ptr::NonNull;
15use core::str;
16use wasmtime_environ::component::{
17    CanonicalAbiInfo, ComponentTypes, InterfaceType, StringEncoding, VariantInfo, MAX_FLAT_PARAMS,
18    MAX_FLAT_RESULTS,
19};
20
21#[cfg(feature = "component-model-async")]
22use crate::component::concurrent::Promise;
23
24/// A statically-typed version of [`Func`] which takes `Params` as input and
25/// returns `Return`.
26///
27/// This is an efficient way to invoke a WebAssembly component where if the
28/// inputs and output are statically known this can eschew the vast majority of
29/// machinery and checks when calling WebAssembly. This is the most optimized
30/// way to call a WebAssembly component.
31///
32/// Note that like [`Func`] this is a pointer within a [`Store`](crate::Store)
33/// and usage will panic if used with the wrong store.
34///
35/// This type is primarily created with the [`Func::typed`] API.
36///
37/// See [`ComponentType`] for more information about supported types.
38pub struct TypedFunc<Params, Return> {
39    func: Func,
40
41    // The definition of this field is somewhat subtle and may be surprising.
42    // Naively one might expect something like
43    //
44    //      _marker: marker::PhantomData<fn(Params) -> Return>,
45    //
46    // Since this is a function pointer after all. The problem with this
47    // definition though is that it imposes the wrong variance on `Params` from
48    // what we want. Abstractly a `fn(Params)` is able to store `Params` within
49    // it meaning you can only give it `Params` that live longer than the
50    // function pointer.
51    //
52    // With a component model function, however, we're always copying data from
53    // the host into the guest, so we are never storing pointers to `Params`
54    // into the guest outside the duration of a `call`, meaning we can actually
55    // accept values in `TypedFunc::call` which live for a shorter duration
56    // than the `Params` argument on the struct.
57    //
58    // This all means that we don't use a phantom function pointer, but instead
59    // feign phantom storage here to get the variance desired.
60    _marker: marker::PhantomData<(Params, Return)>,
61}
62
63impl<Params, Return> Copy for TypedFunc<Params, Return> {}
64
65impl<Params, Return> Clone for TypedFunc<Params, Return> {
66    fn clone(&self) -> TypedFunc<Params, Return> {
67        *self
68    }
69}
70
71impl<Params, Return> TypedFunc<Params, Return>
72where
73    Params: ComponentNamedList + Lower,
74    Return: ComponentNamedList + Lift,
75{
76    /// Creates a new [`TypedFunc`] from the provided component [`Func`],
77    /// unsafely asserting that the underlying function takes `Params` as
78    /// input and returns `Return`.
79    ///
80    /// # Unsafety
81    ///
82    /// This is an unsafe function because it does not verify that the [`Func`]
83    /// provided actually implements this signature. It's up to the caller to
84    /// have performed some other sort of check to ensure that the signature is
85    /// correct.
86    pub unsafe fn new_unchecked(func: Func) -> TypedFunc<Params, Return> {
87        TypedFunc {
88            _marker: marker::PhantomData,
89            func,
90        }
91    }
92
93    /// Returns the underlying un-typed [`Func`] that this [`TypedFunc`]
94    /// references.
95    pub fn func(&self) -> &Func {
96        &self.func
97    }
98
99    /// Calls the underlying WebAssembly component function using the provided
100    /// `params` as input.
101    ///
102    /// This method is used to enter into a component. Execution happens within
103    /// the `store` provided. The `params` are copied into WebAssembly memory
104    /// as appropriate and a core wasm function is invoked.
105    ///
106    /// # Post-return
107    ///
108    /// In the component model each function can have a "post return" specified
109    /// which allows cleaning up the arguments returned to the host. For example
110    /// if WebAssembly returns a string to the host then it might be a uniquely
111    /// allocated string which, after the host finishes processing it, needs to
112    /// be deallocated in the wasm instance's own linear memory to prevent
113    /// memory leaks in wasm itself. The `post-return` canonical abi option is
114    /// used to configured this.
115    ///
116    /// To accommodate this feature of the component model after invoking a
117    /// function via [`TypedFunc::call`] you must next invoke
118    /// [`TypedFunc::post_return`]. Note that the return value of the function
119    /// should be processed between these two function calls. The return value
120    /// continues to be usable from an embedder's perspective after
121    /// `post_return` is called, but after `post_return` is invoked it may no
122    /// longer retain the same value that the wasm module originally returned.
123    ///
124    /// Also note that [`TypedFunc::post_return`] must be invoked irrespective
125    /// of whether the canonical ABI option `post-return` was configured or not.
126    /// This means that embedders must unconditionally call
127    /// [`TypedFunc::post_return`] when a function returns. If this function
128    /// call returns an error, however, then [`TypedFunc::post_return`] is not
129    /// required.
130    ///
131    /// # Errors
132    ///
133    /// This function can return an error for a number of reasons:
134    ///
135    /// * If the wasm itself traps during execution.
136    /// * If the wasm traps while copying arguments into memory.
137    /// * If the wasm provides bad allocation pointers when copying arguments
138    ///   into memory.
139    /// * If the wasm returns a value which violates the canonical ABI.
140    /// * If this function's instances cannot be entered, for example if the
141    ///   instance is currently calling a host function.
142    /// * If a previous function call occurred and the corresponding
143    ///   `post_return` hasn't been invoked yet.
144    ///
145    /// In general there are many ways that things could go wrong when copying
146    /// types in and out of a wasm module with the canonical ABI, and certain
147    /// error conditions are specific to certain types. For example a
148    /// WebAssembly module can't return an invalid `char`. When allocating space
149    /// for this host to copy a string into the returned pointer must be
150    /// in-bounds in memory.
151    ///
152    /// If an error happens then the error should contain detailed enough
153    /// information to understand which part of the canonical ABI went wrong
154    /// and what to inspect.
155    ///
156    /// # Panics
157    ///
158    /// Panics if this is called on a function in an asynchronous store. This
159    /// only works with functions defined within a synchronous store. Also
160    /// panics if `store` does not own this function.
161    pub fn call(&self, store: impl AsContextMut, params: Params) -> Result<Return> {
162        assert!(
163            !store.as_context().async_support(),
164            "must use `call_async` when async support is enabled on the config"
165        );
166        self.call_impl(store, params)
167    }
168
169    /// Exactly like [`Self::call`], except for use on asynchronous stores.
170    ///
171    /// # Panics
172    ///
173    /// Panics if this is called on a function in a synchronous store. This
174    /// only works with functions defined within an asynchronous store. Also
175    /// panics if `store` does not own this function.
176    #[cfg(feature = "async")]
177    pub async fn call_async<T>(
178        &self,
179        mut store: impl AsContextMut<Data = T>,
180        params: Params,
181    ) -> Result<Return>
182    where
183        T: Send,
184        Params: Send + Sync,
185        Return: Send + Sync,
186    {
187        let mut store = store.as_context_mut();
188        assert!(
189            store.0.async_support(),
190            "cannot use `call_async` when async support is not enabled on the config"
191        );
192        store
193            .on_fiber(|store| self.call_impl(store, params))
194            .await?
195    }
196
197    /// Start concurrent call to this function.
198    ///
199    /// Unlike [`Self::call`] and [`Self::call_async`] (both of which require
200    /// exclusive access to the store until the completion of the call), calls
201    /// made using this method may run concurrently with other calls to the same
202    /// instance.
203    #[cfg(feature = "component-model-async")]
204    pub async fn call_concurrent<T: Send>(
205        self,
206        mut store: impl AsContextMut<Data = T>,
207        params: Params,
208    ) -> Result<Promise<Return>>
209    where
210        Params: Send + Sync + 'static,
211        Return: Send + Sync + 'static,
212    {
213        let store = store.as_context_mut();
214        assert!(
215            store.0.async_support(),
216            "cannot use `call_concurrent` when async support is not enabled on the config"
217        );
218        _ = params;
219        todo!()
220    }
221
222    fn call_impl(&self, mut store: impl AsContextMut, params: Params) -> Result<Return> {
223        let store = &mut store.as_context_mut();
224        // Note that this is in theory simpler than it might read at this time.
225        // Here we're doing a runtime dispatch on the `flatten_count` for the
226        // params/results to see whether they're inbounds. This creates 4 cases
227        // to handle. In reality this is a highly optimizable branch where LLVM
228        // will easily figure out that only one branch here is taken.
229        //
230        // Otherwise this current construction is done to ensure that the stack
231        // space reserved for the params/results is always of the appropriate
232        // size (as the params/results needed differ depending on the "flatten"
233        // count)
234        if Params::flatten_count() <= MAX_FLAT_PARAMS {
235            if Return::flatten_count() <= MAX_FLAT_RESULTS {
236                self.func.call_raw(
237                    store,
238                    &params,
239                    Self::lower_stack_args,
240                    Self::lift_stack_result,
241                )
242            } else {
243                self.func.call_raw(
244                    store,
245                    &params,
246                    Self::lower_stack_args,
247                    Self::lift_heap_result,
248                )
249            }
250        } else {
251            if Return::flatten_count() <= MAX_FLAT_RESULTS {
252                self.func.call_raw(
253                    store,
254                    &params,
255                    Self::lower_heap_args,
256                    Self::lift_stack_result,
257                )
258            } else {
259                self.func.call_raw(
260                    store,
261                    &params,
262                    Self::lower_heap_args,
263                    Self::lift_heap_result,
264                )
265            }
266        }
267    }
268
269    /// Lower parameters directly onto the stack specified by the `dst`
270    /// location.
271    ///
272    /// This is only valid to call when the "flatten count" is small enough, or
273    /// when the canonical ABI says arguments go through the stack rather than
274    /// the heap.
275    fn lower_stack_args<T>(
276        cx: &mut LowerContext<'_, T>,
277        params: &Params,
278        ty: InterfaceType,
279        dst: &mut MaybeUninit<Params::Lower>,
280    ) -> Result<()> {
281        assert!(Params::flatten_count() <= MAX_FLAT_PARAMS);
282        params.lower(cx, ty, dst)?;
283        Ok(())
284    }
285
286    /// Lower parameters onto a heap-allocated location.
287    ///
288    /// This is used when the stack space to be used for the arguments is above
289    /// the `MAX_FLAT_PARAMS` threshold. Here the wasm's `realloc` function is
290    /// invoked to allocate space and then parameters are stored at that heap
291    /// pointer location.
292    fn lower_heap_args<T>(
293        cx: &mut LowerContext<'_, T>,
294        params: &Params,
295        ty: InterfaceType,
296        dst: &mut MaybeUninit<ValRaw>,
297    ) -> Result<()> {
298        assert!(Params::flatten_count() > MAX_FLAT_PARAMS);
299
300        // Memory must exist via validation if the arguments are stored on the
301        // heap, so we can create a `MemoryMut` at this point. Afterwards
302        // `realloc` is used to allocate space for all the arguments and then
303        // they're all stored in linear memory.
304        //
305        // Note that `realloc` will bake in a check that the returned pointer is
306        // in-bounds.
307        let ptr = cx.realloc(0, 0, Params::ALIGN32, Params::SIZE32)?;
308        params.store(cx, ty, ptr)?;
309
310        // Note that the pointer here is stored as a 64-bit integer. This allows
311        // this to work with either 32 or 64-bit memories. For a 32-bit memory
312        // it'll just ignore the upper 32 zero bits, and for 64-bit memories
313        // this'll have the full 64-bits. Note that for 32-bit memories the call
314        // to `realloc` above guarantees that the `ptr` is in-bounds meaning
315        // that we will know that the zero-extended upper bits of `ptr` are
316        // guaranteed to be zero.
317        //
318        // This comment about 64-bit integers is also referred to below with
319        // "WRITEPTR64".
320        dst.write(ValRaw::i64(ptr as i64));
321
322        Ok(())
323    }
324
325    /// Lift the result of a function directly from the stack result.
326    ///
327    /// This is only used when the result fits in the maximum number of stack
328    /// slots.
329    fn lift_stack_result(
330        cx: &mut LiftContext<'_>,
331        ty: InterfaceType,
332        dst: &Return::Lower,
333    ) -> Result<Return> {
334        assert!(Return::flatten_count() <= MAX_FLAT_RESULTS);
335        Return::lift(cx, ty, dst)
336    }
337
338    /// Lift the result of a function where the result is stored indirectly on
339    /// the heap.
340    fn lift_heap_result(
341        cx: &mut LiftContext<'_>,
342        ty: InterfaceType,
343        dst: &ValRaw,
344    ) -> Result<Return> {
345        assert!(Return::flatten_count() > MAX_FLAT_RESULTS);
346        // FIXME(#4311): needs to read an i64 for memory64
347        let ptr = usize::try_from(dst.get_u32())?;
348        if ptr % usize::try_from(Return::ALIGN32)? != 0 {
349            bail!("return pointer not aligned");
350        }
351
352        let bytes = cx
353            .memory()
354            .get(ptr..)
355            .and_then(|b| b.get(..Return::SIZE32))
356            .ok_or_else(|| anyhow::anyhow!("pointer out of bounds of memory"))?;
357        Return::load(cx, ty, bytes)
358    }
359
360    /// See [`Func::post_return`]
361    pub fn post_return(&self, store: impl AsContextMut) -> Result<()> {
362        self.func.post_return(store)
363    }
364
365    /// See [`Func::post_return_async`]
366    #[cfg(feature = "async")]
367    pub async fn post_return_async<T: Send>(
368        &self,
369        store: impl AsContextMut<Data = T>,
370    ) -> Result<()> {
371        self.func.post_return_async(store).await
372    }
373}
374
375/// A trait representing a static list of named types that can be passed to or
376/// returned from a [`TypedFunc`].
377///
378/// This trait is implemented for a number of tuple types and is not expected
379/// to be implemented externally. The contents of this trait are hidden as it's
380/// intended to be an implementation detail of Wasmtime. The contents of this
381/// trait are not covered by Wasmtime's stability guarantees.
382///
383/// For more information about this trait see [`Func::typed`] and
384/// [`TypedFunc`].
385//
386// Note that this is an `unsafe` trait, and the unsafety means that
387// implementations of this trait must be correct or otherwise [`TypedFunc`]
388// would not be memory safe. The main reason this is `unsafe` is the
389// `typecheck` function which must operate correctly relative to the `AsTuple`
390// interpretation of the implementor.
391pub unsafe trait ComponentNamedList: ComponentType {}
392
393/// A trait representing types which can be passed to and read from components
394/// with the canonical ABI.
395///
396/// This trait is implemented for Rust types which can be communicated to
397/// components. The [`Func::typed`] and [`TypedFunc`] Rust items are the main
398/// consumers of this trait.
399///
400/// Supported Rust types include:
401///
402/// | Component Model Type              | Rust Type                            |
403/// |-----------------------------------|--------------------------------------|
404/// | `{s,u}{8,16,32,64}`               | `{i,u}{8,16,32,64}`                  |
405/// | `f{32,64}`                        | `f{32,64}`                           |
406/// | `bool`                            | `bool`                               |
407/// | `char`                            | `char`                               |
408/// | `tuple<A, B>`                     | `(A, B)`                             |
409/// | `option<T>`                       | `Option<T>`                          |
410/// | `result`                          | `Result<(), ()>`                     |
411/// | `result<T>`                       | `Result<T, ()>`                      |
412/// | `result<_, E>`                    | `Result<(), E>`                      |
413/// | `result<T, E>`                    | `Result<T, E>`                       |
414/// | `string`                          | `String`, `&str`, or [`WasmStr`]     |
415/// | `list<T>`                         | `Vec<T>`, `&[T]`, or [`WasmList`]    |
416/// | `own<T>`, `borrow<T>`             | [`Resource<T>`] or [`ResourceAny`]   |
417/// | `record`                          | [`#[derive(ComponentType)]`][d-cm]   |
418/// | `variant`                         | [`#[derive(ComponentType)]`][d-cm]   |
419/// | `enum`                            | [`#[derive(ComponentType)]`][d-cm]   |
420/// | `flags`                           | [`flags!`][f-m]                      |
421///
422/// [`Resource<T>`]: crate::component::Resource
423/// [`ResourceAny`]: crate::component::ResourceAny
424/// [d-cm]: macro@crate::component::ComponentType
425/// [f-m]: crate::component::flags
426///
427/// Rust standard library pointers such as `&T`, `Box<T>`, `Rc<T>`, and `Arc<T>`
428/// additionally represent whatever type `T` represents in the component model.
429/// Note that types such as `record`, `variant`, `enum`, and `flags` are
430/// generated by the embedder at compile time. These macros derive
431/// implementation of this trait for custom types to map to custom types in the
432/// component model. Note that for `record`, `variant`, `enum`, and `flags`
433/// those types are often generated by the
434/// [`bindgen!`](crate::component::bindgen) macro from WIT definitions.
435///
436/// Types that implement [`ComponentType`] are used for `Params` and `Return`
437/// in [`TypedFunc`] and [`Func::typed`].
438///
439/// The contents of this trait are hidden as it's intended to be an
440/// implementation detail of Wasmtime. The contents of this trait are not
441/// covered by Wasmtime's stability guarantees.
442//
443// Note that this is an `unsafe` trait as `TypedFunc`'s safety heavily relies on
444// the correctness of the implementations of this trait. Some ways in which this
445// trait must be correct to be safe are:
446//
447// * The `Lower` associated type must be a `ValRaw` sequence. It doesn't have to
448//   literally be `[ValRaw; N]` but when laid out in memory it must be adjacent
449//   `ValRaw` values and have a multiple of the size of `ValRaw` and the same
450//   alignment.
451//
452// * The `lower` function must initialize the bits within `Lower` that are going
453//   to be read by the trampoline that's used to enter core wasm. A trampoline
454//   is passed `*mut Lower` and will read the canonical abi arguments in
455//   sequence, so all of the bits must be correctly initialized.
456//
457// * The `size` and `align` functions must be correct for this value stored in
458//   the canonical ABI. The `Cursor<T>` iteration of these bytes rely on this
459//   for correctness as they otherwise eschew bounds-checking.
460//
461// There are likely some other correctness issues which aren't documented as
462// well, this isn't intended to be an exhaustive list. It suffices to say,
463// though, that correctness bugs in this trait implementation are highly likely
464// to lead to security bugs, which again leads to the `unsafe` in the trait.
465//
466// Also note that this trait specifically is not sealed because we have a proc
467// macro that generates implementations of this trait for external types in a
468// `#[derive]`-like fashion.
469pub unsafe trait ComponentType {
470    /// Representation of the "lowered" form of this component value.
471    ///
472    /// Lowerings lower into core wasm values which are represented by `ValRaw`.
473    /// This `Lower` type must be a list of `ValRaw` as either a literal array
474    /// or a struct where every field is a `ValRaw`. This must be `Copy` (as
475    /// `ValRaw` is `Copy`) and support all byte patterns. This being correct is
476    /// one reason why the trait is unsafe.
477    #[doc(hidden)]
478    type Lower: Copy;
479
480    /// The information about this type's canonical ABI (size/align/etc).
481    #[doc(hidden)]
482    const ABI: CanonicalAbiInfo;
483
484    #[doc(hidden)]
485    const SIZE32: usize = Self::ABI.size32 as usize;
486    #[doc(hidden)]
487    const ALIGN32: u32 = Self::ABI.align32;
488
489    #[doc(hidden)]
490    const IS_RUST_UNIT_TYPE: bool = false;
491
492    /// Returns the number of core wasm abi values will be used to represent
493    /// this type in its lowered form.
494    ///
495    /// This divides the size of `Self::Lower` by the size of `ValRaw`.
496    #[doc(hidden)]
497    fn flatten_count() -> usize {
498        assert!(mem::size_of::<Self::Lower>() % mem::size_of::<ValRaw>() == 0);
499        assert!(mem::align_of::<Self::Lower>() == mem::align_of::<ValRaw>());
500        mem::size_of::<Self::Lower>() / mem::size_of::<ValRaw>()
501    }
502
503    /// Performs a type-check to see whether this component value type matches
504    /// the interface type `ty` provided.
505    #[doc(hidden)]
506    fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()>;
507}
508
509#[doc(hidden)]
510pub unsafe trait ComponentVariant: ComponentType {
511    const CASES: &'static [Option<CanonicalAbiInfo>];
512    const INFO: VariantInfo = VariantInfo::new_static(Self::CASES);
513    const PAYLOAD_OFFSET32: usize = Self::INFO.payload_offset32 as usize;
514}
515
516/// Host types which can be passed to WebAssembly components.
517///
518/// This trait is implemented for all types that can be passed to components
519/// either as parameters of component exports or returns of component imports.
520/// This trait represents the ability to convert from the native host
521/// representation to the canonical ABI.
522///
523/// Built-in types to Rust such as `Option<T>` implement this trait as
524/// appropriate. For a mapping of component model to Rust types see
525/// [`ComponentType`].
526///
527/// For user-defined types, for example `record` types mapped to Rust `struct`s,
528/// this crate additionally has
529/// [`#[derive(Lower)]`](macro@crate::component::Lower).
530///
531/// Note that like [`ComponentType`] the definition of this trait is intended to
532/// be an internal implementation detail of Wasmtime at this time. It's
533/// recommended to use the `#[derive(Lower)]` implementation instead.
534pub unsafe trait Lower: ComponentType {
535    /// Performs the "lower" function in the canonical ABI.
536    ///
537    /// This method will lower the current value into a component. The `lower`
538    /// function performs a "flat" lowering into the `dst` specified which is
539    /// allowed to be uninitialized entering this method but is guaranteed to be
540    /// fully initialized if the method returns `Ok(())`.
541    ///
542    /// The `cx` context provided is the context within which this lowering is
543    /// happening. This contains information such as canonical options specified
544    /// (e.g. string encodings, memories, etc), the store itself, along with
545    /// type information.
546    ///
547    /// The `ty` parameter is the destination type that is being lowered into.
548    /// For example this is the component's "view" of the type that is being
549    /// lowered. This is guaranteed to have passed a `typecheck` earlier.
550    ///
551    /// This will only be called if `typecheck` passes for `Op::Lower`.
552    #[doc(hidden)]
553    fn lower<T>(
554        &self,
555        cx: &mut LowerContext<'_, T>,
556        ty: InterfaceType,
557        dst: &mut MaybeUninit<Self::Lower>,
558    ) -> Result<()>;
559
560    /// Performs the "store" operation in the canonical ABI.
561    ///
562    /// This function will store `self` into the linear memory described by
563    /// `cx` at the `offset` provided.
564    ///
565    /// It is expected that `offset` is a valid offset in memory for
566    /// `Self::SIZE32` bytes. At this time that's not an unsafe contract as it's
567    /// always re-checked on all stores, but this is something that will need to
568    /// be improved in the future to remove extra bounds checks. For now this
569    /// function will panic if there's a bug and `offset` isn't valid within
570    /// memory.
571    ///
572    /// The `ty` type information passed here is the same as the type
573    /// information passed to `lower` above, and is the component's own view of
574    /// what the resulting type should be.
575    ///
576    /// This will only be called if `typecheck` passes for `Op::Lower`.
577    #[doc(hidden)]
578    fn store<T>(
579        &self,
580        cx: &mut LowerContext<'_, T>,
581        ty: InterfaceType,
582        offset: usize,
583    ) -> Result<()>;
584
585    /// Provided method to lower a list of `Self` into memory.
586    ///
587    /// Requires that `offset` has already been checked for alignment and
588    /// validity in terms of being in-bounds, otherwise this may panic.
589    ///
590    /// This is primarily here to get overridden for implementations of integers
591    /// which can avoid some extra fluff and use a pattern that's more easily
592    /// optimizable by LLVM.
593    #[doc(hidden)]
594    fn store_list<T>(
595        cx: &mut LowerContext<'_, T>,
596        ty: InterfaceType,
597        mut offset: usize,
598        items: &[Self],
599    ) -> Result<()>
600    where
601        Self: Sized,
602    {
603        for item in items {
604            item.store(cx, ty, offset)?;
605            offset += Self::SIZE32;
606        }
607        Ok(())
608    }
609}
610
611/// Host types which can be created from the canonical ABI.
612///
613/// This is the mirror of the [`Lower`] trait where it represents the capability
614/// of acquiring items from WebAssembly and passing them to the host.
615///
616/// Built-in types to Rust such as `Option<T>` implement this trait as
617/// appropriate. For a mapping of component model to Rust types see
618/// [`ComponentType`].
619///
620/// For user-defined types, for example `record` types mapped to Rust `struct`s,
621/// this crate additionally has
622/// [`#[derive(Lift)]`](macro@crate::component::Lift).
623///
624/// Note that like [`ComponentType`] the definition of this trait is intended to
625/// be an internal implementation detail of Wasmtime at this time. It's
626/// recommended to use the `#[derive(Lift)]` implementation instead.
627pub unsafe trait Lift: Sized + ComponentType {
628    /// Performs the "lift" operation in the canonical ABI.
629    ///
630    /// This function performs a "flat" lift operation from the `src` specified
631    /// which is a sequence of core wasm values. The lifting operation will
632    /// validate core wasm values and produce a `Self` on success.
633    ///
634    /// The `cx` provided contains contextual information such as the store
635    /// that's being loaded from, canonical options, and type information.
636    ///
637    /// The `ty` parameter is the origin component's specification for what the
638    /// type that is being lifted is. For example this is the record type or the
639    /// resource type that is being lifted.
640    ///
641    /// Note that this has a default implementation but if `typecheck` passes
642    /// for `Op::Lift` this needs to be overridden.
643    #[doc(hidden)]
644    fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self>;
645
646    /// Performs the "load" operation in the canonical ABI.
647    ///
648    /// This will read the `bytes` provided, which are a sub-slice into the
649    /// linear memory described by `cx`. The `bytes` array provided is
650    /// guaranteed to be `Self::SIZE32` bytes large. All of memory is then also
651    /// available through `cx` for bounds-checks and such as necessary for
652    /// strings/lists.
653    ///
654    /// The `ty` argument is the type that's being loaded, as described by the
655    /// original component.
656    ///
657    /// Note that this has a default implementation but if `typecheck` passes
658    /// for `Op::Lift` this needs to be overridden.
659    #[doc(hidden)]
660    fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self>;
661
662    /// Converts `list` into a `Vec<T>`, used in `Lift for Vec<T>`.
663    ///
664    /// This is primarily here to get overridden for implementations of integers
665    /// which can avoid some extra fluff and use a pattern that's more easily
666    /// optimizable by LLVM.
667    #[doc(hidden)]
668    fn load_list(cx: &mut LiftContext<'_>, list: &WasmList<Self>) -> Result<Vec<Self>>
669    where
670        Self: Sized,
671    {
672        (0..list.len)
673            .map(|index| list.get_from_store(cx, index).unwrap())
674            .collect()
675    }
676}
677
678// Macro to help generate "forwarding implementations" of `ComponentType` to
679// another type, used for wrappers in Rust like `&T`, `Box<T>`, etc. Note that
680// these wrappers only implement lowering because lifting native Rust types
681// cannot be done.
682macro_rules! forward_type_impls {
683    ($(($($generics:tt)*) $a:ty => $b:ty,)*) => ($(
684        unsafe impl <$($generics)*> ComponentType for $a {
685            type Lower = <$b as ComponentType>::Lower;
686
687            const ABI: CanonicalAbiInfo = <$b as ComponentType>::ABI;
688
689            #[inline]
690            fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
691                <$b as ComponentType>::typecheck(ty, types)
692            }
693        }
694    )*)
695}
696
697forward_type_impls! {
698    (T: ComponentType + ?Sized) &'_ T => T,
699    (T: ComponentType + ?Sized) Box<T> => T,
700    (T: ComponentType + ?Sized) alloc::rc::Rc<T> => T,
701    (T: ComponentType + ?Sized) alloc::sync::Arc<T> => T,
702    () String => str,
703    (T: ComponentType) Vec<T> => [T],
704}
705
706macro_rules! forward_lowers {
707    ($(($($generics:tt)*) $a:ty => $b:ty,)*) => ($(
708        unsafe impl <$($generics)*> Lower for $a {
709            fn lower<U>(
710                &self,
711                cx: &mut LowerContext<'_, U>,
712                ty: InterfaceType,
713                dst: &mut MaybeUninit<Self::Lower>,
714            ) -> Result<()> {
715                <$b as Lower>::lower(self, cx, ty, dst)
716            }
717
718            fn store<U>(
719                &self,
720                cx: &mut LowerContext<'_, U>,
721                ty: InterfaceType,
722                offset: usize,
723            ) -> Result<()> {
724                <$b as Lower>::store(self, cx, ty, offset)
725            }
726        }
727    )*)
728}
729
730forward_lowers! {
731    (T: Lower + ?Sized) &'_ T => T,
732    (T: Lower + ?Sized) Box<T> => T,
733    (T: Lower + ?Sized) alloc::rc::Rc<T> => T,
734    (T: Lower + ?Sized) alloc::sync::Arc<T> => T,
735    () String => str,
736    (T: Lower) Vec<T> => [T],
737}
738
739macro_rules! forward_string_lifts {
740    ($($a:ty,)*) => ($(
741        unsafe impl Lift for $a {
742            #[inline]
743            fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
744                Ok(<WasmStr as Lift>::lift(cx, ty, src)?.to_str_from_memory(cx.memory())?.into())
745            }
746
747            #[inline]
748            fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
749                Ok(<WasmStr as Lift>::load(cx, ty, bytes)?.to_str_from_memory(cx.memory())?.into())
750            }
751        }
752    )*)
753}
754
755forward_string_lifts! {
756    Box<str>,
757    alloc::rc::Rc<str>,
758    alloc::sync::Arc<str>,
759    String,
760}
761
762macro_rules! forward_list_lifts {
763    ($($a:ty,)*) => ($(
764        unsafe impl <T: Lift> Lift for $a {
765            fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
766                let list = <WasmList::<T> as Lift>::lift(cx, ty, src)?;
767                Ok(T::load_list(cx, &list)?.into())
768            }
769
770            fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
771                let list = <WasmList::<T> as Lift>::load(cx, ty, bytes)?;
772                Ok(T::load_list(cx, &list)?.into())
773            }
774        }
775    )*)
776}
777
778forward_list_lifts! {
779    Box<[T]>,
780    alloc::rc::Rc<[T]>,
781    alloc::sync::Arc<[T]>,
782    Vec<T>,
783}
784
785// Macro to help generate `ComponentType` implementations for primitive types
786// such as integers, char, bool, etc.
787macro_rules! integers {
788    ($($primitive:ident = $ty:ident in $field:ident/$get:ident with abi:$abi:ident,)*) => ($(
789        unsafe impl ComponentType for $primitive {
790            type Lower = ValRaw;
791
792            const ABI: CanonicalAbiInfo = CanonicalAbiInfo::$abi;
793
794            fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
795                match ty {
796                    InterfaceType::$ty => Ok(()),
797                    other => bail!("expected `{}` found `{}`", desc(&InterfaceType::$ty), desc(other))
798                }
799            }
800        }
801
802        unsafe impl Lower for $primitive {
803            #[inline]
804            #[allow(trivial_numeric_casts)]
805            fn lower<T>(
806                &self,
807                _cx: &mut LowerContext<'_, T>,
808                ty: InterfaceType,
809                dst: &mut MaybeUninit<Self::Lower>,
810            ) -> Result<()> {
811                debug_assert!(matches!(ty, InterfaceType::$ty));
812                dst.write(ValRaw::$field(*self as $field));
813                Ok(())
814            }
815
816            #[inline]
817            fn store<T>(
818                &self,
819                cx: &mut LowerContext<'_, T>,
820                ty: InterfaceType,
821                offset: usize,
822            ) -> Result<()> {
823                debug_assert!(matches!(ty, InterfaceType::$ty));
824                debug_assert!(offset % Self::SIZE32 == 0);
825                *cx.get(offset) = self.to_le_bytes();
826                Ok(())
827            }
828
829            fn store_list<T>(
830                cx: &mut LowerContext<'_, T>,
831                ty: InterfaceType,
832                offset: usize,
833                items: &[Self],
834            ) -> Result<()> {
835                debug_assert!(matches!(ty, InterfaceType::$ty));
836
837                // Double-check that the CM alignment is at least the host's
838                // alignment for this type which should be true for all
839                // platforms.
840                assert!((Self::ALIGN32 as usize) >= mem::align_of::<Self>());
841
842                // Slice `cx`'s memory to the window that we'll be modifying.
843                // This should all have already been verified in terms of
844                // alignment and sizing meaning that these assertions here are
845                // not truly necessary but are instead double-checks.
846                //
847                // Note that we're casting a `[u8]` slice to `[Self]` with
848                // `align_to_mut` which is not safe in general but is safe in
849                // our specific case as all `u8` patterns are valid `Self`
850                // patterns since `Self` is an integral type.
851                let dst = &mut cx.as_slice_mut()[offset..][..items.len() * Self::SIZE32];
852                let (before, middle, end) = unsafe { dst.align_to_mut::<Self>() };
853                assert!(before.is_empty() && end.is_empty());
854                assert_eq!(middle.len(), items.len());
855
856                // And with all that out of the way perform the copying loop.
857                // This is not a `copy_from_slice` because endianness needs to
858                // be handled here, but LLVM should pretty easily transform this
859                // into a memcpy on little-endian platforms.
860                for (dst, src) in middle.iter_mut().zip(items) {
861                    *dst = src.to_le();
862                }
863                Ok(())
864            }
865        }
866
867        unsafe impl Lift for $primitive {
868            #[inline]
869            #[allow(trivial_numeric_casts, clippy::cast_possible_truncation)]
870            fn lift(_cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
871                debug_assert!(matches!(ty, InterfaceType::$ty));
872                Ok(src.$get() as $primitive)
873            }
874
875            #[inline]
876            fn load(_cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
877                debug_assert!(matches!(ty, InterfaceType::$ty));
878                debug_assert!((bytes.as_ptr() as usize) % Self::SIZE32 == 0);
879                Ok($primitive::from_le_bytes(bytes.try_into().unwrap()))
880            }
881
882            fn load_list(cx: &mut LiftContext<'_>, list: &WasmList<Self>) -> Result<Vec<Self>> {
883                Ok(
884                    list._as_le_slice(cx.memory())
885                        .iter()
886                        .map(|i| Self::from_le(*i))
887                        .collect(),
888                )
889            }
890        }
891    )*)
892}
893
894integers! {
895    i8 = S8 in i32/get_i32 with abi:SCALAR1,
896    u8 = U8 in u32/get_u32 with abi:SCALAR1,
897    i16 = S16 in i32/get_i32 with abi:SCALAR2,
898    u16 = U16 in u32/get_u32 with abi:SCALAR2,
899    i32 = S32 in i32/get_i32 with abi:SCALAR4,
900    u32 = U32 in u32/get_u32 with abi:SCALAR4,
901    i64 = S64 in i64/get_i64 with abi:SCALAR8,
902    u64 = U64 in u64/get_u64 with abi:SCALAR8,
903}
904
905macro_rules! floats {
906    ($($float:ident/$get_float:ident = $ty:ident with abi:$abi:ident)*) => ($(const _: () = {
907        unsafe impl ComponentType for $float {
908            type Lower = ValRaw;
909
910            const ABI: CanonicalAbiInfo = CanonicalAbiInfo::$abi;
911
912            fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
913                match ty {
914                    InterfaceType::$ty => Ok(()),
915                    other => bail!("expected `{}` found `{}`", desc(&InterfaceType::$ty), desc(other))
916                }
917            }
918        }
919
920        unsafe impl Lower for $float {
921            #[inline]
922            fn lower<T>(
923                &self,
924                _cx: &mut LowerContext<'_, T>,
925                ty: InterfaceType,
926                dst: &mut MaybeUninit<Self::Lower>,
927            ) -> Result<()> {
928                debug_assert!(matches!(ty, InterfaceType::$ty));
929                dst.write(ValRaw::$float(self.to_bits()));
930                Ok(())
931            }
932
933            #[inline]
934            fn store<T>(
935                &self,
936                cx: &mut LowerContext<'_, T>,
937                ty: InterfaceType,
938                offset: usize,
939            ) -> Result<()> {
940                debug_assert!(matches!(ty, InterfaceType::$ty));
941                debug_assert!(offset % Self::SIZE32 == 0);
942                let ptr = cx.get(offset);
943                *ptr = self.to_bits().to_le_bytes();
944                Ok(())
945            }
946
947            fn store_list<T>(
948                cx: &mut LowerContext<'_, T>,
949                ty: InterfaceType,
950                offset: usize,
951                items: &[Self],
952            ) -> Result<()> {
953                debug_assert!(matches!(ty, InterfaceType::$ty));
954
955                // Double-check that the CM alignment is at least the host's
956                // alignment for this type which should be true for all
957                // platforms.
958                assert!((Self::ALIGN32 as usize) >= mem::align_of::<Self>());
959
960                // Slice `cx`'s memory to the window that we'll be modifying.
961                // This should all have already been verified in terms of
962                // alignment and sizing meaning that these assertions here are
963                // not truly necessary but are instead double-checks.
964                let dst = &mut cx.as_slice_mut()[offset..][..items.len() * Self::SIZE32];
965                assert!(dst.as_ptr().cast::<Self>().is_aligned());
966
967                // And with all that out of the way perform the copying loop.
968                // This is not a `copy_from_slice` because endianness needs to
969                // be handled here, but LLVM should pretty easily transform this
970                // into a memcpy on little-endian platforms.
971                // TODO use `as_chunks` when https://github.com/rust-lang/rust/issues/74985
972                // is stabilized
973                for (dst, src) in iter::zip(dst.chunks_exact_mut(Self::SIZE32), items) {
974                    let dst: &mut [u8; Self::SIZE32] = dst.try_into().unwrap();
975                    *dst = src.to_le_bytes();
976                }
977                Ok(())
978            }
979        }
980
981        unsafe impl Lift for $float {
982            #[inline]
983            fn lift(_cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
984                debug_assert!(matches!(ty, InterfaceType::$ty));
985                Ok($float::from_bits(src.$get_float()))
986            }
987
988            #[inline]
989            fn load(_cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
990                debug_assert!(matches!(ty, InterfaceType::$ty));
991                debug_assert!((bytes.as_ptr() as usize) % Self::SIZE32 == 0);
992                Ok($float::from_le_bytes(bytes.try_into().unwrap()))
993            }
994
995            fn load_list(cx: &mut LiftContext<'_>, list: &WasmList<Self>) -> Result<Vec<Self>> where Self: Sized {
996                // See comments in `WasmList::get` for the panicking indexing
997                let byte_size = list.len * mem::size_of::<Self>();
998                let bytes = &cx.memory()[list.ptr..][..byte_size];
999
1000                // The canonical ABI requires that everything is aligned to its
1001                // own size, so this should be an aligned array.
1002                assert!(bytes.as_ptr().cast::<Self>().is_aligned());
1003
1004                // Copy the resulting slice to a new Vec, handling endianness
1005                // in the process
1006                // TODO use `as_chunks` when https://github.com/rust-lang/rust/issues/74985
1007                // is stabilized
1008                Ok(
1009                    bytes
1010                        .chunks_exact(Self::SIZE32)
1011                        .map(|i| $float::from_le_bytes(i.try_into().unwrap()))
1012                        .collect()
1013                )
1014            }
1015        }
1016    };)*)
1017}
1018
1019floats! {
1020    f32/get_f32 = Float32 with abi:SCALAR4
1021    f64/get_f64 = Float64 with abi:SCALAR8
1022}
1023
1024unsafe impl ComponentType for bool {
1025    type Lower = ValRaw;
1026
1027    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::SCALAR1;
1028
1029    fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
1030        match ty {
1031            InterfaceType::Bool => Ok(()),
1032            other => bail!("expected `bool` found `{}`", desc(other)),
1033        }
1034    }
1035}
1036
1037unsafe impl Lower for bool {
1038    fn lower<T>(
1039        &self,
1040        _cx: &mut LowerContext<'_, T>,
1041        ty: InterfaceType,
1042        dst: &mut MaybeUninit<Self::Lower>,
1043    ) -> Result<()> {
1044        debug_assert!(matches!(ty, InterfaceType::Bool));
1045        dst.write(ValRaw::i32(*self as i32));
1046        Ok(())
1047    }
1048
1049    fn store<T>(
1050        &self,
1051        cx: &mut LowerContext<'_, T>,
1052        ty: InterfaceType,
1053        offset: usize,
1054    ) -> Result<()> {
1055        debug_assert!(matches!(ty, InterfaceType::Bool));
1056        debug_assert!(offset % Self::SIZE32 == 0);
1057        cx.get::<1>(offset)[0] = *self as u8;
1058        Ok(())
1059    }
1060}
1061
1062unsafe impl Lift for bool {
1063    #[inline]
1064    fn lift(_cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1065        debug_assert!(matches!(ty, InterfaceType::Bool));
1066        match src.get_i32() {
1067            0 => Ok(false),
1068            _ => Ok(true),
1069        }
1070    }
1071
1072    #[inline]
1073    fn load(_cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
1074        debug_assert!(matches!(ty, InterfaceType::Bool));
1075        match bytes[0] {
1076            0 => Ok(false),
1077            _ => Ok(true),
1078        }
1079    }
1080}
1081
1082unsafe impl ComponentType for char {
1083    type Lower = ValRaw;
1084
1085    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::SCALAR4;
1086
1087    fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
1088        match ty {
1089            InterfaceType::Char => Ok(()),
1090            other => bail!("expected `char` found `{}`", desc(other)),
1091        }
1092    }
1093}
1094
1095unsafe impl Lower for char {
1096    #[inline]
1097    fn lower<T>(
1098        &self,
1099        _cx: &mut LowerContext<'_, T>,
1100        ty: InterfaceType,
1101        dst: &mut MaybeUninit<Self::Lower>,
1102    ) -> Result<()> {
1103        debug_assert!(matches!(ty, InterfaceType::Char));
1104        dst.write(ValRaw::u32(u32::from(*self)));
1105        Ok(())
1106    }
1107
1108    #[inline]
1109    fn store<T>(
1110        &self,
1111        cx: &mut LowerContext<'_, T>,
1112        ty: InterfaceType,
1113        offset: usize,
1114    ) -> Result<()> {
1115        debug_assert!(matches!(ty, InterfaceType::Char));
1116        debug_assert!(offset % Self::SIZE32 == 0);
1117        *cx.get::<4>(offset) = u32::from(*self).to_le_bytes();
1118        Ok(())
1119    }
1120}
1121
1122unsafe impl Lift for char {
1123    #[inline]
1124    fn lift(_cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1125        debug_assert!(matches!(ty, InterfaceType::Char));
1126        Ok(char::try_from(src.get_u32())?)
1127    }
1128
1129    #[inline]
1130    fn load(_cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
1131        debug_assert!(matches!(ty, InterfaceType::Char));
1132        debug_assert!((bytes.as_ptr() as usize) % Self::SIZE32 == 0);
1133        let bits = u32::from_le_bytes(bytes.try_into().unwrap());
1134        Ok(char::try_from(bits)?)
1135    }
1136}
1137
1138// FIXME(#4311): these probably need different constants for memory64
1139const UTF16_TAG: usize = 1 << 31;
1140const MAX_STRING_BYTE_LENGTH: usize = (1 << 31) - 1;
1141
1142// Note that this is similar to `ComponentType for WasmStr` except it can only
1143// be used for lowering, not lifting.
1144unsafe impl ComponentType for str {
1145    type Lower = [ValRaw; 2];
1146
1147    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::POINTER_PAIR;
1148
1149    fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
1150        match ty {
1151            InterfaceType::String => Ok(()),
1152            other => bail!("expected `string` found `{}`", desc(other)),
1153        }
1154    }
1155}
1156
1157unsafe impl Lower for str {
1158    fn lower<T>(
1159        &self,
1160        cx: &mut LowerContext<'_, T>,
1161        ty: InterfaceType,
1162        dst: &mut MaybeUninit<[ValRaw; 2]>,
1163    ) -> Result<()> {
1164        debug_assert!(matches!(ty, InterfaceType::String));
1165        let (ptr, len) = lower_string(cx, self)?;
1166        // See "WRITEPTR64" above for why this is always storing a 64-bit
1167        // integer.
1168        map_maybe_uninit!(dst[0]).write(ValRaw::i64(ptr as i64));
1169        map_maybe_uninit!(dst[1]).write(ValRaw::i64(len as i64));
1170        Ok(())
1171    }
1172
1173    fn store<T>(
1174        &self,
1175        cx: &mut LowerContext<'_, T>,
1176        ty: InterfaceType,
1177        offset: usize,
1178    ) -> Result<()> {
1179        debug_assert!(matches!(ty, InterfaceType::String));
1180        debug_assert!(offset % (Self::ALIGN32 as usize) == 0);
1181        let (ptr, len) = lower_string(cx, self)?;
1182        // FIXME(#4311): needs memory64 handling
1183        *cx.get(offset + 0) = u32::try_from(ptr).unwrap().to_le_bytes();
1184        *cx.get(offset + 4) = u32::try_from(len).unwrap().to_le_bytes();
1185        Ok(())
1186    }
1187}
1188
1189fn lower_string<T>(cx: &mut LowerContext<'_, T>, string: &str) -> Result<(usize, usize)> {
1190    // Note that in general the wasm module can't assume anything about what the
1191    // host strings are encoded as. Additionally hosts are allowed to have
1192    // differently-encoded strings at runtime. Finally when copying a string
1193    // into wasm it's somewhat strict in the sense that the various patterns of
1194    // allocation and such are already dictated for us.
1195    //
1196    // In general what this means is that when copying a string from the host
1197    // into the destination we need to follow one of the cases of copying into
1198    // WebAssembly. It doesn't particularly matter which case as long as it ends
1199    // up in the right encoding. For example a destination encoding of
1200    // latin1+utf16 has a number of ways to get copied into and we do something
1201    // here that isn't the default "utf8 to latin1+utf16" since we have access
1202    // to simd-accelerated helpers in the `encoding_rs` crate. This is ok though
1203    // because we can fake that the host string was already stored in latin1
1204    // format and follow that copy pattern instead.
1205    match cx.options.string_encoding() {
1206        // This corresponds to `store_string_copy` in the canonical ABI where
1207        // the host's representation is utf-8 and the wasm module wants utf-8 so
1208        // a copy is all that's needed (and the `realloc` can be precise for the
1209        // initial memory allocation).
1210        StringEncoding::Utf8 => {
1211            if string.len() > MAX_STRING_BYTE_LENGTH {
1212                bail!(
1213                    "string length of {} too large to copy into wasm",
1214                    string.len()
1215                );
1216            }
1217            let ptr = cx.realloc(0, 0, 1, string.len())?;
1218            cx.as_slice_mut()[ptr..][..string.len()].copy_from_slice(string.as_bytes());
1219            Ok((ptr, string.len()))
1220        }
1221
1222        // This corresponds to `store_utf8_to_utf16` in the canonical ABI. Here
1223        // an over-large allocation is performed and then shrunk afterwards if
1224        // necessary.
1225        StringEncoding::Utf16 => {
1226            let size = string.len() * 2;
1227            if size > MAX_STRING_BYTE_LENGTH {
1228                bail!(
1229                    "string length of {} too large to copy into wasm",
1230                    string.len()
1231                );
1232            }
1233            let mut ptr = cx.realloc(0, 0, 2, size)?;
1234            let mut copied = 0;
1235            let bytes = &mut cx.as_slice_mut()[ptr..][..size];
1236            for (u, bytes) in string.encode_utf16().zip(bytes.chunks_mut(2)) {
1237                let u_bytes = u.to_le_bytes();
1238                bytes[0] = u_bytes[0];
1239                bytes[1] = u_bytes[1];
1240                copied += 1;
1241            }
1242            if (copied * 2) < size {
1243                ptr = cx.realloc(ptr, size, 2, copied * 2)?;
1244            }
1245            Ok((ptr, copied))
1246        }
1247
1248        StringEncoding::CompactUtf16 => {
1249            // This corresponds to `store_string_to_latin1_or_utf16`
1250            let bytes = string.as_bytes();
1251            let mut iter = string.char_indices();
1252            let mut ptr = cx.realloc(0, 0, 2, bytes.len())?;
1253            let mut dst = &mut cx.as_slice_mut()[ptr..][..bytes.len()];
1254            let mut result = 0;
1255            while let Some((i, ch)) = iter.next() {
1256                // Test if this `char` fits into the latin1 encoding.
1257                if let Ok(byte) = u8::try_from(u32::from(ch)) {
1258                    dst[result] = byte;
1259                    result += 1;
1260                    continue;
1261                }
1262
1263                // .. if utf16 is forced to be used then the allocation is
1264                // bumped up to the maximum size.
1265                let worst_case = bytes
1266                    .len()
1267                    .checked_mul(2)
1268                    .ok_or_else(|| anyhow!("byte length overflow"))?;
1269                if worst_case > MAX_STRING_BYTE_LENGTH {
1270                    bail!("byte length too large");
1271                }
1272                ptr = cx.realloc(ptr, bytes.len(), 2, worst_case)?;
1273                dst = &mut cx.as_slice_mut()[ptr..][..worst_case];
1274
1275                // Previously encoded latin1 bytes are inflated to their 16-bit
1276                // size for utf16
1277                for i in (0..result).rev() {
1278                    dst[2 * i] = dst[i];
1279                    dst[2 * i + 1] = 0;
1280                }
1281
1282                // and then the remainder of the string is encoded.
1283                for (u, bytes) in string[i..]
1284                    .encode_utf16()
1285                    .zip(dst[2 * result..].chunks_mut(2))
1286                {
1287                    let u_bytes = u.to_le_bytes();
1288                    bytes[0] = u_bytes[0];
1289                    bytes[1] = u_bytes[1];
1290                    result += 1;
1291                }
1292                if worst_case > 2 * result {
1293                    ptr = cx.realloc(ptr, worst_case, 2, 2 * result)?;
1294                }
1295                return Ok((ptr, result | UTF16_TAG));
1296            }
1297            if result < bytes.len() {
1298                ptr = cx.realloc(ptr, bytes.len(), 2, result)?;
1299            }
1300            Ok((ptr, result))
1301        }
1302    }
1303}
1304
1305/// Representation of a string located in linear memory in a WebAssembly
1306/// instance.
1307///
1308/// This type can be used in place of `String` and `str` for string-taking APIs
1309/// in some situations. The purpose of this type is to represent a range of
1310/// validated bytes within a component but does not actually copy the bytes. The
1311/// primary method, [`WasmStr::to_str`], attempts to return a reference to the
1312/// string directly located in the component's memory, avoiding a copy into the
1313/// host if possible.
1314///
1315/// The downside of this type, however, is that accessing a string requires a
1316/// [`Store`](crate::Store) pointer (via [`StoreContext`]). Bindings generated
1317/// by [`bindgen!`](crate::component::bindgen), for example, do not have access
1318/// to [`StoreContext`] and thus can't use this type.
1319///
1320/// This is intended for more advanced use cases such as defining functions
1321/// directly in a [`Linker`](crate::component::Linker). It's expected that in
1322/// the future [`bindgen!`](crate::component::bindgen) will also have a way to
1323/// use this type.
1324///
1325/// This type is used with [`TypedFunc`], for example, when WebAssembly returns
1326/// a string. This type cannot be used to give a string to WebAssembly, instead
1327/// `&str` should be used for that (since it's coming from the host).
1328///
1329/// Note that this type represents an in-bounds string in linear memory, but it
1330/// does not represent a valid string (e.g. valid utf-8). Validation happens
1331/// when [`WasmStr::to_str`] is called.
1332///
1333/// Also note that this type does not implement [`Lower`], it only implements
1334/// [`Lift`].
1335pub struct WasmStr {
1336    ptr: usize,
1337    len: usize,
1338    options: Options,
1339}
1340
1341impl WasmStr {
1342    fn new(ptr: usize, len: usize, cx: &mut LiftContext<'_>) -> Result<WasmStr> {
1343        let byte_len = match cx.options.string_encoding() {
1344            StringEncoding::Utf8 => Some(len),
1345            StringEncoding::Utf16 => len.checked_mul(2),
1346            StringEncoding::CompactUtf16 => {
1347                if len & UTF16_TAG == 0 {
1348                    Some(len)
1349                } else {
1350                    (len ^ UTF16_TAG).checked_mul(2)
1351                }
1352            }
1353        };
1354        match byte_len.and_then(|len| ptr.checked_add(len)) {
1355            Some(n) if n <= cx.memory().len() => {}
1356            _ => bail!("string pointer/length out of bounds of memory"),
1357        }
1358        Ok(WasmStr {
1359            ptr,
1360            len,
1361            options: *cx.options,
1362        })
1363    }
1364
1365    /// Returns the underlying string that this cursor points to.
1366    ///
1367    /// Note that this will internally decode the string from the wasm's
1368    /// encoding to utf-8 and additionally perform validation.
1369    ///
1370    /// The `store` provided must be the store where this string lives to
1371    /// access the correct memory.
1372    ///
1373    /// # Errors
1374    ///
1375    /// Returns an error if the string wasn't encoded correctly (e.g. invalid
1376    /// utf-8).
1377    ///
1378    /// # Panics
1379    ///
1380    /// Panics if this string is not owned by `store`.
1381    //
1382    // TODO: should add accessors for specifically utf-8 and utf-16 that perhaps
1383    // in an opt-in basis don't do validation. Additionally there should be some
1384    // method that returns `[u16]` after validating to avoid the utf16-to-utf8
1385    // transcode.
1386    pub fn to_str<'a, T: 'a>(&self, store: impl Into<StoreContext<'a, T>>) -> Result<Cow<'a, str>> {
1387        let store = store.into().0;
1388        let memory = self.options.memory(store);
1389        self.to_str_from_memory(memory)
1390    }
1391
1392    fn to_str_from_memory<'a>(&self, memory: &'a [u8]) -> Result<Cow<'a, str>> {
1393        match self.options.string_encoding() {
1394            StringEncoding::Utf8 => self.decode_utf8(memory),
1395            StringEncoding::Utf16 => self.decode_utf16(memory, self.len),
1396            StringEncoding::CompactUtf16 => {
1397                if self.len & UTF16_TAG == 0 {
1398                    self.decode_latin1(memory)
1399                } else {
1400                    self.decode_utf16(memory, self.len ^ UTF16_TAG)
1401                }
1402            }
1403        }
1404    }
1405
1406    fn decode_utf8<'a>(&self, memory: &'a [u8]) -> Result<Cow<'a, str>> {
1407        // Note that bounds-checking already happen in construction of `WasmStr`
1408        // so this is never expected to panic. This could theoretically be
1409        // unchecked indexing if we're feeling wild enough.
1410        Ok(str::from_utf8(&memory[self.ptr..][..self.len])?.into())
1411    }
1412
1413    fn decode_utf16<'a>(&self, memory: &'a [u8], len: usize) -> Result<Cow<'a, str>> {
1414        // See notes in `decode_utf8` for why this is panicking indexing.
1415        let memory = &memory[self.ptr..][..len * 2];
1416        Ok(core::char::decode_utf16(
1417            memory
1418                .chunks(2)
1419                .map(|chunk| u16::from_le_bytes(chunk.try_into().unwrap())),
1420        )
1421        .collect::<Result<String, _>>()?
1422        .into())
1423    }
1424
1425    fn decode_latin1<'a>(&self, memory: &'a [u8]) -> Result<Cow<'a, str>> {
1426        // See notes in `decode_utf8` for why this is panicking indexing.
1427        Ok(encoding_rs::mem::decode_latin1(
1428            &memory[self.ptr..][..self.len],
1429        ))
1430    }
1431}
1432
1433// Note that this is similar to `ComponentType for str` except it can only be
1434// used for lifting, not lowering.
1435unsafe impl ComponentType for WasmStr {
1436    type Lower = <str as ComponentType>::Lower;
1437
1438    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::POINTER_PAIR;
1439
1440    fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
1441        match ty {
1442            InterfaceType::String => Ok(()),
1443            other => bail!("expected `string` found `{}`", desc(other)),
1444        }
1445    }
1446}
1447
1448unsafe impl Lift for WasmStr {
1449    #[inline]
1450    fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1451        debug_assert!(matches!(ty, InterfaceType::String));
1452        // FIXME(#4311): needs memory64 treatment
1453        let ptr = src[0].get_u32();
1454        let len = src[1].get_u32();
1455        let (ptr, len) = (usize::try_from(ptr)?, usize::try_from(len)?);
1456        WasmStr::new(ptr, len, cx)
1457    }
1458
1459    #[inline]
1460    fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
1461        debug_assert!(matches!(ty, InterfaceType::String));
1462        debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
1463        // FIXME(#4311): needs memory64 treatment
1464        let ptr = u32::from_le_bytes(bytes[..4].try_into().unwrap());
1465        let len = u32::from_le_bytes(bytes[4..].try_into().unwrap());
1466        let (ptr, len) = (usize::try_from(ptr)?, usize::try_from(len)?);
1467        WasmStr::new(ptr, len, cx)
1468    }
1469}
1470
1471unsafe impl<T> ComponentType for [T]
1472where
1473    T: ComponentType,
1474{
1475    type Lower = [ValRaw; 2];
1476
1477    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::POINTER_PAIR;
1478
1479    fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
1480        match ty {
1481            InterfaceType::List(t) => T::typecheck(&types.types[*t].element, types),
1482            other => bail!("expected `list` found `{}`", desc(other)),
1483        }
1484    }
1485}
1486
1487unsafe impl<T> Lower for [T]
1488where
1489    T: Lower,
1490{
1491    fn lower<U>(
1492        &self,
1493        cx: &mut LowerContext<'_, U>,
1494        ty: InterfaceType,
1495        dst: &mut MaybeUninit<[ValRaw; 2]>,
1496    ) -> Result<()> {
1497        let elem = match ty {
1498            InterfaceType::List(i) => cx.types[i].element,
1499            _ => bad_type_info(),
1500        };
1501        let (ptr, len) = lower_list(cx, elem, self)?;
1502        // See "WRITEPTR64" above for why this is always storing a 64-bit
1503        // integer.
1504        map_maybe_uninit!(dst[0]).write(ValRaw::i64(ptr as i64));
1505        map_maybe_uninit!(dst[1]).write(ValRaw::i64(len as i64));
1506        Ok(())
1507    }
1508
1509    fn store<U>(
1510        &self,
1511        cx: &mut LowerContext<'_, U>,
1512        ty: InterfaceType,
1513        offset: usize,
1514    ) -> Result<()> {
1515        let elem = match ty {
1516            InterfaceType::List(i) => cx.types[i].element,
1517            _ => bad_type_info(),
1518        };
1519        debug_assert!(offset % (Self::ALIGN32 as usize) == 0);
1520        let (ptr, len) = lower_list(cx, elem, self)?;
1521        *cx.get(offset + 0) = u32::try_from(ptr).unwrap().to_le_bytes();
1522        *cx.get(offset + 4) = u32::try_from(len).unwrap().to_le_bytes();
1523        Ok(())
1524    }
1525}
1526
1527// FIXME: this is not a memcpy for `T` where `T` is something like `u8`.
1528//
1529// Some attempts to fix this have proved not fruitful. In isolation an attempt
1530// was made where:
1531//
1532// * `MemoryMut` stored a `*mut [u8]` as its "last view" of memory to avoid
1533//   reloading the base pointer constantly. This view is reset on `realloc`.
1534// * The bounds-checks in `MemoryMut::get` were removed (replaced with unsafe
1535//   indexing)
1536//
1537// Even then though this didn't correctly vectorized for `Vec<u8>`. It's not
1538// entirely clear why but it appeared that it's related to reloading the base
1539// pointer to memory (I guess from `MemoryMut` itself?). Overall I'm not really
1540// clear on what's happening there, but this is surely going to be a performance
1541// bottleneck in the future.
1542fn lower_list<T, U>(
1543    cx: &mut LowerContext<'_, U>,
1544    ty: InterfaceType,
1545    list: &[T],
1546) -> Result<(usize, usize)>
1547where
1548    T: Lower,
1549{
1550    let elem_size = T::SIZE32;
1551    let size = list
1552        .len()
1553        .checked_mul(elem_size)
1554        .ok_or_else(|| anyhow!("size overflow copying a list"))?;
1555    let ptr = cx.realloc(0, 0, T::ALIGN32, size)?;
1556    T::store_list(cx, ty, ptr, list)?;
1557    Ok((ptr, list.len()))
1558}
1559
1560/// Representation of a list of values that are owned by a WebAssembly instance.
1561///
1562/// For some more commentary about the rationale for this type see the
1563/// documentation of [`WasmStr`]. In summary this type can avoid a copy when
1564/// passing data to the host in some situations but is additionally more
1565/// cumbersome to use by requiring a [`Store`](crate::Store) to be provided.
1566///
1567/// This type is used whenever a `(list T)` is returned from a [`TypedFunc`],
1568/// for example. This type represents a list of values that are stored in linear
1569/// memory which are waiting to be read.
1570///
1571/// Note that this type represents only a valid range of bytes for the list
1572/// itself, it does not represent validity of the elements themselves and that's
1573/// performed when they're iterated.
1574///
1575/// Note that this type does not implement the [`Lower`] trait, only [`Lift`].
1576pub struct WasmList<T> {
1577    ptr: usize,
1578    len: usize,
1579    options: Options,
1580    elem: InterfaceType,
1581    // NB: it would probably be more efficient to store a non-atomic index-style
1582    // reference to something inside a `StoreOpaque`, but that's not easily
1583    // available at this time, so it's left as a future exercise.
1584    types: Arc<ComponentTypes>,
1585    instance: SendSyncPtr<ComponentInstance>,
1586    _marker: marker::PhantomData<T>,
1587}
1588
1589impl<T: Lift> WasmList<T> {
1590    fn new(
1591        ptr: usize,
1592        len: usize,
1593        cx: &mut LiftContext<'_>,
1594        elem: InterfaceType,
1595    ) -> Result<WasmList<T>> {
1596        match len
1597            .checked_mul(T::SIZE32)
1598            .and_then(|len| ptr.checked_add(len))
1599        {
1600            Some(n) if n <= cx.memory().len() => {}
1601            _ => bail!("list pointer/length out of bounds of memory"),
1602        }
1603        if ptr % usize::try_from(T::ALIGN32)? != 0 {
1604            bail!("list pointer is not aligned")
1605        }
1606        Ok(WasmList {
1607            ptr,
1608            len,
1609            options: *cx.options,
1610            elem,
1611            types: cx.types.clone(),
1612            instance: SendSyncPtr::new(NonNull::new(cx.instance_ptr()).unwrap()),
1613            _marker: marker::PhantomData,
1614        })
1615    }
1616
1617    /// Returns the item length of this vector
1618    #[inline]
1619    pub fn len(&self) -> usize {
1620        self.len
1621    }
1622
1623    /// Gets the `n`th element of this list.
1624    ///
1625    /// Returns `None` if `index` is out of bounds. Returns `Some(Err(..))` if
1626    /// the value couldn't be decoded (it was invalid). Returns `Some(Ok(..))`
1627    /// if the value is valid.
1628    ///
1629    /// # Panics
1630    ///
1631    /// This function will panic if the string did not originally come from the
1632    /// `store` specified.
1633    //
1634    // TODO: given that interface values are intended to be consumed in one go
1635    // should we even expose a random access iteration API? In theory all
1636    // consumers should be validating through the iterator.
1637    pub fn get(&self, mut store: impl AsContextMut, index: usize) -> Option<Result<T>> {
1638        let store = store.as_context_mut().0;
1639        self.options.store_id().assert_belongs_to(store.id());
1640        // This should be safe because the unsafety lies in the `self.instance`
1641        // pointer passed in has previously been validated by the lifting
1642        // context this was originally created within and with the check above
1643        // this is guaranteed to be the same store. This means that this should
1644        // be carrying over the original assertion from the original creation of
1645        // the lifting context that created this type.
1646        let mut cx =
1647            unsafe { LiftContext::new(store, &self.options, &self.types, self.instance.as_ptr()) };
1648        self.get_from_store(&mut cx, index)
1649    }
1650
1651    fn get_from_store(&self, cx: &mut LiftContext<'_>, index: usize) -> Option<Result<T>> {
1652        if index >= self.len {
1653            return None;
1654        }
1655        // Note that this is using panicking indexing and this is expected to
1656        // never fail. The bounds-checking here happened during the construction
1657        // of the `WasmList` itself which means these should always be in-bounds
1658        // (and wasm memory can only grow). This could theoretically be
1659        // unchecked indexing if we're confident enough and it's actually a perf
1660        // issue one day.
1661        let bytes = &cx.memory()[self.ptr + index * T::SIZE32..][..T::SIZE32];
1662        Some(T::load(cx, self.elem, bytes))
1663    }
1664
1665    /// Returns an iterator over the elements of this list.
1666    ///
1667    /// Each item of the list may fail to decode and is represented through the
1668    /// `Result` value of the iterator.
1669    pub fn iter<'a, U: 'a>(
1670        &'a self,
1671        store: impl Into<StoreContextMut<'a, U>>,
1672    ) -> impl ExactSizeIterator<Item = Result<T>> + 'a {
1673        let store = store.into().0;
1674        self.options.store_id().assert_belongs_to(store.id());
1675        // See comments about unsafety in the `get` method.
1676        let mut cx =
1677            unsafe { LiftContext::new(store, &self.options, &self.types, self.instance.as_ptr()) };
1678        (0..self.len).map(move |i| self.get_from_store(&mut cx, i).unwrap())
1679    }
1680}
1681
1682macro_rules! raw_wasm_list_accessors {
1683    ($($i:ident)*) => ($(
1684        impl WasmList<$i> {
1685            /// Get access to the raw underlying memory for this list.
1686            ///
1687            /// This method will return a direct slice into the original wasm
1688            /// module's linear memory where the data for this slice is stored.
1689            /// This allows the embedder to have efficient access to the
1690            /// underlying memory if needed and avoid copies and such if
1691            /// desired.
1692            ///
1693            /// Note that multi-byte integers are stored in little-endian format
1694            /// so portable processing of this slice must be aware of the host's
1695            /// byte-endianness. The `from_le` constructors in the Rust standard
1696            /// library should be suitable for converting from little-endian.
1697            ///
1698            /// # Panics
1699            ///
1700            /// Panics if the `store` provided is not the one from which this
1701            /// slice originated.
1702            pub fn as_le_slice<'a, T: 'a>(&self, store: impl Into<StoreContext<'a, T>>) -> &'a [$i] {
1703                let memory = self.options.memory(store.into().0);
1704                self._as_le_slice(memory)
1705            }
1706
1707            fn _as_le_slice<'a>(&self, all_of_memory: &'a [u8]) -> &'a [$i] {
1708                // See comments in `WasmList::get` for the panicking indexing
1709                let byte_size = self.len * mem::size_of::<$i>();
1710                let bytes = &all_of_memory[self.ptr..][..byte_size];
1711
1712                // The canonical ABI requires that everything is aligned to its
1713                // own size, so this should be an aligned array. Furthermore the
1714                // alignment of primitive integers for hosts should be smaller
1715                // than or equal to the size of the primitive itself, meaning
1716                // that a wasm canonical-abi-aligned list is also aligned for
1717                // the host. That should mean that the head/tail slices here are
1718                // empty.
1719                //
1720                // Also note that the `unsafe` here is needed since the type
1721                // we're aligning to isn't guaranteed to be valid, but in our
1722                // case it's just integers and bytes so this should be safe.
1723                unsafe {
1724                    let (head, body, tail) = bytes.align_to::<$i>();
1725                    assert!(head.is_empty() && tail.is_empty());
1726                    body
1727                }
1728            }
1729        }
1730    )*)
1731}
1732
1733raw_wasm_list_accessors! {
1734    i8 i16 i32 i64
1735    u8 u16 u32 u64
1736}
1737
1738// Note that this is similar to `ComponentType for str` except it can only be
1739// used for lifting, not lowering.
1740unsafe impl<T: ComponentType> ComponentType for WasmList<T> {
1741    type Lower = <[T] as ComponentType>::Lower;
1742
1743    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::POINTER_PAIR;
1744
1745    fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
1746        <[T] as ComponentType>::typecheck(ty, types)
1747    }
1748}
1749
1750unsafe impl<T: Lift> Lift for WasmList<T> {
1751    fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1752        let elem = match ty {
1753            InterfaceType::List(i) => cx.types[i].element,
1754            _ => bad_type_info(),
1755        };
1756        // FIXME(#4311): needs memory64 treatment
1757        let ptr = src[0].get_u32();
1758        let len = src[1].get_u32();
1759        let (ptr, len) = (usize::try_from(ptr)?, usize::try_from(len)?);
1760        WasmList::new(ptr, len, cx, elem)
1761    }
1762
1763    fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
1764        let elem = match ty {
1765            InterfaceType::List(i) => cx.types[i].element,
1766            _ => bad_type_info(),
1767        };
1768        debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
1769        // FIXME(#4311): needs memory64 treatment
1770        let ptr = u32::from_le_bytes(bytes[..4].try_into().unwrap());
1771        let len = u32::from_le_bytes(bytes[4..].try_into().unwrap());
1772        let (ptr, len) = (usize::try_from(ptr)?, usize::try_from(len)?);
1773        WasmList::new(ptr, len, cx, elem)
1774    }
1775}
1776
1777/// Verify that the given wasm type is a tuple with the expected fields in the right order.
1778fn typecheck_tuple(
1779    ty: &InterfaceType,
1780    types: &InstanceType<'_>,
1781    expected: &[fn(&InterfaceType, &InstanceType<'_>) -> Result<()>],
1782) -> Result<()> {
1783    match ty {
1784        InterfaceType::Tuple(t) => {
1785            let tuple = &types.types[*t];
1786            if tuple.types.len() != expected.len() {
1787                bail!(
1788                    "expected {}-tuple, found {}-tuple",
1789                    expected.len(),
1790                    tuple.types.len()
1791                );
1792            }
1793            for (ty, check) in tuple.types.iter().zip(expected) {
1794                check(ty, types)?;
1795            }
1796            Ok(())
1797        }
1798        other => bail!("expected `tuple` found `{}`", desc(other)),
1799    }
1800}
1801
1802/// Verify that the given wasm type is a record with the expected fields in the right order and with the right
1803/// names.
1804pub fn typecheck_record(
1805    ty: &InterfaceType,
1806    types: &InstanceType<'_>,
1807    expected: &[(&str, fn(&InterfaceType, &InstanceType<'_>) -> Result<()>)],
1808) -> Result<()> {
1809    match ty {
1810        InterfaceType::Record(index) => {
1811            let fields = &types.types[*index].fields;
1812
1813            if fields.len() != expected.len() {
1814                bail!(
1815                    "expected record of {} fields, found {} fields",
1816                    expected.len(),
1817                    fields.len()
1818                );
1819            }
1820
1821            for (field, &(name, check)) in fields.iter().zip(expected) {
1822                check(&field.ty, types)
1823                    .with_context(|| format!("type mismatch for field {name}"))?;
1824
1825                if field.name != name {
1826                    bail!("expected record field named {}, found {}", name, field.name);
1827                }
1828            }
1829
1830            Ok(())
1831        }
1832        other => bail!("expected `record` found `{}`", desc(other)),
1833    }
1834}
1835
1836/// Verify that the given wasm type is a variant with the expected cases in the right order and with the right
1837/// names.
1838pub fn typecheck_variant(
1839    ty: &InterfaceType,
1840    types: &InstanceType<'_>,
1841    expected: &[(
1842        &str,
1843        Option<fn(&InterfaceType, &InstanceType<'_>) -> Result<()>>,
1844    )],
1845) -> Result<()> {
1846    match ty {
1847        InterfaceType::Variant(index) => {
1848            let cases = &types.types[*index].cases;
1849
1850            if cases.len() != expected.len() {
1851                bail!(
1852                    "expected variant of {} cases, found {} cases",
1853                    expected.len(),
1854                    cases.len()
1855                );
1856            }
1857
1858            for ((case_name, case_ty), &(name, check)) in cases.iter().zip(expected) {
1859                if *case_name != name {
1860                    bail!("expected variant case named {name}, found {case_name}");
1861                }
1862
1863                match (check, case_ty) {
1864                    (Some(check), Some(ty)) => check(ty, types)
1865                        .with_context(|| format!("type mismatch for case {name}"))?,
1866                    (None, None) => {}
1867                    (Some(_), None) => {
1868                        bail!("case `{name}` has no type but one was expected")
1869                    }
1870                    (None, Some(_)) => {
1871                        bail!("case `{name}` has a type but none was expected")
1872                    }
1873                }
1874            }
1875
1876            Ok(())
1877        }
1878        other => bail!("expected `variant` found `{}`", desc(other)),
1879    }
1880}
1881
1882/// Verify that the given wasm type is a enum with the expected cases in the right order and with the right
1883/// names.
1884pub fn typecheck_enum(
1885    ty: &InterfaceType,
1886    types: &InstanceType<'_>,
1887    expected: &[&str],
1888) -> Result<()> {
1889    match ty {
1890        InterfaceType::Enum(index) => {
1891            let names = &types.types[*index].names;
1892
1893            if names.len() != expected.len() {
1894                bail!(
1895                    "expected enum of {} names, found {} names",
1896                    expected.len(),
1897                    names.len()
1898                );
1899            }
1900
1901            for (name, expected) in names.iter().zip(expected) {
1902                if name != expected {
1903                    bail!("expected enum case named {}, found {}", expected, name);
1904                }
1905            }
1906
1907            Ok(())
1908        }
1909        other => bail!("expected `enum` found `{}`", desc(other)),
1910    }
1911}
1912
1913/// Verify that the given wasm type is a flags type with the expected flags in the right order and with the right
1914/// names.
1915pub fn typecheck_flags(
1916    ty: &InterfaceType,
1917    types: &InstanceType<'_>,
1918    expected: &[&str],
1919) -> Result<()> {
1920    match ty {
1921        InterfaceType::Flags(index) => {
1922            let names = &types.types[*index].names;
1923
1924            if names.len() != expected.len() {
1925                bail!(
1926                    "expected flags type with {} names, found {} names",
1927                    expected.len(),
1928                    names.len()
1929                );
1930            }
1931
1932            for (name, expected) in names.iter().zip(expected) {
1933                if name != expected {
1934                    bail!("expected flag named {}, found {}", expected, name);
1935                }
1936            }
1937
1938            Ok(())
1939        }
1940        other => bail!("expected `flags` found `{}`", desc(other)),
1941    }
1942}
1943
1944/// Format the specified bitflags using the specified names for debugging
1945pub fn format_flags(bits: &[u32], names: &[&str], f: &mut fmt::Formatter) -> fmt::Result {
1946    f.write_str("(")?;
1947    let mut wrote = false;
1948    for (index, name) in names.iter().enumerate() {
1949        if ((bits[index / 32] >> (index % 32)) & 1) != 0 {
1950            if wrote {
1951                f.write_str("|")?;
1952            } else {
1953                wrote = true;
1954            }
1955
1956            f.write_str(name)?;
1957        }
1958    }
1959    f.write_str(")")
1960}
1961
1962unsafe impl<T> ComponentType for Option<T>
1963where
1964    T: ComponentType,
1965{
1966    type Lower = TupleLower<<u32 as ComponentType>::Lower, T::Lower>;
1967
1968    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::variant_static(&[None, Some(T::ABI)]);
1969
1970    fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
1971        match ty {
1972            InterfaceType::Option(t) => T::typecheck(&types.types[*t].ty, types),
1973            other => bail!("expected `option` found `{}`", desc(other)),
1974        }
1975    }
1976}
1977
1978unsafe impl<T> ComponentVariant for Option<T>
1979where
1980    T: ComponentType,
1981{
1982    const CASES: &'static [Option<CanonicalAbiInfo>] = &[None, Some(T::ABI)];
1983}
1984
1985unsafe impl<T> Lower for Option<T>
1986where
1987    T: Lower,
1988{
1989    fn lower<U>(
1990        &self,
1991        cx: &mut LowerContext<'_, U>,
1992        ty: InterfaceType,
1993        dst: &mut MaybeUninit<Self::Lower>,
1994    ) -> Result<()> {
1995        let payload = match ty {
1996            InterfaceType::Option(ty) => cx.types[ty].ty,
1997            _ => bad_type_info(),
1998        };
1999        match self {
2000            None => {
2001                map_maybe_uninit!(dst.A1).write(ValRaw::i32(0));
2002                // Note that this is unsafe as we're writing an arbitrary
2003                // bit-pattern to an arbitrary type, but part of the unsafe
2004                // contract of the `ComponentType` trait is that we can assign
2005                // any bit-pattern. By writing all zeros here we're ensuring
2006                // that the core wasm arguments this translates to will all be
2007                // zeros (as the canonical ABI requires).
2008                unsafe {
2009                    map_maybe_uninit!(dst.A2).as_mut_ptr().write_bytes(0u8, 1);
2010                }
2011            }
2012            Some(val) => {
2013                map_maybe_uninit!(dst.A1).write(ValRaw::i32(1));
2014                val.lower(cx, payload, map_maybe_uninit!(dst.A2))?;
2015            }
2016        }
2017        Ok(())
2018    }
2019
2020    fn store<U>(
2021        &self,
2022        cx: &mut LowerContext<'_, U>,
2023        ty: InterfaceType,
2024        offset: usize,
2025    ) -> Result<()> {
2026        debug_assert!(offset % (Self::ALIGN32 as usize) == 0);
2027        let payload = match ty {
2028            InterfaceType::Option(ty) => cx.types[ty].ty,
2029            _ => bad_type_info(),
2030        };
2031        match self {
2032            None => {
2033                cx.get::<1>(offset)[0] = 0;
2034            }
2035            Some(val) => {
2036                cx.get::<1>(offset)[0] = 1;
2037                val.store(cx, payload, offset + (Self::INFO.payload_offset32 as usize))?;
2038            }
2039        }
2040        Ok(())
2041    }
2042}
2043
2044unsafe impl<T> Lift for Option<T>
2045where
2046    T: Lift,
2047{
2048    fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
2049        let payload = match ty {
2050            InterfaceType::Option(ty) => cx.types[ty].ty,
2051            _ => bad_type_info(),
2052        };
2053        Ok(match src.A1.get_i32() {
2054            0 => None,
2055            1 => Some(T::lift(cx, payload, &src.A2)?),
2056            _ => bail!("invalid option discriminant"),
2057        })
2058    }
2059
2060    fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
2061        debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
2062        let payload_ty = match ty {
2063            InterfaceType::Option(ty) => cx.types[ty].ty,
2064            _ => bad_type_info(),
2065        };
2066        let discrim = bytes[0];
2067        let payload = &bytes[Self::INFO.payload_offset32 as usize..];
2068        match discrim {
2069            0 => Ok(None),
2070            1 => Ok(Some(T::load(cx, payload_ty, payload)?)),
2071            _ => bail!("invalid option discriminant"),
2072        }
2073    }
2074}
2075
2076#[derive(Clone, Copy)]
2077#[repr(C)]
2078pub struct ResultLower<T: Copy, E: Copy> {
2079    tag: ValRaw,
2080    payload: ResultLowerPayload<T, E>,
2081}
2082
2083#[derive(Clone, Copy)]
2084#[repr(C)]
2085union ResultLowerPayload<T: Copy, E: Copy> {
2086    ok: T,
2087    err: E,
2088}
2089
2090unsafe impl<T, E> ComponentType for Result<T, E>
2091where
2092    T: ComponentType,
2093    E: ComponentType,
2094{
2095    type Lower = ResultLower<T::Lower, E::Lower>;
2096
2097    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::variant_static(&[Some(T::ABI), Some(E::ABI)]);
2098
2099    fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
2100        match ty {
2101            InterfaceType::Result(r) => {
2102                let result = &types.types[*r];
2103                match &result.ok {
2104                    Some(ty) => T::typecheck(ty, types)?,
2105                    None if T::IS_RUST_UNIT_TYPE => {}
2106                    None => bail!("expected no `ok` type"),
2107                }
2108                match &result.err {
2109                    Some(ty) => E::typecheck(ty, types)?,
2110                    None if E::IS_RUST_UNIT_TYPE => {}
2111                    None => bail!("expected no `err` type"),
2112                }
2113                Ok(())
2114            }
2115            other => bail!("expected `result` found `{}`", desc(other)),
2116        }
2117    }
2118}
2119
2120/// Lowers the payload of a variant into the storage for the entire payload,
2121/// handling writing zeros at the end of the representation if this payload is
2122/// smaller than the entire flat representation.
2123///
2124/// * `payload` - the flat storage space for the entire payload of the variant
2125/// * `typed_payload` - projection from the payload storage space to the
2126///   individual storage space for this variant.
2127/// * `lower` - lowering operation used to initialize the `typed_payload` return
2128///   value.
2129///
2130/// For more information on this se the comments in the `Lower for Result`
2131/// implementation below.
2132pub unsafe fn lower_payload<P, T>(
2133    payload: &mut MaybeUninit<P>,
2134    typed_payload: impl FnOnce(&mut MaybeUninit<P>) -> &mut MaybeUninit<T>,
2135    lower: impl FnOnce(&mut MaybeUninit<T>) -> Result<()>,
2136) -> Result<()> {
2137    let typed = typed_payload(payload);
2138    lower(typed)?;
2139
2140    let typed_len = storage_as_slice(typed).len();
2141    let payload = storage_as_slice_mut(payload);
2142    for slot in payload[typed_len..].iter_mut() {
2143        *slot = ValRaw::u64(0);
2144    }
2145    Ok(())
2146}
2147
2148unsafe impl<T, E> ComponentVariant for Result<T, E>
2149where
2150    T: ComponentType,
2151    E: ComponentType,
2152{
2153    const CASES: &'static [Option<CanonicalAbiInfo>] = &[Some(T::ABI), Some(E::ABI)];
2154}
2155
2156unsafe impl<T, E> Lower for Result<T, E>
2157where
2158    T: Lower,
2159    E: Lower,
2160{
2161    fn lower<U>(
2162        &self,
2163        cx: &mut LowerContext<'_, U>,
2164        ty: InterfaceType,
2165        dst: &mut MaybeUninit<Self::Lower>,
2166    ) -> Result<()> {
2167        let (ok, err) = match ty {
2168            InterfaceType::Result(ty) => {
2169                let ty = &cx.types[ty];
2170                (ty.ok, ty.err)
2171            }
2172            _ => bad_type_info(),
2173        };
2174
2175        // This implementation of `Lower::lower`, if you're reading these from
2176        // the top of this file, is the first location that the "join" logic of
2177        // the component model's canonical ABI encountered. The rough problem is
2178        // that let's say we have a component model type of the form:
2179        //
2180        //      (result u64 (error (tuple f32 u16)))
2181        //
2182        // The flat representation of this is actually pretty tricky. Currently
2183        // it is:
2184        //
2185        //      i32 i64 i32
2186        //
2187        // The first `i32` is the discriminant for the `result`, and the payload
2188        // is represented by `i64 i32`. The "ok" variant will only use the `i64`
2189        // and the "err" variant will use both `i64` and `i32`.
2190        //
2191        // In the "ok" variant the first issue is encountered. The size of one
2192        // variant may not match the size of the other variants. All variants
2193        // start at the "front" but when lowering a type we need to be sure to
2194        // initialize the later variants (lest we leak random host memory into
2195        // the guest module). Due to how the `Lower` type is represented as a
2196        // `union` of all the variants what ends up happening here is that
2197        // internally within the `lower_payload` after the typed payload is
2198        // lowered the remaining bits of the payload that weren't initialized
2199        // are all set to zero. This will guarantee that we'll write to all the
2200        // slots for each variant.
2201        //
2202        // The "err" variant encounters the second issue, however, which is that
2203        // the flat representation for each type may differ between payloads. In
2204        // the "ok" arm an `i64` is written, but the `lower` implementation for
2205        // the "err" arm will write an `f32` and then an `i32`. For this
2206        // implementation of `lower` to be valid the `f32` needs to get inflated
2207        // to an `i64` with zero-padding in the upper bits. What may be
2208        // surprising, however, is that none of this is handled in this file.
2209        // This implementation looks like it's blindly deferring to `E::lower`
2210        // and hoping it does the right thing.
2211        //
2212        // In reality, however, the correctness of variant lowering relies on
2213        // two subtle details of the `ValRaw` implementation in Wasmtime:
2214        //
2215        // 1. First the `ValRaw` value always contains little-endian values.
2216        //    This means that if a `u32` is written, a `u64` is read, and then
2217        //    the `u64` has its upper bits truncated the original value will
2218        //    always be retained. This is primarily here for big-endian
2219        //    platforms where if it weren't little endian then the opposite
2220        //    would occur and the wrong value would be read.
2221        //
2222        // 2. Second, and perhaps even more subtly, the `ValRaw` constructors
2223        //    for 32-bit types actually always initialize 64-bits of the
2224        //    `ValRaw`. In the component model flat ABI only 32 and 64-bit types
2225        //    are used so 64-bits is big enough to contain everything. This
2226        //    means that when a `ValRaw` is written into the destination it will
2227        //    always, whether it's needed or not, be "ready" to get extended up
2228        //    to 64-bits.
2229        //
2230        // Put together these two subtle guarantees means that all `Lower`
2231        // implementations can be written "naturally" as one might naively
2232        // expect. Variants will, on each arm, zero out remaining fields and all
2233        // writes to the flat representation will automatically be 64-bit writes
2234        // meaning that if the value is read as a 64-bit value, which isn't
2235        // known at the time of the write, it'll still be correct.
2236        match self {
2237            Ok(e) => {
2238                map_maybe_uninit!(dst.tag).write(ValRaw::i32(0));
2239                unsafe {
2240                    lower_payload(
2241                        map_maybe_uninit!(dst.payload),
2242                        |payload| map_maybe_uninit!(payload.ok),
2243                        |dst| match ok {
2244                            Some(ok) => e.lower(cx, ok, dst),
2245                            None => Ok(()),
2246                        },
2247                    )
2248                }
2249            }
2250            Err(e) => {
2251                map_maybe_uninit!(dst.tag).write(ValRaw::i32(1));
2252                unsafe {
2253                    lower_payload(
2254                        map_maybe_uninit!(dst.payload),
2255                        |payload| map_maybe_uninit!(payload.err),
2256                        |dst| match err {
2257                            Some(err) => e.lower(cx, err, dst),
2258                            None => Ok(()),
2259                        },
2260                    )
2261                }
2262            }
2263        }
2264    }
2265
2266    fn store<U>(
2267        &self,
2268        cx: &mut LowerContext<'_, U>,
2269        ty: InterfaceType,
2270        offset: usize,
2271    ) -> Result<()> {
2272        let (ok, err) = match ty {
2273            InterfaceType::Result(ty) => {
2274                let ty = &cx.types[ty];
2275                (ty.ok, ty.err)
2276            }
2277            _ => bad_type_info(),
2278        };
2279        debug_assert!(offset % (Self::ALIGN32 as usize) == 0);
2280        let payload_offset = Self::INFO.payload_offset32 as usize;
2281        match self {
2282            Ok(e) => {
2283                cx.get::<1>(offset)[0] = 0;
2284                if let Some(ok) = ok {
2285                    e.store(cx, ok, offset + payload_offset)?;
2286                }
2287            }
2288            Err(e) => {
2289                cx.get::<1>(offset)[0] = 1;
2290                if let Some(err) = err {
2291                    e.store(cx, err, offset + payload_offset)?;
2292                }
2293            }
2294        }
2295        Ok(())
2296    }
2297}
2298
2299unsafe impl<T, E> Lift for Result<T, E>
2300where
2301    T: Lift,
2302    E: Lift,
2303{
2304    #[inline]
2305    fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
2306        let (ok, err) = match ty {
2307            InterfaceType::Result(ty) => {
2308                let ty = &cx.types[ty];
2309                (ty.ok, ty.err)
2310            }
2311            _ => bad_type_info(),
2312        };
2313        // Note that this implementation specifically isn't trying to actually
2314        // reinterpret or alter the bits of `lower` depending on which variant
2315        // we're lifting. This ends up all working out because the value is
2316        // stored in little-endian format.
2317        //
2318        // When stored in little-endian format the `{T,E}::Lower`, when each
2319        // individual `ValRaw` is read, means that if an i64 value, extended
2320        // from an i32 value, was stored then when the i32 value is read it'll
2321        // automatically ignore the upper bits.
2322        //
2323        // This "trick" allows us to seamlessly pass through the `Self::Lower`
2324        // representation into the lifting/lowering without trying to handle
2325        // "join"ed types as per the canonical ABI. It just so happens that i64
2326        // bits will naturally be reinterpreted as f64. Additionally if the
2327        // joined type is i64 but only the lower bits are read that's ok and we
2328        // don't need to validate the upper bits.
2329        //
2330        // This is largely enabled by WebAssembly/component-model#35 where no
2331        // validation needs to be performed for ignored bits and bytes here.
2332        Ok(match src.tag.get_i32() {
2333            0 => Ok(unsafe { lift_option(cx, ok, &src.payload.ok)? }),
2334            1 => Err(unsafe { lift_option(cx, err, &src.payload.err)? }),
2335            _ => bail!("invalid expected discriminant"),
2336        })
2337    }
2338
2339    #[inline]
2340    fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
2341        debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
2342        let discrim = bytes[0];
2343        let payload = &bytes[Self::INFO.payload_offset32 as usize..];
2344        let (ok, err) = match ty {
2345            InterfaceType::Result(ty) => {
2346                let ty = &cx.types[ty];
2347                (ty.ok, ty.err)
2348            }
2349            _ => bad_type_info(),
2350        };
2351        match discrim {
2352            0 => Ok(Ok(load_option(cx, ok, &payload[..T::SIZE32])?)),
2353            1 => Ok(Err(load_option(cx, err, &payload[..E::SIZE32])?)),
2354            _ => bail!("invalid expected discriminant"),
2355        }
2356    }
2357}
2358
2359fn lift_option<T>(cx: &mut LiftContext<'_>, ty: Option<InterfaceType>, src: &T::Lower) -> Result<T>
2360where
2361    T: Lift,
2362{
2363    match ty {
2364        Some(ty) => T::lift(cx, ty, src),
2365        None => Ok(empty_lift()),
2366    }
2367}
2368
2369fn load_option<T>(cx: &mut LiftContext<'_>, ty: Option<InterfaceType>, bytes: &[u8]) -> Result<T>
2370where
2371    T: Lift,
2372{
2373    match ty {
2374        Some(ty) => T::load(cx, ty, bytes),
2375        None => Ok(empty_lift()),
2376    }
2377}
2378
2379fn empty_lift<T>() -> T
2380where
2381    T: Lift,
2382{
2383    assert!(T::IS_RUST_UNIT_TYPE);
2384    assert_eq!(mem::size_of::<T>(), 0);
2385    unsafe { MaybeUninit::uninit().assume_init() }
2386}
2387
2388/// Helper structure to define `Lower` for tuples below.
2389///
2390/// Uses default type parameters to have fields be zero-sized and not present
2391/// in memory for smaller tuple values.
2392#[allow(non_snake_case)]
2393#[doc(hidden)]
2394#[derive(Clone, Copy)]
2395#[repr(C)]
2396pub struct TupleLower<
2397    T1 = (),
2398    T2 = (),
2399    T3 = (),
2400    T4 = (),
2401    T5 = (),
2402    T6 = (),
2403    T7 = (),
2404    T8 = (),
2405    T9 = (),
2406    T10 = (),
2407    T11 = (),
2408    T12 = (),
2409    T13 = (),
2410    T14 = (),
2411    T15 = (),
2412    T16 = (),
2413    T17 = (),
2414> {
2415    // NB: these names match the names in `for_each_function_signature!`
2416    A1: T1,
2417    A2: T2,
2418    A3: T3,
2419    A4: T4,
2420    A5: T5,
2421    A6: T6,
2422    A7: T7,
2423    A8: T8,
2424    A9: T9,
2425    A10: T10,
2426    A11: T11,
2427    A12: T12,
2428    A13: T13,
2429    A14: T14,
2430    A15: T15,
2431    A16: T16,
2432    A17: T17,
2433    _align_tuple_lower0_correctly: [ValRaw; 0],
2434}
2435
2436macro_rules! impl_component_ty_for_tuples {
2437    ($n:tt $($t:ident)*) => {
2438        #[allow(non_snake_case)]
2439        unsafe impl<$($t,)*> ComponentType for ($($t,)*)
2440            where $($t: ComponentType),*
2441        {
2442            type Lower = TupleLower<$($t::Lower),*>;
2443
2444            const ABI: CanonicalAbiInfo = CanonicalAbiInfo::record_static(&[
2445                $($t::ABI),*
2446            ]);
2447
2448            const IS_RUST_UNIT_TYPE: bool = {
2449                let mut _is_unit = true;
2450                $(
2451                    let _anything_to_bind_the_macro_variable = $t::IS_RUST_UNIT_TYPE;
2452                    _is_unit = false;
2453                )*
2454                _is_unit
2455            };
2456
2457            fn typecheck(
2458                ty: &InterfaceType,
2459                types: &InstanceType<'_>,
2460            ) -> Result<()> {
2461                typecheck_tuple(ty, types, &[$($t::typecheck),*])
2462            }
2463        }
2464
2465        #[allow(non_snake_case)]
2466        unsafe impl<$($t,)*> Lower for ($($t,)*)
2467            where $($t: Lower),*
2468        {
2469            fn lower<U>(
2470                &self,
2471                cx: &mut LowerContext<'_, U>,
2472                ty: InterfaceType,
2473                _dst: &mut MaybeUninit<Self::Lower>,
2474            ) -> Result<()> {
2475                let types = match ty {
2476                    InterfaceType::Tuple(t) => &cx.types[t].types,
2477                    _ => bad_type_info(),
2478                };
2479                let ($($t,)*) = self;
2480                let mut _types = types.iter();
2481                $(
2482                    let ty = *_types.next().unwrap_or_else(bad_type_info);
2483                    $t.lower(cx, ty, map_maybe_uninit!(_dst.$t))?;
2484                )*
2485                Ok(())
2486            }
2487
2488            fn store<U>(
2489                &self,
2490                cx: &mut LowerContext<'_, U>,
2491                ty: InterfaceType,
2492                mut _offset: usize,
2493            ) -> Result<()> {
2494                debug_assert!(_offset % (Self::ALIGN32 as usize) == 0);
2495                let types = match ty {
2496                    InterfaceType::Tuple(t) => &cx.types[t].types,
2497                    _ => bad_type_info(),
2498                };
2499                let ($($t,)*) = self;
2500                let mut _types = types.iter();
2501                $(
2502                    let ty = *_types.next().unwrap_or_else(bad_type_info);
2503                    $t.store(cx, ty, $t::ABI.next_field32_size(&mut _offset))?;
2504                )*
2505                Ok(())
2506            }
2507        }
2508
2509        #[allow(non_snake_case)]
2510        unsafe impl<$($t,)*> Lift for ($($t,)*)
2511            where $($t: Lift),*
2512        {
2513            #[inline]
2514            fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, _src: &Self::Lower) -> Result<Self> {
2515                let types = match ty {
2516                    InterfaceType::Tuple(t) => &cx.types[t].types,
2517                    _ => bad_type_info(),
2518                };
2519                let mut _types = types.iter();
2520                Ok(($(
2521                    $t::lift(
2522                        cx,
2523                        *_types.next().unwrap_or_else(bad_type_info),
2524                        &_src.$t,
2525                    )?,
2526                )*))
2527            }
2528
2529            #[inline]
2530            fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
2531                debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
2532                let types = match ty {
2533                    InterfaceType::Tuple(t) => &cx.types[t].types,
2534                    _ => bad_type_info(),
2535                };
2536                let mut _types = types.iter();
2537                let mut _offset = 0;
2538                $(
2539                    let ty = *_types.next().unwrap_or_else(bad_type_info);
2540                    let $t = $t::load(cx, ty, &bytes[$t::ABI.next_field32_size(&mut _offset)..][..$t::SIZE32])?;
2541                )*
2542                Ok(($($t,)*))
2543            }
2544        }
2545
2546        #[allow(non_snake_case)]
2547        unsafe impl<$($t,)*> ComponentNamedList for ($($t,)*)
2548            where $($t: ComponentType),*
2549        {}
2550    };
2551}
2552
2553for_each_function_signature!(impl_component_ty_for_tuples);
2554
2555pub fn desc(ty: &InterfaceType) -> &'static str {
2556    match ty {
2557        InterfaceType::U8 => "u8",
2558        InterfaceType::S8 => "s8",
2559        InterfaceType::U16 => "u16",
2560        InterfaceType::S16 => "s16",
2561        InterfaceType::U32 => "u32",
2562        InterfaceType::S32 => "s32",
2563        InterfaceType::U64 => "u64",
2564        InterfaceType::S64 => "s64",
2565        InterfaceType::Float32 => "f32",
2566        InterfaceType::Float64 => "f64",
2567        InterfaceType::Bool => "bool",
2568        InterfaceType::Char => "char",
2569        InterfaceType::String => "string",
2570        InterfaceType::List(_) => "list",
2571        InterfaceType::Tuple(_) => "tuple",
2572        InterfaceType::Option(_) => "option",
2573        InterfaceType::Result(_) => "result",
2574
2575        InterfaceType::Record(_) => "record",
2576        InterfaceType::Variant(_) => "variant",
2577        InterfaceType::Flags(_) => "flags",
2578        InterfaceType::Enum(_) => "enum",
2579        InterfaceType::Own(_) => "owned resource",
2580        InterfaceType::Borrow(_) => "borrowed resource",
2581        InterfaceType::Future(_) => "future",
2582        InterfaceType::Stream(_) => "stream",
2583        InterfaceType::ErrorContext(_) => "error-context",
2584    }
2585}
2586
2587#[cold]
2588#[doc(hidden)]
2589pub fn bad_type_info<T>() -> T {
2590    // NB: should consider something like `unreachable_unchecked` here if this
2591    // becomes a performance bottleneck at some point, but that also comes with
2592    // a tradeoff of propagating a lot of unsafety, so it may not be worth it.
2593    panic!("bad type information detected");
2594}