wasmtime/runtime/component/func/
typed.rs

1use crate::component::func::{Func, LiftContext, LowerContext, Options};
2use crate::component::matching::InstanceType;
3use crate::component::storage::{storage_as_slice, storage_as_slice_mut};
4use crate::prelude::*;
5use crate::runtime::vm::SendSyncPtr;
6use crate::runtime::vm::component::ComponentInstance;
7use crate::{AsContextMut, StoreContext, StoreContextMut, ValRaw};
8use alloc::borrow::Cow;
9use alloc::sync::Arc;
10use core::fmt;
11use core::iter;
12use core::marker;
13use core::mem::{self, MaybeUninit};
14use core::ptr::NonNull;
15use core::str;
16use wasmtime_environ::component::{
17    CanonicalAbiInfo, ComponentTypes, InterfaceType, MAX_FLAT_PARAMS, MAX_FLAT_RESULTS,
18    StringEncoding, VariantInfo,
19};
20
21#[cfg(feature = "component-model-async")]
22use crate::component::concurrent::Promise;
23
24/// A statically-typed version of [`Func`] which takes `Params` as input and
25/// returns `Return`.
26///
27/// This is an efficient way to invoke a WebAssembly component where if the
28/// inputs and output are statically known this can eschew the vast majority of
29/// machinery and checks when calling WebAssembly. This is the most optimized
30/// way to call a WebAssembly component.
31///
32/// Note that like [`Func`] this is a pointer within a [`Store`](crate::Store)
33/// and usage will panic if used with the wrong store.
34///
35/// This type is primarily created with the [`Func::typed`] API.
36///
37/// See [`ComponentType`] for more information about supported types.
38pub struct TypedFunc<Params, Return> {
39    func: Func,
40
41    // The definition of this field is somewhat subtle and may be surprising.
42    // Naively one might expect something like
43    //
44    //      _marker: marker::PhantomData<fn(Params) -> Return>,
45    //
46    // Since this is a function pointer after all. The problem with this
47    // definition though is that it imposes the wrong variance on `Params` from
48    // what we want. Abstractly a `fn(Params)` is able to store `Params` within
49    // it meaning you can only give it `Params` that live longer than the
50    // function pointer.
51    //
52    // With a component model function, however, we're always copying data from
53    // the host into the guest, so we are never storing pointers to `Params`
54    // into the guest outside the duration of a `call`, meaning we can actually
55    // accept values in `TypedFunc::call` which live for a shorter duration
56    // than the `Params` argument on the struct.
57    //
58    // This all means that we don't use a phantom function pointer, but instead
59    // feign phantom storage here to get the variance desired.
60    _marker: marker::PhantomData<(Params, Return)>,
61}
62
63impl<Params, Return> Copy for TypedFunc<Params, Return> {}
64
65impl<Params, Return> Clone for TypedFunc<Params, Return> {
66    fn clone(&self) -> TypedFunc<Params, Return> {
67        *self
68    }
69}
70
71impl<Params, Return> TypedFunc<Params, Return>
72where
73    Params: ComponentNamedList + Lower,
74    Return: ComponentNamedList + Lift,
75{
76    /// Creates a new [`TypedFunc`] from the provided component [`Func`],
77    /// unsafely asserting that the underlying function takes `Params` as
78    /// input and returns `Return`.
79    ///
80    /// # Unsafety
81    ///
82    /// This is an unsafe function because it does not verify that the [`Func`]
83    /// provided actually implements this signature. It's up to the caller to
84    /// have performed some other sort of check to ensure that the signature is
85    /// correct.
86    pub unsafe fn new_unchecked(func: Func) -> TypedFunc<Params, Return> {
87        TypedFunc {
88            _marker: marker::PhantomData,
89            func,
90        }
91    }
92
93    /// Returns the underlying un-typed [`Func`] that this [`TypedFunc`]
94    /// references.
95    pub fn func(&self) -> &Func {
96        &self.func
97    }
98
99    /// Calls the underlying WebAssembly component function using the provided
100    /// `params` as input.
101    ///
102    /// This method is used to enter into a component. Execution happens within
103    /// the `store` provided. The `params` are copied into WebAssembly memory
104    /// as appropriate and a core wasm function is invoked.
105    ///
106    /// # Post-return
107    ///
108    /// In the component model each function can have a "post return" specified
109    /// which allows cleaning up the arguments returned to the host. For example
110    /// if WebAssembly returns a string to the host then it might be a uniquely
111    /// allocated string which, after the host finishes processing it, needs to
112    /// be deallocated in the wasm instance's own linear memory to prevent
113    /// memory leaks in wasm itself. The `post-return` canonical abi option is
114    /// used to configured this.
115    ///
116    /// To accommodate this feature of the component model after invoking a
117    /// function via [`TypedFunc::call`] you must next invoke
118    /// [`TypedFunc::post_return`]. Note that the return value of the function
119    /// should be processed between these two function calls. The return value
120    /// continues to be usable from an embedder's perspective after
121    /// `post_return` is called, but after `post_return` is invoked it may no
122    /// longer retain the same value that the wasm module originally returned.
123    ///
124    /// Also note that [`TypedFunc::post_return`] must be invoked irrespective
125    /// of whether the canonical ABI option `post-return` was configured or not.
126    /// This means that embedders must unconditionally call
127    /// [`TypedFunc::post_return`] when a function returns. If this function
128    /// call returns an error, however, then [`TypedFunc::post_return`] is not
129    /// required.
130    ///
131    /// # Errors
132    ///
133    /// This function can return an error for a number of reasons:
134    ///
135    /// * If the wasm itself traps during execution.
136    /// * If the wasm traps while copying arguments into memory.
137    /// * If the wasm provides bad allocation pointers when copying arguments
138    ///   into memory.
139    /// * If the wasm returns a value which violates the canonical ABI.
140    /// * If this function's instances cannot be entered, for example if the
141    ///   instance is currently calling a host function.
142    /// * If a previous function call occurred and the corresponding
143    ///   `post_return` hasn't been invoked yet.
144    ///
145    /// In general there are many ways that things could go wrong when copying
146    /// types in and out of a wasm module with the canonical ABI, and certain
147    /// error conditions are specific to certain types. For example a
148    /// WebAssembly module can't return an invalid `char`. When allocating space
149    /// for this host to copy a string into the returned pointer must be
150    /// in-bounds in memory.
151    ///
152    /// If an error happens then the error should contain detailed enough
153    /// information to understand which part of the canonical ABI went wrong
154    /// and what to inspect.
155    ///
156    /// # Panics
157    ///
158    /// Panics if this is called on a function in an asynchronous store. This
159    /// only works with functions defined within a synchronous store. Also
160    /// panics if `store` does not own this function.
161    pub fn call(&self, store: impl AsContextMut, params: Params) -> Result<Return> {
162        assert!(
163            !store.as_context().async_support(),
164            "must use `call_async` when async support is enabled on the config"
165        );
166        self.call_impl(store, params)
167    }
168
169    /// Exactly like [`Self::call`], except for use on asynchronous stores.
170    ///
171    /// # Panics
172    ///
173    /// Panics if this is called on a function in a synchronous store. This
174    /// only works with functions defined within an asynchronous store. Also
175    /// panics if `store` does not own this function.
176    #[cfg(feature = "async")]
177    pub async fn call_async(
178        &self,
179        mut store: impl AsContextMut<Data: Send>,
180        params: Params,
181    ) -> Result<Return>
182    where
183        Params: Send + Sync,
184        Return: Send + Sync,
185    {
186        let mut store = store.as_context_mut();
187        assert!(
188            store.0.async_support(),
189            "cannot use `call_async` when async support is not enabled on the config"
190        );
191        store
192            .on_fiber(|store| self.call_impl(store, params))
193            .await?
194    }
195
196    /// Start concurrent call to this function.
197    ///
198    /// Unlike [`Self::call`] and [`Self::call_async`] (both of which require
199    /// exclusive access to the store until the completion of the call), calls
200    /// made using this method may run concurrently with other calls to the same
201    /// instance.
202    #[cfg(feature = "component-model-async")]
203    pub async fn call_concurrent(
204        self,
205        mut store: impl AsContextMut<Data: Send>,
206        params: Params,
207    ) -> Result<Promise<Return>>
208    where
209        Params: Send + Sync + 'static,
210        Return: Send + Sync + 'static,
211    {
212        let store = store.as_context_mut();
213        assert!(
214            store.0.async_support(),
215            "cannot use `call_concurrent` when async support is not enabled on the config"
216        );
217        _ = params;
218        todo!()
219    }
220
221    fn call_impl(&self, mut store: impl AsContextMut, params: Params) -> Result<Return> {
222        let store = &mut store.as_context_mut();
223        // Note that this is in theory simpler than it might read at this time.
224        // Here we're doing a runtime dispatch on the `flatten_count` for the
225        // params/results to see whether they're inbounds. This creates 4 cases
226        // to handle. In reality this is a highly optimizable branch where LLVM
227        // will easily figure out that only one branch here is taken.
228        //
229        // Otherwise this current construction is done to ensure that the stack
230        // space reserved for the params/results is always of the appropriate
231        // size (as the params/results needed differ depending on the "flatten"
232        // count)
233        if Params::flatten_count() <= MAX_FLAT_PARAMS {
234            if Return::flatten_count() <= MAX_FLAT_RESULTS {
235                self.func.call_raw(
236                    store,
237                    &params,
238                    Self::lower_stack_args,
239                    Self::lift_stack_result,
240                )
241            } else {
242                self.func.call_raw(
243                    store,
244                    &params,
245                    Self::lower_stack_args,
246                    Self::lift_heap_result,
247                )
248            }
249        } else {
250            if Return::flatten_count() <= MAX_FLAT_RESULTS {
251                self.func.call_raw(
252                    store,
253                    &params,
254                    Self::lower_heap_args,
255                    Self::lift_stack_result,
256                )
257            } else {
258                self.func.call_raw(
259                    store,
260                    &params,
261                    Self::lower_heap_args,
262                    Self::lift_heap_result,
263                )
264            }
265        }
266    }
267
268    /// Lower parameters directly onto the stack specified by the `dst`
269    /// location.
270    ///
271    /// This is only valid to call when the "flatten count" is small enough, or
272    /// when the canonical ABI says arguments go through the stack rather than
273    /// the heap.
274    fn lower_stack_args<T>(
275        cx: &mut LowerContext<'_, T>,
276        params: &Params,
277        ty: InterfaceType,
278        dst: &mut MaybeUninit<Params::Lower>,
279    ) -> Result<()> {
280        assert!(Params::flatten_count() <= MAX_FLAT_PARAMS);
281        params.lower(cx, ty, dst)?;
282        Ok(())
283    }
284
285    /// Lower parameters onto a heap-allocated location.
286    ///
287    /// This is used when the stack space to be used for the arguments is above
288    /// the `MAX_FLAT_PARAMS` threshold. Here the wasm's `realloc` function is
289    /// invoked to allocate space and then parameters are stored at that heap
290    /// pointer location.
291    fn lower_heap_args<T>(
292        cx: &mut LowerContext<'_, T>,
293        params: &Params,
294        ty: InterfaceType,
295        dst: &mut MaybeUninit<ValRaw>,
296    ) -> Result<()> {
297        assert!(Params::flatten_count() > MAX_FLAT_PARAMS);
298
299        // Memory must exist via validation if the arguments are stored on the
300        // heap, so we can create a `MemoryMut` at this point. Afterwards
301        // `realloc` is used to allocate space for all the arguments and then
302        // they're all stored in linear memory.
303        //
304        // Note that `realloc` will bake in a check that the returned pointer is
305        // in-bounds.
306        let ptr = cx.realloc(0, 0, Params::ALIGN32, Params::SIZE32)?;
307        params.store(cx, ty, ptr)?;
308
309        // Note that the pointer here is stored as a 64-bit integer. This allows
310        // this to work with either 32 or 64-bit memories. For a 32-bit memory
311        // it'll just ignore the upper 32 zero bits, and for 64-bit memories
312        // this'll have the full 64-bits. Note that for 32-bit memories the call
313        // to `realloc` above guarantees that the `ptr` is in-bounds meaning
314        // that we will know that the zero-extended upper bits of `ptr` are
315        // guaranteed to be zero.
316        //
317        // This comment about 64-bit integers is also referred to below with
318        // "WRITEPTR64".
319        dst.write(ValRaw::i64(ptr as i64));
320
321        Ok(())
322    }
323
324    /// Lift the result of a function directly from the stack result.
325    ///
326    /// This is only used when the result fits in the maximum number of stack
327    /// slots.
328    fn lift_stack_result(
329        cx: &mut LiftContext<'_>,
330        ty: InterfaceType,
331        dst: &Return::Lower,
332    ) -> Result<Return> {
333        assert!(Return::flatten_count() <= MAX_FLAT_RESULTS);
334        Return::lift(cx, ty, dst)
335    }
336
337    /// Lift the result of a function where the result is stored indirectly on
338    /// the heap.
339    fn lift_heap_result(
340        cx: &mut LiftContext<'_>,
341        ty: InterfaceType,
342        dst: &ValRaw,
343    ) -> Result<Return> {
344        assert!(Return::flatten_count() > MAX_FLAT_RESULTS);
345        // FIXME(#4311): needs to read an i64 for memory64
346        let ptr = usize::try_from(dst.get_u32())?;
347        if ptr % usize::try_from(Return::ALIGN32)? != 0 {
348            bail!("return pointer not aligned");
349        }
350
351        let bytes = cx
352            .memory()
353            .get(ptr..)
354            .and_then(|b| b.get(..Return::SIZE32))
355            .ok_or_else(|| anyhow::anyhow!("pointer out of bounds of memory"))?;
356        Return::load(cx, ty, bytes)
357    }
358
359    /// See [`Func::post_return`]
360    pub fn post_return(&self, store: impl AsContextMut) -> Result<()> {
361        self.func.post_return(store)
362    }
363
364    /// See [`Func::post_return_async`]
365    #[cfg(feature = "async")]
366    pub async fn post_return_async<T: Send>(
367        &self,
368        store: impl AsContextMut<Data = T>,
369    ) -> Result<()> {
370        self.func.post_return_async(store).await
371    }
372}
373
374/// A trait representing a static list of named types that can be passed to or
375/// returned from a [`TypedFunc`].
376///
377/// This trait is implemented for a number of tuple types and is not expected
378/// to be implemented externally. The contents of this trait are hidden as it's
379/// intended to be an implementation detail of Wasmtime. The contents of this
380/// trait are not covered by Wasmtime's stability guarantees.
381///
382/// For more information about this trait see [`Func::typed`] and
383/// [`TypedFunc`].
384//
385// Note that this is an `unsafe` trait, and the unsafety means that
386// implementations of this trait must be correct or otherwise [`TypedFunc`]
387// would not be memory safe. The main reason this is `unsafe` is the
388// `typecheck` function which must operate correctly relative to the `AsTuple`
389// interpretation of the implementor.
390pub unsafe trait ComponentNamedList: ComponentType {}
391
392/// A trait representing types which can be passed to and read from components
393/// with the canonical ABI.
394///
395/// This trait is implemented for Rust types which can be communicated to
396/// components. The [`Func::typed`] and [`TypedFunc`] Rust items are the main
397/// consumers of this trait.
398///
399/// Supported Rust types include:
400///
401/// | Component Model Type              | Rust Type                            |
402/// |-----------------------------------|--------------------------------------|
403/// | `{s,u}{8,16,32,64}`               | `{i,u}{8,16,32,64}`                  |
404/// | `f{32,64}`                        | `f{32,64}`                           |
405/// | `bool`                            | `bool`                               |
406/// | `char`                            | `char`                               |
407/// | `tuple<A, B>`                     | `(A, B)`                             |
408/// | `option<T>`                       | `Option<T>`                          |
409/// | `result`                          | `Result<(), ()>`                     |
410/// | `result<T>`                       | `Result<T, ()>`                      |
411/// | `result<_, E>`                    | `Result<(), E>`                      |
412/// | `result<T, E>`                    | `Result<T, E>`                       |
413/// | `string`                          | `String`, `&str`, or [`WasmStr`]     |
414/// | `list<T>`                         | `Vec<T>`, `&[T]`, or [`WasmList`]    |
415/// | `own<T>`, `borrow<T>`             | [`Resource<T>`] or [`ResourceAny`]   |
416/// | `record`                          | [`#[derive(ComponentType)]`][d-cm]   |
417/// | `variant`                         | [`#[derive(ComponentType)]`][d-cm]   |
418/// | `enum`                            | [`#[derive(ComponentType)]`][d-cm]   |
419/// | `flags`                           | [`flags!`][f-m]                      |
420///
421/// [`Resource<T>`]: crate::component::Resource
422/// [`ResourceAny`]: crate::component::ResourceAny
423/// [d-cm]: macro@crate::component::ComponentType
424/// [f-m]: crate::component::flags
425///
426/// Rust standard library pointers such as `&T`, `Box<T>`, `Rc<T>`, and `Arc<T>`
427/// additionally represent whatever type `T` represents in the component model.
428/// Note that types such as `record`, `variant`, `enum`, and `flags` are
429/// generated by the embedder at compile time. These macros derive
430/// implementation of this trait for custom types to map to custom types in the
431/// component model. Note that for `record`, `variant`, `enum`, and `flags`
432/// those types are often generated by the
433/// [`bindgen!`](crate::component::bindgen) macro from WIT definitions.
434///
435/// Types that implement [`ComponentType`] are used for `Params` and `Return`
436/// in [`TypedFunc`] and [`Func::typed`].
437///
438/// The contents of this trait are hidden as it's intended to be an
439/// implementation detail of Wasmtime. The contents of this trait are not
440/// covered by Wasmtime's stability guarantees.
441//
442// Note that this is an `unsafe` trait as `TypedFunc`'s safety heavily relies on
443// the correctness of the implementations of this trait. Some ways in which this
444// trait must be correct to be safe are:
445//
446// * The `Lower` associated type must be a `ValRaw` sequence. It doesn't have to
447//   literally be `[ValRaw; N]` but when laid out in memory it must be adjacent
448//   `ValRaw` values and have a multiple of the size of `ValRaw` and the same
449//   alignment.
450//
451// * The `lower` function must initialize the bits within `Lower` that are going
452//   to be read by the trampoline that's used to enter core wasm. A trampoline
453//   is passed `*mut Lower` and will read the canonical abi arguments in
454//   sequence, so all of the bits must be correctly initialized.
455//
456// * The `size` and `align` functions must be correct for this value stored in
457//   the canonical ABI. The `Cursor<T>` iteration of these bytes rely on this
458//   for correctness as they otherwise eschew bounds-checking.
459//
460// There are likely some other correctness issues which aren't documented as
461// well, this isn't intended to be an exhaustive list. It suffices to say,
462// though, that correctness bugs in this trait implementation are highly likely
463// to lead to security bugs, which again leads to the `unsafe` in the trait.
464//
465// Also note that this trait specifically is not sealed because we have a proc
466// macro that generates implementations of this trait for external types in a
467// `#[derive]`-like fashion.
468pub unsafe trait ComponentType {
469    /// Representation of the "lowered" form of this component value.
470    ///
471    /// Lowerings lower into core wasm values which are represented by `ValRaw`.
472    /// This `Lower` type must be a list of `ValRaw` as either a literal array
473    /// or a struct where every field is a `ValRaw`. This must be `Copy` (as
474    /// `ValRaw` is `Copy`) and support all byte patterns. This being correct is
475    /// one reason why the trait is unsafe.
476    #[doc(hidden)]
477    type Lower: Copy;
478
479    /// The information about this type's canonical ABI (size/align/etc).
480    #[doc(hidden)]
481    const ABI: CanonicalAbiInfo;
482
483    #[doc(hidden)]
484    const SIZE32: usize = Self::ABI.size32 as usize;
485    #[doc(hidden)]
486    const ALIGN32: u32 = Self::ABI.align32;
487
488    #[doc(hidden)]
489    const IS_RUST_UNIT_TYPE: bool = false;
490
491    /// Returns the number of core wasm abi values will be used to represent
492    /// this type in its lowered form.
493    ///
494    /// This divides the size of `Self::Lower` by the size of `ValRaw`.
495    #[doc(hidden)]
496    fn flatten_count() -> usize {
497        assert!(mem::size_of::<Self::Lower>() % mem::size_of::<ValRaw>() == 0);
498        assert!(mem::align_of::<Self::Lower>() == mem::align_of::<ValRaw>());
499        mem::size_of::<Self::Lower>() / mem::size_of::<ValRaw>()
500    }
501
502    /// Performs a type-check to see whether this component value type matches
503    /// the interface type `ty` provided.
504    #[doc(hidden)]
505    fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()>;
506}
507
508#[doc(hidden)]
509pub unsafe trait ComponentVariant: ComponentType {
510    const CASES: &'static [Option<CanonicalAbiInfo>];
511    const INFO: VariantInfo = VariantInfo::new_static(Self::CASES);
512    const PAYLOAD_OFFSET32: usize = Self::INFO.payload_offset32 as usize;
513}
514
515/// Host types which can be passed to WebAssembly components.
516///
517/// This trait is implemented for all types that can be passed to components
518/// either as parameters of component exports or returns of component imports.
519/// This trait represents the ability to convert from the native host
520/// representation to the canonical ABI.
521///
522/// Built-in types to Rust such as `Option<T>` implement this trait as
523/// appropriate. For a mapping of component model to Rust types see
524/// [`ComponentType`].
525///
526/// For user-defined types, for example `record` types mapped to Rust `struct`s,
527/// this crate additionally has
528/// [`#[derive(Lower)]`](macro@crate::component::Lower).
529///
530/// Note that like [`ComponentType`] the definition of this trait is intended to
531/// be an internal implementation detail of Wasmtime at this time. It's
532/// recommended to use the `#[derive(Lower)]` implementation instead.
533pub unsafe trait Lower: ComponentType {
534    /// Performs the "lower" function in the canonical ABI.
535    ///
536    /// This method will lower the current value into a component. The `lower`
537    /// function performs a "flat" lowering into the `dst` specified which is
538    /// allowed to be uninitialized entering this method but is guaranteed to be
539    /// fully initialized if the method returns `Ok(())`.
540    ///
541    /// The `cx` context provided is the context within which this lowering is
542    /// happening. This contains information such as canonical options specified
543    /// (e.g. string encodings, memories, etc), the store itself, along with
544    /// type information.
545    ///
546    /// The `ty` parameter is the destination type that is being lowered into.
547    /// For example this is the component's "view" of the type that is being
548    /// lowered. This is guaranteed to have passed a `typecheck` earlier.
549    ///
550    /// This will only be called if `typecheck` passes for `Op::Lower`.
551    #[doc(hidden)]
552    fn lower<T>(
553        &self,
554        cx: &mut LowerContext<'_, T>,
555        ty: InterfaceType,
556        dst: &mut MaybeUninit<Self::Lower>,
557    ) -> Result<()>;
558
559    /// Performs the "store" operation in the canonical ABI.
560    ///
561    /// This function will store `self` into the linear memory described by
562    /// `cx` at the `offset` provided.
563    ///
564    /// It is expected that `offset` is a valid offset in memory for
565    /// `Self::SIZE32` bytes. At this time that's not an unsafe contract as it's
566    /// always re-checked on all stores, but this is something that will need to
567    /// be improved in the future to remove extra bounds checks. For now this
568    /// function will panic if there's a bug and `offset` isn't valid within
569    /// memory.
570    ///
571    /// The `ty` type information passed here is the same as the type
572    /// information passed to `lower` above, and is the component's own view of
573    /// what the resulting type should be.
574    ///
575    /// This will only be called if `typecheck` passes for `Op::Lower`.
576    #[doc(hidden)]
577    fn store<T>(
578        &self,
579        cx: &mut LowerContext<'_, T>,
580        ty: InterfaceType,
581        offset: usize,
582    ) -> Result<()>;
583
584    /// Provided method to lower a list of `Self` into memory.
585    ///
586    /// Requires that `offset` has already been checked for alignment and
587    /// validity in terms of being in-bounds, otherwise this may panic.
588    ///
589    /// This is primarily here to get overridden for implementations of integers
590    /// which can avoid some extra fluff and use a pattern that's more easily
591    /// optimizable by LLVM.
592    #[doc(hidden)]
593    fn store_list<T>(
594        cx: &mut LowerContext<'_, T>,
595        ty: InterfaceType,
596        mut offset: usize,
597        items: &[Self],
598    ) -> Result<()>
599    where
600        Self: Sized,
601    {
602        for item in items {
603            item.store(cx, ty, offset)?;
604            offset += Self::SIZE32;
605        }
606        Ok(())
607    }
608}
609
610/// Host types which can be created from the canonical ABI.
611///
612/// This is the mirror of the [`Lower`] trait where it represents the capability
613/// of acquiring items from WebAssembly and passing them to the host.
614///
615/// Built-in types to Rust such as `Option<T>` implement this trait as
616/// appropriate. For a mapping of component model to Rust types see
617/// [`ComponentType`].
618///
619/// For user-defined types, for example `record` types mapped to Rust `struct`s,
620/// this crate additionally has
621/// [`#[derive(Lift)]`](macro@crate::component::Lift).
622///
623/// Note that like [`ComponentType`] the definition of this trait is intended to
624/// be an internal implementation detail of Wasmtime at this time. It's
625/// recommended to use the `#[derive(Lift)]` implementation instead.
626pub unsafe trait Lift: Sized + ComponentType {
627    /// Performs the "lift" operation in the canonical ABI.
628    ///
629    /// This function performs a "flat" lift operation from the `src` specified
630    /// which is a sequence of core wasm values. The lifting operation will
631    /// validate core wasm values and produce a `Self` on success.
632    ///
633    /// The `cx` provided contains contextual information such as the store
634    /// that's being loaded from, canonical options, and type information.
635    ///
636    /// The `ty` parameter is the origin component's specification for what the
637    /// type that is being lifted is. For example this is the record type or the
638    /// resource type that is being lifted.
639    ///
640    /// Note that this has a default implementation but if `typecheck` passes
641    /// for `Op::Lift` this needs to be overridden.
642    #[doc(hidden)]
643    fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self>;
644
645    /// Performs the "load" operation in the canonical ABI.
646    ///
647    /// This will read the `bytes` provided, which are a sub-slice into the
648    /// linear memory described by `cx`. The `bytes` array provided is
649    /// guaranteed to be `Self::SIZE32` bytes large. All of memory is then also
650    /// available through `cx` for bounds-checks and such as necessary for
651    /// strings/lists.
652    ///
653    /// The `ty` argument is the type that's being loaded, as described by the
654    /// original component.
655    ///
656    /// Note that this has a default implementation but if `typecheck` passes
657    /// for `Op::Lift` this needs to be overridden.
658    #[doc(hidden)]
659    fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self>;
660
661    /// Converts `list` into a `Vec<T>`, used in `Lift for Vec<T>`.
662    ///
663    /// This is primarily here to get overridden for implementations of integers
664    /// which can avoid some extra fluff and use a pattern that's more easily
665    /// optimizable by LLVM.
666    #[doc(hidden)]
667    fn load_list(cx: &mut LiftContext<'_>, list: &WasmList<Self>) -> Result<Vec<Self>>
668    where
669        Self: Sized,
670    {
671        (0..list.len)
672            .map(|index| list.get_from_store(cx, index).unwrap())
673            .collect()
674    }
675}
676
677// Macro to help generate "forwarding implementations" of `ComponentType` to
678// another type, used for wrappers in Rust like `&T`, `Box<T>`, etc. Note that
679// these wrappers only implement lowering because lifting native Rust types
680// cannot be done.
681macro_rules! forward_type_impls {
682    ($(($($generics:tt)*) $a:ty => $b:ty,)*) => ($(
683        unsafe impl <$($generics)*> ComponentType for $a {
684            type Lower = <$b as ComponentType>::Lower;
685
686            const ABI: CanonicalAbiInfo = <$b as ComponentType>::ABI;
687
688            #[inline]
689            fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
690                <$b as ComponentType>::typecheck(ty, types)
691            }
692        }
693    )*)
694}
695
696forward_type_impls! {
697    (T: ComponentType + ?Sized) &'_ T => T,
698    (T: ComponentType + ?Sized) Box<T> => T,
699    (T: ComponentType + ?Sized) alloc::rc::Rc<T> => T,
700    (T: ComponentType + ?Sized) alloc::sync::Arc<T> => T,
701    () String => str,
702    (T: ComponentType) Vec<T> => [T],
703}
704
705macro_rules! forward_lowers {
706    ($(($($generics:tt)*) $a:ty => $b:ty,)*) => ($(
707        unsafe impl <$($generics)*> Lower for $a {
708            fn lower<U>(
709                &self,
710                cx: &mut LowerContext<'_, U>,
711                ty: InterfaceType,
712                dst: &mut MaybeUninit<Self::Lower>,
713            ) -> Result<()> {
714                <$b as Lower>::lower(self, cx, ty, dst)
715            }
716
717            fn store<U>(
718                &self,
719                cx: &mut LowerContext<'_, U>,
720                ty: InterfaceType,
721                offset: usize,
722            ) -> Result<()> {
723                <$b as Lower>::store(self, cx, ty, offset)
724            }
725        }
726    )*)
727}
728
729forward_lowers! {
730    (T: Lower + ?Sized) &'_ T => T,
731    (T: Lower + ?Sized) Box<T> => T,
732    (T: Lower + ?Sized) alloc::rc::Rc<T> => T,
733    (T: Lower + ?Sized) alloc::sync::Arc<T> => T,
734    () String => str,
735    (T: Lower) Vec<T> => [T],
736}
737
738macro_rules! forward_string_lifts {
739    ($($a:ty,)*) => ($(
740        unsafe impl Lift for $a {
741            #[inline]
742            fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
743                Ok(<WasmStr as Lift>::lift(cx, ty, src)?.to_str_from_memory(cx.memory())?.into())
744            }
745
746            #[inline]
747            fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
748                Ok(<WasmStr as Lift>::load(cx, ty, bytes)?.to_str_from_memory(cx.memory())?.into())
749            }
750        }
751    )*)
752}
753
754forward_string_lifts! {
755    Box<str>,
756    alloc::rc::Rc<str>,
757    alloc::sync::Arc<str>,
758    String,
759}
760
761macro_rules! forward_list_lifts {
762    ($($a:ty,)*) => ($(
763        unsafe impl <T: Lift> Lift for $a {
764            fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
765                let list = <WasmList::<T> as Lift>::lift(cx, ty, src)?;
766                Ok(T::load_list(cx, &list)?.into())
767            }
768
769            fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
770                let list = <WasmList::<T> as Lift>::load(cx, ty, bytes)?;
771                Ok(T::load_list(cx, &list)?.into())
772            }
773        }
774    )*)
775}
776
777forward_list_lifts! {
778    Box<[T]>,
779    alloc::rc::Rc<[T]>,
780    alloc::sync::Arc<[T]>,
781    Vec<T>,
782}
783
784// Macro to help generate `ComponentType` implementations for primitive types
785// such as integers, char, bool, etc.
786macro_rules! integers {
787    ($($primitive:ident = $ty:ident in $field:ident/$get:ident with abi:$abi:ident,)*) => ($(
788        unsafe impl ComponentType for $primitive {
789            type Lower = ValRaw;
790
791            const ABI: CanonicalAbiInfo = CanonicalAbiInfo::$abi;
792
793            fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
794                match ty {
795                    InterfaceType::$ty => Ok(()),
796                    other => bail!("expected `{}` found `{}`", desc(&InterfaceType::$ty), desc(other))
797                }
798            }
799        }
800
801        unsafe impl Lower for $primitive {
802            #[inline]
803            #[allow(trivial_numeric_casts)]
804            fn lower<T>(
805                &self,
806                _cx: &mut LowerContext<'_, T>,
807                ty: InterfaceType,
808                dst: &mut MaybeUninit<Self::Lower>,
809            ) -> Result<()> {
810                debug_assert!(matches!(ty, InterfaceType::$ty));
811                dst.write(ValRaw::$field(*self as $field));
812                Ok(())
813            }
814
815            #[inline]
816            fn store<T>(
817                &self,
818                cx: &mut LowerContext<'_, T>,
819                ty: InterfaceType,
820                offset: usize,
821            ) -> Result<()> {
822                debug_assert!(matches!(ty, InterfaceType::$ty));
823                debug_assert!(offset % Self::SIZE32 == 0);
824                *cx.get(offset) = self.to_le_bytes();
825                Ok(())
826            }
827
828            fn store_list<T>(
829                cx: &mut LowerContext<'_, T>,
830                ty: InterfaceType,
831                offset: usize,
832                items: &[Self],
833            ) -> Result<()> {
834                debug_assert!(matches!(ty, InterfaceType::$ty));
835
836                // Double-check that the CM alignment is at least the host's
837                // alignment for this type which should be true for all
838                // platforms.
839                assert!((Self::ALIGN32 as usize) >= mem::align_of::<Self>());
840
841                // Slice `cx`'s memory to the window that we'll be modifying.
842                // This should all have already been verified in terms of
843                // alignment and sizing meaning that these assertions here are
844                // not truly necessary but are instead double-checks.
845                //
846                // Note that we're casting a `[u8]` slice to `[Self]` with
847                // `align_to_mut` which is not safe in general but is safe in
848                // our specific case as all `u8` patterns are valid `Self`
849                // patterns since `Self` is an integral type.
850                let dst = &mut cx.as_slice_mut()[offset..][..items.len() * Self::SIZE32];
851                let (before, middle, end) = unsafe { dst.align_to_mut::<Self>() };
852                assert!(before.is_empty() && end.is_empty());
853                assert_eq!(middle.len(), items.len());
854
855                // And with all that out of the way perform the copying loop.
856                // This is not a `copy_from_slice` because endianness needs to
857                // be handled here, but LLVM should pretty easily transform this
858                // into a memcpy on little-endian platforms.
859                for (dst, src) in middle.iter_mut().zip(items) {
860                    *dst = src.to_le();
861                }
862                Ok(())
863            }
864        }
865
866        unsafe impl Lift for $primitive {
867            #[inline]
868            #[allow(trivial_numeric_casts, clippy::cast_possible_truncation)]
869            fn lift(_cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
870                debug_assert!(matches!(ty, InterfaceType::$ty));
871                Ok(src.$get() as $primitive)
872            }
873
874            #[inline]
875            fn load(_cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
876                debug_assert!(matches!(ty, InterfaceType::$ty));
877                debug_assert!((bytes.as_ptr() as usize) % Self::SIZE32 == 0);
878                Ok($primitive::from_le_bytes(bytes.try_into().unwrap()))
879            }
880
881            fn load_list(cx: &mut LiftContext<'_>, list: &WasmList<Self>) -> Result<Vec<Self>> {
882                Ok(
883                    list._as_le_slice(cx.memory())
884                        .iter()
885                        .map(|i| Self::from_le(*i))
886                        .collect(),
887                )
888            }
889        }
890    )*)
891}
892
893integers! {
894    i8 = S8 in i32/get_i32 with abi:SCALAR1,
895    u8 = U8 in u32/get_u32 with abi:SCALAR1,
896    i16 = S16 in i32/get_i32 with abi:SCALAR2,
897    u16 = U16 in u32/get_u32 with abi:SCALAR2,
898    i32 = S32 in i32/get_i32 with abi:SCALAR4,
899    u32 = U32 in u32/get_u32 with abi:SCALAR4,
900    i64 = S64 in i64/get_i64 with abi:SCALAR8,
901    u64 = U64 in u64/get_u64 with abi:SCALAR8,
902}
903
904macro_rules! floats {
905    ($($float:ident/$get_float:ident = $ty:ident with abi:$abi:ident)*) => ($(const _: () = {
906        unsafe impl ComponentType for $float {
907            type Lower = ValRaw;
908
909            const ABI: CanonicalAbiInfo = CanonicalAbiInfo::$abi;
910
911            fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
912                match ty {
913                    InterfaceType::$ty => Ok(()),
914                    other => bail!("expected `{}` found `{}`", desc(&InterfaceType::$ty), desc(other))
915                }
916            }
917        }
918
919        unsafe impl Lower for $float {
920            #[inline]
921            fn lower<T>(
922                &self,
923                _cx: &mut LowerContext<'_, T>,
924                ty: InterfaceType,
925                dst: &mut MaybeUninit<Self::Lower>,
926            ) -> Result<()> {
927                debug_assert!(matches!(ty, InterfaceType::$ty));
928                dst.write(ValRaw::$float(self.to_bits()));
929                Ok(())
930            }
931
932            #[inline]
933            fn store<T>(
934                &self,
935                cx: &mut LowerContext<'_, T>,
936                ty: InterfaceType,
937                offset: usize,
938            ) -> Result<()> {
939                debug_assert!(matches!(ty, InterfaceType::$ty));
940                debug_assert!(offset % Self::SIZE32 == 0);
941                let ptr = cx.get(offset);
942                *ptr = self.to_bits().to_le_bytes();
943                Ok(())
944            }
945
946            fn store_list<T>(
947                cx: &mut LowerContext<'_, T>,
948                ty: InterfaceType,
949                offset: usize,
950                items: &[Self],
951            ) -> Result<()> {
952                debug_assert!(matches!(ty, InterfaceType::$ty));
953
954                // Double-check that the CM alignment is at least the host's
955                // alignment for this type which should be true for all
956                // platforms.
957                assert!((Self::ALIGN32 as usize) >= mem::align_of::<Self>());
958
959                // Slice `cx`'s memory to the window that we'll be modifying.
960                // This should all have already been verified in terms of
961                // alignment and sizing meaning that these assertions here are
962                // not truly necessary but are instead double-checks.
963                let dst = &mut cx.as_slice_mut()[offset..][..items.len() * Self::SIZE32];
964                assert!(dst.as_ptr().cast::<Self>().is_aligned());
965
966                // And with all that out of the way perform the copying loop.
967                // This is not a `copy_from_slice` because endianness needs to
968                // be handled here, but LLVM should pretty easily transform this
969                // into a memcpy on little-endian platforms.
970                // TODO use `as_chunks` when https://github.com/rust-lang/rust/issues/74985
971                // is stabilized
972                for (dst, src) in iter::zip(dst.chunks_exact_mut(Self::SIZE32), items) {
973                    let dst: &mut [u8; Self::SIZE32] = dst.try_into().unwrap();
974                    *dst = src.to_le_bytes();
975                }
976                Ok(())
977            }
978        }
979
980        unsafe impl Lift for $float {
981            #[inline]
982            fn lift(_cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
983                debug_assert!(matches!(ty, InterfaceType::$ty));
984                Ok($float::from_bits(src.$get_float()))
985            }
986
987            #[inline]
988            fn load(_cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
989                debug_assert!(matches!(ty, InterfaceType::$ty));
990                debug_assert!((bytes.as_ptr() as usize) % Self::SIZE32 == 0);
991                Ok($float::from_le_bytes(bytes.try_into().unwrap()))
992            }
993
994            fn load_list(cx: &mut LiftContext<'_>, list: &WasmList<Self>) -> Result<Vec<Self>> where Self: Sized {
995                // See comments in `WasmList::get` for the panicking indexing
996                let byte_size = list.len * mem::size_of::<Self>();
997                let bytes = &cx.memory()[list.ptr..][..byte_size];
998
999                // The canonical ABI requires that everything is aligned to its
1000                // own size, so this should be an aligned array.
1001                assert!(bytes.as_ptr().cast::<Self>().is_aligned());
1002
1003                // Copy the resulting slice to a new Vec, handling endianness
1004                // in the process
1005                // TODO use `as_chunks` when https://github.com/rust-lang/rust/issues/74985
1006                // is stabilized
1007                Ok(
1008                    bytes
1009                        .chunks_exact(Self::SIZE32)
1010                        .map(|i| $float::from_le_bytes(i.try_into().unwrap()))
1011                        .collect()
1012                )
1013            }
1014        }
1015    };)*)
1016}
1017
1018floats! {
1019    f32/get_f32 = Float32 with abi:SCALAR4
1020    f64/get_f64 = Float64 with abi:SCALAR8
1021}
1022
1023unsafe impl ComponentType for bool {
1024    type Lower = ValRaw;
1025
1026    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::SCALAR1;
1027
1028    fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
1029        match ty {
1030            InterfaceType::Bool => Ok(()),
1031            other => bail!("expected `bool` found `{}`", desc(other)),
1032        }
1033    }
1034}
1035
1036unsafe impl Lower for bool {
1037    fn lower<T>(
1038        &self,
1039        _cx: &mut LowerContext<'_, T>,
1040        ty: InterfaceType,
1041        dst: &mut MaybeUninit<Self::Lower>,
1042    ) -> Result<()> {
1043        debug_assert!(matches!(ty, InterfaceType::Bool));
1044        dst.write(ValRaw::i32(*self as i32));
1045        Ok(())
1046    }
1047
1048    fn store<T>(
1049        &self,
1050        cx: &mut LowerContext<'_, T>,
1051        ty: InterfaceType,
1052        offset: usize,
1053    ) -> Result<()> {
1054        debug_assert!(matches!(ty, InterfaceType::Bool));
1055        debug_assert!(offset % Self::SIZE32 == 0);
1056        cx.get::<1>(offset)[0] = *self as u8;
1057        Ok(())
1058    }
1059}
1060
1061unsafe impl Lift for bool {
1062    #[inline]
1063    fn lift(_cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1064        debug_assert!(matches!(ty, InterfaceType::Bool));
1065        match src.get_i32() {
1066            0 => Ok(false),
1067            _ => Ok(true),
1068        }
1069    }
1070
1071    #[inline]
1072    fn load(_cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
1073        debug_assert!(matches!(ty, InterfaceType::Bool));
1074        match bytes[0] {
1075            0 => Ok(false),
1076            _ => Ok(true),
1077        }
1078    }
1079}
1080
1081unsafe impl ComponentType for char {
1082    type Lower = ValRaw;
1083
1084    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::SCALAR4;
1085
1086    fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
1087        match ty {
1088            InterfaceType::Char => Ok(()),
1089            other => bail!("expected `char` found `{}`", desc(other)),
1090        }
1091    }
1092}
1093
1094unsafe impl Lower for char {
1095    #[inline]
1096    fn lower<T>(
1097        &self,
1098        _cx: &mut LowerContext<'_, T>,
1099        ty: InterfaceType,
1100        dst: &mut MaybeUninit<Self::Lower>,
1101    ) -> Result<()> {
1102        debug_assert!(matches!(ty, InterfaceType::Char));
1103        dst.write(ValRaw::u32(u32::from(*self)));
1104        Ok(())
1105    }
1106
1107    #[inline]
1108    fn store<T>(
1109        &self,
1110        cx: &mut LowerContext<'_, T>,
1111        ty: InterfaceType,
1112        offset: usize,
1113    ) -> Result<()> {
1114        debug_assert!(matches!(ty, InterfaceType::Char));
1115        debug_assert!(offset % Self::SIZE32 == 0);
1116        *cx.get::<4>(offset) = u32::from(*self).to_le_bytes();
1117        Ok(())
1118    }
1119}
1120
1121unsafe impl Lift for char {
1122    #[inline]
1123    fn lift(_cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1124        debug_assert!(matches!(ty, InterfaceType::Char));
1125        Ok(char::try_from(src.get_u32())?)
1126    }
1127
1128    #[inline]
1129    fn load(_cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
1130        debug_assert!(matches!(ty, InterfaceType::Char));
1131        debug_assert!((bytes.as_ptr() as usize) % Self::SIZE32 == 0);
1132        let bits = u32::from_le_bytes(bytes.try_into().unwrap());
1133        Ok(char::try_from(bits)?)
1134    }
1135}
1136
1137// FIXME(#4311): these probably need different constants for memory64
1138const UTF16_TAG: usize = 1 << 31;
1139const MAX_STRING_BYTE_LENGTH: usize = (1 << 31) - 1;
1140
1141// Note that this is similar to `ComponentType for WasmStr` except it can only
1142// be used for lowering, not lifting.
1143unsafe impl ComponentType for str {
1144    type Lower = [ValRaw; 2];
1145
1146    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::POINTER_PAIR;
1147
1148    fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
1149        match ty {
1150            InterfaceType::String => Ok(()),
1151            other => bail!("expected `string` found `{}`", desc(other)),
1152        }
1153    }
1154}
1155
1156unsafe impl Lower for str {
1157    fn lower<T>(
1158        &self,
1159        cx: &mut LowerContext<'_, T>,
1160        ty: InterfaceType,
1161        dst: &mut MaybeUninit<[ValRaw; 2]>,
1162    ) -> Result<()> {
1163        debug_assert!(matches!(ty, InterfaceType::String));
1164        let (ptr, len) = lower_string(cx, self)?;
1165        // See "WRITEPTR64" above for why this is always storing a 64-bit
1166        // integer.
1167        map_maybe_uninit!(dst[0]).write(ValRaw::i64(ptr as i64));
1168        map_maybe_uninit!(dst[1]).write(ValRaw::i64(len as i64));
1169        Ok(())
1170    }
1171
1172    fn store<T>(
1173        &self,
1174        cx: &mut LowerContext<'_, T>,
1175        ty: InterfaceType,
1176        offset: usize,
1177    ) -> Result<()> {
1178        debug_assert!(matches!(ty, InterfaceType::String));
1179        debug_assert!(offset % (Self::ALIGN32 as usize) == 0);
1180        let (ptr, len) = lower_string(cx, self)?;
1181        // FIXME(#4311): needs memory64 handling
1182        *cx.get(offset + 0) = u32::try_from(ptr).unwrap().to_le_bytes();
1183        *cx.get(offset + 4) = u32::try_from(len).unwrap().to_le_bytes();
1184        Ok(())
1185    }
1186}
1187
1188fn lower_string<T>(cx: &mut LowerContext<'_, T>, string: &str) -> Result<(usize, usize)> {
1189    // Note that in general the wasm module can't assume anything about what the
1190    // host strings are encoded as. Additionally hosts are allowed to have
1191    // differently-encoded strings at runtime. Finally when copying a string
1192    // into wasm it's somewhat strict in the sense that the various patterns of
1193    // allocation and such are already dictated for us.
1194    //
1195    // In general what this means is that when copying a string from the host
1196    // into the destination we need to follow one of the cases of copying into
1197    // WebAssembly. It doesn't particularly matter which case as long as it ends
1198    // up in the right encoding. For example a destination encoding of
1199    // latin1+utf16 has a number of ways to get copied into and we do something
1200    // here that isn't the default "utf8 to latin1+utf16" since we have access
1201    // to simd-accelerated helpers in the `encoding_rs` crate. This is ok though
1202    // because we can fake that the host string was already stored in latin1
1203    // format and follow that copy pattern instead.
1204    match cx.options.string_encoding() {
1205        // This corresponds to `store_string_copy` in the canonical ABI where
1206        // the host's representation is utf-8 and the wasm module wants utf-8 so
1207        // a copy is all that's needed (and the `realloc` can be precise for the
1208        // initial memory allocation).
1209        StringEncoding::Utf8 => {
1210            if string.len() > MAX_STRING_BYTE_LENGTH {
1211                bail!(
1212                    "string length of {} too large to copy into wasm",
1213                    string.len()
1214                );
1215            }
1216            let ptr = cx.realloc(0, 0, 1, string.len())?;
1217            cx.as_slice_mut()[ptr..][..string.len()].copy_from_slice(string.as_bytes());
1218            Ok((ptr, string.len()))
1219        }
1220
1221        // This corresponds to `store_utf8_to_utf16` in the canonical ABI. Here
1222        // an over-large allocation is performed and then shrunk afterwards if
1223        // necessary.
1224        StringEncoding::Utf16 => {
1225            let size = string.len() * 2;
1226            if size > MAX_STRING_BYTE_LENGTH {
1227                bail!(
1228                    "string length of {} too large to copy into wasm",
1229                    string.len()
1230                );
1231            }
1232            let mut ptr = cx.realloc(0, 0, 2, size)?;
1233            let mut copied = 0;
1234            let bytes = &mut cx.as_slice_mut()[ptr..][..size];
1235            for (u, bytes) in string.encode_utf16().zip(bytes.chunks_mut(2)) {
1236                let u_bytes = u.to_le_bytes();
1237                bytes[0] = u_bytes[0];
1238                bytes[1] = u_bytes[1];
1239                copied += 1;
1240            }
1241            if (copied * 2) < size {
1242                ptr = cx.realloc(ptr, size, 2, copied * 2)?;
1243            }
1244            Ok((ptr, copied))
1245        }
1246
1247        StringEncoding::CompactUtf16 => {
1248            // This corresponds to `store_string_to_latin1_or_utf16`
1249            let bytes = string.as_bytes();
1250            let mut iter = string.char_indices();
1251            let mut ptr = cx.realloc(0, 0, 2, bytes.len())?;
1252            let mut dst = &mut cx.as_slice_mut()[ptr..][..bytes.len()];
1253            let mut result = 0;
1254            while let Some((i, ch)) = iter.next() {
1255                // Test if this `char` fits into the latin1 encoding.
1256                if let Ok(byte) = u8::try_from(u32::from(ch)) {
1257                    dst[result] = byte;
1258                    result += 1;
1259                    continue;
1260                }
1261
1262                // .. if utf16 is forced to be used then the allocation is
1263                // bumped up to the maximum size.
1264                let worst_case = bytes
1265                    .len()
1266                    .checked_mul(2)
1267                    .ok_or_else(|| anyhow!("byte length overflow"))?;
1268                if worst_case > MAX_STRING_BYTE_LENGTH {
1269                    bail!("byte length too large");
1270                }
1271                ptr = cx.realloc(ptr, bytes.len(), 2, worst_case)?;
1272                dst = &mut cx.as_slice_mut()[ptr..][..worst_case];
1273
1274                // Previously encoded latin1 bytes are inflated to their 16-bit
1275                // size for utf16
1276                for i in (0..result).rev() {
1277                    dst[2 * i] = dst[i];
1278                    dst[2 * i + 1] = 0;
1279                }
1280
1281                // and then the remainder of the string is encoded.
1282                for (u, bytes) in string[i..]
1283                    .encode_utf16()
1284                    .zip(dst[2 * result..].chunks_mut(2))
1285                {
1286                    let u_bytes = u.to_le_bytes();
1287                    bytes[0] = u_bytes[0];
1288                    bytes[1] = u_bytes[1];
1289                    result += 1;
1290                }
1291                if worst_case > 2 * result {
1292                    ptr = cx.realloc(ptr, worst_case, 2, 2 * result)?;
1293                }
1294                return Ok((ptr, result | UTF16_TAG));
1295            }
1296            if result < bytes.len() {
1297                ptr = cx.realloc(ptr, bytes.len(), 2, result)?;
1298            }
1299            Ok((ptr, result))
1300        }
1301    }
1302}
1303
1304/// Representation of a string located in linear memory in a WebAssembly
1305/// instance.
1306///
1307/// This type can be used in place of `String` and `str` for string-taking APIs
1308/// in some situations. The purpose of this type is to represent a range of
1309/// validated bytes within a component but does not actually copy the bytes. The
1310/// primary method, [`WasmStr::to_str`], attempts to return a reference to the
1311/// string directly located in the component's memory, avoiding a copy into the
1312/// host if possible.
1313///
1314/// The downside of this type, however, is that accessing a string requires a
1315/// [`Store`](crate::Store) pointer (via [`StoreContext`]). Bindings generated
1316/// by [`bindgen!`](crate::component::bindgen), for example, do not have access
1317/// to [`StoreContext`] and thus can't use this type.
1318///
1319/// This is intended for more advanced use cases such as defining functions
1320/// directly in a [`Linker`](crate::component::Linker). It's expected that in
1321/// the future [`bindgen!`](crate::component::bindgen) will also have a way to
1322/// use this type.
1323///
1324/// This type is used with [`TypedFunc`], for example, when WebAssembly returns
1325/// a string. This type cannot be used to give a string to WebAssembly, instead
1326/// `&str` should be used for that (since it's coming from the host).
1327///
1328/// Note that this type represents an in-bounds string in linear memory, but it
1329/// does not represent a valid string (e.g. valid utf-8). Validation happens
1330/// when [`WasmStr::to_str`] is called.
1331///
1332/// Also note that this type does not implement [`Lower`], it only implements
1333/// [`Lift`].
1334pub struct WasmStr {
1335    ptr: usize,
1336    len: usize,
1337    options: Options,
1338}
1339
1340impl WasmStr {
1341    fn new(ptr: usize, len: usize, cx: &mut LiftContext<'_>) -> Result<WasmStr> {
1342        let byte_len = match cx.options.string_encoding() {
1343            StringEncoding::Utf8 => Some(len),
1344            StringEncoding::Utf16 => len.checked_mul(2),
1345            StringEncoding::CompactUtf16 => {
1346                if len & UTF16_TAG == 0 {
1347                    Some(len)
1348                } else {
1349                    (len ^ UTF16_TAG).checked_mul(2)
1350                }
1351            }
1352        };
1353        match byte_len.and_then(|len| ptr.checked_add(len)) {
1354            Some(n) if n <= cx.memory().len() => {}
1355            _ => bail!("string pointer/length out of bounds of memory"),
1356        }
1357        Ok(WasmStr {
1358            ptr,
1359            len,
1360            options: *cx.options,
1361        })
1362    }
1363
1364    /// Returns the underlying string that this cursor points to.
1365    ///
1366    /// Note that this will internally decode the string from the wasm's
1367    /// encoding to utf-8 and additionally perform validation.
1368    ///
1369    /// The `store` provided must be the store where this string lives to
1370    /// access the correct memory.
1371    ///
1372    /// # Errors
1373    ///
1374    /// Returns an error if the string wasn't encoded correctly (e.g. invalid
1375    /// utf-8).
1376    ///
1377    /// # Panics
1378    ///
1379    /// Panics if this string is not owned by `store`.
1380    //
1381    // TODO: should add accessors for specifically utf-8 and utf-16 that perhaps
1382    // in an opt-in basis don't do validation. Additionally there should be some
1383    // method that returns `[u16]` after validating to avoid the utf16-to-utf8
1384    // transcode.
1385    pub fn to_str<'a, T: 'static>(
1386        &self,
1387        store: impl Into<StoreContext<'a, T>>,
1388    ) -> Result<Cow<'a, str>> {
1389        let store = store.into().0;
1390        let memory = self.options.memory(store);
1391        self.to_str_from_memory(memory)
1392    }
1393
1394    fn to_str_from_memory<'a>(&self, memory: &'a [u8]) -> Result<Cow<'a, str>> {
1395        match self.options.string_encoding() {
1396            StringEncoding::Utf8 => self.decode_utf8(memory),
1397            StringEncoding::Utf16 => self.decode_utf16(memory, self.len),
1398            StringEncoding::CompactUtf16 => {
1399                if self.len & UTF16_TAG == 0 {
1400                    self.decode_latin1(memory)
1401                } else {
1402                    self.decode_utf16(memory, self.len ^ UTF16_TAG)
1403                }
1404            }
1405        }
1406    }
1407
1408    fn decode_utf8<'a>(&self, memory: &'a [u8]) -> Result<Cow<'a, str>> {
1409        // Note that bounds-checking already happen in construction of `WasmStr`
1410        // so this is never expected to panic. This could theoretically be
1411        // unchecked indexing if we're feeling wild enough.
1412        Ok(str::from_utf8(&memory[self.ptr..][..self.len])?.into())
1413    }
1414
1415    fn decode_utf16<'a>(&self, memory: &'a [u8], len: usize) -> Result<Cow<'a, str>> {
1416        // See notes in `decode_utf8` for why this is panicking indexing.
1417        let memory = &memory[self.ptr..][..len * 2];
1418        Ok(core::char::decode_utf16(
1419            memory
1420                .chunks(2)
1421                .map(|chunk| u16::from_le_bytes(chunk.try_into().unwrap())),
1422        )
1423        .collect::<Result<String, _>>()?
1424        .into())
1425    }
1426
1427    fn decode_latin1<'a>(&self, memory: &'a [u8]) -> Result<Cow<'a, str>> {
1428        // See notes in `decode_utf8` for why this is panicking indexing.
1429        Ok(encoding_rs::mem::decode_latin1(
1430            &memory[self.ptr..][..self.len],
1431        ))
1432    }
1433}
1434
1435// Note that this is similar to `ComponentType for str` except it can only be
1436// used for lifting, not lowering.
1437unsafe impl ComponentType for WasmStr {
1438    type Lower = <str as ComponentType>::Lower;
1439
1440    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::POINTER_PAIR;
1441
1442    fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
1443        match ty {
1444            InterfaceType::String => Ok(()),
1445            other => bail!("expected `string` found `{}`", desc(other)),
1446        }
1447    }
1448}
1449
1450unsafe impl Lift for WasmStr {
1451    #[inline]
1452    fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1453        debug_assert!(matches!(ty, InterfaceType::String));
1454        // FIXME(#4311): needs memory64 treatment
1455        let ptr = src[0].get_u32();
1456        let len = src[1].get_u32();
1457        let (ptr, len) = (usize::try_from(ptr)?, usize::try_from(len)?);
1458        WasmStr::new(ptr, len, cx)
1459    }
1460
1461    #[inline]
1462    fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
1463        debug_assert!(matches!(ty, InterfaceType::String));
1464        debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
1465        // FIXME(#4311): needs memory64 treatment
1466        let ptr = u32::from_le_bytes(bytes[..4].try_into().unwrap());
1467        let len = u32::from_le_bytes(bytes[4..].try_into().unwrap());
1468        let (ptr, len) = (usize::try_from(ptr)?, usize::try_from(len)?);
1469        WasmStr::new(ptr, len, cx)
1470    }
1471}
1472
1473unsafe impl<T> ComponentType for [T]
1474where
1475    T: ComponentType,
1476{
1477    type Lower = [ValRaw; 2];
1478
1479    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::POINTER_PAIR;
1480
1481    fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
1482        match ty {
1483            InterfaceType::List(t) => T::typecheck(&types.types[*t].element, types),
1484            other => bail!("expected `list` found `{}`", desc(other)),
1485        }
1486    }
1487}
1488
1489unsafe impl<T> Lower for [T]
1490where
1491    T: Lower,
1492{
1493    fn lower<U>(
1494        &self,
1495        cx: &mut LowerContext<'_, U>,
1496        ty: InterfaceType,
1497        dst: &mut MaybeUninit<[ValRaw; 2]>,
1498    ) -> Result<()> {
1499        let elem = match ty {
1500            InterfaceType::List(i) => cx.types[i].element,
1501            _ => bad_type_info(),
1502        };
1503        let (ptr, len) = lower_list(cx, elem, self)?;
1504        // See "WRITEPTR64" above for why this is always storing a 64-bit
1505        // integer.
1506        map_maybe_uninit!(dst[0]).write(ValRaw::i64(ptr as i64));
1507        map_maybe_uninit!(dst[1]).write(ValRaw::i64(len as i64));
1508        Ok(())
1509    }
1510
1511    fn store<U>(
1512        &self,
1513        cx: &mut LowerContext<'_, U>,
1514        ty: InterfaceType,
1515        offset: usize,
1516    ) -> Result<()> {
1517        let elem = match ty {
1518            InterfaceType::List(i) => cx.types[i].element,
1519            _ => bad_type_info(),
1520        };
1521        debug_assert!(offset % (Self::ALIGN32 as usize) == 0);
1522        let (ptr, len) = lower_list(cx, elem, self)?;
1523        *cx.get(offset + 0) = u32::try_from(ptr).unwrap().to_le_bytes();
1524        *cx.get(offset + 4) = u32::try_from(len).unwrap().to_le_bytes();
1525        Ok(())
1526    }
1527}
1528
1529// FIXME: this is not a memcpy for `T` where `T` is something like `u8`.
1530//
1531// Some attempts to fix this have proved not fruitful. In isolation an attempt
1532// was made where:
1533//
1534// * `MemoryMut` stored a `*mut [u8]` as its "last view" of memory to avoid
1535//   reloading the base pointer constantly. This view is reset on `realloc`.
1536// * The bounds-checks in `MemoryMut::get` were removed (replaced with unsafe
1537//   indexing)
1538//
1539// Even then though this didn't correctly vectorized for `Vec<u8>`. It's not
1540// entirely clear why but it appeared that it's related to reloading the base
1541// pointer to memory (I guess from `MemoryMut` itself?). Overall I'm not really
1542// clear on what's happening there, but this is surely going to be a performance
1543// bottleneck in the future.
1544fn lower_list<T, U>(
1545    cx: &mut LowerContext<'_, U>,
1546    ty: InterfaceType,
1547    list: &[T],
1548) -> Result<(usize, usize)>
1549where
1550    T: Lower,
1551{
1552    let elem_size = T::SIZE32;
1553    let size = list
1554        .len()
1555        .checked_mul(elem_size)
1556        .ok_or_else(|| anyhow!("size overflow copying a list"))?;
1557    let ptr = cx.realloc(0, 0, T::ALIGN32, size)?;
1558    T::store_list(cx, ty, ptr, list)?;
1559    Ok((ptr, list.len()))
1560}
1561
1562/// Representation of a list of values that are owned by a WebAssembly instance.
1563///
1564/// For some more commentary about the rationale for this type see the
1565/// documentation of [`WasmStr`]. In summary this type can avoid a copy when
1566/// passing data to the host in some situations but is additionally more
1567/// cumbersome to use by requiring a [`Store`](crate::Store) to be provided.
1568///
1569/// This type is used whenever a `(list T)` is returned from a [`TypedFunc`],
1570/// for example. This type represents a list of values that are stored in linear
1571/// memory which are waiting to be read.
1572///
1573/// Note that this type represents only a valid range of bytes for the list
1574/// itself, it does not represent validity of the elements themselves and that's
1575/// performed when they're iterated.
1576///
1577/// Note that this type does not implement the [`Lower`] trait, only [`Lift`].
1578pub struct WasmList<T> {
1579    ptr: usize,
1580    len: usize,
1581    options: Options,
1582    elem: InterfaceType,
1583    // NB: it would probably be more efficient to store a non-atomic index-style
1584    // reference to something inside a `StoreOpaque`, but that's not easily
1585    // available at this time, so it's left as a future exercise.
1586    types: Arc<ComponentTypes>,
1587    instance: SendSyncPtr<ComponentInstance>,
1588    _marker: marker::PhantomData<T>,
1589}
1590
1591impl<T: Lift> WasmList<T> {
1592    fn new(
1593        ptr: usize,
1594        len: usize,
1595        cx: &mut LiftContext<'_>,
1596        elem: InterfaceType,
1597    ) -> Result<WasmList<T>> {
1598        match len
1599            .checked_mul(T::SIZE32)
1600            .and_then(|len| ptr.checked_add(len))
1601        {
1602            Some(n) if n <= cx.memory().len() => {}
1603            _ => bail!("list pointer/length out of bounds of memory"),
1604        }
1605        if ptr % usize::try_from(T::ALIGN32)? != 0 {
1606            bail!("list pointer is not aligned")
1607        }
1608        Ok(WasmList {
1609            ptr,
1610            len,
1611            options: *cx.options,
1612            elem,
1613            types: cx.types.clone(),
1614            instance: SendSyncPtr::new(NonNull::new(cx.instance_ptr()).unwrap()),
1615            _marker: marker::PhantomData,
1616        })
1617    }
1618
1619    /// Returns the item length of this vector
1620    #[inline]
1621    pub fn len(&self) -> usize {
1622        self.len
1623    }
1624
1625    /// Gets the `n`th element of this list.
1626    ///
1627    /// Returns `None` if `index` is out of bounds. Returns `Some(Err(..))` if
1628    /// the value couldn't be decoded (it was invalid). Returns `Some(Ok(..))`
1629    /// if the value is valid.
1630    ///
1631    /// # Panics
1632    ///
1633    /// This function will panic if the string did not originally come from the
1634    /// `store` specified.
1635    //
1636    // TODO: given that interface values are intended to be consumed in one go
1637    // should we even expose a random access iteration API? In theory all
1638    // consumers should be validating through the iterator.
1639    pub fn get(&self, mut store: impl AsContextMut, index: usize) -> Option<Result<T>> {
1640        let store = store.as_context_mut().0;
1641        self.options.store_id().assert_belongs_to(store.id());
1642        // This should be safe because the unsafety lies in the `self.instance`
1643        // pointer passed in has previously been validated by the lifting
1644        // context this was originally created within and with the check above
1645        // this is guaranteed to be the same store. This means that this should
1646        // be carrying over the original assertion from the original creation of
1647        // the lifting context that created this type.
1648        let mut cx =
1649            unsafe { LiftContext::new(store, &self.options, &self.types, self.instance.as_ptr()) };
1650        self.get_from_store(&mut cx, index)
1651    }
1652
1653    fn get_from_store(&self, cx: &mut LiftContext<'_>, index: usize) -> Option<Result<T>> {
1654        if index >= self.len {
1655            return None;
1656        }
1657        // Note that this is using panicking indexing and this is expected to
1658        // never fail. The bounds-checking here happened during the construction
1659        // of the `WasmList` itself which means these should always be in-bounds
1660        // (and wasm memory can only grow). This could theoretically be
1661        // unchecked indexing if we're confident enough and it's actually a perf
1662        // issue one day.
1663        let bytes = &cx.memory()[self.ptr + index * T::SIZE32..][..T::SIZE32];
1664        Some(T::load(cx, self.elem, bytes))
1665    }
1666
1667    /// Returns an iterator over the elements of this list.
1668    ///
1669    /// Each item of the list may fail to decode and is represented through the
1670    /// `Result` value of the iterator.
1671    pub fn iter<'a, U: 'static>(
1672        &'a self,
1673        store: impl Into<StoreContextMut<'a, U>>,
1674    ) -> impl ExactSizeIterator<Item = Result<T>> + 'a {
1675        let store = store.into().0;
1676        self.options.store_id().assert_belongs_to(store.id());
1677        // See comments about unsafety in the `get` method.
1678        let mut cx =
1679            unsafe { LiftContext::new(store, &self.options, &self.types, self.instance.as_ptr()) };
1680        (0..self.len).map(move |i| self.get_from_store(&mut cx, i).unwrap())
1681    }
1682}
1683
1684macro_rules! raw_wasm_list_accessors {
1685    ($($i:ident)*) => ($(
1686        impl WasmList<$i> {
1687            /// Get access to the raw underlying memory for this list.
1688            ///
1689            /// This method will return a direct slice into the original wasm
1690            /// module's linear memory where the data for this slice is stored.
1691            /// This allows the embedder to have efficient access to the
1692            /// underlying memory if needed and avoid copies and such if
1693            /// desired.
1694            ///
1695            /// Note that multi-byte integers are stored in little-endian format
1696            /// so portable processing of this slice must be aware of the host's
1697            /// byte-endianness. The `from_le` constructors in the Rust standard
1698            /// library should be suitable for converting from little-endian.
1699            ///
1700            /// # Panics
1701            ///
1702            /// Panics if the `store` provided is not the one from which this
1703            /// slice originated.
1704            pub fn as_le_slice<'a, T: 'static>(&self, store: impl Into<StoreContext<'a, T>>) -> &'a [$i] {
1705                let memory = self.options.memory(store.into().0);
1706                self._as_le_slice(memory)
1707            }
1708
1709            fn _as_le_slice<'a>(&self, all_of_memory: &'a [u8]) -> &'a [$i] {
1710                // See comments in `WasmList::get` for the panicking indexing
1711                let byte_size = self.len * mem::size_of::<$i>();
1712                let bytes = &all_of_memory[self.ptr..][..byte_size];
1713
1714                // The canonical ABI requires that everything is aligned to its
1715                // own size, so this should be an aligned array. Furthermore the
1716                // alignment of primitive integers for hosts should be smaller
1717                // than or equal to the size of the primitive itself, meaning
1718                // that a wasm canonical-abi-aligned list is also aligned for
1719                // the host. That should mean that the head/tail slices here are
1720                // empty.
1721                //
1722                // Also note that the `unsafe` here is needed since the type
1723                // we're aligning to isn't guaranteed to be valid, but in our
1724                // case it's just integers and bytes so this should be safe.
1725                unsafe {
1726                    let (head, body, tail) = bytes.align_to::<$i>();
1727                    assert!(head.is_empty() && tail.is_empty());
1728                    body
1729                }
1730            }
1731        }
1732    )*)
1733}
1734
1735raw_wasm_list_accessors! {
1736    i8 i16 i32 i64
1737    u8 u16 u32 u64
1738}
1739
1740// Note that this is similar to `ComponentType for str` except it can only be
1741// used for lifting, not lowering.
1742unsafe impl<T: ComponentType> ComponentType for WasmList<T> {
1743    type Lower = <[T] as ComponentType>::Lower;
1744
1745    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::POINTER_PAIR;
1746
1747    fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
1748        <[T] as ComponentType>::typecheck(ty, types)
1749    }
1750}
1751
1752unsafe impl<T: Lift> Lift for WasmList<T> {
1753    fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1754        let elem = match ty {
1755            InterfaceType::List(i) => cx.types[i].element,
1756            _ => bad_type_info(),
1757        };
1758        // FIXME(#4311): needs memory64 treatment
1759        let ptr = src[0].get_u32();
1760        let len = src[1].get_u32();
1761        let (ptr, len) = (usize::try_from(ptr)?, usize::try_from(len)?);
1762        WasmList::new(ptr, len, cx, elem)
1763    }
1764
1765    fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
1766        let elem = match ty {
1767            InterfaceType::List(i) => cx.types[i].element,
1768            _ => bad_type_info(),
1769        };
1770        debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
1771        // FIXME(#4311): needs memory64 treatment
1772        let ptr = u32::from_le_bytes(bytes[..4].try_into().unwrap());
1773        let len = u32::from_le_bytes(bytes[4..].try_into().unwrap());
1774        let (ptr, len) = (usize::try_from(ptr)?, usize::try_from(len)?);
1775        WasmList::new(ptr, len, cx, elem)
1776    }
1777}
1778
1779/// Verify that the given wasm type is a tuple with the expected fields in the right order.
1780fn typecheck_tuple(
1781    ty: &InterfaceType,
1782    types: &InstanceType<'_>,
1783    expected: &[fn(&InterfaceType, &InstanceType<'_>) -> Result<()>],
1784) -> Result<()> {
1785    match ty {
1786        InterfaceType::Tuple(t) => {
1787            let tuple = &types.types[*t];
1788            if tuple.types.len() != expected.len() {
1789                bail!(
1790                    "expected {}-tuple, found {}-tuple",
1791                    expected.len(),
1792                    tuple.types.len()
1793                );
1794            }
1795            for (ty, check) in tuple.types.iter().zip(expected) {
1796                check(ty, types)?;
1797            }
1798            Ok(())
1799        }
1800        other => bail!("expected `tuple` found `{}`", desc(other)),
1801    }
1802}
1803
1804/// Verify that the given wasm type is a record with the expected fields in the right order and with the right
1805/// names.
1806pub fn typecheck_record(
1807    ty: &InterfaceType,
1808    types: &InstanceType<'_>,
1809    expected: &[(&str, fn(&InterfaceType, &InstanceType<'_>) -> Result<()>)],
1810) -> Result<()> {
1811    match ty {
1812        InterfaceType::Record(index) => {
1813            let fields = &types.types[*index].fields;
1814
1815            if fields.len() != expected.len() {
1816                bail!(
1817                    "expected record of {} fields, found {} fields",
1818                    expected.len(),
1819                    fields.len()
1820                );
1821            }
1822
1823            for (field, &(name, check)) in fields.iter().zip(expected) {
1824                check(&field.ty, types)
1825                    .with_context(|| format!("type mismatch for field {name}"))?;
1826
1827                if field.name != name {
1828                    bail!("expected record field named {}, found {}", name, field.name);
1829                }
1830            }
1831
1832            Ok(())
1833        }
1834        other => bail!("expected `record` found `{}`", desc(other)),
1835    }
1836}
1837
1838/// Verify that the given wasm type is a variant with the expected cases in the right order and with the right
1839/// names.
1840pub fn typecheck_variant(
1841    ty: &InterfaceType,
1842    types: &InstanceType<'_>,
1843    expected: &[(
1844        &str,
1845        Option<fn(&InterfaceType, &InstanceType<'_>) -> Result<()>>,
1846    )],
1847) -> Result<()> {
1848    match ty {
1849        InterfaceType::Variant(index) => {
1850            let cases = &types.types[*index].cases;
1851
1852            if cases.len() != expected.len() {
1853                bail!(
1854                    "expected variant of {} cases, found {} cases",
1855                    expected.len(),
1856                    cases.len()
1857                );
1858            }
1859
1860            for ((case_name, case_ty), &(name, check)) in cases.iter().zip(expected) {
1861                if *case_name != name {
1862                    bail!("expected variant case named {name}, found {case_name}");
1863                }
1864
1865                match (check, case_ty) {
1866                    (Some(check), Some(ty)) => check(ty, types)
1867                        .with_context(|| format!("type mismatch for case {name}"))?,
1868                    (None, None) => {}
1869                    (Some(_), None) => {
1870                        bail!("case `{name}` has no type but one was expected")
1871                    }
1872                    (None, Some(_)) => {
1873                        bail!("case `{name}` has a type but none was expected")
1874                    }
1875                }
1876            }
1877
1878            Ok(())
1879        }
1880        other => bail!("expected `variant` found `{}`", desc(other)),
1881    }
1882}
1883
1884/// Verify that the given wasm type is a enum with the expected cases in the right order and with the right
1885/// names.
1886pub fn typecheck_enum(
1887    ty: &InterfaceType,
1888    types: &InstanceType<'_>,
1889    expected: &[&str],
1890) -> Result<()> {
1891    match ty {
1892        InterfaceType::Enum(index) => {
1893            let names = &types.types[*index].names;
1894
1895            if names.len() != expected.len() {
1896                bail!(
1897                    "expected enum of {} names, found {} names",
1898                    expected.len(),
1899                    names.len()
1900                );
1901            }
1902
1903            for (name, expected) in names.iter().zip(expected) {
1904                if name != expected {
1905                    bail!("expected enum case named {}, found {}", expected, name);
1906                }
1907            }
1908
1909            Ok(())
1910        }
1911        other => bail!("expected `enum` found `{}`", desc(other)),
1912    }
1913}
1914
1915/// Verify that the given wasm type is a flags type with the expected flags in the right order and with the right
1916/// names.
1917pub fn typecheck_flags(
1918    ty: &InterfaceType,
1919    types: &InstanceType<'_>,
1920    expected: &[&str],
1921) -> Result<()> {
1922    match ty {
1923        InterfaceType::Flags(index) => {
1924            let names = &types.types[*index].names;
1925
1926            if names.len() != expected.len() {
1927                bail!(
1928                    "expected flags type with {} names, found {} names",
1929                    expected.len(),
1930                    names.len()
1931                );
1932            }
1933
1934            for (name, expected) in names.iter().zip(expected) {
1935                if name != expected {
1936                    bail!("expected flag named {}, found {}", expected, name);
1937                }
1938            }
1939
1940            Ok(())
1941        }
1942        other => bail!("expected `flags` found `{}`", desc(other)),
1943    }
1944}
1945
1946/// Format the specified bitflags using the specified names for debugging
1947pub fn format_flags(bits: &[u32], names: &[&str], f: &mut fmt::Formatter) -> fmt::Result {
1948    f.write_str("(")?;
1949    let mut wrote = false;
1950    for (index, name) in names.iter().enumerate() {
1951        if ((bits[index / 32] >> (index % 32)) & 1) != 0 {
1952            if wrote {
1953                f.write_str("|")?;
1954            } else {
1955                wrote = true;
1956            }
1957
1958            f.write_str(name)?;
1959        }
1960    }
1961    f.write_str(")")
1962}
1963
1964unsafe impl<T> ComponentType for Option<T>
1965where
1966    T: ComponentType,
1967{
1968    type Lower = TupleLower<<u32 as ComponentType>::Lower, T::Lower>;
1969
1970    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::variant_static(&[None, Some(T::ABI)]);
1971
1972    fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
1973        match ty {
1974            InterfaceType::Option(t) => T::typecheck(&types.types[*t].ty, types),
1975            other => bail!("expected `option` found `{}`", desc(other)),
1976        }
1977    }
1978}
1979
1980unsafe impl<T> ComponentVariant for Option<T>
1981where
1982    T: ComponentType,
1983{
1984    const CASES: &'static [Option<CanonicalAbiInfo>] = &[None, Some(T::ABI)];
1985}
1986
1987unsafe impl<T> Lower for Option<T>
1988where
1989    T: Lower,
1990{
1991    fn lower<U>(
1992        &self,
1993        cx: &mut LowerContext<'_, U>,
1994        ty: InterfaceType,
1995        dst: &mut MaybeUninit<Self::Lower>,
1996    ) -> Result<()> {
1997        let payload = match ty {
1998            InterfaceType::Option(ty) => cx.types[ty].ty,
1999            _ => bad_type_info(),
2000        };
2001        match self {
2002            None => {
2003                map_maybe_uninit!(dst.A1).write(ValRaw::i32(0));
2004                // Note that this is unsafe as we're writing an arbitrary
2005                // bit-pattern to an arbitrary type, but part of the unsafe
2006                // contract of the `ComponentType` trait is that we can assign
2007                // any bit-pattern. By writing all zeros here we're ensuring
2008                // that the core wasm arguments this translates to will all be
2009                // zeros (as the canonical ABI requires).
2010                unsafe {
2011                    map_maybe_uninit!(dst.A2).as_mut_ptr().write_bytes(0u8, 1);
2012                }
2013            }
2014            Some(val) => {
2015                map_maybe_uninit!(dst.A1).write(ValRaw::i32(1));
2016                val.lower(cx, payload, map_maybe_uninit!(dst.A2))?;
2017            }
2018        }
2019        Ok(())
2020    }
2021
2022    fn store<U>(
2023        &self,
2024        cx: &mut LowerContext<'_, U>,
2025        ty: InterfaceType,
2026        offset: usize,
2027    ) -> Result<()> {
2028        debug_assert!(offset % (Self::ALIGN32 as usize) == 0);
2029        let payload = match ty {
2030            InterfaceType::Option(ty) => cx.types[ty].ty,
2031            _ => bad_type_info(),
2032        };
2033        match self {
2034            None => {
2035                cx.get::<1>(offset)[0] = 0;
2036            }
2037            Some(val) => {
2038                cx.get::<1>(offset)[0] = 1;
2039                val.store(cx, payload, offset + (Self::INFO.payload_offset32 as usize))?;
2040            }
2041        }
2042        Ok(())
2043    }
2044}
2045
2046unsafe impl<T> Lift for Option<T>
2047where
2048    T: Lift,
2049{
2050    fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
2051        let payload = match ty {
2052            InterfaceType::Option(ty) => cx.types[ty].ty,
2053            _ => bad_type_info(),
2054        };
2055        Ok(match src.A1.get_i32() {
2056            0 => None,
2057            1 => Some(T::lift(cx, payload, &src.A2)?),
2058            _ => bail!("invalid option discriminant"),
2059        })
2060    }
2061
2062    fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
2063        debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
2064        let payload_ty = match ty {
2065            InterfaceType::Option(ty) => cx.types[ty].ty,
2066            _ => bad_type_info(),
2067        };
2068        let discrim = bytes[0];
2069        let payload = &bytes[Self::INFO.payload_offset32 as usize..];
2070        match discrim {
2071            0 => Ok(None),
2072            1 => Ok(Some(T::load(cx, payload_ty, payload)?)),
2073            _ => bail!("invalid option discriminant"),
2074        }
2075    }
2076}
2077
2078#[derive(Clone, Copy)]
2079#[repr(C)]
2080pub struct ResultLower<T: Copy, E: Copy> {
2081    tag: ValRaw,
2082    payload: ResultLowerPayload<T, E>,
2083}
2084
2085#[derive(Clone, Copy)]
2086#[repr(C)]
2087union ResultLowerPayload<T: Copy, E: Copy> {
2088    ok: T,
2089    err: E,
2090}
2091
2092unsafe impl<T, E> ComponentType for Result<T, E>
2093where
2094    T: ComponentType,
2095    E: ComponentType,
2096{
2097    type Lower = ResultLower<T::Lower, E::Lower>;
2098
2099    const ABI: CanonicalAbiInfo = CanonicalAbiInfo::variant_static(&[Some(T::ABI), Some(E::ABI)]);
2100
2101    fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
2102        match ty {
2103            InterfaceType::Result(r) => {
2104                let result = &types.types[*r];
2105                match &result.ok {
2106                    Some(ty) => T::typecheck(ty, types)?,
2107                    None if T::IS_RUST_UNIT_TYPE => {}
2108                    None => bail!("expected no `ok` type"),
2109                }
2110                match &result.err {
2111                    Some(ty) => E::typecheck(ty, types)?,
2112                    None if E::IS_RUST_UNIT_TYPE => {}
2113                    None => bail!("expected no `err` type"),
2114                }
2115                Ok(())
2116            }
2117            other => bail!("expected `result` found `{}`", desc(other)),
2118        }
2119    }
2120}
2121
2122/// Lowers the payload of a variant into the storage for the entire payload,
2123/// handling writing zeros at the end of the representation if this payload is
2124/// smaller than the entire flat representation.
2125///
2126/// * `payload` - the flat storage space for the entire payload of the variant
2127/// * `typed_payload` - projection from the payload storage space to the
2128///   individual storage space for this variant.
2129/// * `lower` - lowering operation used to initialize the `typed_payload` return
2130///   value.
2131///
2132/// For more information on this se the comments in the `Lower for Result`
2133/// implementation below.
2134pub unsafe fn lower_payload<P, T>(
2135    payload: &mut MaybeUninit<P>,
2136    typed_payload: impl FnOnce(&mut MaybeUninit<P>) -> &mut MaybeUninit<T>,
2137    lower: impl FnOnce(&mut MaybeUninit<T>) -> Result<()>,
2138) -> Result<()> {
2139    let typed = typed_payload(payload);
2140    lower(typed)?;
2141
2142    let typed_len = storage_as_slice(typed).len();
2143    let payload = storage_as_slice_mut(payload);
2144    for slot in payload[typed_len..].iter_mut() {
2145        *slot = ValRaw::u64(0);
2146    }
2147    Ok(())
2148}
2149
2150unsafe impl<T, E> ComponentVariant for Result<T, E>
2151where
2152    T: ComponentType,
2153    E: ComponentType,
2154{
2155    const CASES: &'static [Option<CanonicalAbiInfo>] = &[Some(T::ABI), Some(E::ABI)];
2156}
2157
2158unsafe impl<T, E> Lower for Result<T, E>
2159where
2160    T: Lower,
2161    E: Lower,
2162{
2163    fn lower<U>(
2164        &self,
2165        cx: &mut LowerContext<'_, U>,
2166        ty: InterfaceType,
2167        dst: &mut MaybeUninit<Self::Lower>,
2168    ) -> Result<()> {
2169        let (ok, err) = match ty {
2170            InterfaceType::Result(ty) => {
2171                let ty = &cx.types[ty];
2172                (ty.ok, ty.err)
2173            }
2174            _ => bad_type_info(),
2175        };
2176
2177        // This implementation of `Lower::lower`, if you're reading these from
2178        // the top of this file, is the first location that the "join" logic of
2179        // the component model's canonical ABI encountered. The rough problem is
2180        // that let's say we have a component model type of the form:
2181        //
2182        //      (result u64 (error (tuple f32 u16)))
2183        //
2184        // The flat representation of this is actually pretty tricky. Currently
2185        // it is:
2186        //
2187        //      i32 i64 i32
2188        //
2189        // The first `i32` is the discriminant for the `result`, and the payload
2190        // is represented by `i64 i32`. The "ok" variant will only use the `i64`
2191        // and the "err" variant will use both `i64` and `i32`.
2192        //
2193        // In the "ok" variant the first issue is encountered. The size of one
2194        // variant may not match the size of the other variants. All variants
2195        // start at the "front" but when lowering a type we need to be sure to
2196        // initialize the later variants (lest we leak random host memory into
2197        // the guest module). Due to how the `Lower` type is represented as a
2198        // `union` of all the variants what ends up happening here is that
2199        // internally within the `lower_payload` after the typed payload is
2200        // lowered the remaining bits of the payload that weren't initialized
2201        // are all set to zero. This will guarantee that we'll write to all the
2202        // slots for each variant.
2203        //
2204        // The "err" variant encounters the second issue, however, which is that
2205        // the flat representation for each type may differ between payloads. In
2206        // the "ok" arm an `i64` is written, but the `lower` implementation for
2207        // the "err" arm will write an `f32` and then an `i32`. For this
2208        // implementation of `lower` to be valid the `f32` needs to get inflated
2209        // to an `i64` with zero-padding in the upper bits. What may be
2210        // surprising, however, is that none of this is handled in this file.
2211        // This implementation looks like it's blindly deferring to `E::lower`
2212        // and hoping it does the right thing.
2213        //
2214        // In reality, however, the correctness of variant lowering relies on
2215        // two subtle details of the `ValRaw` implementation in Wasmtime:
2216        //
2217        // 1. First the `ValRaw` value always contains little-endian values.
2218        //    This means that if a `u32` is written, a `u64` is read, and then
2219        //    the `u64` has its upper bits truncated the original value will
2220        //    always be retained. This is primarily here for big-endian
2221        //    platforms where if it weren't little endian then the opposite
2222        //    would occur and the wrong value would be read.
2223        //
2224        // 2. Second, and perhaps even more subtly, the `ValRaw` constructors
2225        //    for 32-bit types actually always initialize 64-bits of the
2226        //    `ValRaw`. In the component model flat ABI only 32 and 64-bit types
2227        //    are used so 64-bits is big enough to contain everything. This
2228        //    means that when a `ValRaw` is written into the destination it will
2229        //    always, whether it's needed or not, be "ready" to get extended up
2230        //    to 64-bits.
2231        //
2232        // Put together these two subtle guarantees means that all `Lower`
2233        // implementations can be written "naturally" as one might naively
2234        // expect. Variants will, on each arm, zero out remaining fields and all
2235        // writes to the flat representation will automatically be 64-bit writes
2236        // meaning that if the value is read as a 64-bit value, which isn't
2237        // known at the time of the write, it'll still be correct.
2238        match self {
2239            Ok(e) => {
2240                map_maybe_uninit!(dst.tag).write(ValRaw::i32(0));
2241                unsafe {
2242                    lower_payload(
2243                        map_maybe_uninit!(dst.payload),
2244                        |payload| map_maybe_uninit!(payload.ok),
2245                        |dst| match ok {
2246                            Some(ok) => e.lower(cx, ok, dst),
2247                            None => Ok(()),
2248                        },
2249                    )
2250                }
2251            }
2252            Err(e) => {
2253                map_maybe_uninit!(dst.tag).write(ValRaw::i32(1));
2254                unsafe {
2255                    lower_payload(
2256                        map_maybe_uninit!(dst.payload),
2257                        |payload| map_maybe_uninit!(payload.err),
2258                        |dst| match err {
2259                            Some(err) => e.lower(cx, err, dst),
2260                            None => Ok(()),
2261                        },
2262                    )
2263                }
2264            }
2265        }
2266    }
2267
2268    fn store<U>(
2269        &self,
2270        cx: &mut LowerContext<'_, U>,
2271        ty: InterfaceType,
2272        offset: usize,
2273    ) -> Result<()> {
2274        let (ok, err) = match ty {
2275            InterfaceType::Result(ty) => {
2276                let ty = &cx.types[ty];
2277                (ty.ok, ty.err)
2278            }
2279            _ => bad_type_info(),
2280        };
2281        debug_assert!(offset % (Self::ALIGN32 as usize) == 0);
2282        let payload_offset = Self::INFO.payload_offset32 as usize;
2283        match self {
2284            Ok(e) => {
2285                cx.get::<1>(offset)[0] = 0;
2286                if let Some(ok) = ok {
2287                    e.store(cx, ok, offset + payload_offset)?;
2288                }
2289            }
2290            Err(e) => {
2291                cx.get::<1>(offset)[0] = 1;
2292                if let Some(err) = err {
2293                    e.store(cx, err, offset + payload_offset)?;
2294                }
2295            }
2296        }
2297        Ok(())
2298    }
2299}
2300
2301unsafe impl<T, E> Lift for Result<T, E>
2302where
2303    T: Lift,
2304    E: Lift,
2305{
2306    #[inline]
2307    fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
2308        let (ok, err) = match ty {
2309            InterfaceType::Result(ty) => {
2310                let ty = &cx.types[ty];
2311                (ty.ok, ty.err)
2312            }
2313            _ => bad_type_info(),
2314        };
2315        // Note that this implementation specifically isn't trying to actually
2316        // reinterpret or alter the bits of `lower` depending on which variant
2317        // we're lifting. This ends up all working out because the value is
2318        // stored in little-endian format.
2319        //
2320        // When stored in little-endian format the `{T,E}::Lower`, when each
2321        // individual `ValRaw` is read, means that if an i64 value, extended
2322        // from an i32 value, was stored then when the i32 value is read it'll
2323        // automatically ignore the upper bits.
2324        //
2325        // This "trick" allows us to seamlessly pass through the `Self::Lower`
2326        // representation into the lifting/lowering without trying to handle
2327        // "join"ed types as per the canonical ABI. It just so happens that i64
2328        // bits will naturally be reinterpreted as f64. Additionally if the
2329        // joined type is i64 but only the lower bits are read that's ok and we
2330        // don't need to validate the upper bits.
2331        //
2332        // This is largely enabled by WebAssembly/component-model#35 where no
2333        // validation needs to be performed for ignored bits and bytes here.
2334        Ok(match src.tag.get_i32() {
2335            0 => Ok(unsafe { lift_option(cx, ok, &src.payload.ok)? }),
2336            1 => Err(unsafe { lift_option(cx, err, &src.payload.err)? }),
2337            _ => bail!("invalid expected discriminant"),
2338        })
2339    }
2340
2341    #[inline]
2342    fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
2343        debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
2344        let discrim = bytes[0];
2345        let payload = &bytes[Self::INFO.payload_offset32 as usize..];
2346        let (ok, err) = match ty {
2347            InterfaceType::Result(ty) => {
2348                let ty = &cx.types[ty];
2349                (ty.ok, ty.err)
2350            }
2351            _ => bad_type_info(),
2352        };
2353        match discrim {
2354            0 => Ok(Ok(load_option(cx, ok, &payload[..T::SIZE32])?)),
2355            1 => Ok(Err(load_option(cx, err, &payload[..E::SIZE32])?)),
2356            _ => bail!("invalid expected discriminant"),
2357        }
2358    }
2359}
2360
2361fn lift_option<T>(cx: &mut LiftContext<'_>, ty: Option<InterfaceType>, src: &T::Lower) -> Result<T>
2362where
2363    T: Lift,
2364{
2365    match ty {
2366        Some(ty) => T::lift(cx, ty, src),
2367        None => Ok(empty_lift()),
2368    }
2369}
2370
2371fn load_option<T>(cx: &mut LiftContext<'_>, ty: Option<InterfaceType>, bytes: &[u8]) -> Result<T>
2372where
2373    T: Lift,
2374{
2375    match ty {
2376        Some(ty) => T::load(cx, ty, bytes),
2377        None => Ok(empty_lift()),
2378    }
2379}
2380
2381fn empty_lift<T>() -> T
2382where
2383    T: Lift,
2384{
2385    assert!(T::IS_RUST_UNIT_TYPE);
2386    assert_eq!(mem::size_of::<T>(), 0);
2387    unsafe { MaybeUninit::uninit().assume_init() }
2388}
2389
2390/// Helper structure to define `Lower` for tuples below.
2391///
2392/// Uses default type parameters to have fields be zero-sized and not present
2393/// in memory for smaller tuple values.
2394#[allow(non_snake_case)]
2395#[doc(hidden)]
2396#[derive(Clone, Copy)]
2397#[repr(C)]
2398pub struct TupleLower<
2399    T1 = (),
2400    T2 = (),
2401    T3 = (),
2402    T4 = (),
2403    T5 = (),
2404    T6 = (),
2405    T7 = (),
2406    T8 = (),
2407    T9 = (),
2408    T10 = (),
2409    T11 = (),
2410    T12 = (),
2411    T13 = (),
2412    T14 = (),
2413    T15 = (),
2414    T16 = (),
2415    T17 = (),
2416> {
2417    // NB: these names match the names in `for_each_function_signature!`
2418    A1: T1,
2419    A2: T2,
2420    A3: T3,
2421    A4: T4,
2422    A5: T5,
2423    A6: T6,
2424    A7: T7,
2425    A8: T8,
2426    A9: T9,
2427    A10: T10,
2428    A11: T11,
2429    A12: T12,
2430    A13: T13,
2431    A14: T14,
2432    A15: T15,
2433    A16: T16,
2434    A17: T17,
2435    _align_tuple_lower0_correctly: [ValRaw; 0],
2436}
2437
2438macro_rules! impl_component_ty_for_tuples {
2439    ($n:tt $($t:ident)*) => {
2440        #[allow(non_snake_case)]
2441        unsafe impl<$($t,)*> ComponentType for ($($t,)*)
2442            where $($t: ComponentType),*
2443        {
2444            type Lower = TupleLower<$($t::Lower),*>;
2445
2446            const ABI: CanonicalAbiInfo = CanonicalAbiInfo::record_static(&[
2447                $($t::ABI),*
2448            ]);
2449
2450            const IS_RUST_UNIT_TYPE: bool = {
2451                let mut _is_unit = true;
2452                $(
2453                    let _anything_to_bind_the_macro_variable = $t::IS_RUST_UNIT_TYPE;
2454                    _is_unit = false;
2455                )*
2456                _is_unit
2457            };
2458
2459            fn typecheck(
2460                ty: &InterfaceType,
2461                types: &InstanceType<'_>,
2462            ) -> Result<()> {
2463                typecheck_tuple(ty, types, &[$($t::typecheck),*])
2464            }
2465        }
2466
2467        #[allow(non_snake_case)]
2468        unsafe impl<$($t,)*> Lower for ($($t,)*)
2469            where $($t: Lower),*
2470        {
2471            fn lower<U>(
2472                &self,
2473                cx: &mut LowerContext<'_, U>,
2474                ty: InterfaceType,
2475                _dst: &mut MaybeUninit<Self::Lower>,
2476            ) -> Result<()> {
2477                let types = match ty {
2478                    InterfaceType::Tuple(t) => &cx.types[t].types,
2479                    _ => bad_type_info(),
2480                };
2481                let ($($t,)*) = self;
2482                let mut _types = types.iter();
2483                $(
2484                    let ty = *_types.next().unwrap_or_else(bad_type_info);
2485                    $t.lower(cx, ty, map_maybe_uninit!(_dst.$t))?;
2486                )*
2487                Ok(())
2488            }
2489
2490            fn store<U>(
2491                &self,
2492                cx: &mut LowerContext<'_, U>,
2493                ty: InterfaceType,
2494                mut _offset: usize,
2495            ) -> Result<()> {
2496                debug_assert!(_offset % (Self::ALIGN32 as usize) == 0);
2497                let types = match ty {
2498                    InterfaceType::Tuple(t) => &cx.types[t].types,
2499                    _ => bad_type_info(),
2500                };
2501                let ($($t,)*) = self;
2502                let mut _types = types.iter();
2503                $(
2504                    let ty = *_types.next().unwrap_or_else(bad_type_info);
2505                    $t.store(cx, ty, $t::ABI.next_field32_size(&mut _offset))?;
2506                )*
2507                Ok(())
2508            }
2509        }
2510
2511        #[allow(non_snake_case)]
2512        unsafe impl<$($t,)*> Lift for ($($t,)*)
2513            where $($t: Lift),*
2514        {
2515            #[inline]
2516            fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, _src: &Self::Lower) -> Result<Self> {
2517                let types = match ty {
2518                    InterfaceType::Tuple(t) => &cx.types[t].types,
2519                    _ => bad_type_info(),
2520                };
2521                let mut _types = types.iter();
2522                Ok(($(
2523                    $t::lift(
2524                        cx,
2525                        *_types.next().unwrap_or_else(bad_type_info),
2526                        &_src.$t,
2527                    )?,
2528                )*))
2529            }
2530
2531            #[inline]
2532            fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
2533                debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
2534                let types = match ty {
2535                    InterfaceType::Tuple(t) => &cx.types[t].types,
2536                    _ => bad_type_info(),
2537                };
2538                let mut _types = types.iter();
2539                let mut _offset = 0;
2540                $(
2541                    let ty = *_types.next().unwrap_or_else(bad_type_info);
2542                    let $t = $t::load(cx, ty, &bytes[$t::ABI.next_field32_size(&mut _offset)..][..$t::SIZE32])?;
2543                )*
2544                Ok(($($t,)*))
2545            }
2546        }
2547
2548        #[allow(non_snake_case)]
2549        unsafe impl<$($t,)*> ComponentNamedList for ($($t,)*)
2550            where $($t: ComponentType),*
2551        {}
2552    };
2553}
2554
2555for_each_function_signature!(impl_component_ty_for_tuples);
2556
2557pub fn desc(ty: &InterfaceType) -> &'static str {
2558    match ty {
2559        InterfaceType::U8 => "u8",
2560        InterfaceType::S8 => "s8",
2561        InterfaceType::U16 => "u16",
2562        InterfaceType::S16 => "s16",
2563        InterfaceType::U32 => "u32",
2564        InterfaceType::S32 => "s32",
2565        InterfaceType::U64 => "u64",
2566        InterfaceType::S64 => "s64",
2567        InterfaceType::Float32 => "f32",
2568        InterfaceType::Float64 => "f64",
2569        InterfaceType::Bool => "bool",
2570        InterfaceType::Char => "char",
2571        InterfaceType::String => "string",
2572        InterfaceType::List(_) => "list",
2573        InterfaceType::Tuple(_) => "tuple",
2574        InterfaceType::Option(_) => "option",
2575        InterfaceType::Result(_) => "result",
2576
2577        InterfaceType::Record(_) => "record",
2578        InterfaceType::Variant(_) => "variant",
2579        InterfaceType::Flags(_) => "flags",
2580        InterfaceType::Enum(_) => "enum",
2581        InterfaceType::Own(_) => "owned resource",
2582        InterfaceType::Borrow(_) => "borrowed resource",
2583        InterfaceType::Future(_) => "future",
2584        InterfaceType::Stream(_) => "stream",
2585        InterfaceType::ErrorContext(_) => "error-context",
2586    }
2587}
2588
2589#[cold]
2590#[doc(hidden)]
2591pub fn bad_type_info<T>() -> T {
2592    // NB: should consider something like `unreachable_unchecked` here if this
2593    // becomes a performance bottleneck at some point, but that also comes with
2594    // a tradeoff of propagating a lot of unsafety, so it may not be worth it.
2595    panic!("bad type information detected");
2596}