wasmtime/runtime/component/func/typed.rs
1use crate::component::func::{Func, LiftContext, LowerContext, Options};
2use crate::component::matching::InstanceType;
3use crate::component::storage::{storage_as_slice, storage_as_slice_mut};
4use crate::prelude::*;
5use crate::runtime::vm::component::ComponentInstance;
6use crate::runtime::vm::SendSyncPtr;
7use crate::{AsContextMut, StoreContext, StoreContextMut, ValRaw};
8use alloc::borrow::Cow;
9use alloc::sync::Arc;
10use core::fmt;
11use core::marker;
12use core::mem::{self, MaybeUninit};
13use core::ptr::NonNull;
14use core::str;
15use wasmtime_environ::component::{
16 CanonicalAbiInfo, ComponentTypes, InterfaceType, StringEncoding, VariantInfo, MAX_FLAT_PARAMS,
17 MAX_FLAT_RESULTS,
18};
19
20#[cfg(feature = "component-model-async")]
21use crate::component::concurrent::Promise;
22
23/// A statically-typed version of [`Func`] which takes `Params` as input and
24/// returns `Return`.
25///
26/// This is an efficient way to invoke a WebAssembly component where if the
27/// inputs and output are statically known this can eschew the vast majority of
28/// machinery and checks when calling WebAssembly. This is the most optimized
29/// way to call a WebAssembly component.
30///
31/// Note that like [`Func`] this is a pointer within a [`Store`](crate::Store)
32/// and usage will panic if used with the wrong store.
33///
34/// This type is primarily created with the [`Func::typed`] API.
35///
36/// See [`ComponentType`] for more information about supported types.
37pub struct TypedFunc<Params, Return> {
38 func: Func,
39
40 // The definition of this field is somewhat subtle and may be surprising.
41 // Naively one might expect something like
42 //
43 // _marker: marker::PhantomData<fn(Params) -> Return>,
44 //
45 // Since this is a function pointer after all. The problem with this
46 // definition though is that it imposes the wrong variance on `Params` from
47 // what we want. Abstractly a `fn(Params)` is able to store `Params` within
48 // it meaning you can only give it `Params` that live longer than the
49 // function pointer.
50 //
51 // With a component model function, however, we're always copying data from
52 // the host into the guest, so we are never storing pointers to `Params`
53 // into the guest outside the duration of a `call`, meaning we can actually
54 // accept values in `TypedFunc::call` which live for a shorter duration
55 // than the `Params` argument on the struct.
56 //
57 // This all means that we don't use a phantom function pointer, but instead
58 // feign phantom storage here to get the variance desired.
59 _marker: marker::PhantomData<(Params, Return)>,
60}
61
62impl<Params, Return> Copy for TypedFunc<Params, Return> {}
63
64impl<Params, Return> Clone for TypedFunc<Params, Return> {
65 fn clone(&self) -> TypedFunc<Params, Return> {
66 *self
67 }
68}
69
70impl<Params, Return> TypedFunc<Params, Return>
71where
72 Params: ComponentNamedList + Lower,
73 Return: ComponentNamedList + Lift,
74{
75 /// Creates a new [`TypedFunc`] from the provided component [`Func`],
76 /// unsafely asserting that the underlying function takes `Params` as
77 /// input and returns `Return`.
78 ///
79 /// # Unsafety
80 ///
81 /// This is an unsafe function because it does not verify that the [`Func`]
82 /// provided actually implements this signature. It's up to the caller to
83 /// have performed some other sort of check to ensure that the signature is
84 /// correct.
85 pub unsafe fn new_unchecked(func: Func) -> TypedFunc<Params, Return> {
86 TypedFunc {
87 _marker: marker::PhantomData,
88 func,
89 }
90 }
91
92 /// Returns the underlying un-typed [`Func`] that this [`TypedFunc`]
93 /// references.
94 pub fn func(&self) -> &Func {
95 &self.func
96 }
97
98 /// Calls the underlying WebAssembly component function using the provided
99 /// `params` as input.
100 ///
101 /// This method is used to enter into a component. Execution happens within
102 /// the `store` provided. The `params` are copied into WebAssembly memory
103 /// as appropriate and a core wasm function is invoked.
104 ///
105 /// # Post-return
106 ///
107 /// In the component model each function can have a "post return" specified
108 /// which allows cleaning up the arguments returned to the host. For example
109 /// if WebAssembly returns a string to the host then it might be a uniquely
110 /// allocated string which, after the host finishes processing it, needs to
111 /// be deallocated in the wasm instance's own linear memory to prevent
112 /// memory leaks in wasm itself. The `post-return` canonical abi option is
113 /// used to configured this.
114 ///
115 /// To accommodate this feature of the component model after invoking a
116 /// function via [`TypedFunc::call`] you must next invoke
117 /// [`TypedFunc::post_return`]. Note that the return value of the function
118 /// should be processed between these two function calls. The return value
119 /// continues to be usable from an embedder's perspective after
120 /// `post_return` is called, but after `post_return` is invoked it may no
121 /// longer retain the same value that the wasm module originally returned.
122 ///
123 /// Also note that [`TypedFunc::post_return`] must be invoked irrespective
124 /// of whether the canonical ABI option `post-return` was configured or not.
125 /// This means that embedders must unconditionally call
126 /// [`TypedFunc::post_return`] when a function returns. If this function
127 /// call returns an error, however, then [`TypedFunc::post_return`] is not
128 /// required.
129 ///
130 /// # Errors
131 ///
132 /// This function can return an error for a number of reasons:
133 ///
134 /// * If the wasm itself traps during execution.
135 /// * If the wasm traps while copying arguments into memory.
136 /// * If the wasm provides bad allocation pointers when copying arguments
137 /// into memory.
138 /// * If the wasm returns a value which violates the canonical ABI.
139 /// * If this function's instances cannot be entered, for example if the
140 /// instance is currently calling a host function.
141 /// * If a previous function call occurred and the corresponding
142 /// `post_return` hasn't been invoked yet.
143 ///
144 /// In general there are many ways that things could go wrong when copying
145 /// types in and out of a wasm module with the canonical ABI, and certain
146 /// error conditions are specific to certain types. For example a
147 /// WebAssembly module can't return an invalid `char`. When allocating space
148 /// for this host to copy a string into the returned pointer must be
149 /// in-bounds in memory.
150 ///
151 /// If an error happens then the error should contain detailed enough
152 /// information to understand which part of the canonical ABI went wrong
153 /// and what to inspect.
154 ///
155 /// # Panics
156 ///
157 /// Panics if this is called on a function in an asynchronous store. This
158 /// only works with functions defined within a synchronous store. Also
159 /// panics if `store` does not own this function.
160 pub fn call(&self, store: impl AsContextMut, params: Params) -> Result<Return> {
161 assert!(
162 !store.as_context().async_support(),
163 "must use `call_async` when async support is enabled on the config"
164 );
165 self.call_impl(store, params)
166 }
167
168 /// Exactly like [`Self::call`], except for use on asynchronous stores.
169 ///
170 /// # Panics
171 ///
172 /// Panics if this is called on a function in a synchronous store. This
173 /// only works with functions defined within an asynchronous store. Also
174 /// panics if `store` does not own this function.
175 #[cfg(feature = "async")]
176 pub async fn call_async<T>(
177 &self,
178 mut store: impl AsContextMut<Data = T>,
179 params: Params,
180 ) -> Result<Return>
181 where
182 T: Send,
183 Params: Send + Sync,
184 Return: Send + Sync,
185 {
186 let mut store = store.as_context_mut();
187 assert!(
188 store.0.async_support(),
189 "cannot use `call_async` when async support is not enabled on the config"
190 );
191 store
192 .on_fiber(|store| self.call_impl(store, params))
193 .await?
194 }
195
196 /// Start concurrent call to this function.
197 ///
198 /// Unlike [`Self::call`] and [`Self::call_async`] (both of which require
199 /// exclusive access to the store until the completion of the call), calls
200 /// made using this method may run concurrently with other calls to the same
201 /// instance.
202 #[cfg(feature = "component-model-async")]
203 pub async fn call_concurrent<T: Send>(
204 self,
205 mut store: impl AsContextMut<Data = T>,
206 params: Params,
207 ) -> Result<Promise<Return>>
208 where
209 Params: Send + Sync + 'static,
210 Return: Send + Sync + 'static,
211 {
212 let store = store.as_context_mut();
213 assert!(
214 store.0.async_support(),
215 "cannot use `call_concurrent` when async support is not enabled on the config"
216 );
217 _ = params;
218 todo!()
219 }
220
221 fn call_impl(&self, mut store: impl AsContextMut, params: Params) -> Result<Return> {
222 let store = &mut store.as_context_mut();
223 // Note that this is in theory simpler than it might read at this time.
224 // Here we're doing a runtime dispatch on the `flatten_count` for the
225 // params/results to see whether they're inbounds. This creates 4 cases
226 // to handle. In reality this is a highly optimizable branch where LLVM
227 // will easily figure out that only one branch here is taken.
228 //
229 // Otherwise this current construction is done to ensure that the stack
230 // space reserved for the params/results is always of the appropriate
231 // size (as the params/results needed differ depending on the "flatten"
232 // count)
233 if Params::flatten_count() <= MAX_FLAT_PARAMS {
234 if Return::flatten_count() <= MAX_FLAT_RESULTS {
235 self.func.call_raw(
236 store,
237 ¶ms,
238 Self::lower_stack_args,
239 Self::lift_stack_result,
240 )
241 } else {
242 self.func.call_raw(
243 store,
244 ¶ms,
245 Self::lower_stack_args,
246 Self::lift_heap_result,
247 )
248 }
249 } else {
250 if Return::flatten_count() <= MAX_FLAT_RESULTS {
251 self.func.call_raw(
252 store,
253 ¶ms,
254 Self::lower_heap_args,
255 Self::lift_stack_result,
256 )
257 } else {
258 self.func.call_raw(
259 store,
260 ¶ms,
261 Self::lower_heap_args,
262 Self::lift_heap_result,
263 )
264 }
265 }
266 }
267
268 /// Lower parameters directly onto the stack specified by the `dst`
269 /// location.
270 ///
271 /// This is only valid to call when the "flatten count" is small enough, or
272 /// when the canonical ABI says arguments go through the stack rather than
273 /// the heap.
274 fn lower_stack_args<T>(
275 cx: &mut LowerContext<'_, T>,
276 params: &Params,
277 ty: InterfaceType,
278 dst: &mut MaybeUninit<Params::Lower>,
279 ) -> Result<()> {
280 assert!(Params::flatten_count() <= MAX_FLAT_PARAMS);
281 params.lower(cx, ty, dst)?;
282 Ok(())
283 }
284
285 /// Lower parameters onto a heap-allocated location.
286 ///
287 /// This is used when the stack space to be used for the arguments is above
288 /// the `MAX_FLAT_PARAMS` threshold. Here the wasm's `realloc` function is
289 /// invoked to allocate space and then parameters are stored at that heap
290 /// pointer location.
291 fn lower_heap_args<T>(
292 cx: &mut LowerContext<'_, T>,
293 params: &Params,
294 ty: InterfaceType,
295 dst: &mut MaybeUninit<ValRaw>,
296 ) -> Result<()> {
297 assert!(Params::flatten_count() > MAX_FLAT_PARAMS);
298
299 // Memory must exist via validation if the arguments are stored on the
300 // heap, so we can create a `MemoryMut` at this point. Afterwards
301 // `realloc` is used to allocate space for all the arguments and then
302 // they're all stored in linear memory.
303 //
304 // Note that `realloc` will bake in a check that the returned pointer is
305 // in-bounds.
306 let ptr = cx.realloc(0, 0, Params::ALIGN32, Params::SIZE32)?;
307 params.store(cx, ty, ptr)?;
308
309 // Note that the pointer here is stored as a 64-bit integer. This allows
310 // this to work with either 32 or 64-bit memories. For a 32-bit memory
311 // it'll just ignore the upper 32 zero bits, and for 64-bit memories
312 // this'll have the full 64-bits. Note that for 32-bit memories the call
313 // to `realloc` above guarantees that the `ptr` is in-bounds meaning
314 // that we will know that the zero-extended upper bits of `ptr` are
315 // guaranteed to be zero.
316 //
317 // This comment about 64-bit integers is also referred to below with
318 // "WRITEPTR64".
319 dst.write(ValRaw::i64(ptr as i64));
320
321 Ok(())
322 }
323
324 /// Lift the result of a function directly from the stack result.
325 ///
326 /// This is only used when the result fits in the maximum number of stack
327 /// slots.
328 fn lift_stack_result(
329 cx: &mut LiftContext<'_>,
330 ty: InterfaceType,
331 dst: &Return::Lower,
332 ) -> Result<Return> {
333 assert!(Return::flatten_count() <= MAX_FLAT_RESULTS);
334 Return::lift(cx, ty, dst)
335 }
336
337 /// Lift the result of a function where the result is stored indirectly on
338 /// the heap.
339 fn lift_heap_result(
340 cx: &mut LiftContext<'_>,
341 ty: InterfaceType,
342 dst: &ValRaw,
343 ) -> Result<Return> {
344 assert!(Return::flatten_count() > MAX_FLAT_RESULTS);
345 // FIXME(#4311): needs to read an i64 for memory64
346 let ptr = usize::try_from(dst.get_u32())?;
347 if ptr % usize::try_from(Return::ALIGN32)? != 0 {
348 bail!("return pointer not aligned");
349 }
350
351 let bytes = cx
352 .memory()
353 .get(ptr..)
354 .and_then(|b| b.get(..Return::SIZE32))
355 .ok_or_else(|| anyhow::anyhow!("pointer out of bounds of memory"))?;
356 Return::load(cx, ty, bytes)
357 }
358
359 /// See [`Func::post_return`]
360 pub fn post_return(&self, store: impl AsContextMut) -> Result<()> {
361 self.func.post_return(store)
362 }
363
364 /// See [`Func::post_return_async`]
365 #[cfg(feature = "async")]
366 pub async fn post_return_async<T: Send>(
367 &self,
368 store: impl AsContextMut<Data = T>,
369 ) -> Result<()> {
370 self.func.post_return_async(store).await
371 }
372}
373
374/// A trait representing a static list of named types that can be passed to or
375/// returned from a [`TypedFunc`].
376///
377/// This trait is implemented for a number of tuple types and is not expected
378/// to be implemented externally. The contents of this trait are hidden as it's
379/// intended to be an implementation detail of Wasmtime. The contents of this
380/// trait are not covered by Wasmtime's stability guarantees.
381///
382/// For more information about this trait see [`Func::typed`] and
383/// [`TypedFunc`].
384//
385// Note that this is an `unsafe` trait, and the unsafety means that
386// implementations of this trait must be correct or otherwise [`TypedFunc`]
387// would not be memory safe. The main reason this is `unsafe` is the
388// `typecheck` function which must operate correctly relative to the `AsTuple`
389// interpretation of the implementor.
390pub unsafe trait ComponentNamedList: ComponentType {}
391
392/// A trait representing types which can be passed to and read from components
393/// with the canonical ABI.
394///
395/// This trait is implemented for Rust types which can be communicated to
396/// components. The [`Func::typed`] and [`TypedFunc`] Rust items are the main
397/// consumers of this trait.
398///
399/// Supported Rust types include:
400///
401/// | Component Model Type | Rust Type |
402/// |-----------------------------------|--------------------------------------|
403/// | `{s,u}{8,16,32,64}` | `{i,u}{8,16,32,64}` |
404/// | `f{32,64}` | `f{32,64}` |
405/// | `bool` | `bool` |
406/// | `char` | `char` |
407/// | `tuple<A, B>` | `(A, B)` |
408/// | `option<T>` | `Option<T>` |
409/// | `result` | `Result<(), ()>` |
410/// | `result<T>` | `Result<T, ()>` |
411/// | `result<_, E>` | `Result<(), E>` |
412/// | `result<T, E>` | `Result<T, E>` |
413/// | `string` | `String`, `&str`, or [`WasmStr`] |
414/// | `list<T>` | `Vec<T>`, `&[T]`, or [`WasmList`] |
415/// | `own<T>`, `borrow<T>` | [`Resource<T>`] or [`ResourceAny`] |
416/// | `record` | [`#[derive(ComponentType)]`][d-cm] |
417/// | `variant` | [`#[derive(ComponentType)]`][d-cm] |
418/// | `enum` | [`#[derive(ComponentType)]`][d-cm] |
419/// | `flags` | [`flags!`][f-m] |
420///
421/// [`Resource<T>`]: crate::component::Resource
422/// [`ResourceAny`]: crate::component::ResourceAny
423/// [d-cm]: macro@crate::component::ComponentType
424/// [f-m]: crate::component::flags
425///
426/// Rust standard library pointers such as `&T`, `Box<T>`, `Rc<T>`, and `Arc<T>`
427/// additionally represent whatever type `T` represents in the component model.
428/// Note that types such as `record`, `variant`, `enum`, and `flags` are
429/// generated by the embedder at compile time. These macros derive
430/// implementation of this trait for custom types to map to custom types in the
431/// component model. Note that for `record`, `variant`, `enum`, and `flags`
432/// those types are often generated by the
433/// [`bindgen!`](crate::component::bindgen) macro from WIT definitions.
434///
435/// Types that implement [`ComponentType`] are used for `Params` and `Return`
436/// in [`TypedFunc`] and [`Func::typed`].
437///
438/// The contents of this trait are hidden as it's intended to be an
439/// implementation detail of Wasmtime. The contents of this trait are not
440/// covered by Wasmtime's stability guarantees.
441//
442// Note that this is an `unsafe` trait as `TypedFunc`'s safety heavily relies on
443// the correctness of the implementations of this trait. Some ways in which this
444// trait must be correct to be safe are:
445//
446// * The `Lower` associated type must be a `ValRaw` sequence. It doesn't have to
447// literally be `[ValRaw; N]` but when laid out in memory it must be adjacent
448// `ValRaw` values and have a multiple of the size of `ValRaw` and the same
449// alignment.
450//
451// * The `lower` function must initialize the bits within `Lower` that are going
452// to be read by the trampoline that's used to enter core wasm. A trampoline
453// is passed `*mut Lower` and will read the canonical abi arguments in
454// sequence, so all of the bits must be correctly initialized.
455//
456// * The `size` and `align` functions must be correct for this value stored in
457// the canonical ABI. The `Cursor<T>` iteration of these bytes rely on this
458// for correctness as they otherwise eschew bounds-checking.
459//
460// There are likely some other correctness issues which aren't documented as
461// well, this isn't intended to be an exhaustive list. It suffices to say,
462// though, that correctness bugs in this trait implementation are highly likely
463// to lead to security bugs, which again leads to the `unsafe` in the trait.
464//
465// Also note that this trait specifically is not sealed because we have a proc
466// macro that generates implementations of this trait for external types in a
467// `#[derive]`-like fashion.
468pub unsafe trait ComponentType {
469 /// Representation of the "lowered" form of this component value.
470 ///
471 /// Lowerings lower into core wasm values which are represented by `ValRaw`.
472 /// This `Lower` type must be a list of `ValRaw` as either a literal array
473 /// or a struct where every field is a `ValRaw`. This must be `Copy` (as
474 /// `ValRaw` is `Copy`) and support all byte patterns. This being correct is
475 /// one reason why the trait is unsafe.
476 #[doc(hidden)]
477 type Lower: Copy;
478
479 /// The information about this type's canonical ABI (size/align/etc).
480 #[doc(hidden)]
481 const ABI: CanonicalAbiInfo;
482
483 #[doc(hidden)]
484 const SIZE32: usize = Self::ABI.size32 as usize;
485 #[doc(hidden)]
486 const ALIGN32: u32 = Self::ABI.align32;
487
488 #[doc(hidden)]
489 const IS_RUST_UNIT_TYPE: bool = false;
490
491 /// Returns the number of core wasm abi values will be used to represent
492 /// this type in its lowered form.
493 ///
494 /// This divides the size of `Self::Lower` by the size of `ValRaw`.
495 #[doc(hidden)]
496 fn flatten_count() -> usize {
497 assert!(mem::size_of::<Self::Lower>() % mem::size_of::<ValRaw>() == 0);
498 assert!(mem::align_of::<Self::Lower>() == mem::align_of::<ValRaw>());
499 mem::size_of::<Self::Lower>() / mem::size_of::<ValRaw>()
500 }
501
502 /// Performs a type-check to see whether this component value type matches
503 /// the interface type `ty` provided.
504 #[doc(hidden)]
505 fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()>;
506}
507
508#[doc(hidden)]
509pub unsafe trait ComponentVariant: ComponentType {
510 const CASES: &'static [Option<CanonicalAbiInfo>];
511 const INFO: VariantInfo = VariantInfo::new_static(Self::CASES);
512 const PAYLOAD_OFFSET32: usize = Self::INFO.payload_offset32 as usize;
513}
514
515/// Host types which can be passed to WebAssembly components.
516///
517/// This trait is implemented for all types that can be passed to components
518/// either as parameters of component exports or returns of component imports.
519/// This trait represents the ability to convert from the native host
520/// representation to the canonical ABI.
521///
522/// Built-in types to Rust such as `Option<T>` implement this trait as
523/// appropriate. For a mapping of component model to Rust types see
524/// [`ComponentType`].
525///
526/// For user-defined types, for example `record` types mapped to Rust `struct`s,
527/// this crate additionally has
528/// [`#[derive(Lower)]`](macro@crate::component::Lower).
529///
530/// Note that like [`ComponentType`] the definition of this trait is intended to
531/// be an internal implementation detail of Wasmtime at this time. It's
532/// recommended to use the `#[derive(Lower)]` implementation instead.
533pub unsafe trait Lower: ComponentType {
534 /// Performs the "lower" function in the canonical ABI.
535 ///
536 /// This method will lower the current value into a component. The `lower`
537 /// function performs a "flat" lowering into the `dst` specified which is
538 /// allowed to be uninitialized entering this method but is guaranteed to be
539 /// fully initialized if the method returns `Ok(())`.
540 ///
541 /// The `cx` context provided is the context within which this lowering is
542 /// happening. This contains information such as canonical options specified
543 /// (e.g. string encodings, memories, etc), the store itself, along with
544 /// type information.
545 ///
546 /// The `ty` parameter is the destination type that is being lowered into.
547 /// For example this is the component's "view" of the type that is being
548 /// lowered. This is guaranteed to have passed a `typecheck` earlier.
549 ///
550 /// This will only be called if `typecheck` passes for `Op::Lower`.
551 #[doc(hidden)]
552 fn lower<T>(
553 &self,
554 cx: &mut LowerContext<'_, T>,
555 ty: InterfaceType,
556 dst: &mut MaybeUninit<Self::Lower>,
557 ) -> Result<()>;
558
559 /// Performs the "store" operation in the canonical ABI.
560 ///
561 /// This function will store `self` into the linear memory described by
562 /// `cx` at the `offset` provided.
563 ///
564 /// It is expected that `offset` is a valid offset in memory for
565 /// `Self::SIZE32` bytes. At this time that's not an unsafe contract as it's
566 /// always re-checked on all stores, but this is something that will need to
567 /// be improved in the future to remove extra bounds checks. For now this
568 /// function will panic if there's a bug and `offset` isn't valid within
569 /// memory.
570 ///
571 /// The `ty` type information passed here is the same as the type
572 /// information passed to `lower` above, and is the component's own view of
573 /// what the resulting type should be.
574 ///
575 /// This will only be called if `typecheck` passes for `Op::Lower`.
576 #[doc(hidden)]
577 fn store<T>(
578 &self,
579 cx: &mut LowerContext<'_, T>,
580 ty: InterfaceType,
581 offset: usize,
582 ) -> Result<()>;
583
584 /// Provided method to lower a list of `Self` into memory.
585 ///
586 /// Requires that `offset` has already been checked for alignment and
587 /// validity in terms of being in-bounds, otherwise this may panic.
588 ///
589 /// This is primarily here to get overridden for implementations of integers
590 /// which can avoid some extra fluff and use a pattern that's more easily
591 /// optimizable by LLVM.
592 #[doc(hidden)]
593 fn store_list<T>(
594 cx: &mut LowerContext<'_, T>,
595 ty: InterfaceType,
596 mut offset: usize,
597 items: &[Self],
598 ) -> Result<()>
599 where
600 Self: Sized,
601 {
602 for item in items {
603 item.store(cx, ty, offset)?;
604 offset += Self::SIZE32;
605 }
606 Ok(())
607 }
608}
609
610/// Host types which can be created from the canonical ABI.
611///
612/// This is the mirror of the [`Lower`] trait where it represents the capability
613/// of acquiring items from WebAssembly and passing them to the host.
614///
615/// Built-in types to Rust such as `Option<T>` implement this trait as
616/// appropriate. For a mapping of component model to Rust types see
617/// [`ComponentType`].
618///
619/// For user-defined types, for example `record` types mapped to Rust `struct`s,
620/// this crate additionally has
621/// [`#[derive(Lift)]`](macro@crate::component::Lift).
622///
623/// Note that like [`ComponentType`] the definition of this trait is intended to
624/// be an internal implementation detail of Wasmtime at this time. It's
625/// recommended to use the `#[derive(Lift)]` implementation instead.
626pub unsafe trait Lift: Sized + ComponentType {
627 /// Performs the "lift" operation in the canonical ABI.
628 ///
629 /// This function performs a "flat" lift operation from the `src` specified
630 /// which is a sequence of core wasm values. The lifting operation will
631 /// validate core wasm values and produce a `Self` on success.
632 ///
633 /// The `cx` provided contains contextual information such as the store
634 /// that's being loaded from, canonical options, and type information.
635 ///
636 /// The `ty` parameter is the origin component's specification for what the
637 /// type that is being lifted is. For example this is the record type or the
638 /// resource type that is being lifted.
639 ///
640 /// Note that this has a default implementation but if `typecheck` passes
641 /// for `Op::Lift` this needs to be overridden.
642 #[doc(hidden)]
643 fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self>;
644
645 /// Performs the "load" operation in the canonical ABI.
646 ///
647 /// This will read the `bytes` provided, which are a sub-slice into the
648 /// linear memory described by `cx`. The `bytes` array provided is
649 /// guaranteed to be `Self::SIZE32` bytes large. All of memory is then also
650 /// available through `cx` for bounds-checks and such as necessary for
651 /// strings/lists.
652 ///
653 /// The `ty` argument is the type that's being loaded, as described by the
654 /// original component.
655 ///
656 /// Note that this has a default implementation but if `typecheck` passes
657 /// for `Op::Lift` this needs to be overridden.
658 #[doc(hidden)]
659 fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self>;
660
661 /// Converts `list` into a `Vec<T>`, used in `Lift for Vec<T>`.
662 ///
663 /// This is primarily here to get overridden for implementations of integers
664 /// which can avoid some extra fluff and use a pattern that's more easily
665 /// optimizable by LLVM.
666 #[doc(hidden)]
667 fn load_list(cx: &mut LiftContext<'_>, list: &WasmList<Self>) -> Result<Vec<Self>>
668 where
669 Self: Sized,
670 {
671 (0..list.len)
672 .map(|index| list.get_from_store(cx, index).unwrap())
673 .collect()
674 }
675}
676
677// Macro to help generate "forwarding implementations" of `ComponentType` to
678// another type, used for wrappers in Rust like `&T`, `Box<T>`, etc. Note that
679// these wrappers only implement lowering because lifting native Rust types
680// cannot be done.
681macro_rules! forward_type_impls {
682 ($(($($generics:tt)*) $a:ty => $b:ty,)*) => ($(
683 unsafe impl <$($generics)*> ComponentType for $a {
684 type Lower = <$b as ComponentType>::Lower;
685
686 const ABI: CanonicalAbiInfo = <$b as ComponentType>::ABI;
687
688 #[inline]
689 fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
690 <$b as ComponentType>::typecheck(ty, types)
691 }
692 }
693 )*)
694}
695
696forward_type_impls! {
697 (T: ComponentType + ?Sized) &'_ T => T,
698 (T: ComponentType + ?Sized) Box<T> => T,
699 (T: ComponentType + ?Sized) alloc::rc::Rc<T> => T,
700 (T: ComponentType + ?Sized) alloc::sync::Arc<T> => T,
701 () String => str,
702 (T: ComponentType) Vec<T> => [T],
703}
704
705macro_rules! forward_lowers {
706 ($(($($generics:tt)*) $a:ty => $b:ty,)*) => ($(
707 unsafe impl <$($generics)*> Lower for $a {
708 fn lower<U>(
709 &self,
710 cx: &mut LowerContext<'_, U>,
711 ty: InterfaceType,
712 dst: &mut MaybeUninit<Self::Lower>,
713 ) -> Result<()> {
714 <$b as Lower>::lower(self, cx, ty, dst)
715 }
716
717 fn store<U>(
718 &self,
719 cx: &mut LowerContext<'_, U>,
720 ty: InterfaceType,
721 offset: usize,
722 ) -> Result<()> {
723 <$b as Lower>::store(self, cx, ty, offset)
724 }
725 }
726 )*)
727}
728
729forward_lowers! {
730 (T: Lower + ?Sized) &'_ T => T,
731 (T: Lower + ?Sized) Box<T> => T,
732 (T: Lower + ?Sized) alloc::rc::Rc<T> => T,
733 (T: Lower + ?Sized) alloc::sync::Arc<T> => T,
734 () String => str,
735 (T: Lower) Vec<T> => [T],
736}
737
738macro_rules! forward_string_lifts {
739 ($($a:ty,)*) => ($(
740 unsafe impl Lift for $a {
741 #[inline]
742 fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
743 Ok(<WasmStr as Lift>::lift(cx, ty, src)?.to_str_from_memory(cx.memory())?.into())
744 }
745
746 #[inline]
747 fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
748 Ok(<WasmStr as Lift>::load(cx, ty, bytes)?.to_str_from_memory(cx.memory())?.into())
749 }
750 }
751 )*)
752}
753
754forward_string_lifts! {
755 Box<str>,
756 alloc::rc::Rc<str>,
757 alloc::sync::Arc<str>,
758 String,
759}
760
761macro_rules! forward_list_lifts {
762 ($($a:ty,)*) => ($(
763 unsafe impl <T: Lift> Lift for $a {
764 fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
765 let list = <WasmList::<T> as Lift>::lift(cx, ty, src)?;
766 Ok(T::load_list(cx, &list)?.into())
767 }
768
769 fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
770 let list = <WasmList::<T> as Lift>::load(cx, ty, bytes)?;
771 Ok(T::load_list(cx, &list)?.into())
772 }
773 }
774 )*)
775}
776
777forward_list_lifts! {
778 Box<[T]>,
779 alloc::rc::Rc<[T]>,
780 alloc::sync::Arc<[T]>,
781 Vec<T>,
782}
783
784// Macro to help generate `ComponentType` implementations for primitive types
785// such as integers, char, bool, etc.
786macro_rules! integers {
787 ($($primitive:ident = $ty:ident in $field:ident/$get:ident with abi:$abi:ident,)*) => ($(
788 unsafe impl ComponentType for $primitive {
789 type Lower = ValRaw;
790
791 const ABI: CanonicalAbiInfo = CanonicalAbiInfo::$abi;
792
793 fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
794 match ty {
795 InterfaceType::$ty => Ok(()),
796 other => bail!("expected `{}` found `{}`", desc(&InterfaceType::$ty), desc(other))
797 }
798 }
799 }
800
801 unsafe impl Lower for $primitive {
802 #[inline]
803 #[allow(trivial_numeric_casts)]
804 fn lower<T>(
805 &self,
806 _cx: &mut LowerContext<'_, T>,
807 ty: InterfaceType,
808 dst: &mut MaybeUninit<Self::Lower>,
809 ) -> Result<()> {
810 debug_assert!(matches!(ty, InterfaceType::$ty));
811 dst.write(ValRaw::$field(*self as $field));
812 Ok(())
813 }
814
815 #[inline]
816 fn store<T>(
817 &self,
818 cx: &mut LowerContext<'_, T>,
819 ty: InterfaceType,
820 offset: usize,
821 ) -> Result<()> {
822 debug_assert!(matches!(ty, InterfaceType::$ty));
823 debug_assert!(offset % Self::SIZE32 == 0);
824 *cx.get(offset) = self.to_le_bytes();
825 Ok(())
826 }
827
828 fn store_list<T>(
829 cx: &mut LowerContext<'_, T>,
830 ty: InterfaceType,
831 offset: usize,
832 items: &[Self],
833 ) -> Result<()> {
834 debug_assert!(matches!(ty, InterfaceType::$ty));
835
836 // Double-check that the CM alignment is at least the host's
837 // alignment for this type which should be true for all
838 // platforms.
839 assert!((Self::ALIGN32 as usize) >= mem::align_of::<Self>());
840
841 // Slice `cx`'s memory to the window that we'll be modifying.
842 // This should all have already been verified in terms of
843 // alignment and sizing meaning that these assertions here are
844 // not truly necessary but are instead double-checks.
845 //
846 // Note that we're casting a `[u8]` slice to `[Self]` with
847 // `align_to_mut` which is not safe in general but is safe in
848 // our specific case as all `u8` patterns are valid `Self`
849 // patterns since `Self` is an integral type.
850 let dst = &mut cx.as_slice_mut()[offset..][..items.len() * Self::SIZE32];
851 let (before, middle, end) = unsafe { dst.align_to_mut::<Self>() };
852 assert!(before.is_empty() && end.is_empty());
853 assert_eq!(middle.len(), items.len());
854
855 // And with all that out of the way perform the copying loop.
856 // This is not a `copy_from_slice` because endianness needs to
857 // be handled here, but LLVM should pretty easily transform this
858 // into a memcpy on little-endian platforms.
859 for (dst, src) in middle.iter_mut().zip(items) {
860 *dst = src.to_le();
861 }
862 Ok(())
863 }
864 }
865
866 unsafe impl Lift for $primitive {
867 #[inline]
868 #[allow(trivial_numeric_casts, clippy::cast_possible_truncation)]
869 fn lift(_cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
870 debug_assert!(matches!(ty, InterfaceType::$ty));
871 Ok(src.$get() as $primitive)
872 }
873
874 #[inline]
875 fn load(_cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
876 debug_assert!(matches!(ty, InterfaceType::$ty));
877 debug_assert!((bytes.as_ptr() as usize) % Self::SIZE32 == 0);
878 Ok($primitive::from_le_bytes(bytes.try_into().unwrap()))
879 }
880
881 fn load_list(cx: &mut LiftContext<'_>, list: &WasmList<Self>) -> Result<Vec<Self>> {
882 Ok(
883 list._as_le_slice(cx.memory())
884 .iter()
885 .map(|i| Self::from_le(*i))
886 .collect(),
887 )
888 }
889 }
890 )*)
891}
892
893integers! {
894 i8 = S8 in i32/get_i32 with abi:SCALAR1,
895 u8 = U8 in u32/get_u32 with abi:SCALAR1,
896 i16 = S16 in i32/get_i32 with abi:SCALAR2,
897 u16 = U16 in u32/get_u32 with abi:SCALAR2,
898 i32 = S32 in i32/get_i32 with abi:SCALAR4,
899 u32 = U32 in u32/get_u32 with abi:SCALAR4,
900 i64 = S64 in i64/get_i64 with abi:SCALAR8,
901 u64 = U64 in u64/get_u64 with abi:SCALAR8,
902}
903
904macro_rules! floats {
905 ($($float:ident/$get_float:ident = $ty:ident with abi:$abi:ident)*) => ($(const _: () = {
906 unsafe impl ComponentType for $float {
907 type Lower = ValRaw;
908
909 const ABI: CanonicalAbiInfo = CanonicalAbiInfo::$abi;
910
911 fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
912 match ty {
913 InterfaceType::$ty => Ok(()),
914 other => bail!("expected `{}` found `{}`", desc(&InterfaceType::$ty), desc(other))
915 }
916 }
917 }
918
919 unsafe impl Lower for $float {
920 #[inline]
921 fn lower<T>(
922 &self,
923 _cx: &mut LowerContext<'_, T>,
924 ty: InterfaceType,
925 dst: &mut MaybeUninit<Self::Lower>,
926 ) -> Result<()> {
927 debug_assert!(matches!(ty, InterfaceType::$ty));
928 dst.write(ValRaw::$float(self.to_bits()));
929 Ok(())
930 }
931
932 #[inline]
933 fn store<T>(
934 &self,
935 cx: &mut LowerContext<'_, T>,
936 ty: InterfaceType,
937 offset: usize,
938 ) -> Result<()> {
939 debug_assert!(matches!(ty, InterfaceType::$ty));
940 debug_assert!(offset % Self::SIZE32 == 0);
941 let ptr = cx.get(offset);
942 *ptr = self.to_bits().to_le_bytes();
943 Ok(())
944 }
945 }
946
947 unsafe impl Lift for $float {
948 #[inline]
949 fn lift(_cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
950 debug_assert!(matches!(ty, InterfaceType::$ty));
951 Ok($float::from_bits(src.$get_float()))
952 }
953
954 #[inline]
955 fn load(_cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
956 debug_assert!(matches!(ty, InterfaceType::$ty));
957 debug_assert!((bytes.as_ptr() as usize) % Self::SIZE32 == 0);
958 Ok($float::from_le_bytes(bytes.try_into().unwrap()))
959 }
960 }
961 };)*)
962}
963
964floats! {
965 f32/get_f32 = Float32 with abi:SCALAR4
966 f64/get_f64 = Float64 with abi:SCALAR8
967}
968
969unsafe impl ComponentType for bool {
970 type Lower = ValRaw;
971
972 const ABI: CanonicalAbiInfo = CanonicalAbiInfo::SCALAR1;
973
974 fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
975 match ty {
976 InterfaceType::Bool => Ok(()),
977 other => bail!("expected `bool` found `{}`", desc(other)),
978 }
979 }
980}
981
982unsafe impl Lower for bool {
983 fn lower<T>(
984 &self,
985 _cx: &mut LowerContext<'_, T>,
986 ty: InterfaceType,
987 dst: &mut MaybeUninit<Self::Lower>,
988 ) -> Result<()> {
989 debug_assert!(matches!(ty, InterfaceType::Bool));
990 dst.write(ValRaw::i32(*self as i32));
991 Ok(())
992 }
993
994 fn store<T>(
995 &self,
996 cx: &mut LowerContext<'_, T>,
997 ty: InterfaceType,
998 offset: usize,
999 ) -> Result<()> {
1000 debug_assert!(matches!(ty, InterfaceType::Bool));
1001 debug_assert!(offset % Self::SIZE32 == 0);
1002 cx.get::<1>(offset)[0] = *self as u8;
1003 Ok(())
1004 }
1005}
1006
1007unsafe impl Lift for bool {
1008 #[inline]
1009 fn lift(_cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1010 debug_assert!(matches!(ty, InterfaceType::Bool));
1011 match src.get_i32() {
1012 0 => Ok(false),
1013 _ => Ok(true),
1014 }
1015 }
1016
1017 #[inline]
1018 fn load(_cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
1019 debug_assert!(matches!(ty, InterfaceType::Bool));
1020 match bytes[0] {
1021 0 => Ok(false),
1022 _ => Ok(true),
1023 }
1024 }
1025}
1026
1027unsafe impl ComponentType for char {
1028 type Lower = ValRaw;
1029
1030 const ABI: CanonicalAbiInfo = CanonicalAbiInfo::SCALAR4;
1031
1032 fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
1033 match ty {
1034 InterfaceType::Char => Ok(()),
1035 other => bail!("expected `char` found `{}`", desc(other)),
1036 }
1037 }
1038}
1039
1040unsafe impl Lower for char {
1041 #[inline]
1042 fn lower<T>(
1043 &self,
1044 _cx: &mut LowerContext<'_, T>,
1045 ty: InterfaceType,
1046 dst: &mut MaybeUninit<Self::Lower>,
1047 ) -> Result<()> {
1048 debug_assert!(matches!(ty, InterfaceType::Char));
1049 dst.write(ValRaw::u32(u32::from(*self)));
1050 Ok(())
1051 }
1052
1053 #[inline]
1054 fn store<T>(
1055 &self,
1056 cx: &mut LowerContext<'_, T>,
1057 ty: InterfaceType,
1058 offset: usize,
1059 ) -> Result<()> {
1060 debug_assert!(matches!(ty, InterfaceType::Char));
1061 debug_assert!(offset % Self::SIZE32 == 0);
1062 *cx.get::<4>(offset) = u32::from(*self).to_le_bytes();
1063 Ok(())
1064 }
1065}
1066
1067unsafe impl Lift for char {
1068 #[inline]
1069 fn lift(_cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1070 debug_assert!(matches!(ty, InterfaceType::Char));
1071 Ok(char::try_from(src.get_u32())?)
1072 }
1073
1074 #[inline]
1075 fn load(_cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
1076 debug_assert!(matches!(ty, InterfaceType::Char));
1077 debug_assert!((bytes.as_ptr() as usize) % Self::SIZE32 == 0);
1078 let bits = u32::from_le_bytes(bytes.try_into().unwrap());
1079 Ok(char::try_from(bits)?)
1080 }
1081}
1082
1083// FIXME(#4311): these probably need different constants for memory64
1084const UTF16_TAG: usize = 1 << 31;
1085const MAX_STRING_BYTE_LENGTH: usize = (1 << 31) - 1;
1086
1087// Note that this is similar to `ComponentType for WasmStr` except it can only
1088// be used for lowering, not lifting.
1089unsafe impl ComponentType for str {
1090 type Lower = [ValRaw; 2];
1091
1092 const ABI: CanonicalAbiInfo = CanonicalAbiInfo::POINTER_PAIR;
1093
1094 fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
1095 match ty {
1096 InterfaceType::String => Ok(()),
1097 other => bail!("expected `string` found `{}`", desc(other)),
1098 }
1099 }
1100}
1101
1102unsafe impl Lower for str {
1103 fn lower<T>(
1104 &self,
1105 cx: &mut LowerContext<'_, T>,
1106 ty: InterfaceType,
1107 dst: &mut MaybeUninit<[ValRaw; 2]>,
1108 ) -> Result<()> {
1109 debug_assert!(matches!(ty, InterfaceType::String));
1110 let (ptr, len) = lower_string(cx, self)?;
1111 // See "WRITEPTR64" above for why this is always storing a 64-bit
1112 // integer.
1113 map_maybe_uninit!(dst[0]).write(ValRaw::i64(ptr as i64));
1114 map_maybe_uninit!(dst[1]).write(ValRaw::i64(len as i64));
1115 Ok(())
1116 }
1117
1118 fn store<T>(
1119 &self,
1120 cx: &mut LowerContext<'_, T>,
1121 ty: InterfaceType,
1122 offset: usize,
1123 ) -> Result<()> {
1124 debug_assert!(matches!(ty, InterfaceType::String));
1125 debug_assert!(offset % (Self::ALIGN32 as usize) == 0);
1126 let (ptr, len) = lower_string(cx, self)?;
1127 // FIXME(#4311): needs memory64 handling
1128 *cx.get(offset + 0) = u32::try_from(ptr).unwrap().to_le_bytes();
1129 *cx.get(offset + 4) = u32::try_from(len).unwrap().to_le_bytes();
1130 Ok(())
1131 }
1132}
1133
1134fn lower_string<T>(cx: &mut LowerContext<'_, T>, string: &str) -> Result<(usize, usize)> {
1135 // Note that in general the wasm module can't assume anything about what the
1136 // host strings are encoded as. Additionally hosts are allowed to have
1137 // differently-encoded strings at runtime. Finally when copying a string
1138 // into wasm it's somewhat strict in the sense that the various patterns of
1139 // allocation and such are already dictated for us.
1140 //
1141 // In general what this means is that when copying a string from the host
1142 // into the destination we need to follow one of the cases of copying into
1143 // WebAssembly. It doesn't particularly matter which case as long as it ends
1144 // up in the right encoding. For example a destination encoding of
1145 // latin1+utf16 has a number of ways to get copied into and we do something
1146 // here that isn't the default "utf8 to latin1+utf16" since we have access
1147 // to simd-accelerated helpers in the `encoding_rs` crate. This is ok though
1148 // because we can fake that the host string was already stored in latin1
1149 // format and follow that copy pattern instead.
1150 match cx.options.string_encoding() {
1151 // This corresponds to `store_string_copy` in the canonical ABI where
1152 // the host's representation is utf-8 and the wasm module wants utf-8 so
1153 // a copy is all that's needed (and the `realloc` can be precise for the
1154 // initial memory allocation).
1155 StringEncoding::Utf8 => {
1156 if string.len() > MAX_STRING_BYTE_LENGTH {
1157 bail!(
1158 "string length of {} too large to copy into wasm",
1159 string.len()
1160 );
1161 }
1162 let ptr = cx.realloc(0, 0, 1, string.len())?;
1163 cx.as_slice_mut()[ptr..][..string.len()].copy_from_slice(string.as_bytes());
1164 Ok((ptr, string.len()))
1165 }
1166
1167 // This corresponds to `store_utf8_to_utf16` in the canonical ABI. Here
1168 // an over-large allocation is performed and then shrunk afterwards if
1169 // necessary.
1170 StringEncoding::Utf16 => {
1171 let size = string.len() * 2;
1172 if size > MAX_STRING_BYTE_LENGTH {
1173 bail!(
1174 "string length of {} too large to copy into wasm",
1175 string.len()
1176 );
1177 }
1178 let mut ptr = cx.realloc(0, 0, 2, size)?;
1179 let mut copied = 0;
1180 let bytes = &mut cx.as_slice_mut()[ptr..][..size];
1181 for (u, bytes) in string.encode_utf16().zip(bytes.chunks_mut(2)) {
1182 let u_bytes = u.to_le_bytes();
1183 bytes[0] = u_bytes[0];
1184 bytes[1] = u_bytes[1];
1185 copied += 1;
1186 }
1187 if (copied * 2) < size {
1188 ptr = cx.realloc(ptr, size, 2, copied * 2)?;
1189 }
1190 Ok((ptr, copied))
1191 }
1192
1193 StringEncoding::CompactUtf16 => {
1194 // This corresponds to `store_string_to_latin1_or_utf16`
1195 let bytes = string.as_bytes();
1196 let mut iter = string.char_indices();
1197 let mut ptr = cx.realloc(0, 0, 2, bytes.len())?;
1198 let mut dst = &mut cx.as_slice_mut()[ptr..][..bytes.len()];
1199 let mut result = 0;
1200 while let Some((i, ch)) = iter.next() {
1201 // Test if this `char` fits into the latin1 encoding.
1202 if let Ok(byte) = u8::try_from(u32::from(ch)) {
1203 dst[result] = byte;
1204 result += 1;
1205 continue;
1206 }
1207
1208 // .. if utf16 is forced to be used then the allocation is
1209 // bumped up to the maximum size.
1210 let worst_case = bytes
1211 .len()
1212 .checked_mul(2)
1213 .ok_or_else(|| anyhow!("byte length overflow"))?;
1214 if worst_case > MAX_STRING_BYTE_LENGTH {
1215 bail!("byte length too large");
1216 }
1217 ptr = cx.realloc(ptr, bytes.len(), 2, worst_case)?;
1218 dst = &mut cx.as_slice_mut()[ptr..][..worst_case];
1219
1220 // Previously encoded latin1 bytes are inflated to their 16-bit
1221 // size for utf16
1222 for i in (0..result).rev() {
1223 dst[2 * i] = dst[i];
1224 dst[2 * i + 1] = 0;
1225 }
1226
1227 // and then the remainder of the string is encoded.
1228 for (u, bytes) in string[i..]
1229 .encode_utf16()
1230 .zip(dst[2 * result..].chunks_mut(2))
1231 {
1232 let u_bytes = u.to_le_bytes();
1233 bytes[0] = u_bytes[0];
1234 bytes[1] = u_bytes[1];
1235 result += 1;
1236 }
1237 if worst_case > 2 * result {
1238 ptr = cx.realloc(ptr, worst_case, 2, 2 * result)?;
1239 }
1240 return Ok((ptr, result | UTF16_TAG));
1241 }
1242 if result < bytes.len() {
1243 ptr = cx.realloc(ptr, bytes.len(), 2, result)?;
1244 }
1245 Ok((ptr, result))
1246 }
1247 }
1248}
1249
1250/// Representation of a string located in linear memory in a WebAssembly
1251/// instance.
1252///
1253/// This type can be used in place of `String` and `str` for string-taking APIs
1254/// in some situations. The purpose of this type is to represent a range of
1255/// validated bytes within a component but does not actually copy the bytes. The
1256/// primary method, [`WasmStr::to_str`], attempts to return a reference to the
1257/// string directly located in the component's memory, avoiding a copy into the
1258/// host if possible.
1259///
1260/// The downside of this type, however, is that accessing a string requires a
1261/// [`Store`](crate::Store) pointer (via [`StoreContext`]). Bindings generated
1262/// by [`bindgen!`](crate::component::bindgen), for example, do not have access
1263/// to [`StoreContext`] and thus can't use this type.
1264///
1265/// This is intended for more advanced use cases such as defining functions
1266/// directly in a [`Linker`](crate::component::Linker). It's expected that in
1267/// the future [`bindgen!`](crate::component::bindgen) will also have a way to
1268/// use this type.
1269///
1270/// This type is used with [`TypedFunc`], for example, when WebAssembly returns
1271/// a string. This type cannot be used to give a string to WebAssembly, instead
1272/// `&str` should be used for that (since it's coming from the host).
1273///
1274/// Note that this type represents an in-bounds string in linear memory, but it
1275/// does not represent a valid string (e.g. valid utf-8). Validation happens
1276/// when [`WasmStr::to_str`] is called.
1277///
1278/// Also note that this type does not implement [`Lower`], it only implements
1279/// [`Lift`].
1280pub struct WasmStr {
1281 ptr: usize,
1282 len: usize,
1283 options: Options,
1284}
1285
1286impl WasmStr {
1287 fn new(ptr: usize, len: usize, cx: &mut LiftContext<'_>) -> Result<WasmStr> {
1288 let byte_len = match cx.options.string_encoding() {
1289 StringEncoding::Utf8 => Some(len),
1290 StringEncoding::Utf16 => len.checked_mul(2),
1291 StringEncoding::CompactUtf16 => {
1292 if len & UTF16_TAG == 0 {
1293 Some(len)
1294 } else {
1295 (len ^ UTF16_TAG).checked_mul(2)
1296 }
1297 }
1298 };
1299 match byte_len.and_then(|len| ptr.checked_add(len)) {
1300 Some(n) if n <= cx.memory().len() => {}
1301 _ => bail!("string pointer/length out of bounds of memory"),
1302 }
1303 Ok(WasmStr {
1304 ptr,
1305 len,
1306 options: *cx.options,
1307 })
1308 }
1309
1310 /// Returns the underlying string that this cursor points to.
1311 ///
1312 /// Note that this will internally decode the string from the wasm's
1313 /// encoding to utf-8 and additionally perform validation.
1314 ///
1315 /// The `store` provided must be the store where this string lives to
1316 /// access the correct memory.
1317 ///
1318 /// # Errors
1319 ///
1320 /// Returns an error if the string wasn't encoded correctly (e.g. invalid
1321 /// utf-8).
1322 ///
1323 /// # Panics
1324 ///
1325 /// Panics if this string is not owned by `store`.
1326 //
1327 // TODO: should add accessors for specifically utf-8 and utf-16 that perhaps
1328 // in an opt-in basis don't do validation. Additionally there should be some
1329 // method that returns `[u16]` after validating to avoid the utf16-to-utf8
1330 // transcode.
1331 pub fn to_str<'a, T: 'a>(&self, store: impl Into<StoreContext<'a, T>>) -> Result<Cow<'a, str>> {
1332 let store = store.into().0;
1333 let memory = self.options.memory(store);
1334 self.to_str_from_memory(memory)
1335 }
1336
1337 fn to_str_from_memory<'a>(&self, memory: &'a [u8]) -> Result<Cow<'a, str>> {
1338 match self.options.string_encoding() {
1339 StringEncoding::Utf8 => self.decode_utf8(memory),
1340 StringEncoding::Utf16 => self.decode_utf16(memory, self.len),
1341 StringEncoding::CompactUtf16 => {
1342 if self.len & UTF16_TAG == 0 {
1343 self.decode_latin1(memory)
1344 } else {
1345 self.decode_utf16(memory, self.len ^ UTF16_TAG)
1346 }
1347 }
1348 }
1349 }
1350
1351 fn decode_utf8<'a>(&self, memory: &'a [u8]) -> Result<Cow<'a, str>> {
1352 // Note that bounds-checking already happen in construction of `WasmStr`
1353 // so this is never expected to panic. This could theoretically be
1354 // unchecked indexing if we're feeling wild enough.
1355 Ok(str::from_utf8(&memory[self.ptr..][..self.len])?.into())
1356 }
1357
1358 fn decode_utf16<'a>(&self, memory: &'a [u8], len: usize) -> Result<Cow<'a, str>> {
1359 // See notes in `decode_utf8` for why this is panicking indexing.
1360 let memory = &memory[self.ptr..][..len * 2];
1361 Ok(core::char::decode_utf16(
1362 memory
1363 .chunks(2)
1364 .map(|chunk| u16::from_le_bytes(chunk.try_into().unwrap())),
1365 )
1366 .collect::<Result<String, _>>()?
1367 .into())
1368 }
1369
1370 fn decode_latin1<'a>(&self, memory: &'a [u8]) -> Result<Cow<'a, str>> {
1371 // See notes in `decode_utf8` for why this is panicking indexing.
1372 Ok(encoding_rs::mem::decode_latin1(
1373 &memory[self.ptr..][..self.len],
1374 ))
1375 }
1376}
1377
1378// Note that this is similar to `ComponentType for str` except it can only be
1379// used for lifting, not lowering.
1380unsafe impl ComponentType for WasmStr {
1381 type Lower = <str as ComponentType>::Lower;
1382
1383 const ABI: CanonicalAbiInfo = CanonicalAbiInfo::POINTER_PAIR;
1384
1385 fn typecheck(ty: &InterfaceType, _types: &InstanceType<'_>) -> Result<()> {
1386 match ty {
1387 InterfaceType::String => Ok(()),
1388 other => bail!("expected `string` found `{}`", desc(other)),
1389 }
1390 }
1391}
1392
1393unsafe impl Lift for WasmStr {
1394 #[inline]
1395 fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1396 debug_assert!(matches!(ty, InterfaceType::String));
1397 // FIXME(#4311): needs memory64 treatment
1398 let ptr = src[0].get_u32();
1399 let len = src[1].get_u32();
1400 let (ptr, len) = (usize::try_from(ptr)?, usize::try_from(len)?);
1401 WasmStr::new(ptr, len, cx)
1402 }
1403
1404 #[inline]
1405 fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
1406 debug_assert!(matches!(ty, InterfaceType::String));
1407 debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
1408 // FIXME(#4311): needs memory64 treatment
1409 let ptr = u32::from_le_bytes(bytes[..4].try_into().unwrap());
1410 let len = u32::from_le_bytes(bytes[4..].try_into().unwrap());
1411 let (ptr, len) = (usize::try_from(ptr)?, usize::try_from(len)?);
1412 WasmStr::new(ptr, len, cx)
1413 }
1414}
1415
1416unsafe impl<T> ComponentType for [T]
1417where
1418 T: ComponentType,
1419{
1420 type Lower = [ValRaw; 2];
1421
1422 const ABI: CanonicalAbiInfo = CanonicalAbiInfo::POINTER_PAIR;
1423
1424 fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
1425 match ty {
1426 InterfaceType::List(t) => T::typecheck(&types.types[*t].element, types),
1427 other => bail!("expected `list` found `{}`", desc(other)),
1428 }
1429 }
1430}
1431
1432unsafe impl<T> Lower for [T]
1433where
1434 T: Lower,
1435{
1436 fn lower<U>(
1437 &self,
1438 cx: &mut LowerContext<'_, U>,
1439 ty: InterfaceType,
1440 dst: &mut MaybeUninit<[ValRaw; 2]>,
1441 ) -> Result<()> {
1442 let elem = match ty {
1443 InterfaceType::List(i) => cx.types[i].element,
1444 _ => bad_type_info(),
1445 };
1446 let (ptr, len) = lower_list(cx, elem, self)?;
1447 // See "WRITEPTR64" above for why this is always storing a 64-bit
1448 // integer.
1449 map_maybe_uninit!(dst[0]).write(ValRaw::i64(ptr as i64));
1450 map_maybe_uninit!(dst[1]).write(ValRaw::i64(len as i64));
1451 Ok(())
1452 }
1453
1454 fn store<U>(
1455 &self,
1456 cx: &mut LowerContext<'_, U>,
1457 ty: InterfaceType,
1458 offset: usize,
1459 ) -> Result<()> {
1460 let elem = match ty {
1461 InterfaceType::List(i) => cx.types[i].element,
1462 _ => bad_type_info(),
1463 };
1464 debug_assert!(offset % (Self::ALIGN32 as usize) == 0);
1465 let (ptr, len) = lower_list(cx, elem, self)?;
1466 *cx.get(offset + 0) = u32::try_from(ptr).unwrap().to_le_bytes();
1467 *cx.get(offset + 4) = u32::try_from(len).unwrap().to_le_bytes();
1468 Ok(())
1469 }
1470}
1471
1472// FIXME: this is not a memcpy for `T` where `T` is something like `u8`.
1473//
1474// Some attempts to fix this have proved not fruitful. In isolation an attempt
1475// was made where:
1476//
1477// * `MemoryMut` stored a `*mut [u8]` as its "last view" of memory to avoid
1478// reloading the base pointer constantly. This view is reset on `realloc`.
1479// * The bounds-checks in `MemoryMut::get` were removed (replaced with unsafe
1480// indexing)
1481//
1482// Even then though this didn't correctly vectorized for `Vec<u8>`. It's not
1483// entirely clear why but it appeared that it's related to reloading the base
1484// pointer to memory (I guess from `MemoryMut` itself?). Overall I'm not really
1485// clear on what's happening there, but this is surely going to be a performance
1486// bottleneck in the future.
1487fn lower_list<T, U>(
1488 cx: &mut LowerContext<'_, U>,
1489 ty: InterfaceType,
1490 list: &[T],
1491) -> Result<(usize, usize)>
1492where
1493 T: Lower,
1494{
1495 let elem_size = T::SIZE32;
1496 let size = list
1497 .len()
1498 .checked_mul(elem_size)
1499 .ok_or_else(|| anyhow!("size overflow copying a list"))?;
1500 let ptr = cx.realloc(0, 0, T::ALIGN32, size)?;
1501 T::store_list(cx, ty, ptr, list)?;
1502 Ok((ptr, list.len()))
1503}
1504
1505/// Representation of a list of values that are owned by a WebAssembly instance.
1506///
1507/// For some more commentary about the rationale for this type see the
1508/// documentation of [`WasmStr`]. In summary this type can avoid a copy when
1509/// passing data to the host in some situations but is additionally more
1510/// cumbersome to use by requiring a [`Store`](crate::Store) to be provided.
1511///
1512/// This type is used whenever a `(list T)` is returned from a [`TypedFunc`],
1513/// for example. This type represents a list of values that are stored in linear
1514/// memory which are waiting to be read.
1515///
1516/// Note that this type represents only a valid range of bytes for the list
1517/// itself, it does not represent validity of the elements themselves and that's
1518/// performed when they're iterated.
1519///
1520/// Note that this type does not implement the [`Lower`] trait, only [`Lift`].
1521pub struct WasmList<T> {
1522 ptr: usize,
1523 len: usize,
1524 options: Options,
1525 elem: InterfaceType,
1526 // NB: it would probably be more efficient to store a non-atomic index-style
1527 // reference to something inside a `StoreOpaque`, but that's not easily
1528 // available at this time, so it's left as a future exercise.
1529 types: Arc<ComponentTypes>,
1530 instance: SendSyncPtr<ComponentInstance>,
1531 _marker: marker::PhantomData<T>,
1532}
1533
1534impl<T: Lift> WasmList<T> {
1535 fn new(
1536 ptr: usize,
1537 len: usize,
1538 cx: &mut LiftContext<'_>,
1539 elem: InterfaceType,
1540 ) -> Result<WasmList<T>> {
1541 match len
1542 .checked_mul(T::SIZE32)
1543 .and_then(|len| ptr.checked_add(len))
1544 {
1545 Some(n) if n <= cx.memory().len() => {}
1546 _ => bail!("list pointer/length out of bounds of memory"),
1547 }
1548 if ptr % usize::try_from(T::ALIGN32)? != 0 {
1549 bail!("list pointer is not aligned")
1550 }
1551 Ok(WasmList {
1552 ptr,
1553 len,
1554 options: *cx.options,
1555 elem,
1556 types: cx.types.clone(),
1557 instance: SendSyncPtr::new(NonNull::new(cx.instance_ptr()).unwrap()),
1558 _marker: marker::PhantomData,
1559 })
1560 }
1561
1562 /// Returns the item length of this vector
1563 #[inline]
1564 pub fn len(&self) -> usize {
1565 self.len
1566 }
1567
1568 /// Gets the `n`th element of this list.
1569 ///
1570 /// Returns `None` if `index` is out of bounds. Returns `Some(Err(..))` if
1571 /// the value couldn't be decoded (it was invalid). Returns `Some(Ok(..))`
1572 /// if the value is valid.
1573 ///
1574 /// # Panics
1575 ///
1576 /// This function will panic if the string did not originally come from the
1577 /// `store` specified.
1578 //
1579 // TODO: given that interface values are intended to be consumed in one go
1580 // should we even expose a random access iteration API? In theory all
1581 // consumers should be validating through the iterator.
1582 pub fn get(&self, mut store: impl AsContextMut, index: usize) -> Option<Result<T>> {
1583 let store = store.as_context_mut().0;
1584 self.options.store_id().assert_belongs_to(store.id());
1585 // This should be safe because the unsafety lies in the `self.instance`
1586 // pointer passed in has previously been validated by the lifting
1587 // context this was originally created within and with the check above
1588 // this is guaranteed to be the same store. This means that this should
1589 // be carrying over the original assertion from the original creation of
1590 // the lifting context that created this type.
1591 let mut cx =
1592 unsafe { LiftContext::new(store, &self.options, &self.types, self.instance.as_ptr()) };
1593 self.get_from_store(&mut cx, index)
1594 }
1595
1596 fn get_from_store(&self, cx: &mut LiftContext<'_>, index: usize) -> Option<Result<T>> {
1597 if index >= self.len {
1598 return None;
1599 }
1600 // Note that this is using panicking indexing and this is expected to
1601 // never fail. The bounds-checking here happened during the construction
1602 // of the `WasmList` itself which means these should always be in-bounds
1603 // (and wasm memory can only grow). This could theoretically be
1604 // unchecked indexing if we're confident enough and it's actually a perf
1605 // issue one day.
1606 let bytes = &cx.memory()[self.ptr + index * T::SIZE32..][..T::SIZE32];
1607 Some(T::load(cx, self.elem, bytes))
1608 }
1609
1610 /// Returns an iterator over the elements of this list.
1611 ///
1612 /// Each item of the list may fail to decode and is represented through the
1613 /// `Result` value of the iterator.
1614 pub fn iter<'a, U: 'a>(
1615 &'a self,
1616 store: impl Into<StoreContextMut<'a, U>>,
1617 ) -> impl ExactSizeIterator<Item = Result<T>> + 'a {
1618 let store = store.into().0;
1619 self.options.store_id().assert_belongs_to(store.id());
1620 // See comments about unsafety in the `get` method.
1621 let mut cx =
1622 unsafe { LiftContext::new(store, &self.options, &self.types, self.instance.as_ptr()) };
1623 (0..self.len).map(move |i| self.get_from_store(&mut cx, i).unwrap())
1624 }
1625}
1626
1627macro_rules! raw_wasm_list_accessors {
1628 ($($i:ident)*) => ($(
1629 impl WasmList<$i> {
1630 /// Get access to the raw underlying memory for this list.
1631 ///
1632 /// This method will return a direct slice into the original wasm
1633 /// module's linear memory where the data for this slice is stored.
1634 /// This allows the embedder to have efficient access to the
1635 /// underlying memory if needed and avoid copies and such if
1636 /// desired.
1637 ///
1638 /// Note that multi-byte integers are stored in little-endian format
1639 /// so portable processing of this slice must be aware of the host's
1640 /// byte-endianness. The `from_le` constructors in the Rust standard
1641 /// library should be suitable for converting from little-endian.
1642 ///
1643 /// # Panics
1644 ///
1645 /// Panics if the `store` provided is not the one from which this
1646 /// slice originated.
1647 pub fn as_le_slice<'a, T: 'a>(&self, store: impl Into<StoreContext<'a, T>>) -> &'a [$i] {
1648 let memory = self.options.memory(store.into().0);
1649 self._as_le_slice(memory)
1650 }
1651
1652 fn _as_le_slice<'a>(&self, all_of_memory: &'a [u8]) -> &'a [$i] {
1653 // See comments in `WasmList::get` for the panicking indexing
1654 let byte_size = self.len * mem::size_of::<$i>();
1655 let bytes = &all_of_memory[self.ptr..][..byte_size];
1656
1657 // The canonical ABI requires that everything is aligned to its
1658 // own size, so this should be an aligned array. Furthermore the
1659 // alignment of primitive integers for hosts should be smaller
1660 // than or equal to the size of the primitive itself, meaning
1661 // that a wasm canonical-abi-aligned list is also aligned for
1662 // the host. That should mean that the head/tail slices here are
1663 // empty.
1664 //
1665 // Also note that the `unsafe` here is needed since the type
1666 // we're aligning to isn't guaranteed to be valid, but in our
1667 // case it's just integers and bytes so this should be safe.
1668 unsafe {
1669 let (head, body, tail) = bytes.align_to::<$i>();
1670 assert!(head.is_empty() && tail.is_empty());
1671 body
1672 }
1673 }
1674 }
1675 )*)
1676}
1677
1678raw_wasm_list_accessors! {
1679 i8 i16 i32 i64
1680 u8 u16 u32 u64
1681}
1682
1683// Note that this is similar to `ComponentType for str` except it can only be
1684// used for lifting, not lowering.
1685unsafe impl<T: ComponentType> ComponentType for WasmList<T> {
1686 type Lower = <[T] as ComponentType>::Lower;
1687
1688 const ABI: CanonicalAbiInfo = CanonicalAbiInfo::POINTER_PAIR;
1689
1690 fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
1691 <[T] as ComponentType>::typecheck(ty, types)
1692 }
1693}
1694
1695unsafe impl<T: Lift> Lift for WasmList<T> {
1696 fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1697 let elem = match ty {
1698 InterfaceType::List(i) => cx.types[i].element,
1699 _ => bad_type_info(),
1700 };
1701 // FIXME(#4311): needs memory64 treatment
1702 let ptr = src[0].get_u32();
1703 let len = src[1].get_u32();
1704 let (ptr, len) = (usize::try_from(ptr)?, usize::try_from(len)?);
1705 WasmList::new(ptr, len, cx, elem)
1706 }
1707
1708 fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
1709 let elem = match ty {
1710 InterfaceType::List(i) => cx.types[i].element,
1711 _ => bad_type_info(),
1712 };
1713 debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
1714 // FIXME(#4311): needs memory64 treatment
1715 let ptr = u32::from_le_bytes(bytes[..4].try_into().unwrap());
1716 let len = u32::from_le_bytes(bytes[4..].try_into().unwrap());
1717 let (ptr, len) = (usize::try_from(ptr)?, usize::try_from(len)?);
1718 WasmList::new(ptr, len, cx, elem)
1719 }
1720}
1721
1722/// Verify that the given wasm type is a tuple with the expected fields in the right order.
1723fn typecheck_tuple(
1724 ty: &InterfaceType,
1725 types: &InstanceType<'_>,
1726 expected: &[fn(&InterfaceType, &InstanceType<'_>) -> Result<()>],
1727) -> Result<()> {
1728 match ty {
1729 InterfaceType::Tuple(t) => {
1730 let tuple = &types.types[*t];
1731 if tuple.types.len() != expected.len() {
1732 bail!(
1733 "expected {}-tuple, found {}-tuple",
1734 expected.len(),
1735 tuple.types.len()
1736 );
1737 }
1738 for (ty, check) in tuple.types.iter().zip(expected) {
1739 check(ty, types)?;
1740 }
1741 Ok(())
1742 }
1743 other => bail!("expected `tuple` found `{}`", desc(other)),
1744 }
1745}
1746
1747/// Verify that the given wasm type is a record with the expected fields in the right order and with the right
1748/// names.
1749pub fn typecheck_record(
1750 ty: &InterfaceType,
1751 types: &InstanceType<'_>,
1752 expected: &[(&str, fn(&InterfaceType, &InstanceType<'_>) -> Result<()>)],
1753) -> Result<()> {
1754 match ty {
1755 InterfaceType::Record(index) => {
1756 let fields = &types.types[*index].fields;
1757
1758 if fields.len() != expected.len() {
1759 bail!(
1760 "expected record of {} fields, found {} fields",
1761 expected.len(),
1762 fields.len()
1763 );
1764 }
1765
1766 for (field, &(name, check)) in fields.iter().zip(expected) {
1767 check(&field.ty, types)
1768 .with_context(|| format!("type mismatch for field {name}"))?;
1769
1770 if field.name != name {
1771 bail!("expected record field named {}, found {}", name, field.name);
1772 }
1773 }
1774
1775 Ok(())
1776 }
1777 other => bail!("expected `record` found `{}`", desc(other)),
1778 }
1779}
1780
1781/// Verify that the given wasm type is a variant with the expected cases in the right order and with the right
1782/// names.
1783pub fn typecheck_variant(
1784 ty: &InterfaceType,
1785 types: &InstanceType<'_>,
1786 expected: &[(
1787 &str,
1788 Option<fn(&InterfaceType, &InstanceType<'_>) -> Result<()>>,
1789 )],
1790) -> Result<()> {
1791 match ty {
1792 InterfaceType::Variant(index) => {
1793 let cases = &types.types[*index].cases;
1794
1795 if cases.len() != expected.len() {
1796 bail!(
1797 "expected variant of {} cases, found {} cases",
1798 expected.len(),
1799 cases.len()
1800 );
1801 }
1802
1803 for ((case_name, case_ty), &(name, check)) in cases.iter().zip(expected) {
1804 if *case_name != name {
1805 bail!("expected variant case named {name}, found {case_name}");
1806 }
1807
1808 match (check, case_ty) {
1809 (Some(check), Some(ty)) => check(ty, types)
1810 .with_context(|| format!("type mismatch for case {name}"))?,
1811 (None, None) => {}
1812 (Some(_), None) => {
1813 bail!("case `{name}` has no type but one was expected")
1814 }
1815 (None, Some(_)) => {
1816 bail!("case `{name}` has a type but none was expected")
1817 }
1818 }
1819 }
1820
1821 Ok(())
1822 }
1823 other => bail!("expected `variant` found `{}`", desc(other)),
1824 }
1825}
1826
1827/// Verify that the given wasm type is a enum with the expected cases in the right order and with the right
1828/// names.
1829pub fn typecheck_enum(
1830 ty: &InterfaceType,
1831 types: &InstanceType<'_>,
1832 expected: &[&str],
1833) -> Result<()> {
1834 match ty {
1835 InterfaceType::Enum(index) => {
1836 let names = &types.types[*index].names;
1837
1838 if names.len() != expected.len() {
1839 bail!(
1840 "expected enum of {} names, found {} names",
1841 expected.len(),
1842 names.len()
1843 );
1844 }
1845
1846 for (name, expected) in names.iter().zip(expected) {
1847 if name != expected {
1848 bail!("expected enum case named {}, found {}", expected, name);
1849 }
1850 }
1851
1852 Ok(())
1853 }
1854 other => bail!("expected `enum` found `{}`", desc(other)),
1855 }
1856}
1857
1858/// Verify that the given wasm type is a flags type with the expected flags in the right order and with the right
1859/// names.
1860pub fn typecheck_flags(
1861 ty: &InterfaceType,
1862 types: &InstanceType<'_>,
1863 expected: &[&str],
1864) -> Result<()> {
1865 match ty {
1866 InterfaceType::Flags(index) => {
1867 let names = &types.types[*index].names;
1868
1869 if names.len() != expected.len() {
1870 bail!(
1871 "expected flags type with {} names, found {} names",
1872 expected.len(),
1873 names.len()
1874 );
1875 }
1876
1877 for (name, expected) in names.iter().zip(expected) {
1878 if name != expected {
1879 bail!("expected flag named {}, found {}", expected, name);
1880 }
1881 }
1882
1883 Ok(())
1884 }
1885 other => bail!("expected `flags` found `{}`", desc(other)),
1886 }
1887}
1888
1889/// Format the specified bitflags using the specified names for debugging
1890pub fn format_flags(bits: &[u32], names: &[&str], f: &mut fmt::Formatter) -> fmt::Result {
1891 f.write_str("(")?;
1892 let mut wrote = false;
1893 for (index, name) in names.iter().enumerate() {
1894 if ((bits[index / 32] >> (index % 32)) & 1) != 0 {
1895 if wrote {
1896 f.write_str("|")?;
1897 } else {
1898 wrote = true;
1899 }
1900
1901 f.write_str(name)?;
1902 }
1903 }
1904 f.write_str(")")
1905}
1906
1907unsafe impl<T> ComponentType for Option<T>
1908where
1909 T: ComponentType,
1910{
1911 type Lower = TupleLower2<<u32 as ComponentType>::Lower, T::Lower>;
1912
1913 const ABI: CanonicalAbiInfo = CanonicalAbiInfo::variant_static(&[None, Some(T::ABI)]);
1914
1915 fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
1916 match ty {
1917 InterfaceType::Option(t) => T::typecheck(&types.types[*t].ty, types),
1918 other => bail!("expected `option` found `{}`", desc(other)),
1919 }
1920 }
1921}
1922
1923unsafe impl<T> ComponentVariant for Option<T>
1924where
1925 T: ComponentType,
1926{
1927 const CASES: &'static [Option<CanonicalAbiInfo>] = &[None, Some(T::ABI)];
1928}
1929
1930unsafe impl<T> Lower for Option<T>
1931where
1932 T: Lower,
1933{
1934 fn lower<U>(
1935 &self,
1936 cx: &mut LowerContext<'_, U>,
1937 ty: InterfaceType,
1938 dst: &mut MaybeUninit<Self::Lower>,
1939 ) -> Result<()> {
1940 let payload = match ty {
1941 InterfaceType::Option(ty) => cx.types[ty].ty,
1942 _ => bad_type_info(),
1943 };
1944 match self {
1945 None => {
1946 map_maybe_uninit!(dst.A1).write(ValRaw::i32(0));
1947 // Note that this is unsafe as we're writing an arbitrary
1948 // bit-pattern to an arbitrary type, but part of the unsafe
1949 // contract of the `ComponentType` trait is that we can assign
1950 // any bit-pattern. By writing all zeros here we're ensuring
1951 // that the core wasm arguments this translates to will all be
1952 // zeros (as the canonical ABI requires).
1953 unsafe {
1954 map_maybe_uninit!(dst.A2).as_mut_ptr().write_bytes(0u8, 1);
1955 }
1956 }
1957 Some(val) => {
1958 map_maybe_uninit!(dst.A1).write(ValRaw::i32(1));
1959 val.lower(cx, payload, map_maybe_uninit!(dst.A2))?;
1960 }
1961 }
1962 Ok(())
1963 }
1964
1965 fn store<U>(
1966 &self,
1967 cx: &mut LowerContext<'_, U>,
1968 ty: InterfaceType,
1969 offset: usize,
1970 ) -> Result<()> {
1971 debug_assert!(offset % (Self::ALIGN32 as usize) == 0);
1972 let payload = match ty {
1973 InterfaceType::Option(ty) => cx.types[ty].ty,
1974 _ => bad_type_info(),
1975 };
1976 match self {
1977 None => {
1978 cx.get::<1>(offset)[0] = 0;
1979 }
1980 Some(val) => {
1981 cx.get::<1>(offset)[0] = 1;
1982 val.store(cx, payload, offset + (Self::INFO.payload_offset32 as usize))?;
1983 }
1984 }
1985 Ok(())
1986 }
1987}
1988
1989unsafe impl<T> Lift for Option<T>
1990where
1991 T: Lift,
1992{
1993 fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
1994 let payload = match ty {
1995 InterfaceType::Option(ty) => cx.types[ty].ty,
1996 _ => bad_type_info(),
1997 };
1998 Ok(match src.A1.get_i32() {
1999 0 => None,
2000 1 => Some(T::lift(cx, payload, &src.A2)?),
2001 _ => bail!("invalid option discriminant"),
2002 })
2003 }
2004
2005 fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
2006 debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
2007 let payload_ty = match ty {
2008 InterfaceType::Option(ty) => cx.types[ty].ty,
2009 _ => bad_type_info(),
2010 };
2011 let discrim = bytes[0];
2012 let payload = &bytes[Self::INFO.payload_offset32 as usize..];
2013 match discrim {
2014 0 => Ok(None),
2015 1 => Ok(Some(T::load(cx, payload_ty, payload)?)),
2016 _ => bail!("invalid option discriminant"),
2017 }
2018 }
2019}
2020
2021#[derive(Clone, Copy)]
2022#[repr(C)]
2023pub struct ResultLower<T: Copy, E: Copy> {
2024 tag: ValRaw,
2025 payload: ResultLowerPayload<T, E>,
2026}
2027
2028#[derive(Clone, Copy)]
2029#[repr(C)]
2030union ResultLowerPayload<T: Copy, E: Copy> {
2031 ok: T,
2032 err: E,
2033}
2034
2035unsafe impl<T, E> ComponentType for Result<T, E>
2036where
2037 T: ComponentType,
2038 E: ComponentType,
2039{
2040 type Lower = ResultLower<T::Lower, E::Lower>;
2041
2042 const ABI: CanonicalAbiInfo = CanonicalAbiInfo::variant_static(&[Some(T::ABI), Some(E::ABI)]);
2043
2044 fn typecheck(ty: &InterfaceType, types: &InstanceType<'_>) -> Result<()> {
2045 match ty {
2046 InterfaceType::Result(r) => {
2047 let result = &types.types[*r];
2048 match &result.ok {
2049 Some(ty) => T::typecheck(ty, types)?,
2050 None if T::IS_RUST_UNIT_TYPE => {}
2051 None => bail!("expected no `ok` type"),
2052 }
2053 match &result.err {
2054 Some(ty) => E::typecheck(ty, types)?,
2055 None if E::IS_RUST_UNIT_TYPE => {}
2056 None => bail!("expected no `err` type"),
2057 }
2058 Ok(())
2059 }
2060 other => bail!("expected `result` found `{}`", desc(other)),
2061 }
2062 }
2063}
2064
2065/// Lowers the payload of a variant into the storage for the entire payload,
2066/// handling writing zeros at the end of the representation if this payload is
2067/// smaller than the entire flat representation.
2068///
2069/// * `payload` - the flat storage space for the entire payload of the variant
2070/// * `typed_payload` - projection from the payload storage space to the
2071/// individual storage space for this variant.
2072/// * `lower` - lowering operation used to initialize the `typed_payload` return
2073/// value.
2074///
2075/// For more information on this se the comments in the `Lower for Result`
2076/// implementation below.
2077pub unsafe fn lower_payload<P, T>(
2078 payload: &mut MaybeUninit<P>,
2079 typed_payload: impl FnOnce(&mut MaybeUninit<P>) -> &mut MaybeUninit<T>,
2080 lower: impl FnOnce(&mut MaybeUninit<T>) -> Result<()>,
2081) -> Result<()> {
2082 let typed = typed_payload(payload);
2083 lower(typed)?;
2084
2085 let typed_len = storage_as_slice(typed).len();
2086 let payload = storage_as_slice_mut(payload);
2087 for slot in payload[typed_len..].iter_mut() {
2088 *slot = ValRaw::u64(0);
2089 }
2090 Ok(())
2091}
2092
2093unsafe impl<T, E> ComponentVariant for Result<T, E>
2094where
2095 T: ComponentType,
2096 E: ComponentType,
2097{
2098 const CASES: &'static [Option<CanonicalAbiInfo>] = &[Some(T::ABI), Some(E::ABI)];
2099}
2100
2101unsafe impl<T, E> Lower for Result<T, E>
2102where
2103 T: Lower,
2104 E: Lower,
2105{
2106 fn lower<U>(
2107 &self,
2108 cx: &mut LowerContext<'_, U>,
2109 ty: InterfaceType,
2110 dst: &mut MaybeUninit<Self::Lower>,
2111 ) -> Result<()> {
2112 let (ok, err) = match ty {
2113 InterfaceType::Result(ty) => {
2114 let ty = &cx.types[ty];
2115 (ty.ok, ty.err)
2116 }
2117 _ => bad_type_info(),
2118 };
2119
2120 // This implementation of `Lower::lower`, if you're reading these from
2121 // the top of this file, is the first location that the "join" logic of
2122 // the component model's canonical ABI encountered. The rough problem is
2123 // that let's say we have a component model type of the form:
2124 //
2125 // (result u64 (error (tuple f32 u16)))
2126 //
2127 // The flat representation of this is actually pretty tricky. Currently
2128 // it is:
2129 //
2130 // i32 i64 i32
2131 //
2132 // The first `i32` is the discriminant for the `result`, and the payload
2133 // is represented by `i64 i32`. The "ok" variant will only use the `i64`
2134 // and the "err" variant will use both `i64` and `i32`.
2135 //
2136 // In the "ok" variant the first issue is encountered. The size of one
2137 // variant may not match the size of the other variants. All variants
2138 // start at the "front" but when lowering a type we need to be sure to
2139 // initialize the later variants (lest we leak random host memory into
2140 // the guest module). Due to how the `Lower` type is represented as a
2141 // `union` of all the variants what ends up happening here is that
2142 // internally within the `lower_payload` after the typed payload is
2143 // lowered the remaining bits of the payload that weren't initialized
2144 // are all set to zero. This will guarantee that we'll write to all the
2145 // slots for each variant.
2146 //
2147 // The "err" variant encounters the second issue, however, which is that
2148 // the flat representation for each type may differ between payloads. In
2149 // the "ok" arm an `i64` is written, but the `lower` implementation for
2150 // the "err" arm will write an `f32` and then an `i32`. For this
2151 // implementation of `lower` to be valid the `f32` needs to get inflated
2152 // to an `i64` with zero-padding in the upper bits. What may be
2153 // surprising, however, is that none of this is handled in this file.
2154 // This implementation looks like it's blindly deferring to `E::lower`
2155 // and hoping it does the right thing.
2156 //
2157 // In reality, however, the correctness of variant lowering relies on
2158 // two subtle details of the `ValRaw` implementation in Wasmtime:
2159 //
2160 // 1. First the `ValRaw` value always contains little-endian values.
2161 // This means that if a `u32` is written, a `u64` is read, and then
2162 // the `u64` has its upper bits truncated the original value will
2163 // always be retained. This is primarily here for big-endian
2164 // platforms where if it weren't little endian then the opposite
2165 // would occur and the wrong value would be read.
2166 //
2167 // 2. Second, and perhaps even more subtly, the `ValRaw` constructors
2168 // for 32-bit types actually always initialize 64-bits of the
2169 // `ValRaw`. In the component model flat ABI only 32 and 64-bit types
2170 // are used so 64-bits is big enough to contain everything. This
2171 // means that when a `ValRaw` is written into the destination it will
2172 // always, whether it's needed or not, be "ready" to get extended up
2173 // to 64-bits.
2174 //
2175 // Put together these two subtle guarantees means that all `Lower`
2176 // implementations can be written "naturally" as one might naively
2177 // expect. Variants will, on each arm, zero out remaining fields and all
2178 // writes to the flat representation will automatically be 64-bit writes
2179 // meaning that if the value is read as a 64-bit value, which isn't
2180 // known at the time of the write, it'll still be correct.
2181 match self {
2182 Ok(e) => {
2183 map_maybe_uninit!(dst.tag).write(ValRaw::i32(0));
2184 unsafe {
2185 lower_payload(
2186 map_maybe_uninit!(dst.payload),
2187 |payload| map_maybe_uninit!(payload.ok),
2188 |dst| match ok {
2189 Some(ok) => e.lower(cx, ok, dst),
2190 None => Ok(()),
2191 },
2192 )
2193 }
2194 }
2195 Err(e) => {
2196 map_maybe_uninit!(dst.tag).write(ValRaw::i32(1));
2197 unsafe {
2198 lower_payload(
2199 map_maybe_uninit!(dst.payload),
2200 |payload| map_maybe_uninit!(payload.err),
2201 |dst| match err {
2202 Some(err) => e.lower(cx, err, dst),
2203 None => Ok(()),
2204 },
2205 )
2206 }
2207 }
2208 }
2209 }
2210
2211 fn store<U>(
2212 &self,
2213 cx: &mut LowerContext<'_, U>,
2214 ty: InterfaceType,
2215 offset: usize,
2216 ) -> Result<()> {
2217 let (ok, err) = match ty {
2218 InterfaceType::Result(ty) => {
2219 let ty = &cx.types[ty];
2220 (ty.ok, ty.err)
2221 }
2222 _ => bad_type_info(),
2223 };
2224 debug_assert!(offset % (Self::ALIGN32 as usize) == 0);
2225 let payload_offset = Self::INFO.payload_offset32 as usize;
2226 match self {
2227 Ok(e) => {
2228 cx.get::<1>(offset)[0] = 0;
2229 if let Some(ok) = ok {
2230 e.store(cx, ok, offset + payload_offset)?;
2231 }
2232 }
2233 Err(e) => {
2234 cx.get::<1>(offset)[0] = 1;
2235 if let Some(err) = err {
2236 e.store(cx, err, offset + payload_offset)?;
2237 }
2238 }
2239 }
2240 Ok(())
2241 }
2242}
2243
2244unsafe impl<T, E> Lift for Result<T, E>
2245where
2246 T: Lift,
2247 E: Lift,
2248{
2249 #[inline]
2250 fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, src: &Self::Lower) -> Result<Self> {
2251 let (ok, err) = match ty {
2252 InterfaceType::Result(ty) => {
2253 let ty = &cx.types[ty];
2254 (ty.ok, ty.err)
2255 }
2256 _ => bad_type_info(),
2257 };
2258 // Note that this implementation specifically isn't trying to actually
2259 // reinterpret or alter the bits of `lower` depending on which variant
2260 // we're lifting. This ends up all working out because the value is
2261 // stored in little-endian format.
2262 //
2263 // When stored in little-endian format the `{T,E}::Lower`, when each
2264 // individual `ValRaw` is read, means that if an i64 value, extended
2265 // from an i32 value, was stored then when the i32 value is read it'll
2266 // automatically ignore the upper bits.
2267 //
2268 // This "trick" allows us to seamlessly pass through the `Self::Lower`
2269 // representation into the lifting/lowering without trying to handle
2270 // "join"ed types as per the canonical ABI. It just so happens that i64
2271 // bits will naturally be reinterpreted as f64. Additionally if the
2272 // joined type is i64 but only the lower bits are read that's ok and we
2273 // don't need to validate the upper bits.
2274 //
2275 // This is largely enabled by WebAssembly/component-model#35 where no
2276 // validation needs to be performed for ignored bits and bytes here.
2277 Ok(match src.tag.get_i32() {
2278 0 => Ok(unsafe { lift_option(cx, ok, &src.payload.ok)? }),
2279 1 => Err(unsafe { lift_option(cx, err, &src.payload.err)? }),
2280 _ => bail!("invalid expected discriminant"),
2281 })
2282 }
2283
2284 #[inline]
2285 fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
2286 debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
2287 let discrim = bytes[0];
2288 let payload = &bytes[Self::INFO.payload_offset32 as usize..];
2289 let (ok, err) = match ty {
2290 InterfaceType::Result(ty) => {
2291 let ty = &cx.types[ty];
2292 (ty.ok, ty.err)
2293 }
2294 _ => bad_type_info(),
2295 };
2296 match discrim {
2297 0 => Ok(Ok(load_option(cx, ok, &payload[..T::SIZE32])?)),
2298 1 => Ok(Err(load_option(cx, err, &payload[..E::SIZE32])?)),
2299 _ => bail!("invalid expected discriminant"),
2300 }
2301 }
2302}
2303
2304fn lift_option<T>(cx: &mut LiftContext<'_>, ty: Option<InterfaceType>, src: &T::Lower) -> Result<T>
2305where
2306 T: Lift,
2307{
2308 match ty {
2309 Some(ty) => T::lift(cx, ty, src),
2310 None => Ok(empty_lift()),
2311 }
2312}
2313
2314fn load_option<T>(cx: &mut LiftContext<'_>, ty: Option<InterfaceType>, bytes: &[u8]) -> Result<T>
2315where
2316 T: Lift,
2317{
2318 match ty {
2319 Some(ty) => T::load(cx, ty, bytes),
2320 None => Ok(empty_lift()),
2321 }
2322}
2323
2324fn empty_lift<T>() -> T
2325where
2326 T: Lift,
2327{
2328 assert!(T::IS_RUST_UNIT_TYPE);
2329 assert_eq!(mem::size_of::<T>(), 0);
2330 unsafe { MaybeUninit::uninit().assume_init() }
2331}
2332
2333macro_rules! impl_component_ty_for_tuples {
2334 ($n:tt $($t:ident)*) => {paste::paste!{
2335 #[allow(non_snake_case)]
2336 #[doc(hidden)]
2337 #[derive(Clone, Copy)]
2338 #[repr(C)]
2339 pub struct [<TupleLower$n>]<$($t),*> {
2340 $($t: $t,)*
2341 _align_tuple_lower0_correctly: [ValRaw; 0],
2342 }
2343
2344 #[allow(non_snake_case)]
2345 unsafe impl<$($t,)*> ComponentType for ($($t,)*)
2346 where $($t: ComponentType),*
2347 {
2348 type Lower = [<TupleLower$n>]<$($t::Lower),*>;
2349
2350 const ABI: CanonicalAbiInfo = CanonicalAbiInfo::record_static(&[
2351 $($t::ABI),*
2352 ]);
2353
2354 const IS_RUST_UNIT_TYPE: bool = {
2355 let mut _is_unit = true;
2356 $(
2357 let _anything_to_bind_the_macro_variable = $t::IS_RUST_UNIT_TYPE;
2358 _is_unit = false;
2359 )*
2360 _is_unit
2361 };
2362
2363 fn typecheck(
2364 ty: &InterfaceType,
2365 types: &InstanceType<'_>,
2366 ) -> Result<()> {
2367 typecheck_tuple(ty, types, &[$($t::typecheck),*])
2368 }
2369 }
2370
2371 #[allow(non_snake_case)]
2372 unsafe impl<$($t,)*> Lower for ($($t,)*)
2373 where $($t: Lower),*
2374 {
2375 fn lower<U>(
2376 &self,
2377 cx: &mut LowerContext<'_, U>,
2378 ty: InterfaceType,
2379 _dst: &mut MaybeUninit<Self::Lower>,
2380 ) -> Result<()> {
2381 let types = match ty {
2382 InterfaceType::Tuple(t) => &cx.types[t].types,
2383 _ => bad_type_info(),
2384 };
2385 let ($($t,)*) = self;
2386 let mut _types = types.iter();
2387 $(
2388 let ty = *_types.next().unwrap_or_else(bad_type_info);
2389 $t.lower(cx, ty, map_maybe_uninit!(_dst.$t))?;
2390 )*
2391 Ok(())
2392 }
2393
2394 fn store<U>(
2395 &self,
2396 cx: &mut LowerContext<'_, U>,
2397 ty: InterfaceType,
2398 mut _offset: usize,
2399 ) -> Result<()> {
2400 debug_assert!(_offset % (Self::ALIGN32 as usize) == 0);
2401 let types = match ty {
2402 InterfaceType::Tuple(t) => &cx.types[t].types,
2403 _ => bad_type_info(),
2404 };
2405 let ($($t,)*) = self;
2406 let mut _types = types.iter();
2407 $(
2408 let ty = *_types.next().unwrap_or_else(bad_type_info);
2409 $t.store(cx, ty, $t::ABI.next_field32_size(&mut _offset))?;
2410 )*
2411 Ok(())
2412 }
2413 }
2414
2415 #[allow(non_snake_case)]
2416 unsafe impl<$($t,)*> Lift for ($($t,)*)
2417 where $($t: Lift),*
2418 {
2419 #[inline]
2420 fn lift(cx: &mut LiftContext<'_>, ty: InterfaceType, _src: &Self::Lower) -> Result<Self> {
2421 let types = match ty {
2422 InterfaceType::Tuple(t) => &cx.types[t].types,
2423 _ => bad_type_info(),
2424 };
2425 let mut _types = types.iter();
2426 Ok(($(
2427 $t::lift(
2428 cx,
2429 *_types.next().unwrap_or_else(bad_type_info),
2430 &_src.$t,
2431 )?,
2432 )*))
2433 }
2434
2435 #[inline]
2436 fn load(cx: &mut LiftContext<'_>, ty: InterfaceType, bytes: &[u8]) -> Result<Self> {
2437 debug_assert!((bytes.as_ptr() as usize) % (Self::ALIGN32 as usize) == 0);
2438 let types = match ty {
2439 InterfaceType::Tuple(t) => &cx.types[t].types,
2440 _ => bad_type_info(),
2441 };
2442 let mut _types = types.iter();
2443 let mut _offset = 0;
2444 $(
2445 let ty = *_types.next().unwrap_or_else(bad_type_info);
2446 let $t = $t::load(cx, ty, &bytes[$t::ABI.next_field32_size(&mut _offset)..][..$t::SIZE32])?;
2447 )*
2448 Ok(($($t,)*))
2449 }
2450 }
2451
2452 #[allow(non_snake_case)]
2453 unsafe impl<$($t,)*> ComponentNamedList for ($($t,)*)
2454 where $($t: ComponentType),*
2455 {}
2456 }};
2457}
2458
2459for_each_function_signature!(impl_component_ty_for_tuples);
2460
2461pub fn desc(ty: &InterfaceType) -> &'static str {
2462 match ty {
2463 InterfaceType::U8 => "u8",
2464 InterfaceType::S8 => "s8",
2465 InterfaceType::U16 => "u16",
2466 InterfaceType::S16 => "s16",
2467 InterfaceType::U32 => "u32",
2468 InterfaceType::S32 => "s32",
2469 InterfaceType::U64 => "u64",
2470 InterfaceType::S64 => "s64",
2471 InterfaceType::Float32 => "f32",
2472 InterfaceType::Float64 => "f64",
2473 InterfaceType::Bool => "bool",
2474 InterfaceType::Char => "char",
2475 InterfaceType::String => "string",
2476 InterfaceType::List(_) => "list",
2477 InterfaceType::Tuple(_) => "tuple",
2478 InterfaceType::Option(_) => "option",
2479 InterfaceType::Result(_) => "result",
2480
2481 InterfaceType::Record(_) => "record",
2482 InterfaceType::Variant(_) => "variant",
2483 InterfaceType::Flags(_) => "flags",
2484 InterfaceType::Enum(_) => "enum",
2485 InterfaceType::Own(_) => "owned resource",
2486 InterfaceType::Borrow(_) => "borrowed resource",
2487 InterfaceType::Future(_) => "future",
2488 InterfaceType::Stream(_) => "stream",
2489 InterfaceType::ErrorContext(_) => "error-context",
2490 }
2491}
2492
2493#[cold]
2494#[doc(hidden)]
2495pub fn bad_type_info<T>() -> T {
2496 // NB: should consider something like `unreachable_unchecked` here if this
2497 // becomes a performance bottleneck at some point, but that also comes with
2498 // a tradeoff of propagating a lot of unsafety, so it may not be worth it.
2499 panic!("bad type information detected");
2500}