wasmtime_environ/fact/
trampoline.rs

1//! Low-level compilation of an fused adapter function.
2//!
3//! This module is tasked with the top-level `compile` function which creates a
4//! single WebAssembly function which will perform the steps of the fused
5//! adapter for an `AdapterData` provided. This is the "meat" of compilation
6//! where the validation of the canonical ABI or similar all happens to
7//! translate arguments from one module to another.
8//!
9//! ## Traps and their ordering
10//!
11//! Currently this compiler is pretty "loose" about the ordering of precisely
12//! what trap happens where. The main reason for this is that to core wasm all
13//! traps are the same and for fused adapters if a trap happens no intermediate
14//! side effects are visible (as designed by the canonical ABI itself). For this
15//! it's important to note that some of the precise choices of control flow here
16//! can be somewhat arbitrary, an intentional decision.
17
18use crate::component::{
19    CanonicalAbiInfo, ComponentTypesBuilder, FixedEncoding as FE, FlatType, InterfaceType,
20    StringEncoding, Transcode, TypeComponentLocalErrorContextTableIndex, TypeEnumIndex,
21    TypeFlagsIndex, TypeFutureTableIndex, TypeListIndex, TypeOptionIndex, TypeRecordIndex,
22    TypeResourceTableIndex, TypeResultIndex, TypeStreamTableIndex, TypeTupleIndex,
23    TypeVariantIndex, VariantInfo, FLAG_MAY_ENTER, FLAG_MAY_LEAVE, MAX_FLAT_PARAMS,
24};
25use crate::fact::signature::Signature;
26use crate::fact::transcode::Transcoder;
27use crate::fact::traps::Trap;
28use crate::fact::{
29    AdapterData, Body, Context, Function, FunctionId, Helper, HelperLocation, HelperType, Module,
30    Options,
31};
32use crate::prelude::*;
33use crate::{FuncIndex, GlobalIndex};
34use std::collections::HashMap;
35use std::mem;
36use std::ops::Range;
37use wasm_encoder::{BlockType, Encode, Instruction, Instruction::*, MemArg, ValType};
38use wasmtime_component_util::{DiscriminantSize, FlagsSize};
39
40const MAX_STRING_BYTE_LENGTH: u32 = 1 << 31;
41const UTF16_TAG: u32 = 1 << 31;
42
43/// This value is arbitrarily chosen and should be fine to change at any time,
44/// it just seemed like a halfway reasonable starting point.
45const INITIAL_FUEL: usize = 1_000;
46
47struct Compiler<'a, 'b> {
48    types: &'a ComponentTypesBuilder,
49    module: &'b mut Module<'a>,
50    result: FunctionId,
51
52    /// The encoded WebAssembly function body so far, not including locals.
53    code: Vec<u8>,
54
55    /// Total number of locals generated so far.
56    nlocals: u32,
57
58    /// Locals partitioned by type which are not currently in use.
59    free_locals: HashMap<ValType, Vec<u32>>,
60
61    /// Metadata about all `unreachable` trap instructions in this function and
62    /// what the trap represents. The offset within `self.code` is recorded as
63    /// well.
64    traps: Vec<(usize, Trap)>,
65
66    /// A heuristic which is intended to limit the size of a generated function
67    /// to a certain maximum to avoid generating arbitrarily large functions.
68    ///
69    /// This fuel counter is decremented each time `translate` is called and
70    /// when fuel is entirely consumed further translations, if necessary, will
71    /// be done through calls to other functions in the module. This is intended
72    /// to be a heuristic to split up the main function into theoretically
73    /// reusable portions.
74    fuel: usize,
75
76    /// Indicates whether an "enter call" should be emitted in the generated
77    /// function with a call to `Resource{Enter,Exit}Call` at the beginning and
78    /// end of the function for tracking of information related to borrowed
79    /// resources.
80    emit_resource_call: bool,
81}
82
83pub(super) fn compile(module: &mut Module<'_>, adapter: &AdapterData) {
84    fn compiler<'a, 'b>(
85        module: &'b mut Module<'a>,
86        adapter: &AdapterData,
87    ) -> (Compiler<'a, 'b>, Signature, Signature) {
88        let lower_sig = module.types.signature(&adapter.lower, Context::Lower);
89        let lift_sig = module.types.signature(&adapter.lift, Context::Lift);
90        let ty = module
91            .core_types
92            .function(&lower_sig.params, &lower_sig.results);
93        let result = module
94            .funcs
95            .push(Function::new(Some(adapter.name.clone()), ty));
96
97        // If this type signature contains any borrowed resources then invocations
98        // of enter/exit call for resource-related metadata tracking must be used.
99        // It shouldn't matter whether the lower/lift signature is used here as both
100        // should return the same answer.
101        let emit_resource_call = module.types.contains_borrow_resource(&adapter.lower);
102        assert_eq!(
103            emit_resource_call,
104            module.types.contains_borrow_resource(&adapter.lift)
105        );
106
107        (
108            Compiler::new(
109                module,
110                result,
111                lower_sig.params.len() as u32,
112                emit_resource_call,
113            ),
114            lower_sig,
115            lift_sig,
116        )
117    }
118
119    // This closure compiles a function to be exported to the host which host to
120    // lift the parameters from the caller and lower them to the callee.
121    //
122    // This allows the host to delay copying the parameters until the callee
123    // signals readiness by clearing its backpressure flag.
124    let async_start_adapter = |module: &mut Module| {
125        let sig = module
126            .types
127            .async_start_signature(&adapter.lower, &adapter.lift);
128        let ty = module.core_types.function(&sig.params, &sig.results);
129        let result = module.funcs.push(Function::new(
130            Some(format!("[async-start]{}", adapter.name)),
131            ty,
132        ));
133
134        Compiler::new(module, result, sig.params.len() as u32, false)
135            .compile_async_start_adapter(adapter, &sig);
136
137        result
138    };
139
140    // This closure compiles a function to be exported by the adapter module and
141    // called by the host to lift the results from the callee and lower them to
142    // the caller.
143    //
144    // Given that async-lifted exports return their results via the
145    // `task.return` intrinsic, the host will need to copy the results from
146    // callee to caller when that intrinsic is called rather than when the
147    // callee task fully completes (which may happen much later).
148    let async_return_adapter = |module: &mut Module| {
149        let sig = module
150            .types
151            .async_return_signature(&adapter.lower, &adapter.lift);
152        let ty = module.core_types.function(&sig.params, &sig.results);
153        let result = module.funcs.push(Function::new(
154            Some(format!("[async-return]{}", adapter.name)),
155            ty,
156        ));
157
158        Compiler::new(module, result, sig.params.len() as u32, false)
159            .compile_async_return_adapter(adapter, &sig);
160
161        result
162    };
163
164    match (adapter.lower.options.async_, adapter.lift.options.async_) {
165        (false, false) => {
166            // We can adapt sync->sync case with only minimal use of intrinsics,
167            // e.g. resource enter and exit calls as needed.
168            let (compiler, lower_sig, lift_sig) = compiler(module, adapter);
169            compiler.compile_sync_to_sync_adapter(adapter, &lower_sig, &lift_sig)
170        }
171        (true, true) => {
172            // In the async->async case, we must compile a couple of helper functions:
173            //
174            // - `async-start`: copies the parameters from the caller to the callee
175            // - `async-return`: copies the result from the callee to the caller
176            //
177            // Unlike synchronous calls, the above operations are asynchronous
178            // and subject to backpressure.  If the callee is not yet ready to
179            // handle a new call, the `async-start` function will not be called
180            // immediately.  Instead, control will return to the caller,
181            // allowing it to do other work while waiting for this call to make
182            // progress.  Once the callee indicates it is ready, `async-start`
183            // will be called, and sometime later (possibly after various task
184            // switch events), when the callee has produced a result, it will
185            // call `async-return` via the `task.return` intrinsic, at which
186            // point a `STATUS_RETURNED` event will be delivered to the caller.
187            let start = async_start_adapter(module);
188            let return_ = async_return_adapter(module);
189            let (compiler, _, lift_sig) = compiler(module, adapter);
190            compiler.compile_async_to_async_adapter(
191                adapter,
192                start,
193                return_,
194                i32::try_from(lift_sig.params.len()).unwrap(),
195            );
196        }
197        (false, true) => {
198            // Like the async->async case above, for the sync->async case we
199            // also need `async-start` and `async-return` helper functions to
200            // allow the callee to asynchronously "pull" the parameters and
201            // "push" the results when it is ready.
202            //
203            // However, since the caller is using the synchronous ABI, the
204            // parameters may have been passed via the stack rather than linear
205            // memory.  In that case, we pass them to the host to store in a
206            // task-local location temporarily in the case of backpressure.
207            // Similarly, the host will also temporarily store the results that
208            // the callee provides to `async-return` until it is ready to resume
209            // the caller.
210            let start = async_start_adapter(module);
211            let return_ = async_return_adapter(module);
212            let (compiler, lower_sig, lift_sig) = compiler(module, adapter);
213            compiler.compile_sync_to_async_adapter(
214                adapter,
215                start,
216                return_,
217                i32::try_from(lift_sig.params.len()).unwrap(),
218                &lower_sig,
219            );
220        }
221        (true, false) => {
222            // As with the async->async and sync->async cases above, for the
223            // async->sync case we use `async-start` and `async-return` helper
224            // functions.  Here, those functions allow the host to enforce
225            // backpressure in the case where the callee instance already has
226            // another synchronous call in progress, in which case we can't
227            // start a new one until the current one (and any others already
228            // waiting in line behind it) has completed.
229            //
230            // In the case of backpressure, we'll return control to the caller
231            // immediately so it can do other work.  Later, once the callee is
232            // ready, the host will call the `async-start` function to retrieve
233            // the parameters and pass them to the callee.  At that point, the
234            // callee may block on a host call, at which point the host will
235            // suspend the fiber it is running on and allow the caller (or any
236            // other ready instance) to run concurrently with the blocked
237            // callee.  Once the callee finally returns, the host will call the
238            // `async-return` function to write the result to the caller's
239            // linear memory and deliver a `STATUS_RETURNED` event to the
240            // caller.
241            let lift_sig = module.types.signature(&adapter.lift, Context::Lift);
242            let start = async_start_adapter(module);
243            let return_ = async_return_adapter(module);
244            let (compiler, ..) = compiler(module, adapter);
245            compiler.compile_async_to_sync_adapter(
246                adapter,
247                start,
248                return_,
249                i32::try_from(lift_sig.params.len()).unwrap(),
250                i32::try_from(lift_sig.results.len()).unwrap(),
251            );
252        }
253    }
254}
255
256/// Compiles a helper function as specified by the `Helper` configuration.
257///
258/// This function is invoked when the translation process runs out of fuel for
259/// some prior function which enqueues a helper to get translated later. This
260/// translation function will perform one type translation as specified by
261/// `Helper` which can either be in the stack or memory for each side.
262pub(super) fn compile_helper(module: &mut Module<'_>, result: FunctionId, helper: Helper) {
263    let mut nlocals = 0;
264    let src_flat;
265    let src = match helper.src.loc {
266        // If the source is on the stack then it's specified in the parameters
267        // to the function, so this creates the flattened representation and
268        // then lists those as the locals with appropriate types for the source
269        // values.
270        HelperLocation::Stack => {
271            src_flat = module
272                .types
273                .flatten_types(&helper.src.opts, usize::MAX, [helper.src.ty])
274                .unwrap()
275                .iter()
276                .enumerate()
277                .map(|(i, ty)| (i as u32, *ty))
278                .collect::<Vec<_>>();
279            nlocals += src_flat.len() as u32;
280            Source::Stack(Stack {
281                locals: &src_flat,
282                opts: &helper.src.opts,
283            })
284        }
285        // If the source is in memory then that's just propagated here as the
286        // first local is the pointer to the source.
287        HelperLocation::Memory => {
288            nlocals += 1;
289            Source::Memory(Memory {
290                opts: &helper.src.opts,
291                addr: TempLocal::new(0, helper.src.opts.ptr()),
292                offset: 0,
293            })
294        }
295    };
296    let dst_flat;
297    let dst = match helper.dst.loc {
298        // This is the same as the stack-based source although `Destination` is
299        // configured slightly differently.
300        HelperLocation::Stack => {
301            dst_flat = module
302                .types
303                .flatten_types(&helper.dst.opts, usize::MAX, [helper.dst.ty])
304                .unwrap();
305            Destination::Stack(&dst_flat, &helper.dst.opts)
306        }
307        // This is the same as a memory-based source but note that the address
308        // of the destination is passed as the final parameter to the function.
309        HelperLocation::Memory => {
310            nlocals += 1;
311            Destination::Memory(Memory {
312                opts: &helper.dst.opts,
313                addr: TempLocal::new(nlocals - 1, helper.dst.opts.ptr()),
314                offset: 0,
315            })
316        }
317    };
318    let mut compiler = Compiler {
319        types: module.types,
320        module,
321        code: Vec::new(),
322        nlocals,
323        free_locals: HashMap::new(),
324        traps: Vec::new(),
325        result,
326        fuel: INITIAL_FUEL,
327        // This is a helper function and only the top-level function is
328        // responsible for emitting these intrinsic calls.
329        emit_resource_call: false,
330    };
331    compiler.translate(&helper.src.ty, &src, &helper.dst.ty, &dst);
332    compiler.finish();
333}
334
335/// Possible ways that a interface value is represented in the core wasm
336/// canonical ABI.
337enum Source<'a> {
338    /// This value is stored on the "stack" in wasm locals.
339    ///
340    /// This could mean that it's inline from the parameters to the function or
341    /// that after a function call the results were stored in locals and the
342    /// locals are the inline results.
343    Stack(Stack<'a>),
344
345    /// This value is stored in linear memory described by the `Memory`
346    /// structure.
347    Memory(Memory<'a>),
348}
349
350/// Same as `Source` but for where values are translated into.
351enum Destination<'a> {
352    /// This value is destined for the WebAssembly stack which means that
353    /// results are simply pushed as we go along.
354    ///
355    /// The types listed are the types that are expected to be on the stack at
356    /// the end of translation.
357    Stack(&'a [ValType], &'a Options),
358
359    /// This value is to be placed in linear memory described by `Memory`.
360    Memory(Memory<'a>),
361}
362
363struct Stack<'a> {
364    /// The locals that comprise a particular value.
365    ///
366    /// The length of this list represents the flattened list of types that make
367    /// up the component value. Each list has the index of the local being
368    /// accessed as well as the type of the local itself.
369    locals: &'a [(u32, ValType)],
370    /// The lifting/lowering options for where this stack of values comes from
371    opts: &'a Options,
372}
373
374/// Representation of where a value is going to be stored in linear memory.
375struct Memory<'a> {
376    /// The lifting/lowering options with memory configuration
377    opts: &'a Options,
378    /// The index of the local that contains the base address of where the
379    /// storage is happening.
380    addr: TempLocal,
381    /// A "static" offset that will be baked into wasm instructions for where
382    /// memory loads/stores happen.
383    offset: u32,
384}
385
386impl<'a, 'b> Compiler<'a, 'b> {
387    fn new(
388        module: &'b mut Module<'a>,
389        result: FunctionId,
390        nlocals: u32,
391        emit_resource_call: bool,
392    ) -> Self {
393        Self {
394            types: module.types,
395            module,
396            result,
397            code: Vec::new(),
398            nlocals,
399            free_locals: HashMap::new(),
400            traps: Vec::new(),
401            fuel: INITIAL_FUEL,
402            emit_resource_call,
403        }
404    }
405
406    /// Compile an adapter function supporting an async-lowered import to an
407    /// async-lifted export.
408    ///
409    /// This uses a pair of `async-enter` and `async-exit` built-in functions to
410    /// set up and start a subtask, respectively.  `async-enter` accepts `start`
411    /// and `return_` functions which copy the parameters and results,
412    /// respectively; the host will call the former when the callee has cleared
413    /// its backpressure flag and the latter when the callee has called
414    /// `task.return`.
415    fn compile_async_to_async_adapter(
416        mut self,
417        adapter: &AdapterData,
418        start: FunctionId,
419        return_: FunctionId,
420        param_count: i32,
421    ) {
422        let enter = self.module.import_async_enter_call();
423        let exit = self
424            .module
425            .import_async_exit_call(adapter.lift.options.callback, None);
426
427        self.flush_code();
428        self.module.funcs[self.result]
429            .body
430            .push(Body::RefFunc(start));
431        self.module.funcs[self.result]
432            .body
433            .push(Body::RefFunc(return_));
434        self.instruction(I32Const(
435            i32::try_from(adapter.lower.instance.as_u32()).unwrap(),
436        ));
437        self.instruction(I32Const(
438            i32::try_from(self.types[adapter.lift.ty].results.as_u32()).unwrap(),
439        ));
440        // Async-lowered imports pass params and receive results via linear
441        // memory, and those pointers are in the first and second params to
442        // this adapter.  We pass them on to the host so it can store them in
443        // the subtask for later use.
444        self.instruction(LocalGet(0));
445        self.instruction(LocalGet(1));
446        self.instruction(Call(enter.as_u32()));
447
448        // TODO: As an optimization, consider checking the backpressure flag on
449        // the callee instance and, if it's unset _and_ the callee uses a
450        // callback, translate the params and call the callee function directly
451        // here (and make sure `exit` knows _not_ to call it in that case).
452
453        // We export this function so we can pass a funcref to the host.
454        //
455        // TODO: Use a declarative element segment instead of exporting this.
456        self.module.exports.push((
457            adapter.callee.as_u32(),
458            format!("[adapter-callee]{}", adapter.name),
459        ));
460
461        self.instruction(I32Const(
462            i32::try_from(adapter.lower.instance.as_u32()).unwrap(),
463        ));
464        self.instruction(RefFunc(adapter.callee.as_u32()));
465        self.instruction(I32Const(
466            i32::try_from(adapter.lift.instance.as_u32()).unwrap(),
467        ));
468        self.instruction(I32Const(param_count));
469        // The result count for an async callee is either one (if there's a
470        // callback) or zero (if there's no callback).  We conservatively use
471        // one here to ensure the host provides room for the result, if any.
472        self.instruction(I32Const(1));
473        self.instruction(I32Const(super::EXIT_FLAG_ASYNC_CALLEE));
474        self.instruction(Call(exit.as_u32()));
475
476        self.finish()
477    }
478
479    /// Compile an adapter function supporting a sync-lowered import to an
480    /// async-lifted export.
481    ///
482    /// This uses a pair of `sync-enter` and `sync-exit` built-in functions to
483    /// set up and start a subtask, respectively.  `sync-enter` accepts `start`
484    /// and `return_` functions which copy the parameters and results,
485    /// respectively; the host will call the former when the callee has cleared
486    /// its backpressure flag and the latter when the callee has called
487    /// `task.return`.
488    fn compile_sync_to_async_adapter(
489        mut self,
490        adapter: &AdapterData,
491        start: FunctionId,
492        return_: FunctionId,
493        lift_param_count: i32,
494        lower_sig: &Signature,
495    ) {
496        let enter = self
497            .module
498            .import_sync_enter_call(&adapter.name, &lower_sig.params);
499        let exit = self.module.import_sync_exit_call(
500            &adapter.name,
501            adapter.lift.options.callback,
502            &lower_sig.results,
503        );
504
505        self.flush_code();
506        self.module.funcs[self.result]
507            .body
508            .push(Body::RefFunc(start));
509        self.module.funcs[self.result]
510            .body
511            .push(Body::RefFunc(return_));
512        self.instruction(I32Const(
513            i32::try_from(adapter.lower.instance.as_u32()).unwrap(),
514        ));
515        self.instruction(I32Const(
516            i32::try_from(self.types[adapter.lift.ty].results.as_u32()).unwrap(),
517        ));
518        self.instruction(I32Const(
519            i32::try_from(
520                self.types
521                    .flatten_types(
522                        &adapter.lower.options,
523                        usize::MAX,
524                        self.types[self.types[adapter.lower.ty].results]
525                            .types
526                            .iter()
527                            .copied(),
528                    )
529                    .map(|v| v.len())
530                    .unwrap_or(usize::try_from(i32::MAX).unwrap()),
531            )
532            .unwrap(),
533        ));
534
535        for index in 0..lower_sig.params.len() {
536            self.instruction(LocalGet(u32::try_from(index).unwrap()));
537        }
538
539        self.instruction(Call(enter.as_u32()));
540
541        // TODO: As an optimization, consider checking the backpressure flag on
542        // the callee instance and, if it's unset _and_ the callee uses a
543        // callback, translate the params and call the callee function directly
544        // here (and make sure `exit` knows _not_ to call it in that case).
545
546        // We export this function so we can pass a funcref to the host.
547        //
548        // TODO: Use a declarative element segment instead of exporting this.
549        self.module.exports.push((
550            adapter.callee.as_u32(),
551            format!("[adapter-callee]{}", adapter.name),
552        ));
553
554        self.instruction(I32Const(
555            i32::try_from(adapter.lower.instance.as_u32()).unwrap(),
556        ));
557        self.instruction(RefFunc(adapter.callee.as_u32()));
558        self.instruction(I32Const(
559            i32::try_from(adapter.lift.instance.as_u32()).unwrap(),
560        ));
561        self.instruction(I32Const(lift_param_count));
562        self.instruction(Call(exit.as_u32()));
563
564        self.finish()
565    }
566
567    /// Compile an adapter function supporting an async-lowered import to a
568    /// sync-lifted export.
569    ///
570    /// This uses a pair of `async-enter` and `async-exit` built-in functions to
571    /// set up and start a subtask, respectively.  `async-enter` accepts `start`
572    /// and `return_` functions which copy the parameters and results,
573    /// respectively; the host will call the former when the callee has cleared
574    /// its backpressure flag and the latter when the callee has returned its
575    /// result(s).
576    fn compile_async_to_sync_adapter(
577        mut self,
578        adapter: &AdapterData,
579        start: FunctionId,
580        return_: FunctionId,
581        param_count: i32,
582        result_count: i32,
583    ) {
584        let enter = self.module.import_async_enter_call();
585        let exit = self
586            .module
587            .import_async_exit_call(None, adapter.lift.post_return);
588
589        self.flush_code();
590        self.module.funcs[self.result]
591            .body
592            .push(Body::RefFunc(start));
593        self.module.funcs[self.result]
594            .body
595            .push(Body::RefFunc(return_));
596        self.instruction(I32Const(
597            i32::try_from(adapter.lower.instance.as_u32()).unwrap(),
598        ));
599        self.instruction(I32Const(
600            i32::try_from(self.types[adapter.lift.ty].results.as_u32()).unwrap(),
601        ));
602        self.instruction(LocalGet(0));
603        self.instruction(LocalGet(1));
604        self.instruction(Call(enter.as_u32()));
605
606        // We export this function so we can pass a funcref to the host.
607        //
608        // TODO: Use a declarative element segment instead of exporting this.
609        self.module.exports.push((
610            adapter.callee.as_u32(),
611            format!("[adapter-callee]{}", adapter.name),
612        ));
613
614        self.instruction(I32Const(
615            i32::try_from(adapter.lower.instance.as_u32()).unwrap(),
616        ));
617        self.instruction(RefFunc(adapter.callee.as_u32()));
618        self.instruction(I32Const(
619            i32::try_from(adapter.lift.instance.as_u32()).unwrap(),
620        ));
621        self.instruction(I32Const(param_count));
622        self.instruction(I32Const(result_count));
623        self.instruction(I32Const(0));
624        self.instruction(Call(exit.as_u32()));
625
626        self.finish()
627    }
628
629    /// Compiles a function to be exported to the host which host to lift the
630    /// parameters from the caller and lower them to the callee.
631    ///
632    /// This allows the host to delay copying the parameters until the callee
633    /// signals readiness by clearing its backpressure flag.
634    fn compile_async_start_adapter(mut self, adapter: &AdapterData, sig: &Signature) {
635        let param_locals = sig
636            .params
637            .iter()
638            .enumerate()
639            .map(|(i, ty)| (i as u32, *ty))
640            .collect::<Vec<_>>();
641
642        self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, false);
643        self.translate_params(adapter, &param_locals);
644        self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, true);
645
646        self.finish();
647    }
648
649    /// Compiles a function to be exported by the adapter module and called by
650    /// the host to lift the results from the callee and lower them to the
651    /// caller.
652    ///
653    /// Given that async-lifted exports return their results via the
654    /// `task.return` intrinsic, the host will need to copy the results from
655    /// callee to caller when that intrinsic is called rather than when the
656    /// callee task fully completes (which may happen much later).
657    fn compile_async_return_adapter(mut self, adapter: &AdapterData, sig: &Signature) {
658        let param_locals = sig
659            .params
660            .iter()
661            .enumerate()
662            .map(|(i, ty)| (i as u32, *ty))
663            .collect::<Vec<_>>();
664
665        self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, false);
666        // Note that we pass `param_locals` as _both_ the `param_locals` and
667        // `result_locals` parameters to `translate_results`.  That's because
668        // the _parameters_ to `task.return` are actually the _results_ that the
669        // caller is waiting for.
670        //
671        // Additionally, the host will append a return
672        // pointer to the end of that list before calling this adapter's
673        // `async-return` function if the results exceed `MAX_FLAT_RESULTS` or
674        // the import is lowered async, in which case `translate_results` will
675        // use that pointer to store the results.
676        self.translate_results(adapter, &param_locals, &param_locals);
677        self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, true);
678
679        self.finish()
680    }
681
682    /// Compile an adapter function supporting a sync-lowered import to a
683    /// sync-lifted export.
684    ///
685    /// Unlike calls involving async-lowered imports or async-lifted exports,
686    /// this adapter need not involve host built-ins except possibly for
687    /// resource bookkeeping.
688    fn compile_sync_to_sync_adapter(
689        mut self,
690        adapter: &AdapterData,
691        lower_sig: &Signature,
692        lift_sig: &Signature,
693    ) {
694        // Check the instance flags required for this trampoline.
695        //
696        // This inserts the initial check required by `canon_lower` that the
697        // caller instance can be left and additionally checks the
698        // flags on the callee if necessary whether it can be entered.
699        self.trap_if_not_flag(adapter.lower.flags, FLAG_MAY_LEAVE, Trap::CannotLeave);
700        if adapter.called_as_export {
701            self.trap_if_not_flag(adapter.lift.flags, FLAG_MAY_ENTER, Trap::CannotEnter);
702            self.set_flag(adapter.lift.flags, FLAG_MAY_ENTER, false);
703        } else if self.module.debug {
704            self.assert_not_flag(
705                adapter.lift.flags,
706                FLAG_MAY_ENTER,
707                "may_enter should be unset",
708            );
709        }
710
711        if self.emit_resource_call {
712            let enter = self.module.import_resource_enter_call();
713            self.instruction(Call(enter.as_u32()));
714        }
715
716        // Perform the translation of arguments. Note that `FLAG_MAY_LEAVE` is
717        // cleared around this invocation for the callee as per the
718        // `canon_lift` definition in the spec. Additionally note that the
719        // precise ordering of traps here is not required since internal state
720        // is not visible to either instance and a trap will "lock down" both
721        // instances to no longer be visible. This means that we're free to
722        // reorder lifts/lowers and flags and such as is necessary and
723        // convenient here.
724        //
725        // TODO: if translation doesn't actually call any functions in either
726        // instance then there's no need to set/clear the flag here and that can
727        // be optimized away.
728        self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, false);
729        let param_locals = lower_sig
730            .params
731            .iter()
732            .enumerate()
733            .map(|(i, ty)| (i as u32, *ty))
734            .collect::<Vec<_>>();
735        self.translate_params(adapter, &param_locals);
736        self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, true);
737
738        // With all the arguments on the stack the actual target function is
739        // now invoked. The core wasm results of the function are then placed
740        // into locals for result translation afterwards.
741        self.instruction(Call(adapter.callee.as_u32()));
742        let mut result_locals = Vec::with_capacity(lift_sig.results.len());
743        let mut temps = Vec::new();
744        for ty in lift_sig.results.iter().rev() {
745            let local = self.local_set_new_tmp(*ty);
746            result_locals.push((local.idx, *ty));
747            temps.push(local);
748        }
749        result_locals.reverse();
750
751        // Like above during the translation of results the caller cannot be
752        // left (as we might invoke things like `realloc`). Again the precise
753        // order of everything doesn't matter since intermediate states cannot
754        // be witnessed, hence the setting of flags here to encapsulate both
755        // liftings and lowerings.
756        //
757        // TODO: like above the management of the `MAY_LEAVE` flag can probably
758        // be elided here for "simple" results.
759        self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, false);
760        self.translate_results(adapter, &param_locals, &result_locals);
761        self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, true);
762
763        // And finally post-return state is handled here once all results/etc
764        // are all translated.
765        if let Some(func) = adapter.lift.post_return {
766            for (result, _) in result_locals.iter() {
767                self.instruction(LocalGet(*result));
768            }
769            self.instruction(Call(func.as_u32()));
770        }
771        if adapter.called_as_export {
772            self.set_flag(adapter.lift.flags, FLAG_MAY_ENTER, true);
773        }
774
775        for tmp in temps {
776            self.free_temp_local(tmp);
777        }
778
779        if self.emit_resource_call {
780            let exit = self.module.import_resource_exit_call();
781            self.instruction(Call(exit.as_u32()));
782        }
783
784        self.finish()
785    }
786
787    fn translate_params(&mut self, adapter: &AdapterData, param_locals: &[(u32, ValType)]) {
788        let src_tys = self.types[adapter.lower.ty].params;
789        let src_tys = self.types[src_tys]
790            .types
791            .iter()
792            .copied()
793            .collect::<Vec<_>>();
794        let dst_tys = self.types[adapter.lift.ty].params;
795        let dst_tys = self.types[dst_tys]
796            .types
797            .iter()
798            .copied()
799            .collect::<Vec<_>>();
800        let lift_opts = &adapter.lift.options;
801        let lower_opts = &adapter.lower.options;
802
803        // TODO: handle subtyping
804        assert_eq!(src_tys.len(), dst_tys.len());
805
806        let src_flat = if adapter.lower.options.async_ {
807            None
808        } else {
809            self.types
810                .flatten_types(lower_opts, MAX_FLAT_PARAMS, src_tys.iter().copied())
811        };
812        let dst_flat =
813            self.types
814                .flatten_types(lift_opts, MAX_FLAT_PARAMS, dst_tys.iter().copied());
815
816        let src = if let Some(flat) = &src_flat {
817            Source::Stack(Stack {
818                locals: &param_locals[..flat.len()],
819                opts: lower_opts,
820            })
821        } else {
822            // If there are too many parameters then that means the parameters
823            // are actually a tuple stored in linear memory addressed by the
824            // first parameter local.
825            let (addr, ty) = param_locals[0];
826            assert_eq!(ty, lower_opts.ptr());
827            let align = src_tys
828                .iter()
829                .map(|t| self.types.align(lower_opts, t))
830                .max()
831                .unwrap_or(1);
832            Source::Memory(self.memory_operand(lower_opts, TempLocal::new(addr, ty), align))
833        };
834
835        let dst = if let Some(flat) = &dst_flat {
836            Destination::Stack(flat, lift_opts)
837        } else {
838            let abi = CanonicalAbiInfo::record(dst_tys.iter().map(|t| self.types.canonical_abi(t)));
839            let (size, align) = if lift_opts.memory64 {
840                (abi.size64, abi.align64)
841            } else {
842                (abi.size32, abi.align32)
843            };
844
845            // If there are too many parameters then space is allocated in the
846            // destination module for the parameters via its `realloc` function.
847            let size = MallocSize::Const(size);
848            Destination::Memory(self.malloc(lift_opts, size, align))
849        };
850
851        let srcs = src
852            .record_field_srcs(self.types, src_tys.iter().copied())
853            .zip(src_tys.iter());
854        let dsts = dst
855            .record_field_dsts(self.types, dst_tys.iter().copied())
856            .zip(dst_tys.iter());
857        for ((src, src_ty), (dst, dst_ty)) in srcs.zip(dsts) {
858            self.translate(&src_ty, &src, &dst_ty, &dst);
859        }
860
861        // If the destination was linear memory instead of the stack then the
862        // actual parameter that we're passing is the address of the values
863        // stored, so ensure that's happening in the wasm body here.
864        if let Destination::Memory(mem) = dst {
865            self.instruction(LocalGet(mem.addr.idx));
866            self.free_temp_local(mem.addr);
867        }
868    }
869
870    fn translate_results(
871        &mut self,
872        adapter: &AdapterData,
873        param_locals: &[(u32, ValType)],
874        result_locals: &[(u32, ValType)],
875    ) {
876        let src_tys = self.types[adapter.lift.ty].results;
877        let src_tys = self.types[src_tys]
878            .types
879            .iter()
880            .copied()
881            .collect::<Vec<_>>();
882        let dst_tys = self.types[adapter.lower.ty].results;
883        let dst_tys = self.types[dst_tys]
884            .types
885            .iter()
886            .copied()
887            .collect::<Vec<_>>();
888        let lift_opts = &adapter.lift.options;
889        let lower_opts = &adapter.lower.options;
890
891        let src_flat = self
892            .types
893            .flatten_lifting_types(lift_opts, src_tys.iter().copied());
894        let dst_flat = self
895            .types
896            .flatten_lowering_types(lower_opts, dst_tys.iter().copied());
897
898        let src = if src_flat.is_some() {
899            Source::Stack(Stack {
900                locals: result_locals,
901                opts: lift_opts,
902            })
903        } else {
904            // The original results to read from in this case come from the
905            // return value of the function itself. The imported function will
906            // return a linear memory address at which the values can be read
907            // from.
908            let align = src_tys
909                .iter()
910                .map(|t| self.types.align(lift_opts, t))
911                .max()
912                .unwrap_or(1);
913            assert_eq!(
914                result_locals.len(),
915                if lower_opts.async_ || lift_opts.async_ {
916                    2
917                } else {
918                    1
919                }
920            );
921            let (addr, ty) = result_locals[0];
922            assert_eq!(ty, lift_opts.ptr());
923            Source::Memory(self.memory_operand(lift_opts, TempLocal::new(addr, ty), align))
924        };
925
926        let dst = if let Some(flat) = &dst_flat {
927            Destination::Stack(flat, lower_opts)
928        } else {
929            // This is slightly different than `translate_params` where the
930            // return pointer was provided by the caller of this function
931            // meaning the last parameter local is a pointer into linear memory.
932            let align = dst_tys
933                .iter()
934                .map(|t| self.types.align(lower_opts, t))
935                .max()
936                .unwrap_or(1);
937            let (addr, ty) = *param_locals.last().expect("no retptr");
938            assert_eq!(ty, lower_opts.ptr());
939            Destination::Memory(self.memory_operand(lower_opts, TempLocal::new(addr, ty), align))
940        };
941
942        let srcs = src
943            .record_field_srcs(self.types, src_tys.iter().copied())
944            .zip(src_tys.iter());
945        let dsts = dst
946            .record_field_dsts(self.types, dst_tys.iter().copied())
947            .zip(dst_tys.iter());
948        for ((src, src_ty), (dst, dst_ty)) in srcs.zip(dsts) {
949            self.translate(&src_ty, &src, &dst_ty, &dst);
950        }
951    }
952
953    fn translate(
954        &mut self,
955        src_ty: &InterfaceType,
956        src: &Source<'_>,
957        dst_ty: &InterfaceType,
958        dst: &Destination,
959    ) {
960        if let Source::Memory(mem) = src {
961            self.assert_aligned(src_ty, mem);
962        }
963        if let Destination::Memory(mem) = dst {
964            self.assert_aligned(dst_ty, mem);
965        }
966
967        // Calculate a cost heuristic for what the translation of this specific
968        // layer of the type is going to incur. The purpose of this cost is that
969        // we'll deduct it from `self.fuel` and if no fuel is remaining then
970        // translation is outlined into a separate function rather than being
971        // translated into this function.
972        //
973        // The general goal is to avoid creating an exponentially sized function
974        // for a linearly sized input (the type section). By outlining helper
975        // functions there will ideally be a constant set of helper functions
976        // per type (to accommodate in-memory or on-stack transfers as well as
977        // src/dst options) which means that each function is at most a certain
978        // size and we have a linear number of functions which should guarantee
979        // an overall linear size of the output.
980        //
981        // To implement this the current heuristic is that each layer of
982        // translating a type has a cost associated with it and this cost is
983        // accounted for in `self.fuel`. Some conversions are considered free as
984        // they generate basically as much code as the `call` to the translation
985        // function while other are considered proportionally expensive to the
986        // size of the type. The hope is that some upper layers are of a type's
987        // translation are all inlined into one function but bottom layers end
988        // up getting outlined to separate functions. Theoretically, again this
989        // is built on hopes and dreams, the outlining can be shared amongst
990        // tightly-intertwined type hierarchies which will reduce the size of
991        // the output module due to the helpers being used.
992        //
993        // This heuristic of how to split functions has changed a few times in
994        // the past and this isn't necessarily guaranteed to be the final
995        // iteration.
996        let cost = match src_ty {
997            // These types are all quite simple to load/store and equate to
998            // basically the same cost of the `call` instruction to call an
999            // out-of-line translation function, so give them 0 cost.
1000            InterfaceType::Bool
1001            | InterfaceType::U8
1002            | InterfaceType::S8
1003            | InterfaceType::U16
1004            | InterfaceType::S16
1005            | InterfaceType::U32
1006            | InterfaceType::S32
1007            | InterfaceType::U64
1008            | InterfaceType::S64
1009            | InterfaceType::Float32
1010            | InterfaceType::Float64 => 0,
1011
1012            // This has a small amount of validation associated with it, so
1013            // give it a cost of 1.
1014            InterfaceType::Char => 1,
1015
1016            // This has a fair bit of code behind it depending on the
1017            // strings/encodings in play, so arbitrarily assign it this cost.
1018            InterfaceType::String => 40,
1019
1020            // Iteration of a loop is along the lines of the cost of a string
1021            // so give it the same cost
1022            InterfaceType::List(_) => 40,
1023
1024            InterfaceType::Flags(i) => {
1025                let count = self.module.types[*i].names.len();
1026                match FlagsSize::from_count(count) {
1027                    FlagsSize::Size0 => 0,
1028                    FlagsSize::Size1 | FlagsSize::Size2 => 1,
1029                    FlagsSize::Size4Plus(n) => n.into(),
1030                }
1031            }
1032
1033            InterfaceType::Record(i) => self.types[*i].fields.len(),
1034            InterfaceType::Tuple(i) => self.types[*i].types.len(),
1035            InterfaceType::Variant(i) => self.types[*i].cases.len(),
1036            InterfaceType::Enum(i) => self.types[*i].names.len(),
1037
1038            // 2 cases to consider for each of these variants.
1039            InterfaceType::Option(_) | InterfaceType::Result(_) => 2,
1040
1041            // TODO(#6696) - something nonzero, is 1 right?
1042            InterfaceType::Own(_)
1043            | InterfaceType::Borrow(_)
1044            | InterfaceType::Future(_)
1045            | InterfaceType::Stream(_)
1046            | InterfaceType::ErrorContext(_) => 1,
1047        };
1048
1049        match self.fuel.checked_sub(cost) {
1050            // This function has enough fuel to perform the layer of translation
1051            // necessary for this type, so the fuel is updated in-place and
1052            // translation continues. Note that the recursion here is bounded by
1053            // the static recursion limit for all interface types as imposed
1054            // during the translation phase.
1055            Some(n) => {
1056                self.fuel = n;
1057                match src_ty {
1058                    InterfaceType::Bool => self.translate_bool(src, dst_ty, dst),
1059                    InterfaceType::U8 => self.translate_u8(src, dst_ty, dst),
1060                    InterfaceType::S8 => self.translate_s8(src, dst_ty, dst),
1061                    InterfaceType::U16 => self.translate_u16(src, dst_ty, dst),
1062                    InterfaceType::S16 => self.translate_s16(src, dst_ty, dst),
1063                    InterfaceType::U32 => self.translate_u32(src, dst_ty, dst),
1064                    InterfaceType::S32 => self.translate_s32(src, dst_ty, dst),
1065                    InterfaceType::U64 => self.translate_u64(src, dst_ty, dst),
1066                    InterfaceType::S64 => self.translate_s64(src, dst_ty, dst),
1067                    InterfaceType::Float32 => self.translate_f32(src, dst_ty, dst),
1068                    InterfaceType::Float64 => self.translate_f64(src, dst_ty, dst),
1069                    InterfaceType::Char => self.translate_char(src, dst_ty, dst),
1070                    InterfaceType::String => self.translate_string(src, dst_ty, dst),
1071                    InterfaceType::List(t) => self.translate_list(*t, src, dst_ty, dst),
1072                    InterfaceType::Record(t) => self.translate_record(*t, src, dst_ty, dst),
1073                    InterfaceType::Flags(f) => self.translate_flags(*f, src, dst_ty, dst),
1074                    InterfaceType::Tuple(t) => self.translate_tuple(*t, src, dst_ty, dst),
1075                    InterfaceType::Variant(v) => self.translate_variant(*v, src, dst_ty, dst),
1076                    InterfaceType::Enum(t) => self.translate_enum(*t, src, dst_ty, dst),
1077                    InterfaceType::Option(t) => self.translate_option(*t, src, dst_ty, dst),
1078                    InterfaceType::Result(t) => self.translate_result(*t, src, dst_ty, dst),
1079                    InterfaceType::Own(t) => self.translate_own(*t, src, dst_ty, dst),
1080                    InterfaceType::Borrow(t) => self.translate_borrow(*t, src, dst_ty, dst),
1081                    InterfaceType::Future(t) => self.translate_future(*t, src, dst_ty, dst),
1082                    InterfaceType::Stream(t) => self.translate_stream(*t, src, dst_ty, dst),
1083                    InterfaceType::ErrorContext(t) => {
1084                        self.translate_error_context(*t, src, dst_ty, dst)
1085                    }
1086                }
1087            }
1088
1089            // This function does not have enough fuel left to perform this
1090            // layer of translation so the translation is deferred to a helper
1091            // function. The actual translation here is then done by marshalling
1092            // the src/dst into the function we're calling and then processing
1093            // the results.
1094            None => {
1095                let src_loc = match src {
1096                    // If the source is on the stack then `stack_get` is used to
1097                    // convert everything to the appropriate flat representation
1098                    // for the source type.
1099                    Source::Stack(stack) => {
1100                        for (i, ty) in stack
1101                            .opts
1102                            .flat_types(src_ty, self.types)
1103                            .unwrap()
1104                            .iter()
1105                            .enumerate()
1106                        {
1107                            let stack = stack.slice(i..i + 1);
1108                            self.stack_get(&stack, (*ty).into());
1109                        }
1110                        HelperLocation::Stack
1111                    }
1112                    // If the source is in memory then the pointer is passed
1113                    // through, but note that the offset must be factored in
1114                    // here since the translation function will start from
1115                    // offset 0.
1116                    Source::Memory(mem) => {
1117                        self.push_mem_addr(mem);
1118                        HelperLocation::Memory
1119                    }
1120                };
1121                let dst_loc = match dst {
1122                    Destination::Stack(..) => HelperLocation::Stack,
1123                    Destination::Memory(mem) => {
1124                        self.push_mem_addr(mem);
1125                        HelperLocation::Memory
1126                    }
1127                };
1128                // Generate a `FunctionId` corresponding to the `Helper`
1129                // configuration that is necessary here. This will ideally be a
1130                // "cache hit" and use a preexisting helper which represents
1131                // outlining what would otherwise be duplicate code within a
1132                // function to one function.
1133                let helper = self.module.translate_helper(Helper {
1134                    src: HelperType {
1135                        ty: *src_ty,
1136                        opts: *src.opts(),
1137                        loc: src_loc,
1138                    },
1139                    dst: HelperType {
1140                        ty: *dst_ty,
1141                        opts: *dst.opts(),
1142                        loc: dst_loc,
1143                    },
1144                });
1145                // Emit a `call` instruction which will get "relocated" to a
1146                // function index once translation has completely finished.
1147                self.flush_code();
1148                self.module.funcs[self.result].body.push(Body::Call(helper));
1149
1150                // If the destination of the translation was on the stack then
1151                // the types on the stack need to be optionally converted to
1152                // different types (e.g. if the result here is part of a variant
1153                // somewhere else).
1154                //
1155                // This translation happens inline here by popping the results
1156                // into new locals and then using those locals to do a
1157                // `stack_set`.
1158                if let Destination::Stack(tys, opts) = dst {
1159                    let flat = self
1160                        .types
1161                        .flatten_types(opts, usize::MAX, [*dst_ty])
1162                        .unwrap();
1163                    assert_eq!(flat.len(), tys.len());
1164                    let locals = flat
1165                        .iter()
1166                        .rev()
1167                        .map(|ty| self.local_set_new_tmp(*ty))
1168                        .collect::<Vec<_>>();
1169                    for (ty, local) in tys.iter().zip(locals.into_iter().rev()) {
1170                        self.instruction(LocalGet(local.idx));
1171                        self.stack_set(std::slice::from_ref(ty), local.ty);
1172                        self.free_temp_local(local);
1173                    }
1174                }
1175            }
1176        }
1177    }
1178
1179    fn push_mem_addr(&mut self, mem: &Memory<'_>) {
1180        self.instruction(LocalGet(mem.addr.idx));
1181        if mem.offset != 0 {
1182            self.ptr_uconst(mem.opts, mem.offset);
1183            self.ptr_add(mem.opts);
1184        }
1185    }
1186
1187    fn translate_bool(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1188        // TODO: subtyping
1189        assert!(matches!(dst_ty, InterfaceType::Bool));
1190        self.push_dst_addr(dst);
1191
1192        // Booleans are canonicalized to 0 or 1 as they pass through the
1193        // component boundary, so use a `select` instruction to do so.
1194        self.instruction(I32Const(1));
1195        self.instruction(I32Const(0));
1196        match src {
1197            Source::Memory(mem) => self.i32_load8u(mem),
1198            Source::Stack(stack) => self.stack_get(stack, ValType::I32),
1199        }
1200        self.instruction(Select);
1201
1202        match dst {
1203            Destination::Memory(mem) => self.i32_store8(mem),
1204            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1205        }
1206    }
1207
1208    fn translate_u8(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1209        // TODO: subtyping
1210        assert!(matches!(dst_ty, InterfaceType::U8));
1211        self.convert_u8_mask(src, dst, 0xff);
1212    }
1213
1214    fn convert_u8_mask(&mut self, src: &Source<'_>, dst: &Destination<'_>, mask: u8) {
1215        self.push_dst_addr(dst);
1216        let mut needs_mask = true;
1217        match src {
1218            Source::Memory(mem) => {
1219                self.i32_load8u(mem);
1220                needs_mask = mask != 0xff;
1221            }
1222            Source::Stack(stack) => {
1223                self.stack_get(stack, ValType::I32);
1224            }
1225        }
1226        if needs_mask {
1227            self.instruction(I32Const(i32::from(mask)));
1228            self.instruction(I32And);
1229        }
1230        match dst {
1231            Destination::Memory(mem) => self.i32_store8(mem),
1232            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1233        }
1234    }
1235
1236    fn translate_s8(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1237        // TODO: subtyping
1238        assert!(matches!(dst_ty, InterfaceType::S8));
1239        self.push_dst_addr(dst);
1240        match src {
1241            Source::Memory(mem) => self.i32_load8s(mem),
1242            Source::Stack(stack) => {
1243                self.stack_get(stack, ValType::I32);
1244                self.instruction(I32Extend8S);
1245            }
1246        }
1247        match dst {
1248            Destination::Memory(mem) => self.i32_store8(mem),
1249            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1250        }
1251    }
1252
1253    fn translate_u16(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1254        // TODO: subtyping
1255        assert!(matches!(dst_ty, InterfaceType::U16));
1256        self.convert_u16_mask(src, dst, 0xffff);
1257    }
1258
1259    fn convert_u16_mask(&mut self, src: &Source<'_>, dst: &Destination<'_>, mask: u16) {
1260        self.push_dst_addr(dst);
1261        let mut needs_mask = true;
1262        match src {
1263            Source::Memory(mem) => {
1264                self.i32_load16u(mem);
1265                needs_mask = mask != 0xffff;
1266            }
1267            Source::Stack(stack) => {
1268                self.stack_get(stack, ValType::I32);
1269            }
1270        }
1271        if needs_mask {
1272            self.instruction(I32Const(i32::from(mask)));
1273            self.instruction(I32And);
1274        }
1275        match dst {
1276            Destination::Memory(mem) => self.i32_store16(mem),
1277            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1278        }
1279    }
1280
1281    fn translate_s16(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1282        // TODO: subtyping
1283        assert!(matches!(dst_ty, InterfaceType::S16));
1284        self.push_dst_addr(dst);
1285        match src {
1286            Source::Memory(mem) => self.i32_load16s(mem),
1287            Source::Stack(stack) => {
1288                self.stack_get(stack, ValType::I32);
1289                self.instruction(I32Extend16S);
1290            }
1291        }
1292        match dst {
1293            Destination::Memory(mem) => self.i32_store16(mem),
1294            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1295        }
1296    }
1297
1298    fn translate_u32(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1299        // TODO: subtyping
1300        assert!(matches!(dst_ty, InterfaceType::U32));
1301        self.convert_u32_mask(src, dst, 0xffffffff)
1302    }
1303
1304    fn convert_u32_mask(&mut self, src: &Source<'_>, dst: &Destination<'_>, mask: u32) {
1305        self.push_dst_addr(dst);
1306        match src {
1307            Source::Memory(mem) => self.i32_load(mem),
1308            Source::Stack(stack) => self.stack_get(stack, ValType::I32),
1309        }
1310        if mask != 0xffffffff {
1311            self.instruction(I32Const(mask as i32));
1312            self.instruction(I32And);
1313        }
1314        match dst {
1315            Destination::Memory(mem) => self.i32_store(mem),
1316            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1317        }
1318    }
1319
1320    fn translate_s32(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1321        // TODO: subtyping
1322        assert!(matches!(dst_ty, InterfaceType::S32));
1323        self.push_dst_addr(dst);
1324        match src {
1325            Source::Memory(mem) => self.i32_load(mem),
1326            Source::Stack(stack) => self.stack_get(stack, ValType::I32),
1327        }
1328        match dst {
1329            Destination::Memory(mem) => self.i32_store(mem),
1330            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1331        }
1332    }
1333
1334    fn translate_u64(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1335        // TODO: subtyping
1336        assert!(matches!(dst_ty, InterfaceType::U64));
1337        self.push_dst_addr(dst);
1338        match src {
1339            Source::Memory(mem) => self.i64_load(mem),
1340            Source::Stack(stack) => self.stack_get(stack, ValType::I64),
1341        }
1342        match dst {
1343            Destination::Memory(mem) => self.i64_store(mem),
1344            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I64),
1345        }
1346    }
1347
1348    fn translate_s64(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1349        // TODO: subtyping
1350        assert!(matches!(dst_ty, InterfaceType::S64));
1351        self.push_dst_addr(dst);
1352        match src {
1353            Source::Memory(mem) => self.i64_load(mem),
1354            Source::Stack(stack) => self.stack_get(stack, ValType::I64),
1355        }
1356        match dst {
1357            Destination::Memory(mem) => self.i64_store(mem),
1358            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I64),
1359        }
1360    }
1361
1362    fn translate_f32(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1363        // TODO: subtyping
1364        assert!(matches!(dst_ty, InterfaceType::Float32));
1365        self.push_dst_addr(dst);
1366        match src {
1367            Source::Memory(mem) => self.f32_load(mem),
1368            Source::Stack(stack) => self.stack_get(stack, ValType::F32),
1369        }
1370        match dst {
1371            Destination::Memory(mem) => self.f32_store(mem),
1372            Destination::Stack(stack, _) => self.stack_set(stack, ValType::F32),
1373        }
1374    }
1375
1376    fn translate_f64(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1377        // TODO: subtyping
1378        assert!(matches!(dst_ty, InterfaceType::Float64));
1379        self.push_dst_addr(dst);
1380        match src {
1381            Source::Memory(mem) => self.f64_load(mem),
1382            Source::Stack(stack) => self.stack_get(stack, ValType::F64),
1383        }
1384        match dst {
1385            Destination::Memory(mem) => self.f64_store(mem),
1386            Destination::Stack(stack, _) => self.stack_set(stack, ValType::F64),
1387        }
1388    }
1389
1390    fn translate_char(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1391        assert!(matches!(dst_ty, InterfaceType::Char));
1392        match src {
1393            Source::Memory(mem) => self.i32_load(mem),
1394            Source::Stack(stack) => self.stack_get(stack, ValType::I32),
1395        }
1396        let local = self.local_set_new_tmp(ValType::I32);
1397
1398        // This sequence is copied from the output of LLVM for:
1399        //
1400        //      pub extern "C" fn foo(x: u32) -> char {
1401        //          char::try_from(x)
1402        //              .unwrap_or_else(|_| std::arch::wasm32::unreachable())
1403        //      }
1404        //
1405        // Apparently this does what's required by the canonical ABI:
1406        //
1407        //    def i32_to_char(opts, i):
1408        //      trap_if(i >= 0x110000)
1409        //      trap_if(0xD800 <= i <= 0xDFFF)
1410        //      return chr(i)
1411        //
1412        // ... but I don't know how it works other than "well I trust LLVM"
1413        self.instruction(Block(BlockType::Empty));
1414        self.instruction(Block(BlockType::Empty));
1415        self.instruction(LocalGet(local.idx));
1416        self.instruction(I32Const(0xd800));
1417        self.instruction(I32Xor);
1418        self.instruction(I32Const(-0x110000));
1419        self.instruction(I32Add);
1420        self.instruction(I32Const(-0x10f800));
1421        self.instruction(I32LtU);
1422        self.instruction(BrIf(0));
1423        self.instruction(LocalGet(local.idx));
1424        self.instruction(I32Const(0x110000));
1425        self.instruction(I32Ne);
1426        self.instruction(BrIf(1));
1427        self.instruction(End);
1428        self.trap(Trap::InvalidChar);
1429        self.instruction(End);
1430
1431        self.push_dst_addr(dst);
1432        self.instruction(LocalGet(local.idx));
1433        match dst {
1434            Destination::Memory(mem) => {
1435                self.i32_store(mem);
1436            }
1437            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1438        }
1439
1440        self.free_temp_local(local);
1441    }
1442
1443    fn translate_string(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1444        assert!(matches!(dst_ty, InterfaceType::String));
1445        let src_opts = src.opts();
1446        let dst_opts = dst.opts();
1447
1448        // Load the pointer/length of this string into temporary locals. These
1449        // will be referenced a good deal so this just makes it easier to deal
1450        // with them consistently below rather than trying to reload from memory
1451        // for example.
1452        match src {
1453            Source::Stack(s) => {
1454                assert_eq!(s.locals.len(), 2);
1455                self.stack_get(&s.slice(0..1), src_opts.ptr());
1456                self.stack_get(&s.slice(1..2), src_opts.ptr());
1457            }
1458            Source::Memory(mem) => {
1459                self.ptr_load(mem);
1460                self.ptr_load(&mem.bump(src_opts.ptr_size().into()));
1461            }
1462        }
1463        let src_len = self.local_set_new_tmp(src_opts.ptr());
1464        let src_ptr = self.local_set_new_tmp(src_opts.ptr());
1465        let src_str = WasmString {
1466            ptr: src_ptr,
1467            len: src_len,
1468            opts: src_opts,
1469        };
1470
1471        let dst_str = match src_opts.string_encoding {
1472            StringEncoding::Utf8 => match dst_opts.string_encoding {
1473                StringEncoding::Utf8 => self.string_copy(&src_str, FE::Utf8, dst_opts, FE::Utf8),
1474                StringEncoding::Utf16 => self.string_utf8_to_utf16(&src_str, dst_opts),
1475                StringEncoding::CompactUtf16 => {
1476                    self.string_to_compact(&src_str, FE::Utf8, dst_opts)
1477                }
1478            },
1479
1480            StringEncoding::Utf16 => {
1481                self.verify_aligned(src_opts, src_str.ptr.idx, 2);
1482                match dst_opts.string_encoding {
1483                    StringEncoding::Utf8 => {
1484                        self.string_deflate_to_utf8(&src_str, FE::Utf16, dst_opts)
1485                    }
1486                    StringEncoding::Utf16 => {
1487                        self.string_copy(&src_str, FE::Utf16, dst_opts, FE::Utf16)
1488                    }
1489                    StringEncoding::CompactUtf16 => {
1490                        self.string_to_compact(&src_str, FE::Utf16, dst_opts)
1491                    }
1492                }
1493            }
1494
1495            StringEncoding::CompactUtf16 => {
1496                self.verify_aligned(src_opts, src_str.ptr.idx, 2);
1497
1498                // Test the tag big to see if this is a utf16 or a latin1 string
1499                // at runtime...
1500                self.instruction(LocalGet(src_str.len.idx));
1501                self.ptr_uconst(src_opts, UTF16_TAG);
1502                self.ptr_and(src_opts);
1503                self.ptr_if(src_opts, BlockType::Empty);
1504
1505                // In the utf16 block unset the upper bit from the length local
1506                // so further calculations have the right value. Afterwards the
1507                // string transcode proceeds assuming utf16.
1508                self.instruction(LocalGet(src_str.len.idx));
1509                self.ptr_uconst(src_opts, UTF16_TAG);
1510                self.ptr_xor(src_opts);
1511                self.instruction(LocalSet(src_str.len.idx));
1512                let s1 = match dst_opts.string_encoding {
1513                    StringEncoding::Utf8 => {
1514                        self.string_deflate_to_utf8(&src_str, FE::Utf16, dst_opts)
1515                    }
1516                    StringEncoding::Utf16 => {
1517                        self.string_copy(&src_str, FE::Utf16, dst_opts, FE::Utf16)
1518                    }
1519                    StringEncoding::CompactUtf16 => {
1520                        self.string_compact_utf16_to_compact(&src_str, dst_opts)
1521                    }
1522                };
1523
1524                self.instruction(Else);
1525
1526                // In the latin1 block the `src_len` local is already the number
1527                // of code units, so the string transcoding is all that needs to
1528                // happen.
1529                let s2 = match dst_opts.string_encoding {
1530                    StringEncoding::Utf16 => {
1531                        self.string_copy(&src_str, FE::Latin1, dst_opts, FE::Utf16)
1532                    }
1533                    StringEncoding::Utf8 => {
1534                        self.string_deflate_to_utf8(&src_str, FE::Latin1, dst_opts)
1535                    }
1536                    StringEncoding::CompactUtf16 => {
1537                        self.string_copy(&src_str, FE::Latin1, dst_opts, FE::Latin1)
1538                    }
1539                };
1540                // Set our `s2` generated locals to the `s2` generated locals
1541                // as the resulting pointer of this transcode.
1542                self.instruction(LocalGet(s2.ptr.idx));
1543                self.instruction(LocalSet(s1.ptr.idx));
1544                self.instruction(LocalGet(s2.len.idx));
1545                self.instruction(LocalSet(s1.len.idx));
1546                self.instruction(End);
1547                self.free_temp_local(s2.ptr);
1548                self.free_temp_local(s2.len);
1549                s1
1550            }
1551        };
1552
1553        // Store the ptr/length in the desired destination
1554        match dst {
1555            Destination::Stack(s, _) => {
1556                self.instruction(LocalGet(dst_str.ptr.idx));
1557                self.stack_set(&s[..1], dst_opts.ptr());
1558                self.instruction(LocalGet(dst_str.len.idx));
1559                self.stack_set(&s[1..], dst_opts.ptr());
1560            }
1561            Destination::Memory(mem) => {
1562                self.instruction(LocalGet(mem.addr.idx));
1563                self.instruction(LocalGet(dst_str.ptr.idx));
1564                self.ptr_store(mem);
1565                self.instruction(LocalGet(mem.addr.idx));
1566                self.instruction(LocalGet(dst_str.len.idx));
1567                self.ptr_store(&mem.bump(dst_opts.ptr_size().into()));
1568            }
1569        }
1570
1571        self.free_temp_local(src_str.ptr);
1572        self.free_temp_local(src_str.len);
1573        self.free_temp_local(dst_str.ptr);
1574        self.free_temp_local(dst_str.len);
1575    }
1576
1577    // Corresponding function for `store_string_copy` in the spec.
1578    //
1579    // This performs a transcoding of the string with a one-pass copy from
1580    // the `src` encoding to the `dst` encoding. This is only possible for
1581    // fixed encodings where the first allocation is guaranteed to be an
1582    // appropriate fit so it's not suitable for all encodings.
1583    //
1584    // Imported host transcoding functions here take the src/dst pointers as
1585    // well as the number of code units in the source (which always matches
1586    // the number of code units in the destination). There is no return
1587    // value from the transcode function since the encoding should always
1588    // work on the first pass.
1589    fn string_copy<'c>(
1590        &mut self,
1591        src: &WasmString<'_>,
1592        src_enc: FE,
1593        dst_opts: &'c Options,
1594        dst_enc: FE,
1595    ) -> WasmString<'c> {
1596        assert!(dst_enc.width() >= src_enc.width());
1597        self.validate_string_length(src, dst_enc);
1598
1599        // Calculate the source byte length given the size of each code
1600        // unit. Note that this shouldn't overflow given
1601        // `validate_string_length` above.
1602        let mut src_byte_len_tmp = None;
1603        let src_byte_len = if src_enc.width() == 1 {
1604            src.len.idx
1605        } else {
1606            assert_eq!(src_enc.width(), 2);
1607            self.instruction(LocalGet(src.len.idx));
1608            self.ptr_uconst(src.opts, 1);
1609            self.ptr_shl(src.opts);
1610            let tmp = self.local_set_new_tmp(src.opts.ptr());
1611            let ret = tmp.idx;
1612            src_byte_len_tmp = Some(tmp);
1613            ret
1614        };
1615
1616        // Convert the source code units length to the destination byte
1617        // length type.
1618        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst_opts.ptr());
1619        let dst_len = self.local_tee_new_tmp(dst_opts.ptr());
1620        if dst_enc.width() > 1 {
1621            assert_eq!(dst_enc.width(), 2);
1622            self.ptr_uconst(dst_opts, 1);
1623            self.ptr_shl(dst_opts);
1624        }
1625        let dst_byte_len = self.local_set_new_tmp(dst_opts.ptr());
1626
1627        // Allocate space in the destination using the calculated byte
1628        // length.
1629        let dst = {
1630            let dst_mem = self.malloc(
1631                dst_opts,
1632                MallocSize::Local(dst_byte_len.idx),
1633                dst_enc.width().into(),
1634            );
1635            WasmString {
1636                ptr: dst_mem.addr,
1637                len: dst_len,
1638                opts: dst_opts,
1639            }
1640        };
1641
1642        // Validate that `src_len + src_ptr` and
1643        // `dst_mem.addr_local + dst_byte_len` are both in-bounds. This
1644        // is done by loading the last byte of the string and if that
1645        // doesn't trap then it's known valid.
1646        self.validate_string_inbounds(src, src_byte_len);
1647        self.validate_string_inbounds(&dst, dst_byte_len.idx);
1648
1649        // If the validations pass then the host `transcode` intrinsic
1650        // is invoked. This will either raise a trap or otherwise succeed
1651        // in which case we're done.
1652        let op = if src_enc == dst_enc {
1653            Transcode::Copy(src_enc)
1654        } else {
1655            assert_eq!(src_enc, FE::Latin1);
1656            assert_eq!(dst_enc, FE::Utf16);
1657            Transcode::Latin1ToUtf16
1658        };
1659        let transcode = self.transcoder(src, &dst, op);
1660        self.instruction(LocalGet(src.ptr.idx));
1661        self.instruction(LocalGet(src.len.idx));
1662        self.instruction(LocalGet(dst.ptr.idx));
1663        self.instruction(Call(transcode.as_u32()));
1664
1665        self.free_temp_local(dst_byte_len);
1666        if let Some(tmp) = src_byte_len_tmp {
1667            self.free_temp_local(tmp);
1668        }
1669
1670        dst
1671    }
1672    // Corresponding function for `store_string_to_utf8` in the spec.
1673    //
1674    // This translation works by possibly performing a number of
1675    // reallocations. First a buffer of size input-code-units is used to try
1676    // to get the transcoding correct on the first try. If that fails the
1677    // maximum worst-case size is used and then that is resized down if it's
1678    // too large.
1679    //
1680    // The host transcoding function imported here will receive src ptr/len
1681    // and dst ptr/len and return how many code units were consumed on both
1682    // sides. The amount of code units consumed in the source dictates which
1683    // branches are taken in this conversion.
1684    fn string_deflate_to_utf8<'c>(
1685        &mut self,
1686        src: &WasmString<'_>,
1687        src_enc: FE,
1688        dst_opts: &'c Options,
1689    ) -> WasmString<'c> {
1690        self.validate_string_length(src, src_enc);
1691
1692        // Optimistically assume that the code unit length of the source is
1693        // all that's needed in the destination. Perform that allocation
1694        // here and proceed to transcoding below.
1695        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst_opts.ptr());
1696        let dst_len = self.local_tee_new_tmp(dst_opts.ptr());
1697        let dst_byte_len = self.local_set_new_tmp(dst_opts.ptr());
1698
1699        let dst = {
1700            let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), 1);
1701            WasmString {
1702                ptr: dst_mem.addr,
1703                len: dst_len,
1704                opts: dst_opts,
1705            }
1706        };
1707
1708        // Ensure buffers are all in-bounds
1709        let mut src_byte_len_tmp = None;
1710        let src_byte_len = match src_enc {
1711            FE::Latin1 => src.len.idx,
1712            FE::Utf16 => {
1713                self.instruction(LocalGet(src.len.idx));
1714                self.ptr_uconst(src.opts, 1);
1715                self.ptr_shl(src.opts);
1716                let tmp = self.local_set_new_tmp(src.opts.ptr());
1717                let ret = tmp.idx;
1718                src_byte_len_tmp = Some(tmp);
1719                ret
1720            }
1721            FE::Utf8 => unreachable!(),
1722        };
1723        self.validate_string_inbounds(src, src_byte_len);
1724        self.validate_string_inbounds(&dst, dst_byte_len.idx);
1725
1726        // Perform the initial transcode
1727        let op = match src_enc {
1728            FE::Latin1 => Transcode::Latin1ToUtf8,
1729            FE::Utf16 => Transcode::Utf16ToUtf8,
1730            FE::Utf8 => unreachable!(),
1731        };
1732        let transcode = self.transcoder(src, &dst, op);
1733        self.instruction(LocalGet(src.ptr.idx));
1734        self.instruction(LocalGet(src.len.idx));
1735        self.instruction(LocalGet(dst.ptr.idx));
1736        self.instruction(LocalGet(dst_byte_len.idx));
1737        self.instruction(Call(transcode.as_u32()));
1738        self.instruction(LocalSet(dst.len.idx));
1739        let src_len_tmp = self.local_set_new_tmp(src.opts.ptr());
1740
1741        // Test if the source was entirely transcoded by comparing
1742        // `src_len_tmp`, the number of code units transcoded from the
1743        // source, with `src_len`, the original number of code units.
1744        self.instruction(LocalGet(src_len_tmp.idx));
1745        self.instruction(LocalGet(src.len.idx));
1746        self.ptr_ne(src.opts);
1747        self.instruction(If(BlockType::Empty));
1748
1749        // Here a worst-case reallocation is performed to grow `dst_mem`.
1750        // In-line a check is also performed that the worst-case byte size
1751        // fits within the maximum size of strings.
1752        self.instruction(LocalGet(dst.ptr.idx)); // old_ptr
1753        self.instruction(LocalGet(dst_byte_len.idx)); // old_size
1754        self.ptr_uconst(dst.opts, 1); // align
1755        let factor = match src_enc {
1756            FE::Latin1 => 2,
1757            FE::Utf16 => 3,
1758            _ => unreachable!(),
1759        };
1760        self.validate_string_length_u8(src, factor);
1761        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst_opts.ptr());
1762        self.ptr_uconst(dst_opts, factor.into());
1763        self.ptr_mul(dst_opts);
1764        self.instruction(LocalTee(dst_byte_len.idx));
1765        self.instruction(Call(dst_opts.realloc.unwrap().as_u32()));
1766        self.instruction(LocalSet(dst.ptr.idx));
1767
1768        // Verify that the destination is still in-bounds
1769        self.validate_string_inbounds(&dst, dst_byte_len.idx);
1770
1771        // Perform another round of transcoding that should be guaranteed
1772        // to succeed. Note that all the parameters here are offset by the
1773        // results of the first transcoding to only perform the remaining
1774        // transcode on the final units.
1775        self.instruction(LocalGet(src.ptr.idx));
1776        self.instruction(LocalGet(src_len_tmp.idx));
1777        if let FE::Utf16 = src_enc {
1778            self.ptr_uconst(src.opts, 1);
1779            self.ptr_shl(src.opts);
1780        }
1781        self.ptr_add(src.opts);
1782        self.instruction(LocalGet(src.len.idx));
1783        self.instruction(LocalGet(src_len_tmp.idx));
1784        self.ptr_sub(src.opts);
1785        self.instruction(LocalGet(dst.ptr.idx));
1786        self.instruction(LocalGet(dst.len.idx));
1787        self.ptr_add(dst.opts);
1788        self.instruction(LocalGet(dst_byte_len.idx));
1789        self.instruction(LocalGet(dst.len.idx));
1790        self.ptr_sub(dst.opts);
1791        self.instruction(Call(transcode.as_u32()));
1792
1793        // Add the second result, the amount of destination units encoded,
1794        // to `dst_len` so it's an accurate reflection of the final size of
1795        // the destination buffer.
1796        self.instruction(LocalGet(dst.len.idx));
1797        self.ptr_add(dst.opts);
1798        self.instruction(LocalSet(dst.len.idx));
1799
1800        // In debug mode verify the first result consumed the entire string,
1801        // otherwise simply discard it.
1802        if self.module.debug {
1803            self.instruction(LocalGet(src.len.idx));
1804            self.instruction(LocalGet(src_len_tmp.idx));
1805            self.ptr_sub(src.opts);
1806            self.ptr_ne(src.opts);
1807            self.instruction(If(BlockType::Empty));
1808            self.trap(Trap::AssertFailed("should have finished encoding"));
1809            self.instruction(End);
1810        } else {
1811            self.instruction(Drop);
1812        }
1813
1814        // Perform a downsizing if the worst-case size was too large
1815        self.instruction(LocalGet(dst.len.idx));
1816        self.instruction(LocalGet(dst_byte_len.idx));
1817        self.ptr_ne(dst.opts);
1818        self.instruction(If(BlockType::Empty));
1819        self.instruction(LocalGet(dst.ptr.idx)); // old_ptr
1820        self.instruction(LocalGet(dst_byte_len.idx)); // old_size
1821        self.ptr_uconst(dst.opts, 1); // align
1822        self.instruction(LocalGet(dst.len.idx)); // new_size
1823        self.instruction(Call(dst.opts.realloc.unwrap().as_u32()));
1824        self.instruction(LocalSet(dst.ptr.idx));
1825        self.instruction(End);
1826
1827        // If the first transcode was enough then assert that the returned
1828        // amount of destination items written equals the byte size.
1829        if self.module.debug {
1830            self.instruction(Else);
1831
1832            self.instruction(LocalGet(dst.len.idx));
1833            self.instruction(LocalGet(dst_byte_len.idx));
1834            self.ptr_ne(dst_opts);
1835            self.instruction(If(BlockType::Empty));
1836            self.trap(Trap::AssertFailed("should have finished encoding"));
1837            self.instruction(End);
1838        }
1839
1840        self.instruction(End); // end of "first transcode not enough"
1841
1842        self.free_temp_local(src_len_tmp);
1843        self.free_temp_local(dst_byte_len);
1844        if let Some(tmp) = src_byte_len_tmp {
1845            self.free_temp_local(tmp);
1846        }
1847
1848        dst
1849    }
1850
1851    // Corresponds to the `store_utf8_to_utf16` function in the spec.
1852    //
1853    // When converting utf-8 to utf-16 a pessimistic allocation is
1854    // done which is twice the byte length of the utf-8 string.
1855    // The host then transcodes and returns how many code units were
1856    // actually used during the transcoding and if it's beneath the
1857    // pessimistic maximum then the buffer is reallocated down to
1858    // a smaller amount.
1859    //
1860    // The host-imported transcoding function takes the src/dst pointer as
1861    // well as the code unit size of both the source and destination. The
1862    // destination should always be big enough to hold the result of the
1863    // transcode and so the result of the host function is how many code
1864    // units were written to the destination.
1865    fn string_utf8_to_utf16<'c>(
1866        &mut self,
1867        src: &WasmString<'_>,
1868        dst_opts: &'c Options,
1869    ) -> WasmString<'c> {
1870        self.validate_string_length(src, FE::Utf16);
1871        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst_opts.ptr());
1872        let dst_len = self.local_tee_new_tmp(dst_opts.ptr());
1873        self.ptr_uconst(dst_opts, 1);
1874        self.ptr_shl(dst_opts);
1875        let dst_byte_len = self.local_set_new_tmp(dst_opts.ptr());
1876        let dst = {
1877            let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), 2);
1878            WasmString {
1879                ptr: dst_mem.addr,
1880                len: dst_len,
1881                opts: dst_opts,
1882            }
1883        };
1884
1885        self.validate_string_inbounds(src, src.len.idx);
1886        self.validate_string_inbounds(&dst, dst_byte_len.idx);
1887
1888        let transcode = self.transcoder(src, &dst, Transcode::Utf8ToUtf16);
1889        self.instruction(LocalGet(src.ptr.idx));
1890        self.instruction(LocalGet(src.len.idx));
1891        self.instruction(LocalGet(dst.ptr.idx));
1892        self.instruction(Call(transcode.as_u32()));
1893        self.instruction(LocalSet(dst.len.idx));
1894
1895        // If the number of code units returned by transcode is not
1896        // equal to the original number of code units then
1897        // the buffer must be shrunk.
1898        //
1899        // Note that the byte length of the final allocation we
1900        // want is twice the code unit length returned by the
1901        // transcoding function.
1902        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst.opts.ptr());
1903        self.instruction(LocalGet(dst.len.idx));
1904        self.ptr_ne(dst_opts);
1905        self.instruction(If(BlockType::Empty));
1906        self.instruction(LocalGet(dst.ptr.idx));
1907        self.instruction(LocalGet(dst_byte_len.idx));
1908        self.ptr_uconst(dst.opts, 2);
1909        self.instruction(LocalGet(dst.len.idx));
1910        self.ptr_uconst(dst.opts, 1);
1911        self.ptr_shl(dst.opts);
1912        self.instruction(Call(dst.opts.realloc.unwrap().as_u32()));
1913        self.instruction(LocalSet(dst.ptr.idx));
1914        self.instruction(End); // end of shrink-to-fit
1915
1916        self.free_temp_local(dst_byte_len);
1917
1918        dst
1919    }
1920
1921    // Corresponds to `store_probably_utf16_to_latin1_or_utf16` in the spec.
1922    //
1923    // This will try to transcode the input utf16 string to utf16 in the
1924    // destination. If utf16 isn't needed though and latin1 could be used
1925    // then that's used instead and a reallocation to downsize occurs
1926    // afterwards.
1927    //
1928    // The host transcode function here will take the src/dst pointers as
1929    // well as src length. The destination byte length is twice the src code
1930    // unit length. The return value is the tagged length of the returned
1931    // string. If the upper bit is set then utf16 was used and the
1932    // conversion is done. If the upper bit is not set then latin1 was used
1933    // and a downsizing needs to happen.
1934    fn string_compact_utf16_to_compact<'c>(
1935        &mut self,
1936        src: &WasmString<'_>,
1937        dst_opts: &'c Options,
1938    ) -> WasmString<'c> {
1939        self.validate_string_length(src, FE::Utf16);
1940        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst_opts.ptr());
1941        let dst_len = self.local_tee_new_tmp(dst_opts.ptr());
1942        self.ptr_uconst(dst_opts, 1);
1943        self.ptr_shl(dst_opts);
1944        let dst_byte_len = self.local_set_new_tmp(dst_opts.ptr());
1945        let dst = {
1946            let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), 2);
1947            WasmString {
1948                ptr: dst_mem.addr,
1949                len: dst_len,
1950                opts: dst_opts,
1951            }
1952        };
1953
1954        self.convert_src_len_to_dst(dst_byte_len.idx, dst.opts.ptr(), src.opts.ptr());
1955        let src_byte_len = self.local_set_new_tmp(src.opts.ptr());
1956
1957        self.validate_string_inbounds(src, src_byte_len.idx);
1958        self.validate_string_inbounds(&dst, dst_byte_len.idx);
1959
1960        let transcode = self.transcoder(src, &dst, Transcode::Utf16ToCompactProbablyUtf16);
1961        self.instruction(LocalGet(src.ptr.idx));
1962        self.instruction(LocalGet(src.len.idx));
1963        self.instruction(LocalGet(dst.ptr.idx));
1964        self.instruction(Call(transcode.as_u32()));
1965        self.instruction(LocalSet(dst.len.idx));
1966
1967        // Assert that the untagged code unit length is the same as the
1968        // source code unit length.
1969        if self.module.debug {
1970            self.instruction(LocalGet(dst.len.idx));
1971            self.ptr_uconst(dst.opts, !UTF16_TAG);
1972            self.ptr_and(dst.opts);
1973            self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst.opts.ptr());
1974            self.ptr_ne(dst.opts);
1975            self.instruction(If(BlockType::Empty));
1976            self.trap(Trap::AssertFailed("expected equal code units"));
1977            self.instruction(End);
1978        }
1979
1980        // If the UTF16_TAG is set then utf16 was used and the destination
1981        // should be appropriately sized. Bail out of the "is this string
1982        // empty" block and fall through otherwise to resizing.
1983        self.instruction(LocalGet(dst.len.idx));
1984        self.ptr_uconst(dst.opts, UTF16_TAG);
1985        self.ptr_and(dst.opts);
1986        self.ptr_br_if(dst.opts, 0);
1987
1988        // Here `realloc` is used to downsize the string
1989        self.instruction(LocalGet(dst.ptr.idx)); // old_ptr
1990        self.instruction(LocalGet(dst_byte_len.idx)); // old_size
1991        self.ptr_uconst(dst.opts, 2); // align
1992        self.instruction(LocalGet(dst.len.idx)); // new_size
1993        self.instruction(Call(dst.opts.realloc.unwrap().as_u32()));
1994        self.instruction(LocalSet(dst.ptr.idx));
1995
1996        self.free_temp_local(dst_byte_len);
1997        self.free_temp_local(src_byte_len);
1998
1999        dst
2000    }
2001
2002    // Corresponds to `store_string_to_latin1_or_utf16` in the spec.
2003    //
2004    // This will attempt a first pass of transcoding to latin1 and on
2005    // failure a larger buffer is allocated for utf16 and then utf16 is
2006    // encoded in-place into the buffer. After either latin1 or utf16 the
2007    // buffer is then resized to fit the final string allocation.
2008    fn string_to_compact<'c>(
2009        &mut self,
2010        src: &WasmString<'_>,
2011        src_enc: FE,
2012        dst_opts: &'c Options,
2013    ) -> WasmString<'c> {
2014        self.validate_string_length(src, src_enc);
2015        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst_opts.ptr());
2016        let dst_len = self.local_tee_new_tmp(dst_opts.ptr());
2017        let dst_byte_len = self.local_set_new_tmp(dst_opts.ptr());
2018        let dst = {
2019            let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), 2);
2020            WasmString {
2021                ptr: dst_mem.addr,
2022                len: dst_len,
2023                opts: dst_opts,
2024            }
2025        };
2026
2027        self.validate_string_inbounds(src, src.len.idx);
2028        self.validate_string_inbounds(&dst, dst_byte_len.idx);
2029
2030        // Perform the initial latin1 transcode. This returns the number of
2031        // source code units consumed and the number of destination code
2032        // units (bytes) written.
2033        let (latin1, utf16) = match src_enc {
2034            FE::Utf8 => (Transcode::Utf8ToLatin1, Transcode::Utf8ToCompactUtf16),
2035            FE::Utf16 => (Transcode::Utf16ToLatin1, Transcode::Utf16ToCompactUtf16),
2036            FE::Latin1 => unreachable!(),
2037        };
2038        let transcode_latin1 = self.transcoder(src, &dst, latin1);
2039        let transcode_utf16 = self.transcoder(src, &dst, utf16);
2040        self.instruction(LocalGet(src.ptr.idx));
2041        self.instruction(LocalGet(src.len.idx));
2042        self.instruction(LocalGet(dst.ptr.idx));
2043        self.instruction(Call(transcode_latin1.as_u32()));
2044        self.instruction(LocalSet(dst.len.idx));
2045        let src_len_tmp = self.local_set_new_tmp(src.opts.ptr());
2046
2047        // If the source was entirely consumed then the transcode completed
2048        // and all that's necessary is to optionally shrink the buffer.
2049        self.instruction(LocalGet(src_len_tmp.idx));
2050        self.instruction(LocalGet(src.len.idx));
2051        self.ptr_eq(src.opts);
2052        self.instruction(If(BlockType::Empty)); // if latin1-or-utf16 block
2053
2054        // Test if the original byte length of the allocation is the same as
2055        // the number of written bytes, and if not then shrink the buffer
2056        // with a call to `realloc`.
2057        self.instruction(LocalGet(dst_byte_len.idx));
2058        self.instruction(LocalGet(dst.len.idx));
2059        self.ptr_ne(dst.opts);
2060        self.instruction(If(BlockType::Empty));
2061        self.instruction(LocalGet(dst.ptr.idx)); // old_ptr
2062        self.instruction(LocalGet(dst_byte_len.idx)); // old_size
2063        self.ptr_uconst(dst.opts, 2); // align
2064        self.instruction(LocalGet(dst.len.idx)); // new_size
2065        self.instruction(Call(dst.opts.realloc.unwrap().as_u32()));
2066        self.instruction(LocalSet(dst.ptr.idx));
2067        self.instruction(End);
2068
2069        // In this block the latin1 encoding failed. The host transcode
2070        // returned how many units were consumed from the source and how
2071        // many bytes were written to the destination. Here the buffer is
2072        // inflated and sized and the second utf16 intrinsic is invoked to
2073        // perform the final inflation.
2074        self.instruction(Else); // else latin1-or-utf16 block
2075
2076        // For utf8 validate that the inflated size is still within bounds.
2077        if src_enc.width() == 1 {
2078            self.validate_string_length_u8(src, 2);
2079        }
2080
2081        // Reallocate the buffer with twice the source code units in byte
2082        // size.
2083        self.instruction(LocalGet(dst.ptr.idx)); // old_ptr
2084        self.instruction(LocalGet(dst_byte_len.idx)); // old_size
2085        self.ptr_uconst(dst.opts, 2); // align
2086        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst.opts.ptr());
2087        self.ptr_uconst(dst.opts, 1);
2088        self.ptr_shl(dst.opts);
2089        self.instruction(LocalTee(dst_byte_len.idx));
2090        self.instruction(Call(dst.opts.realloc.unwrap().as_u32()));
2091        self.instruction(LocalSet(dst.ptr.idx));
2092
2093        // Call the host utf16 transcoding function. This will inflate the
2094        // prior latin1 bytes and then encode the rest of the source string
2095        // as utf16 into the remaining space in the destination buffer.
2096        self.instruction(LocalGet(src.ptr.idx));
2097        self.instruction(LocalGet(src_len_tmp.idx));
2098        if let FE::Utf16 = src_enc {
2099            self.ptr_uconst(src.opts, 1);
2100            self.ptr_shl(src.opts);
2101        }
2102        self.ptr_add(src.opts);
2103        self.instruction(LocalGet(src.len.idx));
2104        self.instruction(LocalGet(src_len_tmp.idx));
2105        self.ptr_sub(src.opts);
2106        self.instruction(LocalGet(dst.ptr.idx));
2107        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst.opts.ptr());
2108        self.instruction(LocalGet(dst.len.idx));
2109        self.instruction(Call(transcode_utf16.as_u32()));
2110        self.instruction(LocalSet(dst.len.idx));
2111
2112        // If the returned number of code units written to the destination
2113        // is not equal to the size of the allocation then the allocation is
2114        // resized down to the appropriate size.
2115        //
2116        // Note that the byte size desired is `2*dst_len` and the current
2117        // byte buffer size is `2*src_len` so the `2` factor isn't checked
2118        // here, just the lengths.
2119        self.instruction(LocalGet(dst.len.idx));
2120        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst.opts.ptr());
2121        self.ptr_ne(dst.opts);
2122        self.instruction(If(BlockType::Empty));
2123        self.instruction(LocalGet(dst.ptr.idx)); // old_ptr
2124        self.instruction(LocalGet(dst_byte_len.idx)); // old_size
2125        self.ptr_uconst(dst.opts, 2); // align
2126        self.instruction(LocalGet(dst.len.idx));
2127        self.ptr_uconst(dst.opts, 1);
2128        self.ptr_shl(dst.opts);
2129        self.instruction(Call(dst.opts.realloc.unwrap().as_u32()));
2130        self.instruction(LocalSet(dst.ptr.idx));
2131        self.instruction(End);
2132
2133        // Tag the returned pointer as utf16
2134        self.instruction(LocalGet(dst.len.idx));
2135        self.ptr_uconst(dst.opts, UTF16_TAG);
2136        self.ptr_or(dst.opts);
2137        self.instruction(LocalSet(dst.len.idx));
2138
2139        self.instruction(End); // end latin1-or-utf16 block
2140
2141        self.free_temp_local(src_len_tmp);
2142        self.free_temp_local(dst_byte_len);
2143
2144        dst
2145    }
2146
2147    fn validate_string_length(&mut self, src: &WasmString<'_>, dst: FE) {
2148        self.validate_string_length_u8(src, dst.width())
2149    }
2150
2151    fn validate_string_length_u8(&mut self, s: &WasmString<'_>, dst: u8) {
2152        // Check to see if the source byte length is out of bounds in
2153        // which case a trap is generated.
2154        self.instruction(LocalGet(s.len.idx));
2155        let max = MAX_STRING_BYTE_LENGTH / u32::from(dst);
2156        self.ptr_uconst(s.opts, max);
2157        self.ptr_ge_u(s.opts);
2158        self.instruction(If(BlockType::Empty));
2159        self.trap(Trap::StringLengthTooBig);
2160        self.instruction(End);
2161    }
2162
2163    fn transcoder(
2164        &mut self,
2165        src: &WasmString<'_>,
2166        dst: &WasmString<'_>,
2167        op: Transcode,
2168    ) -> FuncIndex {
2169        self.module.import_transcoder(Transcoder {
2170            from_memory: src.opts.memory.unwrap(),
2171            from_memory64: src.opts.memory64,
2172            to_memory: dst.opts.memory.unwrap(),
2173            to_memory64: dst.opts.memory64,
2174            op,
2175        })
2176    }
2177
2178    fn validate_string_inbounds(&mut self, s: &WasmString<'_>, byte_len: u32) {
2179        self.validate_memory_inbounds(s.opts, s.ptr.idx, byte_len, Trap::StringLengthOverflow)
2180    }
2181
2182    fn validate_memory_inbounds(
2183        &mut self,
2184        opts: &Options,
2185        ptr_local: u32,
2186        byte_len_local: u32,
2187        trap: Trap,
2188    ) {
2189        let extend_to_64 = |me: &mut Self| {
2190            if !opts.memory64 {
2191                me.instruction(I64ExtendI32U);
2192            }
2193        };
2194
2195        self.instruction(Block(BlockType::Empty));
2196        self.instruction(Block(BlockType::Empty));
2197
2198        // Calculate the full byte size of memory with `memory.size`. Note that
2199        // arithmetic here is done always in 64-bits to accommodate 4G memories.
2200        // Additionally it's assumed that 64-bit memories never fill up
2201        // entirely.
2202        self.instruction(MemorySize(opts.memory.unwrap().as_u32()));
2203        extend_to_64(self);
2204        self.instruction(I64Const(16));
2205        self.instruction(I64Shl);
2206
2207        // Calculate the end address of the string. This is done by adding the
2208        // base pointer to the byte length. For 32-bit memories there's no need
2209        // to check for overflow since everything is extended to 64-bit, but for
2210        // 64-bit memories overflow is checked.
2211        self.instruction(LocalGet(ptr_local));
2212        extend_to_64(self);
2213        self.instruction(LocalGet(byte_len_local));
2214        extend_to_64(self);
2215        self.instruction(I64Add);
2216        if opts.memory64 {
2217            let tmp = self.local_tee_new_tmp(ValType::I64);
2218            self.instruction(LocalGet(ptr_local));
2219            self.ptr_lt_u(opts);
2220            self.instruction(BrIf(0));
2221            self.instruction(LocalGet(tmp.idx));
2222            self.free_temp_local(tmp);
2223        }
2224
2225        // If the byte size of memory is greater than the final address of the
2226        // string then the string is invalid. Note that if it's precisely equal
2227        // then that's ok.
2228        self.instruction(I64GeU);
2229        self.instruction(BrIf(1));
2230
2231        self.instruction(End);
2232        self.trap(trap);
2233        self.instruction(End);
2234    }
2235
2236    fn translate_list(
2237        &mut self,
2238        src_ty: TypeListIndex,
2239        src: &Source<'_>,
2240        dst_ty: &InterfaceType,
2241        dst: &Destination,
2242    ) {
2243        let src_element_ty = &self.types[src_ty].element;
2244        let dst_element_ty = match dst_ty {
2245            InterfaceType::List(r) => &self.types[*r].element,
2246            _ => panic!("expected a list"),
2247        };
2248        let src_opts = src.opts();
2249        let dst_opts = dst.opts();
2250        let (src_size, src_align) = self.types.size_align(src_opts, src_element_ty);
2251        let (dst_size, dst_align) = self.types.size_align(dst_opts, dst_element_ty);
2252
2253        // Load the pointer/length of this list into temporary locals. These
2254        // will be referenced a good deal so this just makes it easier to deal
2255        // with them consistently below rather than trying to reload from memory
2256        // for example.
2257        match src {
2258            Source::Stack(s) => {
2259                assert_eq!(s.locals.len(), 2);
2260                self.stack_get(&s.slice(0..1), src_opts.ptr());
2261                self.stack_get(&s.slice(1..2), src_opts.ptr());
2262            }
2263            Source::Memory(mem) => {
2264                self.ptr_load(mem);
2265                self.ptr_load(&mem.bump(src_opts.ptr_size().into()));
2266            }
2267        }
2268        let src_len = self.local_set_new_tmp(src_opts.ptr());
2269        let src_ptr = self.local_set_new_tmp(src_opts.ptr());
2270
2271        // Create a `Memory` operand which will internally assert that the
2272        // `src_ptr` value is properly aligned.
2273        let src_mem = self.memory_operand(src_opts, src_ptr, src_align);
2274
2275        // Calculate the source/destination byte lengths into unique locals.
2276        let src_byte_len = self.calculate_list_byte_len(src_opts, src_len.idx, src_size);
2277        let dst_byte_len = if src_size == dst_size {
2278            self.convert_src_len_to_dst(src_byte_len.idx, src_opts.ptr(), dst_opts.ptr());
2279            self.local_set_new_tmp(dst_opts.ptr())
2280        } else if src_opts.ptr() == dst_opts.ptr() {
2281            self.calculate_list_byte_len(dst_opts, src_len.idx, dst_size)
2282        } else {
2283            self.convert_src_len_to_dst(src_byte_len.idx, src_opts.ptr(), dst_opts.ptr());
2284            let tmp = self.local_set_new_tmp(dst_opts.ptr());
2285            let ret = self.calculate_list_byte_len(dst_opts, tmp.idx, dst_size);
2286            self.free_temp_local(tmp);
2287            ret
2288        };
2289
2290        // Here `realloc` is invoked (in a `malloc`-like fashion) to allocate
2291        // space for the list in the destination memory. This will also
2292        // internally insert checks that the returned pointer is aligned
2293        // correctly for the destination.
2294        let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), dst_align);
2295
2296        // With all the pointers and byte lengths verity that both the source
2297        // and the destination buffers are in-bounds.
2298        self.validate_memory_inbounds(
2299            src_opts,
2300            src_mem.addr.idx,
2301            src_byte_len.idx,
2302            Trap::ListByteLengthOverflow,
2303        );
2304        self.validate_memory_inbounds(
2305            dst_opts,
2306            dst_mem.addr.idx,
2307            dst_byte_len.idx,
2308            Trap::ListByteLengthOverflow,
2309        );
2310
2311        self.free_temp_local(src_byte_len);
2312        self.free_temp_local(dst_byte_len);
2313
2314        // This is the main body of the loop to actually translate list types.
2315        // Note that if both element sizes are 0 then this won't actually do
2316        // anything so the loop is removed entirely.
2317        if src_size > 0 || dst_size > 0 {
2318            // This block encompasses the entire loop and is use to exit before even
2319            // entering the loop if the list size is zero.
2320            self.instruction(Block(BlockType::Empty));
2321
2322            // Set the `remaining` local and only continue if it's > 0
2323            self.instruction(LocalGet(src_len.idx));
2324            let remaining = self.local_tee_new_tmp(src_opts.ptr());
2325            self.ptr_eqz(src_opts);
2326            self.instruction(BrIf(0));
2327
2328            // Initialize the two destination pointers to their initial values
2329            self.instruction(LocalGet(src_mem.addr.idx));
2330            let cur_src_ptr = self.local_set_new_tmp(src_opts.ptr());
2331            self.instruction(LocalGet(dst_mem.addr.idx));
2332            let cur_dst_ptr = self.local_set_new_tmp(dst_opts.ptr());
2333
2334            self.instruction(Loop(BlockType::Empty));
2335
2336            // Translate the next element in the list
2337            let element_src = Source::Memory(Memory {
2338                opts: src_opts,
2339                offset: 0,
2340                addr: TempLocal::new(cur_src_ptr.idx, cur_src_ptr.ty),
2341            });
2342            let element_dst = Destination::Memory(Memory {
2343                opts: dst_opts,
2344                offset: 0,
2345                addr: TempLocal::new(cur_dst_ptr.idx, cur_dst_ptr.ty),
2346            });
2347            self.translate(src_element_ty, &element_src, dst_element_ty, &element_dst);
2348
2349            // Update the two loop pointers
2350            if src_size > 0 {
2351                self.instruction(LocalGet(cur_src_ptr.idx));
2352                self.ptr_uconst(src_opts, src_size);
2353                self.ptr_add(src_opts);
2354                self.instruction(LocalSet(cur_src_ptr.idx));
2355            }
2356            if dst_size > 0 {
2357                self.instruction(LocalGet(cur_dst_ptr.idx));
2358                self.ptr_uconst(dst_opts, dst_size);
2359                self.ptr_add(dst_opts);
2360                self.instruction(LocalSet(cur_dst_ptr.idx));
2361            }
2362
2363            // Update the remaining count, falling through to break out if it's zero
2364            // now.
2365            self.instruction(LocalGet(remaining.idx));
2366            self.ptr_iconst(src_opts, -1);
2367            self.ptr_add(src_opts);
2368            self.instruction(LocalTee(remaining.idx));
2369            self.ptr_br_if(src_opts, 0);
2370            self.instruction(End); // end of loop
2371            self.instruction(End); // end of block
2372
2373            self.free_temp_local(cur_dst_ptr);
2374            self.free_temp_local(cur_src_ptr);
2375            self.free_temp_local(remaining);
2376        }
2377
2378        // Store the ptr/length in the desired destination
2379        match dst {
2380            Destination::Stack(s, _) => {
2381                self.instruction(LocalGet(dst_mem.addr.idx));
2382                self.stack_set(&s[..1], dst_opts.ptr());
2383                self.convert_src_len_to_dst(src_len.idx, src_opts.ptr(), dst_opts.ptr());
2384                self.stack_set(&s[1..], dst_opts.ptr());
2385            }
2386            Destination::Memory(mem) => {
2387                self.instruction(LocalGet(mem.addr.idx));
2388                self.instruction(LocalGet(dst_mem.addr.idx));
2389                self.ptr_store(mem);
2390                self.instruction(LocalGet(mem.addr.idx));
2391                self.convert_src_len_to_dst(src_len.idx, src_opts.ptr(), dst_opts.ptr());
2392                self.ptr_store(&mem.bump(dst_opts.ptr_size().into()));
2393            }
2394        }
2395
2396        self.free_temp_local(src_len);
2397        self.free_temp_local(src_mem.addr);
2398        self.free_temp_local(dst_mem.addr);
2399    }
2400
2401    fn calculate_list_byte_len(
2402        &mut self,
2403        opts: &Options,
2404        len_local: u32,
2405        elt_size: u32,
2406    ) -> TempLocal {
2407        // Zero-size types are easy to handle here because the byte size of the
2408        // destination is always zero.
2409        if elt_size == 0 {
2410            self.ptr_uconst(opts, 0);
2411            return self.local_set_new_tmp(opts.ptr());
2412        }
2413
2414        // For one-byte elements in the destination the check here can be a bit
2415        // more optimal than the general case below. In these situations if the
2416        // source pointer type is 32-bit then we're guaranteed to not overflow,
2417        // so the source length is simply casted to the destination's type.
2418        //
2419        // If the source is 64-bit then all that needs to be checked is to
2420        // ensure that it does not have the upper 32-bits set.
2421        if elt_size == 1 {
2422            if let ValType::I64 = opts.ptr() {
2423                self.instruction(LocalGet(len_local));
2424                self.instruction(I64Const(32));
2425                self.instruction(I64ShrU);
2426                self.instruction(I32WrapI64);
2427                self.instruction(If(BlockType::Empty));
2428                self.trap(Trap::ListByteLengthOverflow);
2429                self.instruction(End);
2430            }
2431            self.instruction(LocalGet(len_local));
2432            return self.local_set_new_tmp(opts.ptr());
2433        }
2434
2435        // The main check implemented by this function is to verify that
2436        // `src_len_local` does not exceed the 32-bit range. Byte sizes for
2437        // lists must always fit in 32-bits to get transferred to 32-bit
2438        // memories.
2439        self.instruction(Block(BlockType::Empty));
2440        self.instruction(Block(BlockType::Empty));
2441        self.instruction(LocalGet(len_local));
2442        match opts.ptr() {
2443            // The source's list length is guaranteed to be less than 32-bits
2444            // so simply extend it up to a 64-bit type for the multiplication
2445            // below.
2446            ValType::I32 => self.instruction(I64ExtendI32U),
2447
2448            // If the source is a 64-bit memory then if the item length doesn't
2449            // fit in 32-bits the byte length definitely won't, so generate a
2450            // branch to our overflow trap here if any of the upper 32-bits are set.
2451            ValType::I64 => {
2452                self.instruction(I64Const(32));
2453                self.instruction(I64ShrU);
2454                self.instruction(I32WrapI64);
2455                self.instruction(BrIf(0));
2456                self.instruction(LocalGet(len_local));
2457            }
2458
2459            _ => unreachable!(),
2460        }
2461
2462        // Next perform a 64-bit multiplication with the element byte size that
2463        // is itself guaranteed to fit in 32-bits. The result is then checked
2464        // to see if we overflowed the 32-bit space. The two input operands to
2465        // the multiplication are guaranteed to be 32-bits at most which means
2466        // that this multiplication shouldn't overflow.
2467        //
2468        // The result of the multiplication is saved into a local as well to
2469        // get the result afterwards.
2470        self.instruction(I64Const(elt_size.into()));
2471        self.instruction(I64Mul);
2472        let tmp = self.local_tee_new_tmp(ValType::I64);
2473        // Branch to success if the upper 32-bits are zero, otherwise
2474        // fall-through to the trap.
2475        self.instruction(I64Const(32));
2476        self.instruction(I64ShrU);
2477        self.instruction(I64Eqz);
2478        self.instruction(BrIf(1));
2479        self.instruction(End);
2480        self.trap(Trap::ListByteLengthOverflow);
2481        self.instruction(End);
2482
2483        // If a fresh local was used to store the result of the multiplication
2484        // then convert it down to 32-bits which should be guaranteed to not
2485        // lose information at this point.
2486        if opts.ptr() == ValType::I64 {
2487            tmp
2488        } else {
2489            self.instruction(LocalGet(tmp.idx));
2490            self.instruction(I32WrapI64);
2491            self.free_temp_local(tmp);
2492            self.local_set_new_tmp(ValType::I32)
2493        }
2494    }
2495
2496    fn convert_src_len_to_dst(
2497        &mut self,
2498        src_len_local: u32,
2499        src_ptr_ty: ValType,
2500        dst_ptr_ty: ValType,
2501    ) {
2502        self.instruction(LocalGet(src_len_local));
2503        match (src_ptr_ty, dst_ptr_ty) {
2504            (ValType::I32, ValType::I64) => self.instruction(I64ExtendI32U),
2505            (ValType::I64, ValType::I32) => self.instruction(I32WrapI64),
2506            (src, dst) => assert_eq!(src, dst),
2507        }
2508    }
2509
2510    fn translate_record(
2511        &mut self,
2512        src_ty: TypeRecordIndex,
2513        src: &Source<'_>,
2514        dst_ty: &InterfaceType,
2515        dst: &Destination,
2516    ) {
2517        let src_ty = &self.types[src_ty];
2518        let dst_ty = match dst_ty {
2519            InterfaceType::Record(r) => &self.types[*r],
2520            _ => panic!("expected a record"),
2521        };
2522
2523        // TODO: subtyping
2524        assert_eq!(src_ty.fields.len(), dst_ty.fields.len());
2525
2526        // First a map is made of the source fields to where they're coming
2527        // from (e.g. which offset or which locals). This map is keyed by the
2528        // fields' names
2529        let mut src_fields = HashMap::new();
2530        for (i, src) in src
2531            .record_field_srcs(self.types, src_ty.fields.iter().map(|f| f.ty))
2532            .enumerate()
2533        {
2534            let field = &src_ty.fields[i];
2535            src_fields.insert(&field.name, (src, &field.ty));
2536        }
2537
2538        // .. and next translation is performed in the order of the destination
2539        // fields in case the destination is the stack to ensure that the stack
2540        // has the fields all in the right order.
2541        //
2542        // Note that the lookup in `src_fields` is an infallible lookup which
2543        // will panic if the field isn't found.
2544        //
2545        // TODO: should that lookup be fallible with subtyping?
2546        for (i, dst) in dst
2547            .record_field_dsts(self.types, dst_ty.fields.iter().map(|f| f.ty))
2548            .enumerate()
2549        {
2550            let field = &dst_ty.fields[i];
2551            let (src, src_ty) = &src_fields[&field.name];
2552            self.translate(src_ty, src, &field.ty, &dst);
2553        }
2554    }
2555
2556    fn translate_flags(
2557        &mut self,
2558        src_ty: TypeFlagsIndex,
2559        src: &Source<'_>,
2560        dst_ty: &InterfaceType,
2561        dst: &Destination,
2562    ) {
2563        let src_ty = &self.types[src_ty];
2564        let dst_ty = match dst_ty {
2565            InterfaceType::Flags(r) => &self.types[*r],
2566            _ => panic!("expected a record"),
2567        };
2568
2569        // TODO: subtyping
2570        //
2571        // Notably this implementation does not support reordering flags from
2572        // the source to the destination nor having more flags in the
2573        // destination. Currently this is a copy from source to destination
2574        // in-bulk. Otherwise reordering indices would have to have some sort of
2575        // fancy bit twiddling tricks or something like that.
2576        assert_eq!(src_ty.names, dst_ty.names);
2577        let cnt = src_ty.names.len();
2578        match FlagsSize::from_count(cnt) {
2579            FlagsSize::Size0 => {}
2580            FlagsSize::Size1 => {
2581                let mask = if cnt == 8 { 0xff } else { (1 << cnt) - 1 };
2582                self.convert_u8_mask(src, dst, mask);
2583            }
2584            FlagsSize::Size2 => {
2585                let mask = if cnt == 16 { 0xffff } else { (1 << cnt) - 1 };
2586                self.convert_u16_mask(src, dst, mask);
2587            }
2588            FlagsSize::Size4Plus(n) => {
2589                let srcs = src.record_field_srcs(self.types, (0..n).map(|_| InterfaceType::U32));
2590                let dsts = dst.record_field_dsts(self.types, (0..n).map(|_| InterfaceType::U32));
2591                let n = usize::from(n);
2592                for (i, (src, dst)) in srcs.zip(dsts).enumerate() {
2593                    let mask = if i == n - 1 && (cnt % 32 != 0) {
2594                        (1 << (cnt % 32)) - 1
2595                    } else {
2596                        0xffffffff
2597                    };
2598                    self.convert_u32_mask(&src, &dst, mask);
2599                }
2600            }
2601        }
2602    }
2603
2604    fn translate_tuple(
2605        &mut self,
2606        src_ty: TypeTupleIndex,
2607        src: &Source<'_>,
2608        dst_ty: &InterfaceType,
2609        dst: &Destination,
2610    ) {
2611        let src_ty = &self.types[src_ty];
2612        let dst_ty = match dst_ty {
2613            InterfaceType::Tuple(t) => &self.types[*t],
2614            _ => panic!("expected a tuple"),
2615        };
2616
2617        // TODO: subtyping
2618        assert_eq!(src_ty.types.len(), dst_ty.types.len());
2619
2620        let srcs = src
2621            .record_field_srcs(self.types, src_ty.types.iter().copied())
2622            .zip(src_ty.types.iter());
2623        let dsts = dst
2624            .record_field_dsts(self.types, dst_ty.types.iter().copied())
2625            .zip(dst_ty.types.iter());
2626        for ((src, src_ty), (dst, dst_ty)) in srcs.zip(dsts) {
2627            self.translate(src_ty, &src, dst_ty, &dst);
2628        }
2629    }
2630
2631    fn translate_variant(
2632        &mut self,
2633        src_ty: TypeVariantIndex,
2634        src: &Source<'_>,
2635        dst_ty: &InterfaceType,
2636        dst: &Destination,
2637    ) {
2638        let src_ty = &self.types[src_ty];
2639        let dst_ty = match dst_ty {
2640            InterfaceType::Variant(t) => &self.types[*t],
2641            _ => panic!("expected a variant"),
2642        };
2643
2644        let src_info = variant_info(self.types, src_ty.cases.iter().map(|(_, c)| c.as_ref()));
2645        let dst_info = variant_info(self.types, dst_ty.cases.iter().map(|(_, c)| c.as_ref()));
2646
2647        let iter = src_ty
2648            .cases
2649            .iter()
2650            .enumerate()
2651            .map(|(src_i, (src_case, src_case_ty))| {
2652                let dst_i = dst_ty
2653                    .cases
2654                    .iter()
2655                    .position(|(c, _)| c == src_case)
2656                    .unwrap();
2657                let dst_case_ty = &dst_ty.cases[dst_i];
2658                let src_i = u32::try_from(src_i).unwrap();
2659                let dst_i = u32::try_from(dst_i).unwrap();
2660                VariantCase {
2661                    src_i,
2662                    src_ty: src_case_ty.as_ref(),
2663                    dst_i,
2664                    dst_ty: dst_case_ty.as_ref(),
2665                }
2666            });
2667        self.convert_variant(src, &src_info, dst, &dst_info, iter);
2668    }
2669
2670    fn translate_enum(
2671        &mut self,
2672        src_ty: TypeEnumIndex,
2673        src: &Source<'_>,
2674        dst_ty: &InterfaceType,
2675        dst: &Destination,
2676    ) {
2677        let src_ty = &self.types[src_ty];
2678        let dst_ty = match dst_ty {
2679            InterfaceType::Enum(t) => &self.types[*t],
2680            _ => panic!("expected an option"),
2681        };
2682        let src_info = variant_info(self.types, src_ty.names.iter().map(|_| None));
2683        let dst_info = variant_info(self.types, dst_ty.names.iter().map(|_| None));
2684
2685        self.convert_variant(
2686            src,
2687            &src_info,
2688            dst,
2689            &dst_info,
2690            src_ty.names.iter().enumerate().map(|(src_i, src_name)| {
2691                let dst_i = dst_ty.names.iter().position(|n| n == src_name).unwrap();
2692                let src_i = u32::try_from(src_i).unwrap();
2693                let dst_i = u32::try_from(dst_i).unwrap();
2694                VariantCase {
2695                    src_i,
2696                    dst_i,
2697                    src_ty: None,
2698                    dst_ty: None,
2699                }
2700            }),
2701        );
2702    }
2703
2704    fn translate_option(
2705        &mut self,
2706        src_ty: TypeOptionIndex,
2707        src: &Source<'_>,
2708        dst_ty: &InterfaceType,
2709        dst: &Destination,
2710    ) {
2711        let src_ty = &self.types[src_ty].ty;
2712        let dst_ty = match dst_ty {
2713            InterfaceType::Option(t) => &self.types[*t].ty,
2714            _ => panic!("expected an option"),
2715        };
2716        let src_ty = Some(src_ty);
2717        let dst_ty = Some(dst_ty);
2718
2719        let src_info = variant_info(self.types, [None, src_ty]);
2720        let dst_info = variant_info(self.types, [None, dst_ty]);
2721
2722        self.convert_variant(
2723            src,
2724            &src_info,
2725            dst,
2726            &dst_info,
2727            [
2728                VariantCase {
2729                    src_i: 0,
2730                    dst_i: 0,
2731                    src_ty: None,
2732                    dst_ty: None,
2733                },
2734                VariantCase {
2735                    src_i: 1,
2736                    dst_i: 1,
2737                    src_ty,
2738                    dst_ty,
2739                },
2740            ]
2741            .into_iter(),
2742        );
2743    }
2744
2745    fn translate_result(
2746        &mut self,
2747        src_ty: TypeResultIndex,
2748        src: &Source<'_>,
2749        dst_ty: &InterfaceType,
2750        dst: &Destination,
2751    ) {
2752        let src_ty = &self.types[src_ty];
2753        let dst_ty = match dst_ty {
2754            InterfaceType::Result(t) => &self.types[*t],
2755            _ => panic!("expected a result"),
2756        };
2757
2758        let src_info = variant_info(self.types, [src_ty.ok.as_ref(), src_ty.err.as_ref()]);
2759        let dst_info = variant_info(self.types, [dst_ty.ok.as_ref(), dst_ty.err.as_ref()]);
2760
2761        self.convert_variant(
2762            src,
2763            &src_info,
2764            dst,
2765            &dst_info,
2766            [
2767                VariantCase {
2768                    src_i: 0,
2769                    dst_i: 0,
2770                    src_ty: src_ty.ok.as_ref(),
2771                    dst_ty: dst_ty.ok.as_ref(),
2772                },
2773                VariantCase {
2774                    src_i: 1,
2775                    dst_i: 1,
2776                    src_ty: src_ty.err.as_ref(),
2777                    dst_ty: dst_ty.err.as_ref(),
2778                },
2779            ]
2780            .into_iter(),
2781        );
2782    }
2783
2784    fn convert_variant<'c>(
2785        &mut self,
2786        src: &Source<'_>,
2787        src_info: &VariantInfo,
2788        dst: &Destination,
2789        dst_info: &VariantInfo,
2790        src_cases: impl ExactSizeIterator<Item = VariantCase<'c>>,
2791    ) {
2792        // The outermost block is special since it has the result type of the
2793        // translation here. That will depend on the `dst`.
2794        let outer_block_ty = match dst {
2795            Destination::Stack(dst_flat, _) => match dst_flat.len() {
2796                0 => BlockType::Empty,
2797                1 => BlockType::Result(dst_flat[0]),
2798                _ => {
2799                    let ty = self.module.core_types.function(&[], &dst_flat);
2800                    BlockType::FunctionType(ty)
2801                }
2802            },
2803            Destination::Memory(_) => BlockType::Empty,
2804        };
2805        self.instruction(Block(outer_block_ty));
2806
2807        // After the outermost block generate a new block for each of the
2808        // remaining cases.
2809        let src_cases_len = src_cases.len();
2810        for _ in 0..src_cases_len - 1 {
2811            self.instruction(Block(BlockType::Empty));
2812        }
2813
2814        // Generate a block for an invalid variant discriminant
2815        self.instruction(Block(BlockType::Empty));
2816
2817        // And generate one final block that we'll be jumping out of with the
2818        // `br_table`
2819        self.instruction(Block(BlockType::Empty));
2820
2821        // Load the discriminant
2822        match src {
2823            Source::Stack(s) => self.stack_get(&s.slice(0..1), ValType::I32),
2824            Source::Memory(mem) => match src_info.size {
2825                DiscriminantSize::Size1 => self.i32_load8u(mem),
2826                DiscriminantSize::Size2 => self.i32_load16u(mem),
2827                DiscriminantSize::Size4 => self.i32_load(mem),
2828            },
2829        }
2830
2831        // Generate the `br_table` for the discriminant. Each case has an
2832        // offset of 1 to skip the trapping block.
2833        let mut targets = Vec::new();
2834        for i in 0..src_cases_len {
2835            targets.push((i + 1) as u32);
2836        }
2837        self.instruction(BrTable(targets[..].into(), 0));
2838        self.instruction(End); // end the `br_table` block
2839
2840        self.trap(Trap::InvalidDiscriminant);
2841        self.instruction(End); // end the "invalid discriminant" block
2842
2843        // Translate each case individually within its own block. Note that the
2844        // iteration order here places the first case in the innermost block
2845        // and the last case in the outermost block. This matches the order
2846        // of the jump targets in the `br_table` instruction.
2847        let src_cases_len = u32::try_from(src_cases_len).unwrap();
2848        for case in src_cases {
2849            let VariantCase {
2850                src_i,
2851                src_ty,
2852                dst_i,
2853                dst_ty,
2854            } = case;
2855
2856            // Translate the discriminant here, noting that `dst_i` may be
2857            // different than `src_i`.
2858            self.push_dst_addr(dst);
2859            self.instruction(I32Const(dst_i as i32));
2860            match dst {
2861                Destination::Stack(stack, _) => self.stack_set(&stack[..1], ValType::I32),
2862                Destination::Memory(mem) => match dst_info.size {
2863                    DiscriminantSize::Size1 => self.i32_store8(mem),
2864                    DiscriminantSize::Size2 => self.i32_store16(mem),
2865                    DiscriminantSize::Size4 => self.i32_store(mem),
2866                },
2867            }
2868
2869            let src_payload = src.payload_src(self.types, src_info, src_ty);
2870            let dst_payload = dst.payload_dst(self.types, dst_info, dst_ty);
2871
2872            // Translate the payload of this case using the various types from
2873            // the dst/src.
2874            match (src_ty, dst_ty) {
2875                (Some(src_ty), Some(dst_ty)) => {
2876                    self.translate(src_ty, &src_payload, dst_ty, &dst_payload);
2877                }
2878                (None, None) => {}
2879                _ => unimplemented!(),
2880            }
2881
2882            // If the results of this translation were placed on the stack then
2883            // the stack values may need to be padded with more zeros due to
2884            // this particular case being possibly smaller than the entire
2885            // variant. That's handled here by pushing remaining zeros after
2886            // accounting for the discriminant pushed as well as the results of
2887            // this individual payload.
2888            if let Destination::Stack(payload_results, _) = dst_payload {
2889                if let Destination::Stack(dst_results, _) = dst {
2890                    let remaining = &dst_results[1..][payload_results.len()..];
2891                    for ty in remaining {
2892                        match ty {
2893                            ValType::I32 => self.instruction(I32Const(0)),
2894                            ValType::I64 => self.instruction(I64Const(0)),
2895                            ValType::F32 => self.instruction(F32Const(0.0)),
2896                            ValType::F64 => self.instruction(F64Const(0.0)),
2897                            _ => unreachable!(),
2898                        }
2899                    }
2900                }
2901            }
2902
2903            // Branch to the outermost block. Note that this isn't needed for
2904            // the outermost case since it simply falls through.
2905            if src_i != src_cases_len - 1 {
2906                self.instruction(Br(src_cases_len - src_i - 1));
2907            }
2908            self.instruction(End); // end this case's block
2909        }
2910    }
2911
2912    fn translate_future(
2913        &mut self,
2914        src_ty: TypeFutureTableIndex,
2915        src: &Source<'_>,
2916        dst_ty: &InterfaceType,
2917        dst: &Destination,
2918    ) {
2919        let dst_ty = match dst_ty {
2920            InterfaceType::Future(t) => *t,
2921            _ => panic!("expected a `Future`"),
2922        };
2923        let transfer = self.module.import_future_transfer();
2924        self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);
2925    }
2926
2927    fn translate_stream(
2928        &mut self,
2929        src_ty: TypeStreamTableIndex,
2930        src: &Source<'_>,
2931        dst_ty: &InterfaceType,
2932        dst: &Destination,
2933    ) {
2934        let dst_ty = match dst_ty {
2935            InterfaceType::Stream(t) => *t,
2936            _ => panic!("expected a `Stream`"),
2937        };
2938        let transfer = self.module.import_stream_transfer();
2939        self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);
2940    }
2941
2942    fn translate_error_context(
2943        &mut self,
2944        src_ty: TypeComponentLocalErrorContextTableIndex,
2945        src: &Source<'_>,
2946        dst_ty: &InterfaceType,
2947        dst: &Destination,
2948    ) {
2949        let dst_ty = match dst_ty {
2950            InterfaceType::ErrorContext(t) => *t,
2951            _ => panic!("expected an `ErrorContext`"),
2952        };
2953        let transfer = self.module.import_error_context_transfer();
2954        self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);
2955    }
2956
2957    fn translate_own(
2958        &mut self,
2959        src_ty: TypeResourceTableIndex,
2960        src: &Source<'_>,
2961        dst_ty: &InterfaceType,
2962        dst: &Destination,
2963    ) {
2964        let dst_ty = match dst_ty {
2965            InterfaceType::Own(t) => *t,
2966            _ => panic!("expected an `Own`"),
2967        };
2968        let transfer = self.module.import_resource_transfer_own();
2969        self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);
2970    }
2971
2972    fn translate_borrow(
2973        &mut self,
2974        src_ty: TypeResourceTableIndex,
2975        src: &Source<'_>,
2976        dst_ty: &InterfaceType,
2977        dst: &Destination,
2978    ) {
2979        let dst_ty = match dst_ty {
2980            InterfaceType::Borrow(t) => *t,
2981            _ => panic!("expected an `Borrow`"),
2982        };
2983
2984        let transfer = self.module.import_resource_transfer_borrow();
2985        self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);
2986    }
2987
2988    /// Translates the index `src`, which resides in the table `src_ty`, into
2989    /// and index within `dst_ty` and is stored at `dst`.
2990    ///
2991    /// Actual translation of the index happens in a wasmtime libcall, which a
2992    /// cranelift-generated trampoline to satisfy this import will call. The
2993    /// `transfer` function is an imported function which takes the src, src_ty,
2994    /// and dst_ty, and returns the dst index.
2995    fn translate_handle(
2996        &mut self,
2997        src_ty: u32,
2998        src: &Source<'_>,
2999        dst_ty: u32,
3000        dst: &Destination,
3001        transfer: FuncIndex,
3002    ) {
3003        self.push_dst_addr(dst);
3004        match src {
3005            Source::Memory(mem) => self.i32_load(mem),
3006            Source::Stack(stack) => self.stack_get(stack, ValType::I32),
3007        }
3008        self.instruction(I32Const(src_ty as i32));
3009        self.instruction(I32Const(dst_ty as i32));
3010        self.instruction(Call(transfer.as_u32()));
3011        match dst {
3012            Destination::Memory(mem) => self.i32_store(mem),
3013            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
3014        }
3015    }
3016
3017    fn trap_if_not_flag(&mut self, flags_global: GlobalIndex, flag_to_test: i32, trap: Trap) {
3018        self.instruction(GlobalGet(flags_global.as_u32()));
3019        self.instruction(I32Const(flag_to_test));
3020        self.instruction(I32And);
3021        self.instruction(I32Eqz);
3022        self.instruction(If(BlockType::Empty));
3023        self.trap(trap);
3024        self.instruction(End);
3025    }
3026
3027    fn assert_not_flag(&mut self, flags_global: GlobalIndex, flag_to_test: i32, msg: &'static str) {
3028        self.instruction(GlobalGet(flags_global.as_u32()));
3029        self.instruction(I32Const(flag_to_test));
3030        self.instruction(I32And);
3031        self.instruction(If(BlockType::Empty));
3032        self.trap(Trap::AssertFailed(msg));
3033        self.instruction(End);
3034    }
3035
3036    fn set_flag(&mut self, flags_global: GlobalIndex, flag_to_set: i32, value: bool) {
3037        self.instruction(GlobalGet(flags_global.as_u32()));
3038        if value {
3039            self.instruction(I32Const(flag_to_set));
3040            self.instruction(I32Or);
3041        } else {
3042            self.instruction(I32Const(!flag_to_set));
3043            self.instruction(I32And);
3044        }
3045        self.instruction(GlobalSet(flags_global.as_u32()));
3046    }
3047
3048    fn verify_aligned(&mut self, opts: &Options, addr_local: u32, align: u32) {
3049        // If the alignment is 1 then everything is trivially aligned and the
3050        // check can be omitted.
3051        if align == 1 {
3052            return;
3053        }
3054        self.instruction(LocalGet(addr_local));
3055        assert!(align.is_power_of_two());
3056        self.ptr_uconst(opts, align - 1);
3057        self.ptr_and(opts);
3058        self.ptr_if(opts, BlockType::Empty);
3059        self.trap(Trap::UnalignedPointer);
3060        self.instruction(End);
3061    }
3062
3063    fn assert_aligned(&mut self, ty: &InterfaceType, mem: &Memory) {
3064        if !self.module.debug {
3065            return;
3066        }
3067        let align = self.types.align(mem.opts, ty);
3068        if align == 1 {
3069            return;
3070        }
3071        assert!(align.is_power_of_two());
3072        self.instruction(LocalGet(mem.addr.idx));
3073        self.ptr_uconst(mem.opts, mem.offset);
3074        self.ptr_add(mem.opts);
3075        self.ptr_uconst(mem.opts, align - 1);
3076        self.ptr_and(mem.opts);
3077        self.ptr_if(mem.opts, BlockType::Empty);
3078        self.trap(Trap::AssertFailed("pointer not aligned"));
3079        self.instruction(End);
3080    }
3081
3082    fn malloc<'c>(&mut self, opts: &'c Options, size: MallocSize, align: u32) -> Memory<'c> {
3083        let realloc = opts.realloc.unwrap();
3084        self.ptr_uconst(opts, 0);
3085        self.ptr_uconst(opts, 0);
3086        self.ptr_uconst(opts, align);
3087        match size {
3088            MallocSize::Const(size) => self.ptr_uconst(opts, size),
3089            MallocSize::Local(idx) => self.instruction(LocalGet(idx)),
3090        }
3091        self.instruction(Call(realloc.as_u32()));
3092        let addr = self.local_set_new_tmp(opts.ptr());
3093        self.memory_operand(opts, addr, align)
3094    }
3095
3096    fn memory_operand<'c>(&mut self, opts: &'c Options, addr: TempLocal, align: u32) -> Memory<'c> {
3097        let ret = Memory {
3098            addr,
3099            offset: 0,
3100            opts,
3101        };
3102        self.verify_aligned(opts, ret.addr.idx, align);
3103        ret
3104    }
3105
3106    /// Generates a new local in this function of the `ty` specified,
3107    /// initializing it with the top value on the current wasm stack.
3108    ///
3109    /// The returned `TempLocal` must be freed after it is finished with
3110    /// `free_temp_local`.
3111    fn local_tee_new_tmp(&mut self, ty: ValType) -> TempLocal {
3112        self.gen_temp_local(ty, LocalTee)
3113    }
3114
3115    /// Same as `local_tee_new_tmp` but initializes the local with `LocalSet`
3116    /// instead of `LocalTee`.
3117    fn local_set_new_tmp(&mut self, ty: ValType) -> TempLocal {
3118        self.gen_temp_local(ty, LocalSet)
3119    }
3120
3121    fn gen_temp_local(&mut self, ty: ValType, insn: fn(u32) -> Instruction<'static>) -> TempLocal {
3122        // First check to see if any locals are available in this function which
3123        // were previously generated but are no longer in use.
3124        if let Some(idx) = self.free_locals.get_mut(&ty).and_then(|v| v.pop()) {
3125            self.instruction(insn(idx));
3126            return TempLocal {
3127                ty,
3128                idx,
3129                needs_free: true,
3130            };
3131        }
3132
3133        // Failing that generate a fresh new local.
3134        let locals = &mut self.module.funcs[self.result].locals;
3135        match locals.last_mut() {
3136            Some((cnt, prev_ty)) if ty == *prev_ty => *cnt += 1,
3137            _ => locals.push((1, ty)),
3138        }
3139        self.nlocals += 1;
3140        let idx = self.nlocals - 1;
3141        self.instruction(insn(idx));
3142        TempLocal {
3143            ty,
3144            idx,
3145            needs_free: true,
3146        }
3147    }
3148
3149    /// Used to release a `TempLocal` from a particular lexical scope to allow
3150    /// its possible reuse in later scopes.
3151    fn free_temp_local(&mut self, mut local: TempLocal) {
3152        assert!(local.needs_free);
3153        self.free_locals
3154            .entry(local.ty)
3155            .or_insert(Vec::new())
3156            .push(local.idx);
3157        local.needs_free = false;
3158    }
3159
3160    fn instruction(&mut self, instr: Instruction) {
3161        instr.encode(&mut self.code);
3162    }
3163
3164    fn trap(&mut self, trap: Trap) {
3165        self.traps.push((self.code.len(), trap));
3166        self.instruction(Unreachable);
3167    }
3168
3169    /// Flushes out the current `code` instructions (and `traps` if there are
3170    /// any) into the destination function.
3171    ///
3172    /// This is a noop if no instructions have been encoded yet.
3173    fn flush_code(&mut self) {
3174        if self.code.is_empty() {
3175            return;
3176        }
3177        self.module.funcs[self.result].body.push(Body::Raw(
3178            mem::take(&mut self.code),
3179            mem::take(&mut self.traps),
3180        ));
3181    }
3182
3183    fn finish(mut self) {
3184        // Append the final `end` instruction which all functions require, and
3185        // then empty out the temporary buffer in `Compiler`.
3186        self.instruction(End);
3187        self.flush_code();
3188
3189        // Flag the function as "done" which helps with an assert later on in
3190        // emission that everything was eventually finished.
3191        self.module.funcs[self.result].filled_in = true;
3192    }
3193
3194    /// Fetches the value contained with the local specified by `stack` and
3195    /// converts it to `dst_ty`.
3196    ///
3197    /// This is only intended for use in primitive operations where `stack` is
3198    /// guaranteed to have only one local. The type of the local on the stack is
3199    /// then converted to `dst_ty` appropriately. Note that the types may be
3200    /// different due to the "flattening" of variant types.
3201    fn stack_get(&mut self, stack: &Stack<'_>, dst_ty: ValType) {
3202        assert_eq!(stack.locals.len(), 1);
3203        let (idx, src_ty) = stack.locals[0];
3204        self.instruction(LocalGet(idx));
3205        match (src_ty, dst_ty) {
3206            (ValType::I32, ValType::I32)
3207            | (ValType::I64, ValType::I64)
3208            | (ValType::F32, ValType::F32)
3209            | (ValType::F64, ValType::F64) => {}
3210
3211            (ValType::I32, ValType::F32) => self.instruction(F32ReinterpretI32),
3212            (ValType::I64, ValType::I32) => {
3213                self.assert_i64_upper_bits_not_set(idx);
3214                self.instruction(I32WrapI64);
3215            }
3216            (ValType::I64, ValType::F64) => self.instruction(F64ReinterpretI64),
3217            (ValType::I64, ValType::F32) => {
3218                self.assert_i64_upper_bits_not_set(idx);
3219                self.instruction(I32WrapI64);
3220                self.instruction(F32ReinterpretI32);
3221            }
3222
3223            // should not be possible given the `join` function for variants
3224            (ValType::I32, ValType::I64)
3225            | (ValType::I32, ValType::F64)
3226            | (ValType::F32, ValType::I32)
3227            | (ValType::F32, ValType::I64)
3228            | (ValType::F32, ValType::F64)
3229            | (ValType::F64, ValType::I32)
3230            | (ValType::F64, ValType::I64)
3231            | (ValType::F64, ValType::F32)
3232
3233            // not used in the component model
3234            | (ValType::Ref(_), _)
3235            | (_, ValType::Ref(_))
3236            | (ValType::V128, _)
3237            | (_, ValType::V128) => {
3238                panic!("cannot get {dst_ty:?} from {src_ty:?} local");
3239            }
3240        }
3241    }
3242
3243    fn assert_i64_upper_bits_not_set(&mut self, local: u32) {
3244        if !self.module.debug {
3245            return;
3246        }
3247        self.instruction(LocalGet(local));
3248        self.instruction(I64Const(32));
3249        self.instruction(I64ShrU);
3250        self.instruction(I32WrapI64);
3251        self.instruction(If(BlockType::Empty));
3252        self.trap(Trap::AssertFailed("upper bits are unexpectedly set"));
3253        self.instruction(End);
3254    }
3255
3256    /// Converts the top value on the WebAssembly stack which has type
3257    /// `src_ty` to `dst_tys[0]`.
3258    ///
3259    /// This is only intended for conversion of primitives where the `dst_tys`
3260    /// list is known to be of length 1.
3261    fn stack_set(&mut self, dst_tys: &[ValType], src_ty: ValType) {
3262        assert_eq!(dst_tys.len(), 1);
3263        let dst_ty = dst_tys[0];
3264        match (src_ty, dst_ty) {
3265            (ValType::I32, ValType::I32)
3266            | (ValType::I64, ValType::I64)
3267            | (ValType::F32, ValType::F32)
3268            | (ValType::F64, ValType::F64) => {}
3269
3270            (ValType::F32, ValType::I32) => self.instruction(I32ReinterpretF32),
3271            (ValType::I32, ValType::I64) => self.instruction(I64ExtendI32U),
3272            (ValType::F64, ValType::I64) => self.instruction(I64ReinterpretF64),
3273            (ValType::F32, ValType::I64) => {
3274                self.instruction(I32ReinterpretF32);
3275                self.instruction(I64ExtendI32U);
3276            }
3277
3278            // should not be possible given the `join` function for variants
3279            (ValType::I64, ValType::I32)
3280            | (ValType::F64, ValType::I32)
3281            | (ValType::I32, ValType::F32)
3282            | (ValType::I64, ValType::F32)
3283            | (ValType::F64, ValType::F32)
3284            | (ValType::I32, ValType::F64)
3285            | (ValType::I64, ValType::F64)
3286            | (ValType::F32, ValType::F64)
3287
3288            // not used in the component model
3289            | (ValType::Ref(_), _)
3290            | (_, ValType::Ref(_))
3291            | (ValType::V128, _)
3292            | (_, ValType::V128) => {
3293                panic!("cannot get {dst_ty:?} from {src_ty:?} local");
3294            }
3295        }
3296    }
3297
3298    fn i32_load8u(&mut self, mem: &Memory) {
3299        self.instruction(LocalGet(mem.addr.idx));
3300        self.instruction(I32Load8U(mem.memarg(0)));
3301    }
3302
3303    fn i32_load8s(&mut self, mem: &Memory) {
3304        self.instruction(LocalGet(mem.addr.idx));
3305        self.instruction(I32Load8S(mem.memarg(0)));
3306    }
3307
3308    fn i32_load16u(&mut self, mem: &Memory) {
3309        self.instruction(LocalGet(mem.addr.idx));
3310        self.instruction(I32Load16U(mem.memarg(1)));
3311    }
3312
3313    fn i32_load16s(&mut self, mem: &Memory) {
3314        self.instruction(LocalGet(mem.addr.idx));
3315        self.instruction(I32Load16S(mem.memarg(1)));
3316    }
3317
3318    fn i32_load(&mut self, mem: &Memory) {
3319        self.instruction(LocalGet(mem.addr.idx));
3320        self.instruction(I32Load(mem.memarg(2)));
3321    }
3322
3323    fn i64_load(&mut self, mem: &Memory) {
3324        self.instruction(LocalGet(mem.addr.idx));
3325        self.instruction(I64Load(mem.memarg(3)));
3326    }
3327
3328    fn ptr_load(&mut self, mem: &Memory) {
3329        if mem.opts.memory64 {
3330            self.i64_load(mem);
3331        } else {
3332            self.i32_load(mem);
3333        }
3334    }
3335
3336    fn ptr_add(&mut self, opts: &Options) {
3337        if opts.memory64 {
3338            self.instruction(I64Add);
3339        } else {
3340            self.instruction(I32Add);
3341        }
3342    }
3343
3344    fn ptr_sub(&mut self, opts: &Options) {
3345        if opts.memory64 {
3346            self.instruction(I64Sub);
3347        } else {
3348            self.instruction(I32Sub);
3349        }
3350    }
3351
3352    fn ptr_mul(&mut self, opts: &Options) {
3353        if opts.memory64 {
3354            self.instruction(I64Mul);
3355        } else {
3356            self.instruction(I32Mul);
3357        }
3358    }
3359
3360    fn ptr_ge_u(&mut self, opts: &Options) {
3361        if opts.memory64 {
3362            self.instruction(I64GeU);
3363        } else {
3364            self.instruction(I32GeU);
3365        }
3366    }
3367
3368    fn ptr_lt_u(&mut self, opts: &Options) {
3369        if opts.memory64 {
3370            self.instruction(I64LtU);
3371        } else {
3372            self.instruction(I32LtU);
3373        }
3374    }
3375
3376    fn ptr_shl(&mut self, opts: &Options) {
3377        if opts.memory64 {
3378            self.instruction(I64Shl);
3379        } else {
3380            self.instruction(I32Shl);
3381        }
3382    }
3383
3384    fn ptr_eqz(&mut self, opts: &Options) {
3385        if opts.memory64 {
3386            self.instruction(I64Eqz);
3387        } else {
3388            self.instruction(I32Eqz);
3389        }
3390    }
3391
3392    fn ptr_uconst(&mut self, opts: &Options, val: u32) {
3393        if opts.memory64 {
3394            self.instruction(I64Const(val.into()));
3395        } else {
3396            self.instruction(I32Const(val as i32));
3397        }
3398    }
3399
3400    fn ptr_iconst(&mut self, opts: &Options, val: i32) {
3401        if opts.memory64 {
3402            self.instruction(I64Const(val.into()));
3403        } else {
3404            self.instruction(I32Const(val));
3405        }
3406    }
3407
3408    fn ptr_eq(&mut self, opts: &Options) {
3409        if opts.memory64 {
3410            self.instruction(I64Eq);
3411        } else {
3412            self.instruction(I32Eq);
3413        }
3414    }
3415
3416    fn ptr_ne(&mut self, opts: &Options) {
3417        if opts.memory64 {
3418            self.instruction(I64Ne);
3419        } else {
3420            self.instruction(I32Ne);
3421        }
3422    }
3423
3424    fn ptr_and(&mut self, opts: &Options) {
3425        if opts.memory64 {
3426            self.instruction(I64And);
3427        } else {
3428            self.instruction(I32And);
3429        }
3430    }
3431
3432    fn ptr_or(&mut self, opts: &Options) {
3433        if opts.memory64 {
3434            self.instruction(I64Or);
3435        } else {
3436            self.instruction(I32Or);
3437        }
3438    }
3439
3440    fn ptr_xor(&mut self, opts: &Options) {
3441        if opts.memory64 {
3442            self.instruction(I64Xor);
3443        } else {
3444            self.instruction(I32Xor);
3445        }
3446    }
3447
3448    fn ptr_if(&mut self, opts: &Options, ty: BlockType) {
3449        if opts.memory64 {
3450            self.instruction(I64Const(0));
3451            self.instruction(I64Ne);
3452        }
3453        self.instruction(If(ty));
3454    }
3455
3456    fn ptr_br_if(&mut self, opts: &Options, depth: u32) {
3457        if opts.memory64 {
3458            self.instruction(I64Const(0));
3459            self.instruction(I64Ne);
3460        }
3461        self.instruction(BrIf(depth));
3462    }
3463
3464    fn f32_load(&mut self, mem: &Memory) {
3465        self.instruction(LocalGet(mem.addr.idx));
3466        self.instruction(F32Load(mem.memarg(2)));
3467    }
3468
3469    fn f64_load(&mut self, mem: &Memory) {
3470        self.instruction(LocalGet(mem.addr.idx));
3471        self.instruction(F64Load(mem.memarg(3)));
3472    }
3473
3474    fn push_dst_addr(&mut self, dst: &Destination) {
3475        if let Destination::Memory(mem) = dst {
3476            self.instruction(LocalGet(mem.addr.idx));
3477        }
3478    }
3479
3480    fn i32_store8(&mut self, mem: &Memory) {
3481        self.instruction(I32Store8(mem.memarg(0)));
3482    }
3483
3484    fn i32_store16(&mut self, mem: &Memory) {
3485        self.instruction(I32Store16(mem.memarg(1)));
3486    }
3487
3488    fn i32_store(&mut self, mem: &Memory) {
3489        self.instruction(I32Store(mem.memarg(2)));
3490    }
3491
3492    fn i64_store(&mut self, mem: &Memory) {
3493        self.instruction(I64Store(mem.memarg(3)));
3494    }
3495
3496    fn ptr_store(&mut self, mem: &Memory) {
3497        if mem.opts.memory64 {
3498            self.i64_store(mem);
3499        } else {
3500            self.i32_store(mem);
3501        }
3502    }
3503
3504    fn f32_store(&mut self, mem: &Memory) {
3505        self.instruction(F32Store(mem.memarg(2)));
3506    }
3507
3508    fn f64_store(&mut self, mem: &Memory) {
3509        self.instruction(F64Store(mem.memarg(3)));
3510    }
3511}
3512
3513impl<'a> Source<'a> {
3514    /// Given this `Source` returns an iterator over the `Source` for each of
3515    /// the component `fields` specified.
3516    ///
3517    /// This will automatically slice stack-based locals to the appropriate
3518    /// width for each component type and additionally calculate the appropriate
3519    /// offset for each memory-based type.
3520    fn record_field_srcs<'b>(
3521        &'b self,
3522        types: &'b ComponentTypesBuilder,
3523        fields: impl IntoIterator<Item = InterfaceType> + 'b,
3524    ) -> impl Iterator<Item = Source<'a>> + 'b
3525    where
3526        'a: 'b,
3527    {
3528        let mut offset = 0;
3529        fields.into_iter().map(move |ty| match self {
3530            Source::Memory(mem) => {
3531                let mem = next_field_offset(&mut offset, types, &ty, mem);
3532                Source::Memory(mem)
3533            }
3534            Source::Stack(stack) => {
3535                let cnt = types.flat_types(&ty).unwrap().len() as u32;
3536                offset += cnt;
3537                Source::Stack(stack.slice((offset - cnt) as usize..offset as usize))
3538            }
3539        })
3540    }
3541
3542    /// Returns the corresponding discriminant source and payload source f
3543    fn payload_src(
3544        &self,
3545        types: &ComponentTypesBuilder,
3546        info: &VariantInfo,
3547        case: Option<&InterfaceType>,
3548    ) -> Source<'a> {
3549        match self {
3550            Source::Stack(s) => {
3551                let flat_len = match case {
3552                    Some(case) => types.flat_types(case).unwrap().len(),
3553                    None => 0,
3554                };
3555                Source::Stack(s.slice(1..s.locals.len()).slice(0..flat_len))
3556            }
3557            Source::Memory(mem) => {
3558                let mem = if mem.opts.memory64 {
3559                    mem.bump(info.payload_offset64)
3560                } else {
3561                    mem.bump(info.payload_offset32)
3562                };
3563                Source::Memory(mem)
3564            }
3565        }
3566    }
3567
3568    fn opts(&self) -> &'a Options {
3569        match self {
3570            Source::Stack(s) => s.opts,
3571            Source::Memory(mem) => mem.opts,
3572        }
3573    }
3574}
3575
3576impl<'a> Destination<'a> {
3577    /// Same as `Source::record_field_srcs` but for destinations.
3578    fn record_field_dsts<'b, I>(
3579        &'b self,
3580        types: &'b ComponentTypesBuilder,
3581        fields: I,
3582    ) -> impl Iterator<Item = Destination<'b>> + use<'b, I>
3583    where
3584        'a: 'b,
3585        I: IntoIterator<Item = InterfaceType> + 'b,
3586    {
3587        let mut offset = 0;
3588        fields.into_iter().map(move |ty| match self {
3589            Destination::Memory(mem) => {
3590                let mem = next_field_offset(&mut offset, types, &ty, mem);
3591                Destination::Memory(mem)
3592            }
3593            Destination::Stack(s, opts) => {
3594                let cnt = types.flat_types(&ty).unwrap().len() as u32;
3595                offset += cnt;
3596                Destination::Stack(&s[(offset - cnt) as usize..offset as usize], opts)
3597            }
3598        })
3599    }
3600
3601    /// Returns the corresponding discriminant source and payload source f
3602    fn payload_dst(
3603        &self,
3604        types: &ComponentTypesBuilder,
3605        info: &VariantInfo,
3606        case: Option<&InterfaceType>,
3607    ) -> Destination {
3608        match self {
3609            Destination::Stack(s, opts) => {
3610                let flat_len = match case {
3611                    Some(case) => types.flat_types(case).unwrap().len(),
3612                    None => 0,
3613                };
3614                Destination::Stack(&s[1..][..flat_len], opts)
3615            }
3616            Destination::Memory(mem) => {
3617                let mem = if mem.opts.memory64 {
3618                    mem.bump(info.payload_offset64)
3619                } else {
3620                    mem.bump(info.payload_offset32)
3621                };
3622                Destination::Memory(mem)
3623            }
3624        }
3625    }
3626
3627    fn opts(&self) -> &'a Options {
3628        match self {
3629            Destination::Stack(_, opts) => opts,
3630            Destination::Memory(mem) => mem.opts,
3631        }
3632    }
3633}
3634
3635fn next_field_offset<'a>(
3636    offset: &mut u32,
3637    types: &ComponentTypesBuilder,
3638    field: &InterfaceType,
3639    mem: &Memory<'a>,
3640) -> Memory<'a> {
3641    let abi = types.canonical_abi(field);
3642    let offset = if mem.opts.memory64 {
3643        abi.next_field64(offset)
3644    } else {
3645        abi.next_field32(offset)
3646    };
3647    mem.bump(offset)
3648}
3649
3650impl<'a> Memory<'a> {
3651    fn memarg(&self, align: u32) -> MemArg {
3652        MemArg {
3653            offset: u64::from(self.offset),
3654            align,
3655            memory_index: self.opts.memory.unwrap().as_u32(),
3656        }
3657    }
3658
3659    fn bump(&self, offset: u32) -> Memory<'a> {
3660        Memory {
3661            opts: self.opts,
3662            addr: TempLocal::new(self.addr.idx, self.addr.ty),
3663            offset: self.offset + offset,
3664        }
3665    }
3666}
3667
3668impl<'a> Stack<'a> {
3669    fn slice(&self, range: Range<usize>) -> Stack<'a> {
3670        Stack {
3671            locals: &self.locals[range],
3672            opts: self.opts,
3673        }
3674    }
3675}
3676
3677struct VariantCase<'a> {
3678    src_i: u32,
3679    src_ty: Option<&'a InterfaceType>,
3680    dst_i: u32,
3681    dst_ty: Option<&'a InterfaceType>,
3682}
3683
3684fn variant_info<'a, I>(types: &ComponentTypesBuilder, cases: I) -> VariantInfo
3685where
3686    I: IntoIterator<Item = Option<&'a InterfaceType>>,
3687    I::IntoIter: ExactSizeIterator,
3688{
3689    VariantInfo::new(
3690        cases
3691            .into_iter()
3692            .map(|ty| ty.map(|ty| types.canonical_abi(ty))),
3693    )
3694    .0
3695}
3696
3697enum MallocSize {
3698    Const(u32),
3699    Local(u32),
3700}
3701
3702struct WasmString<'a> {
3703    ptr: TempLocal,
3704    len: TempLocal,
3705    opts: &'a Options,
3706}
3707
3708struct TempLocal {
3709    idx: u32,
3710    ty: ValType,
3711    needs_free: bool,
3712}
3713
3714impl TempLocal {
3715    fn new(idx: u32, ty: ValType) -> TempLocal {
3716        TempLocal {
3717            idx,
3718            ty,
3719            needs_free: false,
3720        }
3721    }
3722}
3723
3724impl std::ops::Drop for TempLocal {
3725    fn drop(&mut self) {
3726        if self.needs_free {
3727            panic!("temporary local not free'd");
3728        }
3729    }
3730}
3731
3732impl From<FlatType> for ValType {
3733    fn from(ty: FlatType) -> ValType {
3734        match ty {
3735            FlatType::I32 => ValType::I32,
3736            FlatType::I64 => ValType::I64,
3737            FlatType::F32 => ValType::F32,
3738            FlatType::F64 => ValType::F64,
3739        }
3740    }
3741}