wasmtime_environ/fact/
trampoline.rs

1//! Low-level compilation of an fused adapter function.
2//!
3//! This module is tasked with the top-level `compile` function which creates a
4//! single WebAssembly function which will perform the steps of the fused
5//! adapter for an `AdapterData` provided. This is the "meat" of compilation
6//! where the validation of the canonical ABI or similar all happens to
7//! translate arguments from one module to another.
8//!
9//! ## Traps and their ordering
10//!
11//! Currently this compiler is pretty "loose" about the ordering of precisely
12//! what trap happens where. The main reason for this is that to core wasm all
13//! traps are the same and for fused adapters if a trap happens no intermediate
14//! side effects are visible (as designed by the canonical ABI itself). For this
15//! it's important to note that some of the precise choices of control flow here
16//! can be somewhat arbitrary, an intentional decision.
17
18use crate::component::{
19    CanonicalAbiInfo, ComponentTypesBuilder, FLAG_MAY_ENTER, FLAG_MAY_LEAVE, FixedEncoding as FE,
20    FlatType, InterfaceType, MAX_FLAT_ASYNC_PARAMS, MAX_FLAT_PARAMS, PREPARE_ASYNC_NO_RESULT,
21    PREPARE_ASYNC_WITH_RESULT, StringEncoding, Transcode, TypeComponentLocalErrorContextTableIndex,
22    TypeEnumIndex, TypeFlagsIndex, TypeFutureTableIndex, TypeListIndex, TypeOptionIndex,
23    TypeRecordIndex, TypeResourceTableIndex, TypeResultIndex, TypeStreamTableIndex, TypeTupleIndex,
24    TypeVariantIndex, VariantInfo,
25};
26use crate::fact::signature::Signature;
27use crate::fact::transcode::Transcoder;
28use crate::fact::traps::Trap;
29use crate::fact::{
30    AdapterData, Body, Context, Function, FunctionId, Helper, HelperLocation, HelperType, Module,
31    Options,
32};
33use crate::prelude::*;
34use crate::{FuncIndex, GlobalIndex};
35use cranelift_entity::Signed;
36use std::collections::HashMap;
37use std::mem;
38use std::ops::Range;
39use wasm_encoder::{BlockType, Encode, Instruction, Instruction::*, MemArg, ValType};
40use wasmtime_component_util::{DiscriminantSize, FlagsSize};
41
42const MAX_STRING_BYTE_LENGTH: u32 = 1 << 31;
43const UTF16_TAG: u32 = 1 << 31;
44
45/// This value is arbitrarily chosen and should be fine to change at any time,
46/// it just seemed like a halfway reasonable starting point.
47const INITIAL_FUEL: usize = 1_000;
48
49struct Compiler<'a, 'b> {
50    types: &'a ComponentTypesBuilder,
51    module: &'b mut Module<'a>,
52    result: FunctionId,
53
54    /// The encoded WebAssembly function body so far, not including locals.
55    code: Vec<u8>,
56
57    /// Total number of locals generated so far.
58    nlocals: u32,
59
60    /// Locals partitioned by type which are not currently in use.
61    free_locals: HashMap<ValType, Vec<u32>>,
62
63    /// Metadata about all `unreachable` trap instructions in this function and
64    /// what the trap represents. The offset within `self.code` is recorded as
65    /// well.
66    traps: Vec<(usize, Trap)>,
67
68    /// A heuristic which is intended to limit the size of a generated function
69    /// to a certain maximum to avoid generating arbitrarily large functions.
70    ///
71    /// This fuel counter is decremented each time `translate` is called and
72    /// when fuel is entirely consumed further translations, if necessary, will
73    /// be done through calls to other functions in the module. This is intended
74    /// to be a heuristic to split up the main function into theoretically
75    /// reusable portions.
76    fuel: usize,
77
78    /// Indicates whether an "enter call" should be emitted in the generated
79    /// function with a call to `Resource{Enter,Exit}Call` at the beginning and
80    /// end of the function for tracking of information related to borrowed
81    /// resources.
82    emit_resource_call: bool,
83}
84
85pub(super) fn compile(module: &mut Module<'_>, adapter: &AdapterData) {
86    fn compiler<'a, 'b>(
87        module: &'b mut Module<'a>,
88        adapter: &AdapterData,
89    ) -> (Compiler<'a, 'b>, Signature, Signature) {
90        let lower_sig = module.types.signature(&adapter.lower, Context::Lower);
91        let lift_sig = module.types.signature(&adapter.lift, Context::Lift);
92        let ty = module
93            .core_types
94            .function(&lower_sig.params, &lower_sig.results);
95        let result = module
96            .funcs
97            .push(Function::new(Some(adapter.name.clone()), ty));
98
99        // If this type signature contains any borrowed resources then invocations
100        // of enter/exit call for resource-related metadata tracking must be used.
101        // It shouldn't matter whether the lower/lift signature is used here as both
102        // should return the same answer.
103        let emit_resource_call = module.types.contains_borrow_resource(&adapter.lower);
104        assert_eq!(
105            emit_resource_call,
106            module.types.contains_borrow_resource(&adapter.lift)
107        );
108
109        (
110            Compiler::new(
111                module,
112                result,
113                lower_sig.params.len() as u32,
114                emit_resource_call,
115            ),
116            lower_sig,
117            lift_sig,
118        )
119    }
120
121    // This closure compiles a function to be exported to the host which host to
122    // lift the parameters from the caller and lower them to the callee.
123    //
124    // This allows the host to delay copying the parameters until the callee
125    // signals readiness by clearing its backpressure flag.
126    let async_start_adapter = |module: &mut Module| {
127        let sig = module
128            .types
129            .async_start_signature(&adapter.lower, &adapter.lift);
130        let ty = module.core_types.function(&sig.params, &sig.results);
131        let result = module.funcs.push(Function::new(
132            Some(format!("[async-start]{}", adapter.name)),
133            ty,
134        ));
135
136        Compiler::new(module, result, sig.params.len() as u32, false)
137            .compile_async_start_adapter(adapter, &sig);
138
139        result
140    };
141
142    // This closure compiles a function to be exported by the adapter module and
143    // called by the host to lift the results from the callee and lower them to
144    // the caller.
145    //
146    // Given that async-lifted exports return their results via the
147    // `task.return` intrinsic, the host will need to copy the results from
148    // callee to caller when that intrinsic is called rather than when the
149    // callee task fully completes (which may happen much later).
150    let async_return_adapter = |module: &mut Module| {
151        let sig = module
152            .types
153            .async_return_signature(&adapter.lower, &adapter.lift);
154        let ty = module.core_types.function(&sig.params, &sig.results);
155        let result = module.funcs.push(Function::new(
156            Some(format!("[async-return]{}", adapter.name)),
157            ty,
158        ));
159
160        Compiler::new(module, result, sig.params.len() as u32, false)
161            .compile_async_return_adapter(adapter, &sig);
162
163        result
164    };
165
166    match (adapter.lower.options.async_, adapter.lift.options.async_) {
167        (false, false) => {
168            // We can adapt sync->sync case with only minimal use of intrinsics,
169            // e.g. resource enter and exit calls as needed.
170            let (compiler, lower_sig, lift_sig) = compiler(module, adapter);
171            compiler.compile_sync_to_sync_adapter(adapter, &lower_sig, &lift_sig)
172        }
173        (true, true) => {
174            // In the async->async case, we must compile a couple of helper functions:
175            //
176            // - `async-start`: copies the parameters from the caller to the callee
177            // - `async-return`: copies the result from the callee to the caller
178            //
179            // Unlike synchronous calls, the above operations are asynchronous
180            // and subject to backpressure.  If the callee is not yet ready to
181            // handle a new call, the `async-start` function will not be called
182            // immediately.  Instead, control will return to the caller,
183            // allowing it to do other work while waiting for this call to make
184            // progress.  Once the callee indicates it is ready, `async-start`
185            // will be called, and sometime later (possibly after various task
186            // switch events), when the callee has produced a result, it will
187            // call `async-return` via the `task.return` intrinsic, at which
188            // point a `STATUS_RETURNED` event will be delivered to the caller.
189            let start = async_start_adapter(module);
190            let return_ = async_return_adapter(module);
191            let (compiler, lower_sig, lift_sig) = compiler(module, adapter);
192            compiler.compile_async_to_async_adapter(
193                adapter,
194                start,
195                return_,
196                i32::try_from(lift_sig.params.len()).unwrap(),
197                &lower_sig,
198            );
199        }
200        (false, true) => {
201            // Like the async->async case above, for the sync->async case we
202            // also need `async-start` and `async-return` helper functions to
203            // allow the callee to asynchronously "pull" the parameters and
204            // "push" the results when it is ready.
205            //
206            // However, since the caller is using the synchronous ABI, the
207            // parameters may have been passed via the stack rather than linear
208            // memory.  In that case, we pass them to the host to store in a
209            // task-local location temporarily in the case of backpressure.
210            // Similarly, the host will also temporarily store the results that
211            // the callee provides to `async-return` until it is ready to resume
212            // the caller.
213            let start = async_start_adapter(module);
214            let return_ = async_return_adapter(module);
215            let (compiler, lower_sig, lift_sig) = compiler(module, adapter);
216            compiler.compile_sync_to_async_adapter(
217                adapter,
218                start,
219                return_,
220                i32::try_from(lift_sig.params.len()).unwrap(),
221                &lower_sig,
222            );
223        }
224        (true, false) => {
225            // As with the async->async and sync->async cases above, for the
226            // async->sync case we use `async-start` and `async-return` helper
227            // functions.  Here, those functions allow the host to enforce
228            // backpressure in the case where the callee instance already has
229            // another synchronous call in progress, in which case we can't
230            // start a new one until the current one (and any others already
231            // waiting in line behind it) has completed.
232            //
233            // In the case of backpressure, we'll return control to the caller
234            // immediately so it can do other work.  Later, once the callee is
235            // ready, the host will call the `async-start` function to retrieve
236            // the parameters and pass them to the callee.  At that point, the
237            // callee may block on a host call, at which point the host will
238            // suspend the fiber it is running on and allow the caller (or any
239            // other ready instance) to run concurrently with the blocked
240            // callee.  Once the callee finally returns, the host will call the
241            // `async-return` function to write the result to the caller's
242            // linear memory and deliver a `STATUS_RETURNED` event to the
243            // caller.
244            let lift_sig = module.types.signature(&adapter.lift, Context::Lift);
245            let start = async_start_adapter(module);
246            let return_ = async_return_adapter(module);
247            let (compiler, lower_sig, ..) = compiler(module, adapter);
248            compiler.compile_async_to_sync_adapter(
249                adapter,
250                start,
251                return_,
252                i32::try_from(lift_sig.params.len()).unwrap(),
253                i32::try_from(lift_sig.results.len()).unwrap(),
254                &lower_sig,
255            );
256        }
257    }
258}
259
260/// Compiles a helper function as specified by the `Helper` configuration.
261///
262/// This function is invoked when the translation process runs out of fuel for
263/// some prior function which enqueues a helper to get translated later. This
264/// translation function will perform one type translation as specified by
265/// `Helper` which can either be in the stack or memory for each side.
266pub(super) fn compile_helper(module: &mut Module<'_>, result: FunctionId, helper: Helper) {
267    let mut nlocals = 0;
268    let src_flat;
269    let src = match helper.src.loc {
270        // If the source is on the stack then it's specified in the parameters
271        // to the function, so this creates the flattened representation and
272        // then lists those as the locals with appropriate types for the source
273        // values.
274        HelperLocation::Stack => {
275            src_flat = module
276                .types
277                .flatten_types(&helper.src.opts, usize::MAX, [helper.src.ty])
278                .unwrap()
279                .iter()
280                .enumerate()
281                .map(|(i, ty)| (i as u32, *ty))
282                .collect::<Vec<_>>();
283            nlocals += src_flat.len() as u32;
284            Source::Stack(Stack {
285                locals: &src_flat,
286                opts: &helper.src.opts,
287            })
288        }
289        // If the source is in memory then that's just propagated here as the
290        // first local is the pointer to the source.
291        HelperLocation::Memory => {
292            nlocals += 1;
293            Source::Memory(Memory {
294                opts: &helper.src.opts,
295                addr: TempLocal::new(0, helper.src.opts.ptr()),
296                offset: 0,
297            })
298        }
299    };
300    let dst_flat;
301    let dst = match helper.dst.loc {
302        // This is the same as the stack-based source although `Destination` is
303        // configured slightly differently.
304        HelperLocation::Stack => {
305            dst_flat = module
306                .types
307                .flatten_types(&helper.dst.opts, usize::MAX, [helper.dst.ty])
308                .unwrap();
309            Destination::Stack(&dst_flat, &helper.dst.opts)
310        }
311        // This is the same as a memory-based source but note that the address
312        // of the destination is passed as the final parameter to the function.
313        HelperLocation::Memory => {
314            nlocals += 1;
315            Destination::Memory(Memory {
316                opts: &helper.dst.opts,
317                addr: TempLocal::new(nlocals - 1, helper.dst.opts.ptr()),
318                offset: 0,
319            })
320        }
321    };
322    let mut compiler = Compiler {
323        types: module.types,
324        module,
325        code: Vec::new(),
326        nlocals,
327        free_locals: HashMap::new(),
328        traps: Vec::new(),
329        result,
330        fuel: INITIAL_FUEL,
331        // This is a helper function and only the top-level function is
332        // responsible for emitting these intrinsic calls.
333        emit_resource_call: false,
334    };
335    compiler.translate(&helper.src.ty, &src, &helper.dst.ty, &dst);
336    compiler.finish();
337}
338
339/// Possible ways that a interface value is represented in the core wasm
340/// canonical ABI.
341enum Source<'a> {
342    /// This value is stored on the "stack" in wasm locals.
343    ///
344    /// This could mean that it's inline from the parameters to the function or
345    /// that after a function call the results were stored in locals and the
346    /// locals are the inline results.
347    Stack(Stack<'a>),
348
349    /// This value is stored in linear memory described by the `Memory`
350    /// structure.
351    Memory(Memory<'a>),
352}
353
354/// Same as `Source` but for where values are translated into.
355enum Destination<'a> {
356    /// This value is destined for the WebAssembly stack which means that
357    /// results are simply pushed as we go along.
358    ///
359    /// The types listed are the types that are expected to be on the stack at
360    /// the end of translation.
361    Stack(&'a [ValType], &'a Options),
362
363    /// This value is to be placed in linear memory described by `Memory`.
364    Memory(Memory<'a>),
365}
366
367struct Stack<'a> {
368    /// The locals that comprise a particular value.
369    ///
370    /// The length of this list represents the flattened list of types that make
371    /// up the component value. Each list has the index of the local being
372    /// accessed as well as the type of the local itself.
373    locals: &'a [(u32, ValType)],
374    /// The lifting/lowering options for where this stack of values comes from
375    opts: &'a Options,
376}
377
378/// Representation of where a value is going to be stored in linear memory.
379struct Memory<'a> {
380    /// The lifting/lowering options with memory configuration
381    opts: &'a Options,
382    /// The index of the local that contains the base address of where the
383    /// storage is happening.
384    addr: TempLocal,
385    /// A "static" offset that will be baked into wasm instructions for where
386    /// memory loads/stores happen.
387    offset: u32,
388}
389
390impl<'a, 'b> Compiler<'a, 'b> {
391    fn new(
392        module: &'b mut Module<'a>,
393        result: FunctionId,
394        nlocals: u32,
395        emit_resource_call: bool,
396    ) -> Self {
397        Self {
398            types: module.types,
399            module,
400            result,
401            code: Vec::new(),
402            nlocals,
403            free_locals: HashMap::new(),
404            traps: Vec::new(),
405            fuel: INITIAL_FUEL,
406            emit_resource_call,
407        }
408    }
409
410    /// Compile an adapter function supporting an async-lowered import to an
411    /// async-lifted export.
412    ///
413    /// This uses a pair of `async-prepare` and `async-start` built-in functions
414    /// to set up and start a subtask, respectively.  `async-prepare` accepts
415    /// `start` and `return_` functions which copy the parameters and results,
416    /// respectively; the host will call the former when the callee has cleared
417    /// its backpressure flag and the latter when the callee has called
418    /// `task.return`.
419    fn compile_async_to_async_adapter(
420        mut self,
421        adapter: &AdapterData,
422        start: FunctionId,
423        return_: FunctionId,
424        param_count: i32,
425        lower_sig: &Signature,
426    ) {
427        let start_call =
428            self.module
429                .import_async_start_call(&adapter.name, adapter.lift.options.callback, None);
430
431        self.call_prepare(adapter, start, return_, lower_sig, false);
432
433        // TODO: As an optimization, consider checking the backpressure flag on
434        // the callee instance and, if it's unset _and_ the callee uses a
435        // callback, translate the params and call the callee function directly
436        // here (and make sure `start_call` knows _not_ to call it in that case).
437
438        // We export this function so we can pass a funcref to the host.
439        //
440        // TODO: Use a declarative element segment instead of exporting this.
441        self.module.exports.push((
442            adapter.callee.as_u32(),
443            format!("[adapter-callee]{}", adapter.name),
444        ));
445
446        self.instruction(RefFunc(adapter.callee.as_u32()));
447        self.instruction(I32Const(param_count));
448        // The result count for an async callee is either one (if there's a
449        // callback) or zero (if there's no callback).  We conservatively use
450        // one here to ensure the host provides room for the result, if any.
451        self.instruction(I32Const(1));
452        self.instruction(I32Const(super::START_FLAG_ASYNC_CALLEE));
453        self.instruction(Call(start_call.as_u32()));
454
455        self.finish()
456    }
457
458    /// Invokes the `prepare_call` builtin with the provided parameters for this
459    /// adapter.
460    ///
461    /// This is part of a async lower and/or async lift adapter. This is not
462    /// used for a sync->sync function call. This is done to create the task on
463    /// the host side of the runtime and such. This will notably invoke a
464    /// Cranelift builtin which will spill all wasm-level parameters to the
465    /// stack to handle variadic signatures.
466    ///
467    /// Note that the `prepare_sync` parameter here configures the
468    /// `result_count_or_max_if_async` parameter to indicate whether this is a
469    /// sync or async prepare.
470    fn call_prepare(
471        &mut self,
472        adapter: &AdapterData,
473        start: FunctionId,
474        return_: FunctionId,
475        lower_sig: &Signature,
476        prepare_sync: bool,
477    ) {
478        let prepare = self.module.import_prepare_call(
479            &adapter.name,
480            &lower_sig.params,
481            adapter.lift.options.memory,
482        );
483
484        self.flush_code();
485        self.module.funcs[self.result]
486            .body
487            .push(Body::RefFunc(start));
488        self.module.funcs[self.result]
489            .body
490            .push(Body::RefFunc(return_));
491        self.instruction(I32Const(
492            i32::try_from(adapter.lower.instance.as_u32()).unwrap(),
493        ));
494        self.instruction(I32Const(
495            i32::try_from(adapter.lift.instance.as_u32()).unwrap(),
496        ));
497        self.instruction(I32Const(
498            i32::try_from(self.types[adapter.lift.ty].results.as_u32()).unwrap(),
499        ));
500        self.instruction(I32Const(i32::from(
501            adapter.lift.options.string_encoding as u8,
502        )));
503
504        // flag this as a preparation for either an async call or sync call,
505        // depending on `prepare_sync`
506        let result_types = &self.types[self.types[adapter.lower.ty].results].types;
507        if prepare_sync {
508            self.instruction(I32Const(
509                i32::try_from(
510                    self.types
511                        .flatten_types(
512                            &adapter.lower.options,
513                            usize::MAX,
514                            result_types.iter().copied(),
515                        )
516                        .map(|v| v.len())
517                        .unwrap_or(usize::try_from(i32::MAX).unwrap()),
518                )
519                .unwrap(),
520            ));
521        } else {
522            if result_types.len() > 0 {
523                self.instruction(I32Const(PREPARE_ASYNC_WITH_RESULT.signed()));
524            } else {
525                self.instruction(I32Const(PREPARE_ASYNC_NO_RESULT.signed()));
526            }
527        }
528
529        // forward all our own arguments on to the host stub
530        for index in 0..lower_sig.params.len() {
531            self.instruction(LocalGet(u32::try_from(index).unwrap()));
532        }
533        self.instruction(Call(prepare.as_u32()));
534    }
535
536    /// Compile an adapter function supporting a sync-lowered import to an
537    /// async-lifted export.
538    ///
539    /// This uses a pair of `sync-prepare` and `sync-start` built-in functions
540    /// to set up and start a subtask, respectively.  `sync-prepare` accepts
541    /// `start` and `return_` functions which copy the parameters and results,
542    /// respectively; the host will call the former when the callee has cleared
543    /// its backpressure flag and the latter when the callee has called
544    /// `task.return`.
545    fn compile_sync_to_async_adapter(
546        mut self,
547        adapter: &AdapterData,
548        start: FunctionId,
549        return_: FunctionId,
550        lift_param_count: i32,
551        lower_sig: &Signature,
552    ) {
553        let start_call = self.module.import_sync_start_call(
554            &adapter.name,
555            adapter.lift.options.callback,
556            &lower_sig.results,
557        );
558
559        self.call_prepare(adapter, start, return_, lower_sig, true);
560
561        // TODO: As an optimization, consider checking the backpressure flag on
562        // the callee instance and, if it's unset _and_ the callee uses a
563        // callback, translate the params and call the callee function directly
564        // here (and make sure `start_call` knows _not_ to call it in that case).
565
566        // We export this function so we can pass a funcref to the host.
567        //
568        // TODO: Use a declarative element segment instead of exporting this.
569        self.module.exports.push((
570            adapter.callee.as_u32(),
571            format!("[adapter-callee]{}", adapter.name),
572        ));
573
574        self.instruction(RefFunc(adapter.callee.as_u32()));
575        self.instruction(I32Const(lift_param_count));
576        self.instruction(Call(start_call.as_u32()));
577
578        self.finish()
579    }
580
581    /// Compile an adapter function supporting an async-lowered import to a
582    /// sync-lifted export.
583    ///
584    /// This uses a pair of `async-prepare` and `async-start` built-in functions
585    /// to set up and start a subtask, respectively.  `async-prepare` accepts
586    /// `start` and `return_` functions which copy the parameters and results,
587    /// respectively; the host will call the former when the callee has cleared
588    /// its backpressure flag and the latter when the callee has returned its
589    /// result(s).
590    fn compile_async_to_sync_adapter(
591        mut self,
592        adapter: &AdapterData,
593        start: FunctionId,
594        return_: FunctionId,
595        param_count: i32,
596        result_count: i32,
597        lower_sig: &Signature,
598    ) {
599        let start_call =
600            self.module
601                .import_async_start_call(&adapter.name, None, adapter.lift.post_return);
602
603        self.call_prepare(adapter, start, return_, lower_sig, false);
604
605        // We export this function so we can pass a funcref to the host.
606        //
607        // TODO: Use a declarative element segment instead of exporting this.
608        self.module.exports.push((
609            adapter.callee.as_u32(),
610            format!("[adapter-callee]{}", adapter.name),
611        ));
612
613        self.instruction(RefFunc(adapter.callee.as_u32()));
614        self.instruction(I32Const(param_count));
615        self.instruction(I32Const(result_count));
616        self.instruction(I32Const(0));
617        self.instruction(Call(start_call.as_u32()));
618
619        self.finish()
620    }
621
622    /// Compiles a function to be exported to the host which host to lift the
623    /// parameters from the caller and lower them to the callee.
624    ///
625    /// This allows the host to delay copying the parameters until the callee
626    /// signals readiness by clearing its backpressure flag.
627    fn compile_async_start_adapter(mut self, adapter: &AdapterData, sig: &Signature) {
628        let param_locals = sig
629            .params
630            .iter()
631            .enumerate()
632            .map(|(i, ty)| (i as u32, *ty))
633            .collect::<Vec<_>>();
634
635        self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, false);
636        self.translate_params(adapter, &param_locals);
637        self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, true);
638
639        self.finish();
640    }
641
642    /// Compiles a function to be exported by the adapter module and called by
643    /// the host to lift the results from the callee and lower them to the
644    /// caller.
645    ///
646    /// Given that async-lifted exports return their results via the
647    /// `task.return` intrinsic, the host will need to copy the results from
648    /// callee to caller when that intrinsic is called rather than when the
649    /// callee task fully completes (which may happen much later).
650    fn compile_async_return_adapter(mut self, adapter: &AdapterData, sig: &Signature) {
651        let param_locals = sig
652            .params
653            .iter()
654            .enumerate()
655            .map(|(i, ty)| (i as u32, *ty))
656            .collect::<Vec<_>>();
657
658        self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, false);
659        // Note that we pass `param_locals` as _both_ the `param_locals` and
660        // `result_locals` parameters to `translate_results`.  That's because
661        // the _parameters_ to `task.return` are actually the _results_ that the
662        // caller is waiting for.
663        //
664        // Additionally, the host will append a return
665        // pointer to the end of that list before calling this adapter's
666        // `async-return` function if the results exceed `MAX_FLAT_RESULTS` or
667        // the import is lowered async, in which case `translate_results` will
668        // use that pointer to store the results.
669        self.translate_results(adapter, &param_locals, &param_locals);
670        self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, true);
671
672        self.finish()
673    }
674
675    /// Compile an adapter function supporting a sync-lowered import to a
676    /// sync-lifted export.
677    ///
678    /// Unlike calls involving async-lowered imports or async-lifted exports,
679    /// this adapter need not involve host built-ins except possibly for
680    /// resource bookkeeping.
681    fn compile_sync_to_sync_adapter(
682        mut self,
683        adapter: &AdapterData,
684        lower_sig: &Signature,
685        lift_sig: &Signature,
686    ) {
687        // Check the instance flags required for this trampoline.
688        //
689        // This inserts the initial check required by `canon_lower` that the
690        // caller instance can be left and additionally checks the
691        // flags on the callee if necessary whether it can be entered.
692        self.trap_if_not_flag(adapter.lower.flags, FLAG_MAY_LEAVE, Trap::CannotLeave);
693        if adapter.called_as_export {
694            self.trap_if_not_flag(adapter.lift.flags, FLAG_MAY_ENTER, Trap::CannotEnter);
695            self.set_flag(adapter.lift.flags, FLAG_MAY_ENTER, false);
696        } else if self.module.debug {
697            self.assert_not_flag(
698                adapter.lift.flags,
699                FLAG_MAY_ENTER,
700                "may_enter should be unset",
701            );
702        }
703
704        if self.emit_resource_call {
705            let enter = self.module.import_resource_enter_call();
706            self.instruction(Call(enter.as_u32()));
707        }
708
709        // Perform the translation of arguments. Note that `FLAG_MAY_LEAVE` is
710        // cleared around this invocation for the callee as per the
711        // `canon_lift` definition in the spec. Additionally note that the
712        // precise ordering of traps here is not required since internal state
713        // is not visible to either instance and a trap will "lock down" both
714        // instances to no longer be visible. This means that we're free to
715        // reorder lifts/lowers and flags and such as is necessary and
716        // convenient here.
717        //
718        // TODO: if translation doesn't actually call any functions in either
719        // instance then there's no need to set/clear the flag here and that can
720        // be optimized away.
721        self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, false);
722        let param_locals = lower_sig
723            .params
724            .iter()
725            .enumerate()
726            .map(|(i, ty)| (i as u32, *ty))
727            .collect::<Vec<_>>();
728        self.translate_params(adapter, &param_locals);
729        self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, true);
730
731        // With all the arguments on the stack the actual target function is
732        // now invoked. The core wasm results of the function are then placed
733        // into locals for result translation afterwards.
734        self.instruction(Call(adapter.callee.as_u32()));
735        let mut result_locals = Vec::with_capacity(lift_sig.results.len());
736        let mut temps = Vec::new();
737        for ty in lift_sig.results.iter().rev() {
738            let local = self.local_set_new_tmp(*ty);
739            result_locals.push((local.idx, *ty));
740            temps.push(local);
741        }
742        result_locals.reverse();
743
744        // Like above during the translation of results the caller cannot be
745        // left (as we might invoke things like `realloc`). Again the precise
746        // order of everything doesn't matter since intermediate states cannot
747        // be witnessed, hence the setting of flags here to encapsulate both
748        // liftings and lowerings.
749        //
750        // TODO: like above the management of the `MAY_LEAVE` flag can probably
751        // be elided here for "simple" results.
752        self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, false);
753        self.translate_results(adapter, &param_locals, &result_locals);
754        self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, true);
755
756        // And finally post-return state is handled here once all results/etc
757        // are all translated.
758        if let Some(func) = adapter.lift.post_return {
759            for (result, _) in result_locals.iter() {
760                self.instruction(LocalGet(*result));
761            }
762            self.instruction(Call(func.as_u32()));
763        }
764        if adapter.called_as_export {
765            self.set_flag(adapter.lift.flags, FLAG_MAY_ENTER, true);
766        }
767
768        for tmp in temps {
769            self.free_temp_local(tmp);
770        }
771
772        if self.emit_resource_call {
773            let exit = self.module.import_resource_exit_call();
774            self.instruction(Call(exit.as_u32()));
775        }
776
777        self.finish()
778    }
779
780    fn translate_params(&mut self, adapter: &AdapterData, param_locals: &[(u32, ValType)]) {
781        let src_tys = self.types[adapter.lower.ty].params;
782        let src_tys = self.types[src_tys]
783            .types
784            .iter()
785            .copied()
786            .collect::<Vec<_>>();
787        let dst_tys = self.types[adapter.lift.ty].params;
788        let dst_tys = self.types[dst_tys]
789            .types
790            .iter()
791            .copied()
792            .collect::<Vec<_>>();
793        let lift_opts = &adapter.lift.options;
794        let lower_opts = &adapter.lower.options;
795
796        // TODO: handle subtyping
797        assert_eq!(src_tys.len(), dst_tys.len());
798
799        // Async lowered functions have a smaller limit on flat parameters, but
800        // their destination, a lifted function, does not have a different limit
801        // than sync functions.
802        let max_flat_params = if adapter.lower.options.async_ {
803            MAX_FLAT_ASYNC_PARAMS
804        } else {
805            MAX_FLAT_PARAMS
806        };
807        let src_flat =
808            self.types
809                .flatten_types(lower_opts, max_flat_params, src_tys.iter().copied());
810        let dst_flat =
811            self.types
812                .flatten_types(lift_opts, MAX_FLAT_PARAMS, dst_tys.iter().copied());
813
814        let src = if let Some(flat) = &src_flat {
815            Source::Stack(Stack {
816                locals: &param_locals[..flat.len()],
817                opts: lower_opts,
818            })
819        } else {
820            // If there are too many parameters then that means the parameters
821            // are actually a tuple stored in linear memory addressed by the
822            // first parameter local.
823            let (addr, ty) = param_locals[0];
824            assert_eq!(ty, lower_opts.ptr());
825            let align = src_tys
826                .iter()
827                .map(|t| self.types.align(lower_opts, t))
828                .max()
829                .unwrap_or(1);
830            Source::Memory(self.memory_operand(lower_opts, TempLocal::new(addr, ty), align))
831        };
832
833        let dst = if let Some(flat) = &dst_flat {
834            Destination::Stack(flat, lift_opts)
835        } else {
836            let abi = CanonicalAbiInfo::record(dst_tys.iter().map(|t| self.types.canonical_abi(t)));
837            let (size, align) = if lift_opts.memory64 {
838                (abi.size64, abi.align64)
839            } else {
840                (abi.size32, abi.align32)
841            };
842
843            // If there are too many parameters then space is allocated in the
844            // destination module for the parameters via its `realloc` function.
845            let size = MallocSize::Const(size);
846            Destination::Memory(self.malloc(lift_opts, size, align))
847        };
848
849        let srcs = src
850            .record_field_srcs(self.types, src_tys.iter().copied())
851            .zip(src_tys.iter());
852        let dsts = dst
853            .record_field_dsts(self.types, dst_tys.iter().copied())
854            .zip(dst_tys.iter());
855        for ((src, src_ty), (dst, dst_ty)) in srcs.zip(dsts) {
856            self.translate(&src_ty, &src, &dst_ty, &dst);
857        }
858
859        // If the destination was linear memory instead of the stack then the
860        // actual parameter that we're passing is the address of the values
861        // stored, so ensure that's happening in the wasm body here.
862        if let Destination::Memory(mem) = dst {
863            self.instruction(LocalGet(mem.addr.idx));
864            self.free_temp_local(mem.addr);
865        }
866    }
867
868    fn translate_results(
869        &mut self,
870        adapter: &AdapterData,
871        param_locals: &[(u32, ValType)],
872        result_locals: &[(u32, ValType)],
873    ) {
874        let src_tys = self.types[adapter.lift.ty].results;
875        let src_tys = self.types[src_tys]
876            .types
877            .iter()
878            .copied()
879            .collect::<Vec<_>>();
880        let dst_tys = self.types[adapter.lower.ty].results;
881        let dst_tys = self.types[dst_tys]
882            .types
883            .iter()
884            .copied()
885            .collect::<Vec<_>>();
886        let lift_opts = &adapter.lift.options;
887        let lower_opts = &adapter.lower.options;
888
889        let src_flat = self
890            .types
891            .flatten_lifting_types(lift_opts, src_tys.iter().copied());
892        let dst_flat = self
893            .types
894            .flatten_lowering_types(lower_opts, dst_tys.iter().copied());
895
896        let src = if src_flat.is_some() {
897            Source::Stack(Stack {
898                locals: result_locals,
899                opts: lift_opts,
900            })
901        } else {
902            // The original results to read from in this case come from the
903            // return value of the function itself. The imported function will
904            // return a linear memory address at which the values can be read
905            // from.
906            let align = src_tys
907                .iter()
908                .map(|t| self.types.align(lift_opts, t))
909                .max()
910                .unwrap_or(1);
911            assert_eq!(
912                result_locals.len(),
913                if lower_opts.async_ || lift_opts.async_ {
914                    2
915                } else {
916                    1
917                }
918            );
919            let (addr, ty) = result_locals[0];
920            assert_eq!(ty, lift_opts.ptr());
921            Source::Memory(self.memory_operand(lift_opts, TempLocal::new(addr, ty), align))
922        };
923
924        let dst = if let Some(flat) = &dst_flat {
925            Destination::Stack(flat, lower_opts)
926        } else {
927            // This is slightly different than `translate_params` where the
928            // return pointer was provided by the caller of this function
929            // meaning the last parameter local is a pointer into linear memory.
930            let align = dst_tys
931                .iter()
932                .map(|t| self.types.align(lower_opts, t))
933                .max()
934                .unwrap_or(1);
935            let (addr, ty) = *param_locals.last().expect("no retptr");
936            assert_eq!(ty, lower_opts.ptr());
937            Destination::Memory(self.memory_operand(lower_opts, TempLocal::new(addr, ty), align))
938        };
939
940        let srcs = src
941            .record_field_srcs(self.types, src_tys.iter().copied())
942            .zip(src_tys.iter());
943        let dsts = dst
944            .record_field_dsts(self.types, dst_tys.iter().copied())
945            .zip(dst_tys.iter());
946        for ((src, src_ty), (dst, dst_ty)) in srcs.zip(dsts) {
947            self.translate(&src_ty, &src, &dst_ty, &dst);
948        }
949    }
950
951    fn translate(
952        &mut self,
953        src_ty: &InterfaceType,
954        src: &Source<'_>,
955        dst_ty: &InterfaceType,
956        dst: &Destination,
957    ) {
958        if let Source::Memory(mem) = src {
959            self.assert_aligned(src_ty, mem);
960        }
961        if let Destination::Memory(mem) = dst {
962            self.assert_aligned(dst_ty, mem);
963        }
964
965        // Calculate a cost heuristic for what the translation of this specific
966        // layer of the type is going to incur. The purpose of this cost is that
967        // we'll deduct it from `self.fuel` and if no fuel is remaining then
968        // translation is outlined into a separate function rather than being
969        // translated into this function.
970        //
971        // The general goal is to avoid creating an exponentially sized function
972        // for a linearly sized input (the type section). By outlining helper
973        // functions there will ideally be a constant set of helper functions
974        // per type (to accommodate in-memory or on-stack transfers as well as
975        // src/dst options) which means that each function is at most a certain
976        // size and we have a linear number of functions which should guarantee
977        // an overall linear size of the output.
978        //
979        // To implement this the current heuristic is that each layer of
980        // translating a type has a cost associated with it and this cost is
981        // accounted for in `self.fuel`. Some conversions are considered free as
982        // they generate basically as much code as the `call` to the translation
983        // function while other are considered proportionally expensive to the
984        // size of the type. The hope is that some upper layers are of a type's
985        // translation are all inlined into one function but bottom layers end
986        // up getting outlined to separate functions. Theoretically, again this
987        // is built on hopes and dreams, the outlining can be shared amongst
988        // tightly-intertwined type hierarchies which will reduce the size of
989        // the output module due to the helpers being used.
990        //
991        // This heuristic of how to split functions has changed a few times in
992        // the past and this isn't necessarily guaranteed to be the final
993        // iteration.
994        let cost = match src_ty {
995            // These types are all quite simple to load/store and equate to
996            // basically the same cost of the `call` instruction to call an
997            // out-of-line translation function, so give them 0 cost.
998            InterfaceType::Bool
999            | InterfaceType::U8
1000            | InterfaceType::S8
1001            | InterfaceType::U16
1002            | InterfaceType::S16
1003            | InterfaceType::U32
1004            | InterfaceType::S32
1005            | InterfaceType::U64
1006            | InterfaceType::S64
1007            | InterfaceType::Float32
1008            | InterfaceType::Float64 => 0,
1009
1010            // This has a small amount of validation associated with it, so
1011            // give it a cost of 1.
1012            InterfaceType::Char => 1,
1013
1014            // This has a fair bit of code behind it depending on the
1015            // strings/encodings in play, so arbitrarily assign it this cost.
1016            InterfaceType::String => 40,
1017
1018            // Iteration of a loop is along the lines of the cost of a string
1019            // so give it the same cost
1020            InterfaceType::List(_) => 40,
1021
1022            InterfaceType::Flags(i) => {
1023                let count = self.module.types[*i].names.len();
1024                match FlagsSize::from_count(count) {
1025                    FlagsSize::Size0 => 0,
1026                    FlagsSize::Size1 | FlagsSize::Size2 => 1,
1027                    FlagsSize::Size4Plus(n) => n.into(),
1028                }
1029            }
1030
1031            InterfaceType::Record(i) => self.types[*i].fields.len(),
1032            InterfaceType::Tuple(i) => self.types[*i].types.len(),
1033            InterfaceType::Variant(i) => self.types[*i].cases.len(),
1034            InterfaceType::Enum(i) => self.types[*i].names.len(),
1035
1036            // 2 cases to consider for each of these variants.
1037            InterfaceType::Option(_) | InterfaceType::Result(_) => 2,
1038
1039            // TODO(#6696) - something nonzero, is 1 right?
1040            InterfaceType::Own(_)
1041            | InterfaceType::Borrow(_)
1042            | InterfaceType::Future(_)
1043            | InterfaceType::Stream(_)
1044            | InterfaceType::ErrorContext(_) => 1,
1045        };
1046
1047        match self.fuel.checked_sub(cost) {
1048            // This function has enough fuel to perform the layer of translation
1049            // necessary for this type, so the fuel is updated in-place and
1050            // translation continues. Note that the recursion here is bounded by
1051            // the static recursion limit for all interface types as imposed
1052            // during the translation phase.
1053            Some(n) => {
1054                self.fuel = n;
1055                match src_ty {
1056                    InterfaceType::Bool => self.translate_bool(src, dst_ty, dst),
1057                    InterfaceType::U8 => self.translate_u8(src, dst_ty, dst),
1058                    InterfaceType::S8 => self.translate_s8(src, dst_ty, dst),
1059                    InterfaceType::U16 => self.translate_u16(src, dst_ty, dst),
1060                    InterfaceType::S16 => self.translate_s16(src, dst_ty, dst),
1061                    InterfaceType::U32 => self.translate_u32(src, dst_ty, dst),
1062                    InterfaceType::S32 => self.translate_s32(src, dst_ty, dst),
1063                    InterfaceType::U64 => self.translate_u64(src, dst_ty, dst),
1064                    InterfaceType::S64 => self.translate_s64(src, dst_ty, dst),
1065                    InterfaceType::Float32 => self.translate_f32(src, dst_ty, dst),
1066                    InterfaceType::Float64 => self.translate_f64(src, dst_ty, dst),
1067                    InterfaceType::Char => self.translate_char(src, dst_ty, dst),
1068                    InterfaceType::String => self.translate_string(src, dst_ty, dst),
1069                    InterfaceType::List(t) => self.translate_list(*t, src, dst_ty, dst),
1070                    InterfaceType::Record(t) => self.translate_record(*t, src, dst_ty, dst),
1071                    InterfaceType::Flags(f) => self.translate_flags(*f, src, dst_ty, dst),
1072                    InterfaceType::Tuple(t) => self.translate_tuple(*t, src, dst_ty, dst),
1073                    InterfaceType::Variant(v) => self.translate_variant(*v, src, dst_ty, dst),
1074                    InterfaceType::Enum(t) => self.translate_enum(*t, src, dst_ty, dst),
1075                    InterfaceType::Option(t) => self.translate_option(*t, src, dst_ty, dst),
1076                    InterfaceType::Result(t) => self.translate_result(*t, src, dst_ty, dst),
1077                    InterfaceType::Own(t) => self.translate_own(*t, src, dst_ty, dst),
1078                    InterfaceType::Borrow(t) => self.translate_borrow(*t, src, dst_ty, dst),
1079                    InterfaceType::Future(t) => self.translate_future(*t, src, dst_ty, dst),
1080                    InterfaceType::Stream(t) => self.translate_stream(*t, src, dst_ty, dst),
1081                    InterfaceType::ErrorContext(t) => {
1082                        self.translate_error_context(*t, src, dst_ty, dst)
1083                    }
1084                }
1085            }
1086
1087            // This function does not have enough fuel left to perform this
1088            // layer of translation so the translation is deferred to a helper
1089            // function. The actual translation here is then done by marshalling
1090            // the src/dst into the function we're calling and then processing
1091            // the results.
1092            None => {
1093                let src_loc = match src {
1094                    // If the source is on the stack then `stack_get` is used to
1095                    // convert everything to the appropriate flat representation
1096                    // for the source type.
1097                    Source::Stack(stack) => {
1098                        for (i, ty) in stack
1099                            .opts
1100                            .flat_types(src_ty, self.types)
1101                            .unwrap()
1102                            .iter()
1103                            .enumerate()
1104                        {
1105                            let stack = stack.slice(i..i + 1);
1106                            self.stack_get(&stack, (*ty).into());
1107                        }
1108                        HelperLocation::Stack
1109                    }
1110                    // If the source is in memory then the pointer is passed
1111                    // through, but note that the offset must be factored in
1112                    // here since the translation function will start from
1113                    // offset 0.
1114                    Source::Memory(mem) => {
1115                        self.push_mem_addr(mem);
1116                        HelperLocation::Memory
1117                    }
1118                };
1119                let dst_loc = match dst {
1120                    Destination::Stack(..) => HelperLocation::Stack,
1121                    Destination::Memory(mem) => {
1122                        self.push_mem_addr(mem);
1123                        HelperLocation::Memory
1124                    }
1125                };
1126                // Generate a `FunctionId` corresponding to the `Helper`
1127                // configuration that is necessary here. This will ideally be a
1128                // "cache hit" and use a preexisting helper which represents
1129                // outlining what would otherwise be duplicate code within a
1130                // function to one function.
1131                let helper = self.module.translate_helper(Helper {
1132                    src: HelperType {
1133                        ty: *src_ty,
1134                        opts: *src.opts(),
1135                        loc: src_loc,
1136                    },
1137                    dst: HelperType {
1138                        ty: *dst_ty,
1139                        opts: *dst.opts(),
1140                        loc: dst_loc,
1141                    },
1142                });
1143                // Emit a `call` instruction which will get "relocated" to a
1144                // function index once translation has completely finished.
1145                self.flush_code();
1146                self.module.funcs[self.result].body.push(Body::Call(helper));
1147
1148                // If the destination of the translation was on the stack then
1149                // the types on the stack need to be optionally converted to
1150                // different types (e.g. if the result here is part of a variant
1151                // somewhere else).
1152                //
1153                // This translation happens inline here by popping the results
1154                // into new locals and then using those locals to do a
1155                // `stack_set`.
1156                if let Destination::Stack(tys, opts) = dst {
1157                    let flat = self
1158                        .types
1159                        .flatten_types(opts, usize::MAX, [*dst_ty])
1160                        .unwrap();
1161                    assert_eq!(flat.len(), tys.len());
1162                    let locals = flat
1163                        .iter()
1164                        .rev()
1165                        .map(|ty| self.local_set_new_tmp(*ty))
1166                        .collect::<Vec<_>>();
1167                    for (ty, local) in tys.iter().zip(locals.into_iter().rev()) {
1168                        self.instruction(LocalGet(local.idx));
1169                        self.stack_set(std::slice::from_ref(ty), local.ty);
1170                        self.free_temp_local(local);
1171                    }
1172                }
1173            }
1174        }
1175    }
1176
1177    fn push_mem_addr(&mut self, mem: &Memory<'_>) {
1178        self.instruction(LocalGet(mem.addr.idx));
1179        if mem.offset != 0 {
1180            self.ptr_uconst(mem.opts, mem.offset);
1181            self.ptr_add(mem.opts);
1182        }
1183    }
1184
1185    fn translate_bool(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1186        // TODO: subtyping
1187        assert!(matches!(dst_ty, InterfaceType::Bool));
1188        self.push_dst_addr(dst);
1189
1190        // Booleans are canonicalized to 0 or 1 as they pass through the
1191        // component boundary, so use a `select` instruction to do so.
1192        self.instruction(I32Const(1));
1193        self.instruction(I32Const(0));
1194        match src {
1195            Source::Memory(mem) => self.i32_load8u(mem),
1196            Source::Stack(stack) => self.stack_get(stack, ValType::I32),
1197        }
1198        self.instruction(Select);
1199
1200        match dst {
1201            Destination::Memory(mem) => self.i32_store8(mem),
1202            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1203        }
1204    }
1205
1206    fn translate_u8(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1207        // TODO: subtyping
1208        assert!(matches!(dst_ty, InterfaceType::U8));
1209        self.convert_u8_mask(src, dst, 0xff);
1210    }
1211
1212    fn convert_u8_mask(&mut self, src: &Source<'_>, dst: &Destination<'_>, mask: u8) {
1213        self.push_dst_addr(dst);
1214        let mut needs_mask = true;
1215        match src {
1216            Source::Memory(mem) => {
1217                self.i32_load8u(mem);
1218                needs_mask = mask != 0xff;
1219            }
1220            Source::Stack(stack) => {
1221                self.stack_get(stack, ValType::I32);
1222            }
1223        }
1224        if needs_mask {
1225            self.instruction(I32Const(i32::from(mask)));
1226            self.instruction(I32And);
1227        }
1228        match dst {
1229            Destination::Memory(mem) => self.i32_store8(mem),
1230            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1231        }
1232    }
1233
1234    fn translate_s8(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1235        // TODO: subtyping
1236        assert!(matches!(dst_ty, InterfaceType::S8));
1237        self.push_dst_addr(dst);
1238        match src {
1239            Source::Memory(mem) => self.i32_load8s(mem),
1240            Source::Stack(stack) => {
1241                self.stack_get(stack, ValType::I32);
1242                self.instruction(I32Extend8S);
1243            }
1244        }
1245        match dst {
1246            Destination::Memory(mem) => self.i32_store8(mem),
1247            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1248        }
1249    }
1250
1251    fn translate_u16(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1252        // TODO: subtyping
1253        assert!(matches!(dst_ty, InterfaceType::U16));
1254        self.convert_u16_mask(src, dst, 0xffff);
1255    }
1256
1257    fn convert_u16_mask(&mut self, src: &Source<'_>, dst: &Destination<'_>, mask: u16) {
1258        self.push_dst_addr(dst);
1259        let mut needs_mask = true;
1260        match src {
1261            Source::Memory(mem) => {
1262                self.i32_load16u(mem);
1263                needs_mask = mask != 0xffff;
1264            }
1265            Source::Stack(stack) => {
1266                self.stack_get(stack, ValType::I32);
1267            }
1268        }
1269        if needs_mask {
1270            self.instruction(I32Const(i32::from(mask)));
1271            self.instruction(I32And);
1272        }
1273        match dst {
1274            Destination::Memory(mem) => self.i32_store16(mem),
1275            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1276        }
1277    }
1278
1279    fn translate_s16(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1280        // TODO: subtyping
1281        assert!(matches!(dst_ty, InterfaceType::S16));
1282        self.push_dst_addr(dst);
1283        match src {
1284            Source::Memory(mem) => self.i32_load16s(mem),
1285            Source::Stack(stack) => {
1286                self.stack_get(stack, ValType::I32);
1287                self.instruction(I32Extend16S);
1288            }
1289        }
1290        match dst {
1291            Destination::Memory(mem) => self.i32_store16(mem),
1292            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1293        }
1294    }
1295
1296    fn translate_u32(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1297        // TODO: subtyping
1298        assert!(matches!(dst_ty, InterfaceType::U32));
1299        self.convert_u32_mask(src, dst, 0xffffffff)
1300    }
1301
1302    fn convert_u32_mask(&mut self, src: &Source<'_>, dst: &Destination<'_>, mask: u32) {
1303        self.push_dst_addr(dst);
1304        match src {
1305            Source::Memory(mem) => self.i32_load(mem),
1306            Source::Stack(stack) => self.stack_get(stack, ValType::I32),
1307        }
1308        if mask != 0xffffffff {
1309            self.instruction(I32Const(mask as i32));
1310            self.instruction(I32And);
1311        }
1312        match dst {
1313            Destination::Memory(mem) => self.i32_store(mem),
1314            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1315        }
1316    }
1317
1318    fn translate_s32(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1319        // TODO: subtyping
1320        assert!(matches!(dst_ty, InterfaceType::S32));
1321        self.push_dst_addr(dst);
1322        match src {
1323            Source::Memory(mem) => self.i32_load(mem),
1324            Source::Stack(stack) => self.stack_get(stack, ValType::I32),
1325        }
1326        match dst {
1327            Destination::Memory(mem) => self.i32_store(mem),
1328            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1329        }
1330    }
1331
1332    fn translate_u64(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1333        // TODO: subtyping
1334        assert!(matches!(dst_ty, InterfaceType::U64));
1335        self.push_dst_addr(dst);
1336        match src {
1337            Source::Memory(mem) => self.i64_load(mem),
1338            Source::Stack(stack) => self.stack_get(stack, ValType::I64),
1339        }
1340        match dst {
1341            Destination::Memory(mem) => self.i64_store(mem),
1342            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I64),
1343        }
1344    }
1345
1346    fn translate_s64(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1347        // TODO: subtyping
1348        assert!(matches!(dst_ty, InterfaceType::S64));
1349        self.push_dst_addr(dst);
1350        match src {
1351            Source::Memory(mem) => self.i64_load(mem),
1352            Source::Stack(stack) => self.stack_get(stack, ValType::I64),
1353        }
1354        match dst {
1355            Destination::Memory(mem) => self.i64_store(mem),
1356            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I64),
1357        }
1358    }
1359
1360    fn translate_f32(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1361        // TODO: subtyping
1362        assert!(matches!(dst_ty, InterfaceType::Float32));
1363        self.push_dst_addr(dst);
1364        match src {
1365            Source::Memory(mem) => self.f32_load(mem),
1366            Source::Stack(stack) => self.stack_get(stack, ValType::F32),
1367        }
1368        match dst {
1369            Destination::Memory(mem) => self.f32_store(mem),
1370            Destination::Stack(stack, _) => self.stack_set(stack, ValType::F32),
1371        }
1372    }
1373
1374    fn translate_f64(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1375        // TODO: subtyping
1376        assert!(matches!(dst_ty, InterfaceType::Float64));
1377        self.push_dst_addr(dst);
1378        match src {
1379            Source::Memory(mem) => self.f64_load(mem),
1380            Source::Stack(stack) => self.stack_get(stack, ValType::F64),
1381        }
1382        match dst {
1383            Destination::Memory(mem) => self.f64_store(mem),
1384            Destination::Stack(stack, _) => self.stack_set(stack, ValType::F64),
1385        }
1386    }
1387
1388    fn translate_char(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1389        assert!(matches!(dst_ty, InterfaceType::Char));
1390        match src {
1391            Source::Memory(mem) => self.i32_load(mem),
1392            Source::Stack(stack) => self.stack_get(stack, ValType::I32),
1393        }
1394        let local = self.local_set_new_tmp(ValType::I32);
1395
1396        // This sequence is copied from the output of LLVM for:
1397        //
1398        //      pub extern "C" fn foo(x: u32) -> char {
1399        //          char::try_from(x)
1400        //              .unwrap_or_else(|_| std::arch::wasm32::unreachable())
1401        //      }
1402        //
1403        // Apparently this does what's required by the canonical ABI:
1404        //
1405        //    def i32_to_char(opts, i):
1406        //      trap_if(i >= 0x110000)
1407        //      trap_if(0xD800 <= i <= 0xDFFF)
1408        //      return chr(i)
1409        //
1410        // ... but I don't know how it works other than "well I trust LLVM"
1411        self.instruction(Block(BlockType::Empty));
1412        self.instruction(Block(BlockType::Empty));
1413        self.instruction(LocalGet(local.idx));
1414        self.instruction(I32Const(0xd800));
1415        self.instruction(I32Xor);
1416        self.instruction(I32Const(-0x110000));
1417        self.instruction(I32Add);
1418        self.instruction(I32Const(-0x10f800));
1419        self.instruction(I32LtU);
1420        self.instruction(BrIf(0));
1421        self.instruction(LocalGet(local.idx));
1422        self.instruction(I32Const(0x110000));
1423        self.instruction(I32Ne);
1424        self.instruction(BrIf(1));
1425        self.instruction(End);
1426        self.trap(Trap::InvalidChar);
1427        self.instruction(End);
1428
1429        self.push_dst_addr(dst);
1430        self.instruction(LocalGet(local.idx));
1431        match dst {
1432            Destination::Memory(mem) => {
1433                self.i32_store(mem);
1434            }
1435            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
1436        }
1437
1438        self.free_temp_local(local);
1439    }
1440
1441    fn translate_string(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {
1442        assert!(matches!(dst_ty, InterfaceType::String));
1443        let src_opts = src.opts();
1444        let dst_opts = dst.opts();
1445
1446        // Load the pointer/length of this string into temporary locals. These
1447        // will be referenced a good deal so this just makes it easier to deal
1448        // with them consistently below rather than trying to reload from memory
1449        // for example.
1450        match src {
1451            Source::Stack(s) => {
1452                assert_eq!(s.locals.len(), 2);
1453                self.stack_get(&s.slice(0..1), src_opts.ptr());
1454                self.stack_get(&s.slice(1..2), src_opts.ptr());
1455            }
1456            Source::Memory(mem) => {
1457                self.ptr_load(mem);
1458                self.ptr_load(&mem.bump(src_opts.ptr_size().into()));
1459            }
1460        }
1461        let src_len = self.local_set_new_tmp(src_opts.ptr());
1462        let src_ptr = self.local_set_new_tmp(src_opts.ptr());
1463        let src_str = WasmString {
1464            ptr: src_ptr,
1465            len: src_len,
1466            opts: src_opts,
1467        };
1468
1469        let dst_str = match src_opts.string_encoding {
1470            StringEncoding::Utf8 => match dst_opts.string_encoding {
1471                StringEncoding::Utf8 => self.string_copy(&src_str, FE::Utf8, dst_opts, FE::Utf8),
1472                StringEncoding::Utf16 => self.string_utf8_to_utf16(&src_str, dst_opts),
1473                StringEncoding::CompactUtf16 => {
1474                    self.string_to_compact(&src_str, FE::Utf8, dst_opts)
1475                }
1476            },
1477
1478            StringEncoding::Utf16 => {
1479                self.verify_aligned(src_opts, src_str.ptr.idx, 2);
1480                match dst_opts.string_encoding {
1481                    StringEncoding::Utf8 => {
1482                        self.string_deflate_to_utf8(&src_str, FE::Utf16, dst_opts)
1483                    }
1484                    StringEncoding::Utf16 => {
1485                        self.string_copy(&src_str, FE::Utf16, dst_opts, FE::Utf16)
1486                    }
1487                    StringEncoding::CompactUtf16 => {
1488                        self.string_to_compact(&src_str, FE::Utf16, dst_opts)
1489                    }
1490                }
1491            }
1492
1493            StringEncoding::CompactUtf16 => {
1494                self.verify_aligned(src_opts, src_str.ptr.idx, 2);
1495
1496                // Test the tag big to see if this is a utf16 or a latin1 string
1497                // at runtime...
1498                self.instruction(LocalGet(src_str.len.idx));
1499                self.ptr_uconst(src_opts, UTF16_TAG);
1500                self.ptr_and(src_opts);
1501                self.ptr_if(src_opts, BlockType::Empty);
1502
1503                // In the utf16 block unset the upper bit from the length local
1504                // so further calculations have the right value. Afterwards the
1505                // string transcode proceeds assuming utf16.
1506                self.instruction(LocalGet(src_str.len.idx));
1507                self.ptr_uconst(src_opts, UTF16_TAG);
1508                self.ptr_xor(src_opts);
1509                self.instruction(LocalSet(src_str.len.idx));
1510                let s1 = match dst_opts.string_encoding {
1511                    StringEncoding::Utf8 => {
1512                        self.string_deflate_to_utf8(&src_str, FE::Utf16, dst_opts)
1513                    }
1514                    StringEncoding::Utf16 => {
1515                        self.string_copy(&src_str, FE::Utf16, dst_opts, FE::Utf16)
1516                    }
1517                    StringEncoding::CompactUtf16 => {
1518                        self.string_compact_utf16_to_compact(&src_str, dst_opts)
1519                    }
1520                };
1521
1522                self.instruction(Else);
1523
1524                // In the latin1 block the `src_len` local is already the number
1525                // of code units, so the string transcoding is all that needs to
1526                // happen.
1527                let s2 = match dst_opts.string_encoding {
1528                    StringEncoding::Utf16 => {
1529                        self.string_copy(&src_str, FE::Latin1, dst_opts, FE::Utf16)
1530                    }
1531                    StringEncoding::Utf8 => {
1532                        self.string_deflate_to_utf8(&src_str, FE::Latin1, dst_opts)
1533                    }
1534                    StringEncoding::CompactUtf16 => {
1535                        self.string_copy(&src_str, FE::Latin1, dst_opts, FE::Latin1)
1536                    }
1537                };
1538                // Set our `s2` generated locals to the `s2` generated locals
1539                // as the resulting pointer of this transcode.
1540                self.instruction(LocalGet(s2.ptr.idx));
1541                self.instruction(LocalSet(s1.ptr.idx));
1542                self.instruction(LocalGet(s2.len.idx));
1543                self.instruction(LocalSet(s1.len.idx));
1544                self.instruction(End);
1545                self.free_temp_local(s2.ptr);
1546                self.free_temp_local(s2.len);
1547                s1
1548            }
1549        };
1550
1551        // Store the ptr/length in the desired destination
1552        match dst {
1553            Destination::Stack(s, _) => {
1554                self.instruction(LocalGet(dst_str.ptr.idx));
1555                self.stack_set(&s[..1], dst_opts.ptr());
1556                self.instruction(LocalGet(dst_str.len.idx));
1557                self.stack_set(&s[1..], dst_opts.ptr());
1558            }
1559            Destination::Memory(mem) => {
1560                self.instruction(LocalGet(mem.addr.idx));
1561                self.instruction(LocalGet(dst_str.ptr.idx));
1562                self.ptr_store(mem);
1563                self.instruction(LocalGet(mem.addr.idx));
1564                self.instruction(LocalGet(dst_str.len.idx));
1565                self.ptr_store(&mem.bump(dst_opts.ptr_size().into()));
1566            }
1567        }
1568
1569        self.free_temp_local(src_str.ptr);
1570        self.free_temp_local(src_str.len);
1571        self.free_temp_local(dst_str.ptr);
1572        self.free_temp_local(dst_str.len);
1573    }
1574
1575    // Corresponding function for `store_string_copy` in the spec.
1576    //
1577    // This performs a transcoding of the string with a one-pass copy from
1578    // the `src` encoding to the `dst` encoding. This is only possible for
1579    // fixed encodings where the first allocation is guaranteed to be an
1580    // appropriate fit so it's not suitable for all encodings.
1581    //
1582    // Imported host transcoding functions here take the src/dst pointers as
1583    // well as the number of code units in the source (which always matches
1584    // the number of code units in the destination). There is no return
1585    // value from the transcode function since the encoding should always
1586    // work on the first pass.
1587    fn string_copy<'c>(
1588        &mut self,
1589        src: &WasmString<'_>,
1590        src_enc: FE,
1591        dst_opts: &'c Options,
1592        dst_enc: FE,
1593    ) -> WasmString<'c> {
1594        assert!(dst_enc.width() >= src_enc.width());
1595        self.validate_string_length(src, dst_enc);
1596
1597        // Calculate the source byte length given the size of each code
1598        // unit. Note that this shouldn't overflow given
1599        // `validate_string_length` above.
1600        let mut src_byte_len_tmp = None;
1601        let src_byte_len = if src_enc.width() == 1 {
1602            src.len.idx
1603        } else {
1604            assert_eq!(src_enc.width(), 2);
1605            self.instruction(LocalGet(src.len.idx));
1606            self.ptr_uconst(src.opts, 1);
1607            self.ptr_shl(src.opts);
1608            let tmp = self.local_set_new_tmp(src.opts.ptr());
1609            let ret = tmp.idx;
1610            src_byte_len_tmp = Some(tmp);
1611            ret
1612        };
1613
1614        // Convert the source code units length to the destination byte
1615        // length type.
1616        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst_opts.ptr());
1617        let dst_len = self.local_tee_new_tmp(dst_opts.ptr());
1618        if dst_enc.width() > 1 {
1619            assert_eq!(dst_enc.width(), 2);
1620            self.ptr_uconst(dst_opts, 1);
1621            self.ptr_shl(dst_opts);
1622        }
1623        let dst_byte_len = self.local_set_new_tmp(dst_opts.ptr());
1624
1625        // Allocate space in the destination using the calculated byte
1626        // length.
1627        let dst = {
1628            let dst_mem = self.malloc(
1629                dst_opts,
1630                MallocSize::Local(dst_byte_len.idx),
1631                dst_enc.width().into(),
1632            );
1633            WasmString {
1634                ptr: dst_mem.addr,
1635                len: dst_len,
1636                opts: dst_opts,
1637            }
1638        };
1639
1640        // Validate that `src_len + src_ptr` and
1641        // `dst_mem.addr_local + dst_byte_len` are both in-bounds. This
1642        // is done by loading the last byte of the string and if that
1643        // doesn't trap then it's known valid.
1644        self.validate_string_inbounds(src, src_byte_len);
1645        self.validate_string_inbounds(&dst, dst_byte_len.idx);
1646
1647        // If the validations pass then the host `transcode` intrinsic
1648        // is invoked. This will either raise a trap or otherwise succeed
1649        // in which case we're done.
1650        let op = if src_enc == dst_enc {
1651            Transcode::Copy(src_enc)
1652        } else {
1653            assert_eq!(src_enc, FE::Latin1);
1654            assert_eq!(dst_enc, FE::Utf16);
1655            Transcode::Latin1ToUtf16
1656        };
1657        let transcode = self.transcoder(src, &dst, op);
1658        self.instruction(LocalGet(src.ptr.idx));
1659        self.instruction(LocalGet(src.len.idx));
1660        self.instruction(LocalGet(dst.ptr.idx));
1661        self.instruction(Call(transcode.as_u32()));
1662
1663        self.free_temp_local(dst_byte_len);
1664        if let Some(tmp) = src_byte_len_tmp {
1665            self.free_temp_local(tmp);
1666        }
1667
1668        dst
1669    }
1670    // Corresponding function for `store_string_to_utf8` in the spec.
1671    //
1672    // This translation works by possibly performing a number of
1673    // reallocations. First a buffer of size input-code-units is used to try
1674    // to get the transcoding correct on the first try. If that fails the
1675    // maximum worst-case size is used and then that is resized down if it's
1676    // too large.
1677    //
1678    // The host transcoding function imported here will receive src ptr/len
1679    // and dst ptr/len and return how many code units were consumed on both
1680    // sides. The amount of code units consumed in the source dictates which
1681    // branches are taken in this conversion.
1682    fn string_deflate_to_utf8<'c>(
1683        &mut self,
1684        src: &WasmString<'_>,
1685        src_enc: FE,
1686        dst_opts: &'c Options,
1687    ) -> WasmString<'c> {
1688        self.validate_string_length(src, src_enc);
1689
1690        // Optimistically assume that the code unit length of the source is
1691        // all that's needed in the destination. Perform that allocation
1692        // here and proceed to transcoding below.
1693        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst_opts.ptr());
1694        let dst_len = self.local_tee_new_tmp(dst_opts.ptr());
1695        let dst_byte_len = self.local_set_new_tmp(dst_opts.ptr());
1696
1697        let dst = {
1698            let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), 1);
1699            WasmString {
1700                ptr: dst_mem.addr,
1701                len: dst_len,
1702                opts: dst_opts,
1703            }
1704        };
1705
1706        // Ensure buffers are all in-bounds
1707        let mut src_byte_len_tmp = None;
1708        let src_byte_len = match src_enc {
1709            FE::Latin1 => src.len.idx,
1710            FE::Utf16 => {
1711                self.instruction(LocalGet(src.len.idx));
1712                self.ptr_uconst(src.opts, 1);
1713                self.ptr_shl(src.opts);
1714                let tmp = self.local_set_new_tmp(src.opts.ptr());
1715                let ret = tmp.idx;
1716                src_byte_len_tmp = Some(tmp);
1717                ret
1718            }
1719            FE::Utf8 => unreachable!(),
1720        };
1721        self.validate_string_inbounds(src, src_byte_len);
1722        self.validate_string_inbounds(&dst, dst_byte_len.idx);
1723
1724        // Perform the initial transcode
1725        let op = match src_enc {
1726            FE::Latin1 => Transcode::Latin1ToUtf8,
1727            FE::Utf16 => Transcode::Utf16ToUtf8,
1728            FE::Utf8 => unreachable!(),
1729        };
1730        let transcode = self.transcoder(src, &dst, op);
1731        self.instruction(LocalGet(src.ptr.idx));
1732        self.instruction(LocalGet(src.len.idx));
1733        self.instruction(LocalGet(dst.ptr.idx));
1734        self.instruction(LocalGet(dst_byte_len.idx));
1735        self.instruction(Call(transcode.as_u32()));
1736        self.instruction(LocalSet(dst.len.idx));
1737        let src_len_tmp = self.local_set_new_tmp(src.opts.ptr());
1738
1739        // Test if the source was entirely transcoded by comparing
1740        // `src_len_tmp`, the number of code units transcoded from the
1741        // source, with `src_len`, the original number of code units.
1742        self.instruction(LocalGet(src_len_tmp.idx));
1743        self.instruction(LocalGet(src.len.idx));
1744        self.ptr_ne(src.opts);
1745        self.instruction(If(BlockType::Empty));
1746
1747        // Here a worst-case reallocation is performed to grow `dst_mem`.
1748        // In-line a check is also performed that the worst-case byte size
1749        // fits within the maximum size of strings.
1750        self.instruction(LocalGet(dst.ptr.idx)); // old_ptr
1751        self.instruction(LocalGet(dst_byte_len.idx)); // old_size
1752        self.ptr_uconst(dst.opts, 1); // align
1753        let factor = match src_enc {
1754            FE::Latin1 => 2,
1755            FE::Utf16 => 3,
1756            _ => unreachable!(),
1757        };
1758        self.validate_string_length_u8(src, factor);
1759        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst_opts.ptr());
1760        self.ptr_uconst(dst_opts, factor.into());
1761        self.ptr_mul(dst_opts);
1762        self.instruction(LocalTee(dst_byte_len.idx));
1763        self.instruction(Call(dst_opts.realloc.unwrap().as_u32()));
1764        self.instruction(LocalSet(dst.ptr.idx));
1765
1766        // Verify that the destination is still in-bounds
1767        self.validate_string_inbounds(&dst, dst_byte_len.idx);
1768
1769        // Perform another round of transcoding that should be guaranteed
1770        // to succeed. Note that all the parameters here are offset by the
1771        // results of the first transcoding to only perform the remaining
1772        // transcode on the final units.
1773        self.instruction(LocalGet(src.ptr.idx));
1774        self.instruction(LocalGet(src_len_tmp.idx));
1775        if let FE::Utf16 = src_enc {
1776            self.ptr_uconst(src.opts, 1);
1777            self.ptr_shl(src.opts);
1778        }
1779        self.ptr_add(src.opts);
1780        self.instruction(LocalGet(src.len.idx));
1781        self.instruction(LocalGet(src_len_tmp.idx));
1782        self.ptr_sub(src.opts);
1783        self.instruction(LocalGet(dst.ptr.idx));
1784        self.instruction(LocalGet(dst.len.idx));
1785        self.ptr_add(dst.opts);
1786        self.instruction(LocalGet(dst_byte_len.idx));
1787        self.instruction(LocalGet(dst.len.idx));
1788        self.ptr_sub(dst.opts);
1789        self.instruction(Call(transcode.as_u32()));
1790
1791        // Add the second result, the amount of destination units encoded,
1792        // to `dst_len` so it's an accurate reflection of the final size of
1793        // the destination buffer.
1794        self.instruction(LocalGet(dst.len.idx));
1795        self.ptr_add(dst.opts);
1796        self.instruction(LocalSet(dst.len.idx));
1797
1798        // In debug mode verify the first result consumed the entire string,
1799        // otherwise simply discard it.
1800        if self.module.debug {
1801            self.instruction(LocalGet(src.len.idx));
1802            self.instruction(LocalGet(src_len_tmp.idx));
1803            self.ptr_sub(src.opts);
1804            self.ptr_ne(src.opts);
1805            self.instruction(If(BlockType::Empty));
1806            self.trap(Trap::AssertFailed("should have finished encoding"));
1807            self.instruction(End);
1808        } else {
1809            self.instruction(Drop);
1810        }
1811
1812        // Perform a downsizing if the worst-case size was too large
1813        self.instruction(LocalGet(dst.len.idx));
1814        self.instruction(LocalGet(dst_byte_len.idx));
1815        self.ptr_ne(dst.opts);
1816        self.instruction(If(BlockType::Empty));
1817        self.instruction(LocalGet(dst.ptr.idx)); // old_ptr
1818        self.instruction(LocalGet(dst_byte_len.idx)); // old_size
1819        self.ptr_uconst(dst.opts, 1); // align
1820        self.instruction(LocalGet(dst.len.idx)); // new_size
1821        self.instruction(Call(dst.opts.realloc.unwrap().as_u32()));
1822        self.instruction(LocalSet(dst.ptr.idx));
1823        self.instruction(End);
1824
1825        // If the first transcode was enough then assert that the returned
1826        // amount of destination items written equals the byte size.
1827        if self.module.debug {
1828            self.instruction(Else);
1829
1830            self.instruction(LocalGet(dst.len.idx));
1831            self.instruction(LocalGet(dst_byte_len.idx));
1832            self.ptr_ne(dst_opts);
1833            self.instruction(If(BlockType::Empty));
1834            self.trap(Trap::AssertFailed("should have finished encoding"));
1835            self.instruction(End);
1836        }
1837
1838        self.instruction(End); // end of "first transcode not enough"
1839
1840        self.free_temp_local(src_len_tmp);
1841        self.free_temp_local(dst_byte_len);
1842        if let Some(tmp) = src_byte_len_tmp {
1843            self.free_temp_local(tmp);
1844        }
1845
1846        dst
1847    }
1848
1849    // Corresponds to the `store_utf8_to_utf16` function in the spec.
1850    //
1851    // When converting utf-8 to utf-16 a pessimistic allocation is
1852    // done which is twice the byte length of the utf-8 string.
1853    // The host then transcodes and returns how many code units were
1854    // actually used during the transcoding and if it's beneath the
1855    // pessimistic maximum then the buffer is reallocated down to
1856    // a smaller amount.
1857    //
1858    // The host-imported transcoding function takes the src/dst pointer as
1859    // well as the code unit size of both the source and destination. The
1860    // destination should always be big enough to hold the result of the
1861    // transcode and so the result of the host function is how many code
1862    // units were written to the destination.
1863    fn string_utf8_to_utf16<'c>(
1864        &mut self,
1865        src: &WasmString<'_>,
1866        dst_opts: &'c Options,
1867    ) -> WasmString<'c> {
1868        self.validate_string_length(src, FE::Utf16);
1869        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst_opts.ptr());
1870        let dst_len = self.local_tee_new_tmp(dst_opts.ptr());
1871        self.ptr_uconst(dst_opts, 1);
1872        self.ptr_shl(dst_opts);
1873        let dst_byte_len = self.local_set_new_tmp(dst_opts.ptr());
1874        let dst = {
1875            let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), 2);
1876            WasmString {
1877                ptr: dst_mem.addr,
1878                len: dst_len,
1879                opts: dst_opts,
1880            }
1881        };
1882
1883        self.validate_string_inbounds(src, src.len.idx);
1884        self.validate_string_inbounds(&dst, dst_byte_len.idx);
1885
1886        let transcode = self.transcoder(src, &dst, Transcode::Utf8ToUtf16);
1887        self.instruction(LocalGet(src.ptr.idx));
1888        self.instruction(LocalGet(src.len.idx));
1889        self.instruction(LocalGet(dst.ptr.idx));
1890        self.instruction(Call(transcode.as_u32()));
1891        self.instruction(LocalSet(dst.len.idx));
1892
1893        // If the number of code units returned by transcode is not
1894        // equal to the original number of code units then
1895        // the buffer must be shrunk.
1896        //
1897        // Note that the byte length of the final allocation we
1898        // want is twice the code unit length returned by the
1899        // transcoding function.
1900        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst.opts.ptr());
1901        self.instruction(LocalGet(dst.len.idx));
1902        self.ptr_ne(dst_opts);
1903        self.instruction(If(BlockType::Empty));
1904        self.instruction(LocalGet(dst.ptr.idx));
1905        self.instruction(LocalGet(dst_byte_len.idx));
1906        self.ptr_uconst(dst.opts, 2);
1907        self.instruction(LocalGet(dst.len.idx));
1908        self.ptr_uconst(dst.opts, 1);
1909        self.ptr_shl(dst.opts);
1910        self.instruction(Call(dst.opts.realloc.unwrap().as_u32()));
1911        self.instruction(LocalSet(dst.ptr.idx));
1912        self.instruction(End); // end of shrink-to-fit
1913
1914        self.free_temp_local(dst_byte_len);
1915
1916        dst
1917    }
1918
1919    // Corresponds to `store_probably_utf16_to_latin1_or_utf16` in the spec.
1920    //
1921    // This will try to transcode the input utf16 string to utf16 in the
1922    // destination. If utf16 isn't needed though and latin1 could be used
1923    // then that's used instead and a reallocation to downsize occurs
1924    // afterwards.
1925    //
1926    // The host transcode function here will take the src/dst pointers as
1927    // well as src length. The destination byte length is twice the src code
1928    // unit length. The return value is the tagged length of the returned
1929    // string. If the upper bit is set then utf16 was used and the
1930    // conversion is done. If the upper bit is not set then latin1 was used
1931    // and a downsizing needs to happen.
1932    fn string_compact_utf16_to_compact<'c>(
1933        &mut self,
1934        src: &WasmString<'_>,
1935        dst_opts: &'c Options,
1936    ) -> WasmString<'c> {
1937        self.validate_string_length(src, FE::Utf16);
1938        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst_opts.ptr());
1939        let dst_len = self.local_tee_new_tmp(dst_opts.ptr());
1940        self.ptr_uconst(dst_opts, 1);
1941        self.ptr_shl(dst_opts);
1942        let dst_byte_len = self.local_set_new_tmp(dst_opts.ptr());
1943        let dst = {
1944            let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), 2);
1945            WasmString {
1946                ptr: dst_mem.addr,
1947                len: dst_len,
1948                opts: dst_opts,
1949            }
1950        };
1951
1952        self.convert_src_len_to_dst(dst_byte_len.idx, dst.opts.ptr(), src.opts.ptr());
1953        let src_byte_len = self.local_set_new_tmp(src.opts.ptr());
1954
1955        self.validate_string_inbounds(src, src_byte_len.idx);
1956        self.validate_string_inbounds(&dst, dst_byte_len.idx);
1957
1958        let transcode = self.transcoder(src, &dst, Transcode::Utf16ToCompactProbablyUtf16);
1959        self.instruction(LocalGet(src.ptr.idx));
1960        self.instruction(LocalGet(src.len.idx));
1961        self.instruction(LocalGet(dst.ptr.idx));
1962        self.instruction(Call(transcode.as_u32()));
1963        self.instruction(LocalSet(dst.len.idx));
1964
1965        // Assert that the untagged code unit length is the same as the
1966        // source code unit length.
1967        if self.module.debug {
1968            self.instruction(LocalGet(dst.len.idx));
1969            self.ptr_uconst(dst.opts, !UTF16_TAG);
1970            self.ptr_and(dst.opts);
1971            self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst.opts.ptr());
1972            self.ptr_ne(dst.opts);
1973            self.instruction(If(BlockType::Empty));
1974            self.trap(Trap::AssertFailed("expected equal code units"));
1975            self.instruction(End);
1976        }
1977
1978        // If the UTF16_TAG is set then utf16 was used and the destination
1979        // should be appropriately sized. Bail out of the "is this string
1980        // empty" block and fall through otherwise to resizing.
1981        self.instruction(LocalGet(dst.len.idx));
1982        self.ptr_uconst(dst.opts, UTF16_TAG);
1983        self.ptr_and(dst.opts);
1984        self.ptr_br_if(dst.opts, 0);
1985
1986        // Here `realloc` is used to downsize the string
1987        self.instruction(LocalGet(dst.ptr.idx)); // old_ptr
1988        self.instruction(LocalGet(dst_byte_len.idx)); // old_size
1989        self.ptr_uconst(dst.opts, 2); // align
1990        self.instruction(LocalGet(dst.len.idx)); // new_size
1991        self.instruction(Call(dst.opts.realloc.unwrap().as_u32()));
1992        self.instruction(LocalSet(dst.ptr.idx));
1993
1994        self.free_temp_local(dst_byte_len);
1995        self.free_temp_local(src_byte_len);
1996
1997        dst
1998    }
1999
2000    // Corresponds to `store_string_to_latin1_or_utf16` in the spec.
2001    //
2002    // This will attempt a first pass of transcoding to latin1 and on
2003    // failure a larger buffer is allocated for utf16 and then utf16 is
2004    // encoded in-place into the buffer. After either latin1 or utf16 the
2005    // buffer is then resized to fit the final string allocation.
2006    fn string_to_compact<'c>(
2007        &mut self,
2008        src: &WasmString<'_>,
2009        src_enc: FE,
2010        dst_opts: &'c Options,
2011    ) -> WasmString<'c> {
2012        self.validate_string_length(src, src_enc);
2013        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst_opts.ptr());
2014        let dst_len = self.local_tee_new_tmp(dst_opts.ptr());
2015        let dst_byte_len = self.local_set_new_tmp(dst_opts.ptr());
2016        let dst = {
2017            let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), 2);
2018            WasmString {
2019                ptr: dst_mem.addr,
2020                len: dst_len,
2021                opts: dst_opts,
2022            }
2023        };
2024
2025        self.validate_string_inbounds(src, src.len.idx);
2026        self.validate_string_inbounds(&dst, dst_byte_len.idx);
2027
2028        // Perform the initial latin1 transcode. This returns the number of
2029        // source code units consumed and the number of destination code
2030        // units (bytes) written.
2031        let (latin1, utf16) = match src_enc {
2032            FE::Utf8 => (Transcode::Utf8ToLatin1, Transcode::Utf8ToCompactUtf16),
2033            FE::Utf16 => (Transcode::Utf16ToLatin1, Transcode::Utf16ToCompactUtf16),
2034            FE::Latin1 => unreachable!(),
2035        };
2036        let transcode_latin1 = self.transcoder(src, &dst, latin1);
2037        let transcode_utf16 = self.transcoder(src, &dst, utf16);
2038        self.instruction(LocalGet(src.ptr.idx));
2039        self.instruction(LocalGet(src.len.idx));
2040        self.instruction(LocalGet(dst.ptr.idx));
2041        self.instruction(Call(transcode_latin1.as_u32()));
2042        self.instruction(LocalSet(dst.len.idx));
2043        let src_len_tmp = self.local_set_new_tmp(src.opts.ptr());
2044
2045        // If the source was entirely consumed then the transcode completed
2046        // and all that's necessary is to optionally shrink the buffer.
2047        self.instruction(LocalGet(src_len_tmp.idx));
2048        self.instruction(LocalGet(src.len.idx));
2049        self.ptr_eq(src.opts);
2050        self.instruction(If(BlockType::Empty)); // if latin1-or-utf16 block
2051
2052        // Test if the original byte length of the allocation is the same as
2053        // the number of written bytes, and if not then shrink the buffer
2054        // with a call to `realloc`.
2055        self.instruction(LocalGet(dst_byte_len.idx));
2056        self.instruction(LocalGet(dst.len.idx));
2057        self.ptr_ne(dst.opts);
2058        self.instruction(If(BlockType::Empty));
2059        self.instruction(LocalGet(dst.ptr.idx)); // old_ptr
2060        self.instruction(LocalGet(dst_byte_len.idx)); // old_size
2061        self.ptr_uconst(dst.opts, 2); // align
2062        self.instruction(LocalGet(dst.len.idx)); // new_size
2063        self.instruction(Call(dst.opts.realloc.unwrap().as_u32()));
2064        self.instruction(LocalSet(dst.ptr.idx));
2065        self.instruction(End);
2066
2067        // In this block the latin1 encoding failed. The host transcode
2068        // returned how many units were consumed from the source and how
2069        // many bytes were written to the destination. Here the buffer is
2070        // inflated and sized and the second utf16 intrinsic is invoked to
2071        // perform the final inflation.
2072        self.instruction(Else); // else latin1-or-utf16 block
2073
2074        // For utf8 validate that the inflated size is still within bounds.
2075        if src_enc.width() == 1 {
2076            self.validate_string_length_u8(src, 2);
2077        }
2078
2079        // Reallocate the buffer with twice the source code units in byte
2080        // size.
2081        self.instruction(LocalGet(dst.ptr.idx)); // old_ptr
2082        self.instruction(LocalGet(dst_byte_len.idx)); // old_size
2083        self.ptr_uconst(dst.opts, 2); // align
2084        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst.opts.ptr());
2085        self.ptr_uconst(dst.opts, 1);
2086        self.ptr_shl(dst.opts);
2087        self.instruction(LocalTee(dst_byte_len.idx));
2088        self.instruction(Call(dst.opts.realloc.unwrap().as_u32()));
2089        self.instruction(LocalSet(dst.ptr.idx));
2090
2091        // Call the host utf16 transcoding function. This will inflate the
2092        // prior latin1 bytes and then encode the rest of the source string
2093        // as utf16 into the remaining space in the destination buffer.
2094        self.instruction(LocalGet(src.ptr.idx));
2095        self.instruction(LocalGet(src_len_tmp.idx));
2096        if let FE::Utf16 = src_enc {
2097            self.ptr_uconst(src.opts, 1);
2098            self.ptr_shl(src.opts);
2099        }
2100        self.ptr_add(src.opts);
2101        self.instruction(LocalGet(src.len.idx));
2102        self.instruction(LocalGet(src_len_tmp.idx));
2103        self.ptr_sub(src.opts);
2104        self.instruction(LocalGet(dst.ptr.idx));
2105        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst.opts.ptr());
2106        self.instruction(LocalGet(dst.len.idx));
2107        self.instruction(Call(transcode_utf16.as_u32()));
2108        self.instruction(LocalSet(dst.len.idx));
2109
2110        // If the returned number of code units written to the destination
2111        // is not equal to the size of the allocation then the allocation is
2112        // resized down to the appropriate size.
2113        //
2114        // Note that the byte size desired is `2*dst_len` and the current
2115        // byte buffer size is `2*src_len` so the `2` factor isn't checked
2116        // here, just the lengths.
2117        self.instruction(LocalGet(dst.len.idx));
2118        self.convert_src_len_to_dst(src.len.idx, src.opts.ptr(), dst.opts.ptr());
2119        self.ptr_ne(dst.opts);
2120        self.instruction(If(BlockType::Empty));
2121        self.instruction(LocalGet(dst.ptr.idx)); // old_ptr
2122        self.instruction(LocalGet(dst_byte_len.idx)); // old_size
2123        self.ptr_uconst(dst.opts, 2); // align
2124        self.instruction(LocalGet(dst.len.idx));
2125        self.ptr_uconst(dst.opts, 1);
2126        self.ptr_shl(dst.opts);
2127        self.instruction(Call(dst.opts.realloc.unwrap().as_u32()));
2128        self.instruction(LocalSet(dst.ptr.idx));
2129        self.instruction(End);
2130
2131        // Tag the returned pointer as utf16
2132        self.instruction(LocalGet(dst.len.idx));
2133        self.ptr_uconst(dst.opts, UTF16_TAG);
2134        self.ptr_or(dst.opts);
2135        self.instruction(LocalSet(dst.len.idx));
2136
2137        self.instruction(End); // end latin1-or-utf16 block
2138
2139        self.free_temp_local(src_len_tmp);
2140        self.free_temp_local(dst_byte_len);
2141
2142        dst
2143    }
2144
2145    fn validate_string_length(&mut self, src: &WasmString<'_>, dst: FE) {
2146        self.validate_string_length_u8(src, dst.width())
2147    }
2148
2149    fn validate_string_length_u8(&mut self, s: &WasmString<'_>, dst: u8) {
2150        // Check to see if the source byte length is out of bounds in
2151        // which case a trap is generated.
2152        self.instruction(LocalGet(s.len.idx));
2153        let max = MAX_STRING_BYTE_LENGTH / u32::from(dst);
2154        self.ptr_uconst(s.opts, max);
2155        self.ptr_ge_u(s.opts);
2156        self.instruction(If(BlockType::Empty));
2157        self.trap(Trap::StringLengthTooBig);
2158        self.instruction(End);
2159    }
2160
2161    fn transcoder(
2162        &mut self,
2163        src: &WasmString<'_>,
2164        dst: &WasmString<'_>,
2165        op: Transcode,
2166    ) -> FuncIndex {
2167        self.module.import_transcoder(Transcoder {
2168            from_memory: src.opts.memory.unwrap(),
2169            from_memory64: src.opts.memory64,
2170            to_memory: dst.opts.memory.unwrap(),
2171            to_memory64: dst.opts.memory64,
2172            op,
2173        })
2174    }
2175
2176    fn validate_string_inbounds(&mut self, s: &WasmString<'_>, byte_len: u32) {
2177        self.validate_memory_inbounds(s.opts, s.ptr.idx, byte_len, Trap::StringLengthOverflow)
2178    }
2179
2180    fn validate_memory_inbounds(
2181        &mut self,
2182        opts: &Options,
2183        ptr_local: u32,
2184        byte_len_local: u32,
2185        trap: Trap,
2186    ) {
2187        let extend_to_64 = |me: &mut Self| {
2188            if !opts.memory64 {
2189                me.instruction(I64ExtendI32U);
2190            }
2191        };
2192
2193        self.instruction(Block(BlockType::Empty));
2194        self.instruction(Block(BlockType::Empty));
2195
2196        // Calculate the full byte size of memory with `memory.size`. Note that
2197        // arithmetic here is done always in 64-bits to accommodate 4G memories.
2198        // Additionally it's assumed that 64-bit memories never fill up
2199        // entirely.
2200        self.instruction(MemorySize(opts.memory.unwrap().as_u32()));
2201        extend_to_64(self);
2202        self.instruction(I64Const(16));
2203        self.instruction(I64Shl);
2204
2205        // Calculate the end address of the string. This is done by adding the
2206        // base pointer to the byte length. For 32-bit memories there's no need
2207        // to check for overflow since everything is extended to 64-bit, but for
2208        // 64-bit memories overflow is checked.
2209        self.instruction(LocalGet(ptr_local));
2210        extend_to_64(self);
2211        self.instruction(LocalGet(byte_len_local));
2212        extend_to_64(self);
2213        self.instruction(I64Add);
2214        if opts.memory64 {
2215            let tmp = self.local_tee_new_tmp(ValType::I64);
2216            self.instruction(LocalGet(ptr_local));
2217            self.ptr_lt_u(opts);
2218            self.instruction(BrIf(0));
2219            self.instruction(LocalGet(tmp.idx));
2220            self.free_temp_local(tmp);
2221        }
2222
2223        // If the byte size of memory is greater than the final address of the
2224        // string then the string is invalid. Note that if it's precisely equal
2225        // then that's ok.
2226        self.instruction(I64GeU);
2227        self.instruction(BrIf(1));
2228
2229        self.instruction(End);
2230        self.trap(trap);
2231        self.instruction(End);
2232    }
2233
2234    fn translate_list(
2235        &mut self,
2236        src_ty: TypeListIndex,
2237        src: &Source<'_>,
2238        dst_ty: &InterfaceType,
2239        dst: &Destination,
2240    ) {
2241        let src_element_ty = &self.types[src_ty].element;
2242        let dst_element_ty = match dst_ty {
2243            InterfaceType::List(r) => &self.types[*r].element,
2244            _ => panic!("expected a list"),
2245        };
2246        let src_opts = src.opts();
2247        let dst_opts = dst.opts();
2248        let (src_size, src_align) = self.types.size_align(src_opts, src_element_ty);
2249        let (dst_size, dst_align) = self.types.size_align(dst_opts, dst_element_ty);
2250
2251        // Load the pointer/length of this list into temporary locals. These
2252        // will be referenced a good deal so this just makes it easier to deal
2253        // with them consistently below rather than trying to reload from memory
2254        // for example.
2255        match src {
2256            Source::Stack(s) => {
2257                assert_eq!(s.locals.len(), 2);
2258                self.stack_get(&s.slice(0..1), src_opts.ptr());
2259                self.stack_get(&s.slice(1..2), src_opts.ptr());
2260            }
2261            Source::Memory(mem) => {
2262                self.ptr_load(mem);
2263                self.ptr_load(&mem.bump(src_opts.ptr_size().into()));
2264            }
2265        }
2266        let src_len = self.local_set_new_tmp(src_opts.ptr());
2267        let src_ptr = self.local_set_new_tmp(src_opts.ptr());
2268
2269        // Create a `Memory` operand which will internally assert that the
2270        // `src_ptr` value is properly aligned.
2271        let src_mem = self.memory_operand(src_opts, src_ptr, src_align);
2272
2273        // Calculate the source/destination byte lengths into unique locals.
2274        let src_byte_len = self.calculate_list_byte_len(src_opts, src_len.idx, src_size);
2275        let dst_byte_len = if src_size == dst_size {
2276            self.convert_src_len_to_dst(src_byte_len.idx, src_opts.ptr(), dst_opts.ptr());
2277            self.local_set_new_tmp(dst_opts.ptr())
2278        } else if src_opts.ptr() == dst_opts.ptr() {
2279            self.calculate_list_byte_len(dst_opts, src_len.idx, dst_size)
2280        } else {
2281            self.convert_src_len_to_dst(src_byte_len.idx, src_opts.ptr(), dst_opts.ptr());
2282            let tmp = self.local_set_new_tmp(dst_opts.ptr());
2283            let ret = self.calculate_list_byte_len(dst_opts, tmp.idx, dst_size);
2284            self.free_temp_local(tmp);
2285            ret
2286        };
2287
2288        // Here `realloc` is invoked (in a `malloc`-like fashion) to allocate
2289        // space for the list in the destination memory. This will also
2290        // internally insert checks that the returned pointer is aligned
2291        // correctly for the destination.
2292        let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), dst_align);
2293
2294        // With all the pointers and byte lengths verity that both the source
2295        // and the destination buffers are in-bounds.
2296        self.validate_memory_inbounds(
2297            src_opts,
2298            src_mem.addr.idx,
2299            src_byte_len.idx,
2300            Trap::ListByteLengthOverflow,
2301        );
2302        self.validate_memory_inbounds(
2303            dst_opts,
2304            dst_mem.addr.idx,
2305            dst_byte_len.idx,
2306            Trap::ListByteLengthOverflow,
2307        );
2308
2309        self.free_temp_local(src_byte_len);
2310        self.free_temp_local(dst_byte_len);
2311
2312        // This is the main body of the loop to actually translate list types.
2313        // Note that if both element sizes are 0 then this won't actually do
2314        // anything so the loop is removed entirely.
2315        if src_size > 0 || dst_size > 0 {
2316            // This block encompasses the entire loop and is use to exit before even
2317            // entering the loop if the list size is zero.
2318            self.instruction(Block(BlockType::Empty));
2319
2320            // Set the `remaining` local and only continue if it's > 0
2321            self.instruction(LocalGet(src_len.idx));
2322            let remaining = self.local_tee_new_tmp(src_opts.ptr());
2323            self.ptr_eqz(src_opts);
2324            self.instruction(BrIf(0));
2325
2326            // Initialize the two destination pointers to their initial values
2327            self.instruction(LocalGet(src_mem.addr.idx));
2328            let cur_src_ptr = self.local_set_new_tmp(src_opts.ptr());
2329            self.instruction(LocalGet(dst_mem.addr.idx));
2330            let cur_dst_ptr = self.local_set_new_tmp(dst_opts.ptr());
2331
2332            self.instruction(Loop(BlockType::Empty));
2333
2334            // Translate the next element in the list
2335            let element_src = Source::Memory(Memory {
2336                opts: src_opts,
2337                offset: 0,
2338                addr: TempLocal::new(cur_src_ptr.idx, cur_src_ptr.ty),
2339            });
2340            let element_dst = Destination::Memory(Memory {
2341                opts: dst_opts,
2342                offset: 0,
2343                addr: TempLocal::new(cur_dst_ptr.idx, cur_dst_ptr.ty),
2344            });
2345            self.translate(src_element_ty, &element_src, dst_element_ty, &element_dst);
2346
2347            // Update the two loop pointers
2348            if src_size > 0 {
2349                self.instruction(LocalGet(cur_src_ptr.idx));
2350                self.ptr_uconst(src_opts, src_size);
2351                self.ptr_add(src_opts);
2352                self.instruction(LocalSet(cur_src_ptr.idx));
2353            }
2354            if dst_size > 0 {
2355                self.instruction(LocalGet(cur_dst_ptr.idx));
2356                self.ptr_uconst(dst_opts, dst_size);
2357                self.ptr_add(dst_opts);
2358                self.instruction(LocalSet(cur_dst_ptr.idx));
2359            }
2360
2361            // Update the remaining count, falling through to break out if it's zero
2362            // now.
2363            self.instruction(LocalGet(remaining.idx));
2364            self.ptr_iconst(src_opts, -1);
2365            self.ptr_add(src_opts);
2366            self.instruction(LocalTee(remaining.idx));
2367            self.ptr_br_if(src_opts, 0);
2368            self.instruction(End); // end of loop
2369            self.instruction(End); // end of block
2370
2371            self.free_temp_local(cur_dst_ptr);
2372            self.free_temp_local(cur_src_ptr);
2373            self.free_temp_local(remaining);
2374        }
2375
2376        // Store the ptr/length in the desired destination
2377        match dst {
2378            Destination::Stack(s, _) => {
2379                self.instruction(LocalGet(dst_mem.addr.idx));
2380                self.stack_set(&s[..1], dst_opts.ptr());
2381                self.convert_src_len_to_dst(src_len.idx, src_opts.ptr(), dst_opts.ptr());
2382                self.stack_set(&s[1..], dst_opts.ptr());
2383            }
2384            Destination::Memory(mem) => {
2385                self.instruction(LocalGet(mem.addr.idx));
2386                self.instruction(LocalGet(dst_mem.addr.idx));
2387                self.ptr_store(mem);
2388                self.instruction(LocalGet(mem.addr.idx));
2389                self.convert_src_len_to_dst(src_len.idx, src_opts.ptr(), dst_opts.ptr());
2390                self.ptr_store(&mem.bump(dst_opts.ptr_size().into()));
2391            }
2392        }
2393
2394        self.free_temp_local(src_len);
2395        self.free_temp_local(src_mem.addr);
2396        self.free_temp_local(dst_mem.addr);
2397    }
2398
2399    fn calculate_list_byte_len(
2400        &mut self,
2401        opts: &Options,
2402        len_local: u32,
2403        elt_size: u32,
2404    ) -> TempLocal {
2405        // Zero-size types are easy to handle here because the byte size of the
2406        // destination is always zero.
2407        if elt_size == 0 {
2408            self.ptr_uconst(opts, 0);
2409            return self.local_set_new_tmp(opts.ptr());
2410        }
2411
2412        // For one-byte elements in the destination the check here can be a bit
2413        // more optimal than the general case below. In these situations if the
2414        // source pointer type is 32-bit then we're guaranteed to not overflow,
2415        // so the source length is simply casted to the destination's type.
2416        //
2417        // If the source is 64-bit then all that needs to be checked is to
2418        // ensure that it does not have the upper 32-bits set.
2419        if elt_size == 1 {
2420            if let ValType::I64 = opts.ptr() {
2421                self.instruction(LocalGet(len_local));
2422                self.instruction(I64Const(32));
2423                self.instruction(I64ShrU);
2424                self.instruction(I32WrapI64);
2425                self.instruction(If(BlockType::Empty));
2426                self.trap(Trap::ListByteLengthOverflow);
2427                self.instruction(End);
2428            }
2429            self.instruction(LocalGet(len_local));
2430            return self.local_set_new_tmp(opts.ptr());
2431        }
2432
2433        // The main check implemented by this function is to verify that
2434        // `src_len_local` does not exceed the 32-bit range. Byte sizes for
2435        // lists must always fit in 32-bits to get transferred to 32-bit
2436        // memories.
2437        self.instruction(Block(BlockType::Empty));
2438        self.instruction(Block(BlockType::Empty));
2439        self.instruction(LocalGet(len_local));
2440        match opts.ptr() {
2441            // The source's list length is guaranteed to be less than 32-bits
2442            // so simply extend it up to a 64-bit type for the multiplication
2443            // below.
2444            ValType::I32 => self.instruction(I64ExtendI32U),
2445
2446            // If the source is a 64-bit memory then if the item length doesn't
2447            // fit in 32-bits the byte length definitely won't, so generate a
2448            // branch to our overflow trap here if any of the upper 32-bits are set.
2449            ValType::I64 => {
2450                self.instruction(I64Const(32));
2451                self.instruction(I64ShrU);
2452                self.instruction(I32WrapI64);
2453                self.instruction(BrIf(0));
2454                self.instruction(LocalGet(len_local));
2455            }
2456
2457            _ => unreachable!(),
2458        }
2459
2460        // Next perform a 64-bit multiplication with the element byte size that
2461        // is itself guaranteed to fit in 32-bits. The result is then checked
2462        // to see if we overflowed the 32-bit space. The two input operands to
2463        // the multiplication are guaranteed to be 32-bits at most which means
2464        // that this multiplication shouldn't overflow.
2465        //
2466        // The result of the multiplication is saved into a local as well to
2467        // get the result afterwards.
2468        self.instruction(I64Const(elt_size.into()));
2469        self.instruction(I64Mul);
2470        let tmp = self.local_tee_new_tmp(ValType::I64);
2471        // Branch to success if the upper 32-bits are zero, otherwise
2472        // fall-through to the trap.
2473        self.instruction(I64Const(32));
2474        self.instruction(I64ShrU);
2475        self.instruction(I64Eqz);
2476        self.instruction(BrIf(1));
2477        self.instruction(End);
2478        self.trap(Trap::ListByteLengthOverflow);
2479        self.instruction(End);
2480
2481        // If a fresh local was used to store the result of the multiplication
2482        // then convert it down to 32-bits which should be guaranteed to not
2483        // lose information at this point.
2484        if opts.ptr() == ValType::I64 {
2485            tmp
2486        } else {
2487            self.instruction(LocalGet(tmp.idx));
2488            self.instruction(I32WrapI64);
2489            self.free_temp_local(tmp);
2490            self.local_set_new_tmp(ValType::I32)
2491        }
2492    }
2493
2494    fn convert_src_len_to_dst(
2495        &mut self,
2496        src_len_local: u32,
2497        src_ptr_ty: ValType,
2498        dst_ptr_ty: ValType,
2499    ) {
2500        self.instruction(LocalGet(src_len_local));
2501        match (src_ptr_ty, dst_ptr_ty) {
2502            (ValType::I32, ValType::I64) => self.instruction(I64ExtendI32U),
2503            (ValType::I64, ValType::I32) => self.instruction(I32WrapI64),
2504            (src, dst) => assert_eq!(src, dst),
2505        }
2506    }
2507
2508    fn translate_record(
2509        &mut self,
2510        src_ty: TypeRecordIndex,
2511        src: &Source<'_>,
2512        dst_ty: &InterfaceType,
2513        dst: &Destination,
2514    ) {
2515        let src_ty = &self.types[src_ty];
2516        let dst_ty = match dst_ty {
2517            InterfaceType::Record(r) => &self.types[*r],
2518            _ => panic!("expected a record"),
2519        };
2520
2521        // TODO: subtyping
2522        assert_eq!(src_ty.fields.len(), dst_ty.fields.len());
2523
2524        // First a map is made of the source fields to where they're coming
2525        // from (e.g. which offset or which locals). This map is keyed by the
2526        // fields' names
2527        let mut src_fields = HashMap::new();
2528        for (i, src) in src
2529            .record_field_srcs(self.types, src_ty.fields.iter().map(|f| f.ty))
2530            .enumerate()
2531        {
2532            let field = &src_ty.fields[i];
2533            src_fields.insert(&field.name, (src, &field.ty));
2534        }
2535
2536        // .. and next translation is performed in the order of the destination
2537        // fields in case the destination is the stack to ensure that the stack
2538        // has the fields all in the right order.
2539        //
2540        // Note that the lookup in `src_fields` is an infallible lookup which
2541        // will panic if the field isn't found.
2542        //
2543        // TODO: should that lookup be fallible with subtyping?
2544        for (i, dst) in dst
2545            .record_field_dsts(self.types, dst_ty.fields.iter().map(|f| f.ty))
2546            .enumerate()
2547        {
2548            let field = &dst_ty.fields[i];
2549            let (src, src_ty) = &src_fields[&field.name];
2550            self.translate(src_ty, src, &field.ty, &dst);
2551        }
2552    }
2553
2554    fn translate_flags(
2555        &mut self,
2556        src_ty: TypeFlagsIndex,
2557        src: &Source<'_>,
2558        dst_ty: &InterfaceType,
2559        dst: &Destination,
2560    ) {
2561        let src_ty = &self.types[src_ty];
2562        let dst_ty = match dst_ty {
2563            InterfaceType::Flags(r) => &self.types[*r],
2564            _ => panic!("expected a record"),
2565        };
2566
2567        // TODO: subtyping
2568        //
2569        // Notably this implementation does not support reordering flags from
2570        // the source to the destination nor having more flags in the
2571        // destination. Currently this is a copy from source to destination
2572        // in-bulk. Otherwise reordering indices would have to have some sort of
2573        // fancy bit twiddling tricks or something like that.
2574        assert_eq!(src_ty.names, dst_ty.names);
2575        let cnt = src_ty.names.len();
2576        match FlagsSize::from_count(cnt) {
2577            FlagsSize::Size0 => {}
2578            FlagsSize::Size1 => {
2579                let mask = if cnt == 8 { 0xff } else { (1 << cnt) - 1 };
2580                self.convert_u8_mask(src, dst, mask);
2581            }
2582            FlagsSize::Size2 => {
2583                let mask = if cnt == 16 { 0xffff } else { (1 << cnt) - 1 };
2584                self.convert_u16_mask(src, dst, mask);
2585            }
2586            FlagsSize::Size4Plus(n) => {
2587                let srcs = src.record_field_srcs(self.types, (0..n).map(|_| InterfaceType::U32));
2588                let dsts = dst.record_field_dsts(self.types, (0..n).map(|_| InterfaceType::U32));
2589                let n = usize::from(n);
2590                for (i, (src, dst)) in srcs.zip(dsts).enumerate() {
2591                    let mask = if i == n - 1 && (cnt % 32 != 0) {
2592                        (1 << (cnt % 32)) - 1
2593                    } else {
2594                        0xffffffff
2595                    };
2596                    self.convert_u32_mask(&src, &dst, mask);
2597                }
2598            }
2599        }
2600    }
2601
2602    fn translate_tuple(
2603        &mut self,
2604        src_ty: TypeTupleIndex,
2605        src: &Source<'_>,
2606        dst_ty: &InterfaceType,
2607        dst: &Destination,
2608    ) {
2609        let src_ty = &self.types[src_ty];
2610        let dst_ty = match dst_ty {
2611            InterfaceType::Tuple(t) => &self.types[*t],
2612            _ => panic!("expected a tuple"),
2613        };
2614
2615        // TODO: subtyping
2616        assert_eq!(src_ty.types.len(), dst_ty.types.len());
2617
2618        let srcs = src
2619            .record_field_srcs(self.types, src_ty.types.iter().copied())
2620            .zip(src_ty.types.iter());
2621        let dsts = dst
2622            .record_field_dsts(self.types, dst_ty.types.iter().copied())
2623            .zip(dst_ty.types.iter());
2624        for ((src, src_ty), (dst, dst_ty)) in srcs.zip(dsts) {
2625            self.translate(src_ty, &src, dst_ty, &dst);
2626        }
2627    }
2628
2629    fn translate_variant(
2630        &mut self,
2631        src_ty: TypeVariantIndex,
2632        src: &Source<'_>,
2633        dst_ty: &InterfaceType,
2634        dst: &Destination,
2635    ) {
2636        let src_ty = &self.types[src_ty];
2637        let dst_ty = match dst_ty {
2638            InterfaceType::Variant(t) => &self.types[*t],
2639            _ => panic!("expected a variant"),
2640        };
2641
2642        let src_info = variant_info(self.types, src_ty.cases.iter().map(|(_, c)| c.as_ref()));
2643        let dst_info = variant_info(self.types, dst_ty.cases.iter().map(|(_, c)| c.as_ref()));
2644
2645        let iter = src_ty
2646            .cases
2647            .iter()
2648            .enumerate()
2649            .map(|(src_i, (src_case, src_case_ty))| {
2650                let dst_i = dst_ty
2651                    .cases
2652                    .iter()
2653                    .position(|(c, _)| c == src_case)
2654                    .unwrap();
2655                let dst_case_ty = &dst_ty.cases[dst_i];
2656                let src_i = u32::try_from(src_i).unwrap();
2657                let dst_i = u32::try_from(dst_i).unwrap();
2658                VariantCase {
2659                    src_i,
2660                    src_ty: src_case_ty.as_ref(),
2661                    dst_i,
2662                    dst_ty: dst_case_ty.as_ref(),
2663                }
2664            });
2665        self.convert_variant(src, &src_info, dst, &dst_info, iter);
2666    }
2667
2668    fn translate_enum(
2669        &mut self,
2670        src_ty: TypeEnumIndex,
2671        src: &Source<'_>,
2672        dst_ty: &InterfaceType,
2673        dst: &Destination,
2674    ) {
2675        let src_ty = &self.types[src_ty];
2676        let dst_ty = match dst_ty {
2677            InterfaceType::Enum(t) => &self.types[*t],
2678            _ => panic!("expected an option"),
2679        };
2680
2681        debug_assert_eq!(src_ty.info.size, dst_ty.info.size);
2682        debug_assert_eq!(src_ty.names.len(), dst_ty.names.len());
2683        debug_assert!(
2684            src_ty
2685                .names
2686                .iter()
2687                .zip(dst_ty.names.iter())
2688                .all(|(a, b)| a == b)
2689        );
2690
2691        // Get the discriminant.
2692        match src {
2693            Source::Stack(s) => self.stack_get(&s.slice(0..1), ValType::I32),
2694            Source::Memory(mem) => match src_ty.info.size {
2695                DiscriminantSize::Size1 => self.i32_load8u(mem),
2696                DiscriminantSize::Size2 => self.i32_load16u(mem),
2697                DiscriminantSize::Size4 => self.i32_load(mem),
2698            },
2699        }
2700        let tmp = self.local_tee_new_tmp(ValType::I32);
2701
2702        // Assert that the discriminant is valid.
2703        self.instruction(I32Const(i32::try_from(src_ty.names.len()).unwrap()));
2704        self.instruction(I32GtU);
2705        self.instruction(If(BlockType::Empty));
2706        self.trap(Trap::InvalidDiscriminant);
2707        self.instruction(End);
2708
2709        // Save the discriminant to the destination.
2710        match dst {
2711            Destination::Stack(stack, _) => {
2712                self.local_get_tmp(&tmp);
2713                self.stack_set(&stack[..1], ValType::I32)
2714            }
2715            Destination::Memory(mem) => {
2716                self.push_dst_addr(dst);
2717                self.local_get_tmp(&tmp);
2718                match dst_ty.info.size {
2719                    DiscriminantSize::Size1 => self.i32_store8(mem),
2720                    DiscriminantSize::Size2 => self.i32_store16(mem),
2721                    DiscriminantSize::Size4 => self.i32_store(mem),
2722                }
2723            }
2724        }
2725        self.free_temp_local(tmp);
2726    }
2727
2728    fn translate_option(
2729        &mut self,
2730        src_ty: TypeOptionIndex,
2731        src: &Source<'_>,
2732        dst_ty: &InterfaceType,
2733        dst: &Destination,
2734    ) {
2735        let src_ty = &self.types[src_ty].ty;
2736        let dst_ty = match dst_ty {
2737            InterfaceType::Option(t) => &self.types[*t].ty,
2738            _ => panic!("expected an option"),
2739        };
2740        let src_ty = Some(src_ty);
2741        let dst_ty = Some(dst_ty);
2742
2743        let src_info = variant_info(self.types, [None, src_ty]);
2744        let dst_info = variant_info(self.types, [None, dst_ty]);
2745
2746        self.convert_variant(
2747            src,
2748            &src_info,
2749            dst,
2750            &dst_info,
2751            [
2752                VariantCase {
2753                    src_i: 0,
2754                    dst_i: 0,
2755                    src_ty: None,
2756                    dst_ty: None,
2757                },
2758                VariantCase {
2759                    src_i: 1,
2760                    dst_i: 1,
2761                    src_ty,
2762                    dst_ty,
2763                },
2764            ]
2765            .into_iter(),
2766        );
2767    }
2768
2769    fn translate_result(
2770        &mut self,
2771        src_ty: TypeResultIndex,
2772        src: &Source<'_>,
2773        dst_ty: &InterfaceType,
2774        dst: &Destination,
2775    ) {
2776        let src_ty = &self.types[src_ty];
2777        let dst_ty = match dst_ty {
2778            InterfaceType::Result(t) => &self.types[*t],
2779            _ => panic!("expected a result"),
2780        };
2781
2782        let src_info = variant_info(self.types, [src_ty.ok.as_ref(), src_ty.err.as_ref()]);
2783        let dst_info = variant_info(self.types, [dst_ty.ok.as_ref(), dst_ty.err.as_ref()]);
2784
2785        self.convert_variant(
2786            src,
2787            &src_info,
2788            dst,
2789            &dst_info,
2790            [
2791                VariantCase {
2792                    src_i: 0,
2793                    dst_i: 0,
2794                    src_ty: src_ty.ok.as_ref(),
2795                    dst_ty: dst_ty.ok.as_ref(),
2796                },
2797                VariantCase {
2798                    src_i: 1,
2799                    dst_i: 1,
2800                    src_ty: src_ty.err.as_ref(),
2801                    dst_ty: dst_ty.err.as_ref(),
2802                },
2803            ]
2804            .into_iter(),
2805        );
2806    }
2807
2808    fn convert_variant<'c>(
2809        &mut self,
2810        src: &Source<'_>,
2811        src_info: &VariantInfo,
2812        dst: &Destination,
2813        dst_info: &VariantInfo,
2814        src_cases: impl ExactSizeIterator<Item = VariantCase<'c>>,
2815    ) {
2816        // The outermost block is special since it has the result type of the
2817        // translation here. That will depend on the `dst`.
2818        let outer_block_ty = match dst {
2819            Destination::Stack(dst_flat, _) => match dst_flat.len() {
2820                0 => BlockType::Empty,
2821                1 => BlockType::Result(dst_flat[0]),
2822                _ => {
2823                    let ty = self.module.core_types.function(&[], &dst_flat);
2824                    BlockType::FunctionType(ty)
2825                }
2826            },
2827            Destination::Memory(_) => BlockType::Empty,
2828        };
2829        self.instruction(Block(outer_block_ty));
2830
2831        // After the outermost block generate a new block for each of the
2832        // remaining cases.
2833        let src_cases_len = src_cases.len();
2834        for _ in 0..src_cases_len - 1 {
2835            self.instruction(Block(BlockType::Empty));
2836        }
2837
2838        // Generate a block for an invalid variant discriminant
2839        self.instruction(Block(BlockType::Empty));
2840
2841        // And generate one final block that we'll be jumping out of with the
2842        // `br_table`
2843        self.instruction(Block(BlockType::Empty));
2844
2845        // Load the discriminant
2846        match src {
2847            Source::Stack(s) => self.stack_get(&s.slice(0..1), ValType::I32),
2848            Source::Memory(mem) => match src_info.size {
2849                DiscriminantSize::Size1 => self.i32_load8u(mem),
2850                DiscriminantSize::Size2 => self.i32_load16u(mem),
2851                DiscriminantSize::Size4 => self.i32_load(mem),
2852            },
2853        }
2854
2855        // Generate the `br_table` for the discriminant. Each case has an
2856        // offset of 1 to skip the trapping block.
2857        let mut targets = Vec::new();
2858        for i in 0..src_cases_len {
2859            targets.push((i + 1) as u32);
2860        }
2861        self.instruction(BrTable(targets[..].into(), 0));
2862        self.instruction(End); // end the `br_table` block
2863
2864        self.trap(Trap::InvalidDiscriminant);
2865        self.instruction(End); // end the "invalid discriminant" block
2866
2867        // Translate each case individually within its own block. Note that the
2868        // iteration order here places the first case in the innermost block
2869        // and the last case in the outermost block. This matches the order
2870        // of the jump targets in the `br_table` instruction.
2871        let src_cases_len = u32::try_from(src_cases_len).unwrap();
2872        for case in src_cases {
2873            let VariantCase {
2874                src_i,
2875                src_ty,
2876                dst_i,
2877                dst_ty,
2878            } = case;
2879
2880            // Translate the discriminant here, noting that `dst_i` may be
2881            // different than `src_i`.
2882            self.push_dst_addr(dst);
2883            self.instruction(I32Const(dst_i as i32));
2884            match dst {
2885                Destination::Stack(stack, _) => self.stack_set(&stack[..1], ValType::I32),
2886                Destination::Memory(mem) => match dst_info.size {
2887                    DiscriminantSize::Size1 => self.i32_store8(mem),
2888                    DiscriminantSize::Size2 => self.i32_store16(mem),
2889                    DiscriminantSize::Size4 => self.i32_store(mem),
2890                },
2891            }
2892
2893            let src_payload = src.payload_src(self.types, src_info, src_ty);
2894            let dst_payload = dst.payload_dst(self.types, dst_info, dst_ty);
2895
2896            // Translate the payload of this case using the various types from
2897            // the dst/src.
2898            match (src_ty, dst_ty) {
2899                (Some(src_ty), Some(dst_ty)) => {
2900                    self.translate(src_ty, &src_payload, dst_ty, &dst_payload);
2901                }
2902                (None, None) => {}
2903                _ => unimplemented!(),
2904            }
2905
2906            // If the results of this translation were placed on the stack then
2907            // the stack values may need to be padded with more zeros due to
2908            // this particular case being possibly smaller than the entire
2909            // variant. That's handled here by pushing remaining zeros after
2910            // accounting for the discriminant pushed as well as the results of
2911            // this individual payload.
2912            if let Destination::Stack(payload_results, _) = dst_payload {
2913                if let Destination::Stack(dst_results, _) = dst {
2914                    let remaining = &dst_results[1..][payload_results.len()..];
2915                    for ty in remaining {
2916                        match ty {
2917                            ValType::I32 => self.instruction(I32Const(0)),
2918                            ValType::I64 => self.instruction(I64Const(0)),
2919                            ValType::F32 => self.instruction(F32Const(0.0.into())),
2920                            ValType::F64 => self.instruction(F64Const(0.0.into())),
2921                            _ => unreachable!(),
2922                        }
2923                    }
2924                }
2925            }
2926
2927            // Branch to the outermost block. Note that this isn't needed for
2928            // the outermost case since it simply falls through.
2929            if src_i != src_cases_len - 1 {
2930                self.instruction(Br(src_cases_len - src_i - 1));
2931            }
2932            self.instruction(End); // end this case's block
2933        }
2934    }
2935
2936    fn translate_future(
2937        &mut self,
2938        src_ty: TypeFutureTableIndex,
2939        src: &Source<'_>,
2940        dst_ty: &InterfaceType,
2941        dst: &Destination,
2942    ) {
2943        let dst_ty = match dst_ty {
2944            InterfaceType::Future(t) => *t,
2945            _ => panic!("expected a `Future`"),
2946        };
2947        let transfer = self.module.import_future_transfer();
2948        self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);
2949    }
2950
2951    fn translate_stream(
2952        &mut self,
2953        src_ty: TypeStreamTableIndex,
2954        src: &Source<'_>,
2955        dst_ty: &InterfaceType,
2956        dst: &Destination,
2957    ) {
2958        let dst_ty = match dst_ty {
2959            InterfaceType::Stream(t) => *t,
2960            _ => panic!("expected a `Stream`"),
2961        };
2962        let transfer = self.module.import_stream_transfer();
2963        self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);
2964    }
2965
2966    fn translate_error_context(
2967        &mut self,
2968        src_ty: TypeComponentLocalErrorContextTableIndex,
2969        src: &Source<'_>,
2970        dst_ty: &InterfaceType,
2971        dst: &Destination,
2972    ) {
2973        let dst_ty = match dst_ty {
2974            InterfaceType::ErrorContext(t) => *t,
2975            _ => panic!("expected an `ErrorContext`"),
2976        };
2977        let transfer = self.module.import_error_context_transfer();
2978        self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);
2979    }
2980
2981    fn translate_own(
2982        &mut self,
2983        src_ty: TypeResourceTableIndex,
2984        src: &Source<'_>,
2985        dst_ty: &InterfaceType,
2986        dst: &Destination,
2987    ) {
2988        let dst_ty = match dst_ty {
2989            InterfaceType::Own(t) => *t,
2990            _ => panic!("expected an `Own`"),
2991        };
2992        let transfer = self.module.import_resource_transfer_own();
2993        self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);
2994    }
2995
2996    fn translate_borrow(
2997        &mut self,
2998        src_ty: TypeResourceTableIndex,
2999        src: &Source<'_>,
3000        dst_ty: &InterfaceType,
3001        dst: &Destination,
3002    ) {
3003        let dst_ty = match dst_ty {
3004            InterfaceType::Borrow(t) => *t,
3005            _ => panic!("expected an `Borrow`"),
3006        };
3007
3008        let transfer = self.module.import_resource_transfer_borrow();
3009        self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);
3010    }
3011
3012    /// Translates the index `src`, which resides in the table `src_ty`, into
3013    /// and index within `dst_ty` and is stored at `dst`.
3014    ///
3015    /// Actual translation of the index happens in a wasmtime libcall, which a
3016    /// cranelift-generated trampoline to satisfy this import will call. The
3017    /// `transfer` function is an imported function which takes the src, src_ty,
3018    /// and dst_ty, and returns the dst index.
3019    fn translate_handle(
3020        &mut self,
3021        src_ty: u32,
3022        src: &Source<'_>,
3023        dst_ty: u32,
3024        dst: &Destination,
3025        transfer: FuncIndex,
3026    ) {
3027        self.push_dst_addr(dst);
3028        match src {
3029            Source::Memory(mem) => self.i32_load(mem),
3030            Source::Stack(stack) => self.stack_get(stack, ValType::I32),
3031        }
3032        self.instruction(I32Const(src_ty as i32));
3033        self.instruction(I32Const(dst_ty as i32));
3034        self.instruction(Call(transfer.as_u32()));
3035        match dst {
3036            Destination::Memory(mem) => self.i32_store(mem),
3037            Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),
3038        }
3039    }
3040
3041    fn trap_if_not_flag(&mut self, flags_global: GlobalIndex, flag_to_test: i32, trap: Trap) {
3042        self.instruction(GlobalGet(flags_global.as_u32()));
3043        self.instruction(I32Const(flag_to_test));
3044        self.instruction(I32And);
3045        self.instruction(I32Eqz);
3046        self.instruction(If(BlockType::Empty));
3047        self.trap(trap);
3048        self.instruction(End);
3049    }
3050
3051    fn assert_not_flag(&mut self, flags_global: GlobalIndex, flag_to_test: i32, msg: &'static str) {
3052        self.instruction(GlobalGet(flags_global.as_u32()));
3053        self.instruction(I32Const(flag_to_test));
3054        self.instruction(I32And);
3055        self.instruction(If(BlockType::Empty));
3056        self.trap(Trap::AssertFailed(msg));
3057        self.instruction(End);
3058    }
3059
3060    fn set_flag(&mut self, flags_global: GlobalIndex, flag_to_set: i32, value: bool) {
3061        self.instruction(GlobalGet(flags_global.as_u32()));
3062        if value {
3063            self.instruction(I32Const(flag_to_set));
3064            self.instruction(I32Or);
3065        } else {
3066            self.instruction(I32Const(!flag_to_set));
3067            self.instruction(I32And);
3068        }
3069        self.instruction(GlobalSet(flags_global.as_u32()));
3070    }
3071
3072    fn verify_aligned(&mut self, opts: &Options, addr_local: u32, align: u32) {
3073        // If the alignment is 1 then everything is trivially aligned and the
3074        // check can be omitted.
3075        if align == 1 {
3076            return;
3077        }
3078        self.instruction(LocalGet(addr_local));
3079        assert!(align.is_power_of_two());
3080        self.ptr_uconst(opts, align - 1);
3081        self.ptr_and(opts);
3082        self.ptr_if(opts, BlockType::Empty);
3083        self.trap(Trap::UnalignedPointer);
3084        self.instruction(End);
3085    }
3086
3087    fn assert_aligned(&mut self, ty: &InterfaceType, mem: &Memory) {
3088        if !self.module.debug {
3089            return;
3090        }
3091        let align = self.types.align(mem.opts, ty);
3092        if align == 1 {
3093            return;
3094        }
3095        assert!(align.is_power_of_two());
3096        self.instruction(LocalGet(mem.addr.idx));
3097        self.ptr_uconst(mem.opts, mem.offset);
3098        self.ptr_add(mem.opts);
3099        self.ptr_uconst(mem.opts, align - 1);
3100        self.ptr_and(mem.opts);
3101        self.ptr_if(mem.opts, BlockType::Empty);
3102        self.trap(Trap::AssertFailed("pointer not aligned"));
3103        self.instruction(End);
3104    }
3105
3106    fn malloc<'c>(&mut self, opts: &'c Options, size: MallocSize, align: u32) -> Memory<'c> {
3107        let realloc = opts.realloc.unwrap();
3108        self.ptr_uconst(opts, 0);
3109        self.ptr_uconst(opts, 0);
3110        self.ptr_uconst(opts, align);
3111        match size {
3112            MallocSize::Const(size) => self.ptr_uconst(opts, size),
3113            MallocSize::Local(idx) => self.instruction(LocalGet(idx)),
3114        }
3115        self.instruction(Call(realloc.as_u32()));
3116        let addr = self.local_set_new_tmp(opts.ptr());
3117        self.memory_operand(opts, addr, align)
3118    }
3119
3120    fn memory_operand<'c>(&mut self, opts: &'c Options, addr: TempLocal, align: u32) -> Memory<'c> {
3121        let ret = Memory {
3122            addr,
3123            offset: 0,
3124            opts,
3125        };
3126        self.verify_aligned(opts, ret.addr.idx, align);
3127        ret
3128    }
3129
3130    /// Generates a new local in this function of the `ty` specified,
3131    /// initializing it with the top value on the current wasm stack.
3132    ///
3133    /// The returned `TempLocal` must be freed after it is finished with
3134    /// `free_temp_local`.
3135    fn local_tee_new_tmp(&mut self, ty: ValType) -> TempLocal {
3136        self.gen_temp_local(ty, LocalTee)
3137    }
3138
3139    /// Same as `local_tee_new_tmp` but initializes the local with `LocalSet`
3140    /// instead of `LocalTee`.
3141    fn local_set_new_tmp(&mut self, ty: ValType) -> TempLocal {
3142        self.gen_temp_local(ty, LocalSet)
3143    }
3144
3145    fn local_get_tmp(&mut self, local: &TempLocal) {
3146        self.instruction(LocalGet(local.idx));
3147    }
3148
3149    fn gen_temp_local(&mut self, ty: ValType, insn: fn(u32) -> Instruction<'static>) -> TempLocal {
3150        // First check to see if any locals are available in this function which
3151        // were previously generated but are no longer in use.
3152        if let Some(idx) = self.free_locals.get_mut(&ty).and_then(|v| v.pop()) {
3153            self.instruction(insn(idx));
3154            return TempLocal {
3155                ty,
3156                idx,
3157                needs_free: true,
3158            };
3159        }
3160
3161        // Failing that generate a fresh new local.
3162        let locals = &mut self.module.funcs[self.result].locals;
3163        match locals.last_mut() {
3164            Some((cnt, prev_ty)) if ty == *prev_ty => *cnt += 1,
3165            _ => locals.push((1, ty)),
3166        }
3167        self.nlocals += 1;
3168        let idx = self.nlocals - 1;
3169        self.instruction(insn(idx));
3170        TempLocal {
3171            ty,
3172            idx,
3173            needs_free: true,
3174        }
3175    }
3176
3177    /// Used to release a `TempLocal` from a particular lexical scope to allow
3178    /// its possible reuse in later scopes.
3179    fn free_temp_local(&mut self, mut local: TempLocal) {
3180        assert!(local.needs_free);
3181        self.free_locals
3182            .entry(local.ty)
3183            .or_insert(Vec::new())
3184            .push(local.idx);
3185        local.needs_free = false;
3186    }
3187
3188    fn instruction(&mut self, instr: Instruction) {
3189        instr.encode(&mut self.code);
3190    }
3191
3192    fn trap(&mut self, trap: Trap) {
3193        self.traps.push((self.code.len(), trap));
3194        self.instruction(Unreachable);
3195    }
3196
3197    /// Flushes out the current `code` instructions (and `traps` if there are
3198    /// any) into the destination function.
3199    ///
3200    /// This is a noop if no instructions have been encoded yet.
3201    fn flush_code(&mut self) {
3202        if self.code.is_empty() {
3203            return;
3204        }
3205        self.module.funcs[self.result].body.push(Body::Raw(
3206            mem::take(&mut self.code),
3207            mem::take(&mut self.traps),
3208        ));
3209    }
3210
3211    fn finish(mut self) {
3212        // Append the final `end` instruction which all functions require, and
3213        // then empty out the temporary buffer in `Compiler`.
3214        self.instruction(End);
3215        self.flush_code();
3216
3217        // Flag the function as "done" which helps with an assert later on in
3218        // emission that everything was eventually finished.
3219        self.module.funcs[self.result].filled_in = true;
3220    }
3221
3222    /// Fetches the value contained with the local specified by `stack` and
3223    /// converts it to `dst_ty`.
3224    ///
3225    /// This is only intended for use in primitive operations where `stack` is
3226    /// guaranteed to have only one local. The type of the local on the stack is
3227    /// then converted to `dst_ty` appropriately. Note that the types may be
3228    /// different due to the "flattening" of variant types.
3229    fn stack_get(&mut self, stack: &Stack<'_>, dst_ty: ValType) {
3230        assert_eq!(stack.locals.len(), 1);
3231        let (idx, src_ty) = stack.locals[0];
3232        self.instruction(LocalGet(idx));
3233        match (src_ty, dst_ty) {
3234            (ValType::I32, ValType::I32)
3235            | (ValType::I64, ValType::I64)
3236            | (ValType::F32, ValType::F32)
3237            | (ValType::F64, ValType::F64) => {}
3238
3239            (ValType::I32, ValType::F32) => self.instruction(F32ReinterpretI32),
3240            (ValType::I64, ValType::I32) => {
3241                self.assert_i64_upper_bits_not_set(idx);
3242                self.instruction(I32WrapI64);
3243            }
3244            (ValType::I64, ValType::F64) => self.instruction(F64ReinterpretI64),
3245            (ValType::I64, ValType::F32) => {
3246                self.assert_i64_upper_bits_not_set(idx);
3247                self.instruction(I32WrapI64);
3248                self.instruction(F32ReinterpretI32);
3249            }
3250
3251            // should not be possible given the `join` function for variants
3252            (ValType::I32, ValType::I64)
3253            | (ValType::I32, ValType::F64)
3254            | (ValType::F32, ValType::I32)
3255            | (ValType::F32, ValType::I64)
3256            | (ValType::F32, ValType::F64)
3257            | (ValType::F64, ValType::I32)
3258            | (ValType::F64, ValType::I64)
3259            | (ValType::F64, ValType::F32)
3260
3261            // not used in the component model
3262            | (ValType::Ref(_), _)
3263            | (_, ValType::Ref(_))
3264            | (ValType::V128, _)
3265            | (_, ValType::V128) => {
3266                panic!("cannot get {dst_ty:?} from {src_ty:?} local");
3267            }
3268        }
3269    }
3270
3271    fn assert_i64_upper_bits_not_set(&mut self, local: u32) {
3272        if !self.module.debug {
3273            return;
3274        }
3275        self.instruction(LocalGet(local));
3276        self.instruction(I64Const(32));
3277        self.instruction(I64ShrU);
3278        self.instruction(I32WrapI64);
3279        self.instruction(If(BlockType::Empty));
3280        self.trap(Trap::AssertFailed("upper bits are unexpectedly set"));
3281        self.instruction(End);
3282    }
3283
3284    /// Converts the top value on the WebAssembly stack which has type
3285    /// `src_ty` to `dst_tys[0]`.
3286    ///
3287    /// This is only intended for conversion of primitives where the `dst_tys`
3288    /// list is known to be of length 1.
3289    fn stack_set(&mut self, dst_tys: &[ValType], src_ty: ValType) {
3290        assert_eq!(dst_tys.len(), 1);
3291        let dst_ty = dst_tys[0];
3292        match (src_ty, dst_ty) {
3293            (ValType::I32, ValType::I32)
3294            | (ValType::I64, ValType::I64)
3295            | (ValType::F32, ValType::F32)
3296            | (ValType::F64, ValType::F64) => {}
3297
3298            (ValType::F32, ValType::I32) => self.instruction(I32ReinterpretF32),
3299            (ValType::I32, ValType::I64) => self.instruction(I64ExtendI32U),
3300            (ValType::F64, ValType::I64) => self.instruction(I64ReinterpretF64),
3301            (ValType::F32, ValType::I64) => {
3302                self.instruction(I32ReinterpretF32);
3303                self.instruction(I64ExtendI32U);
3304            }
3305
3306            // should not be possible given the `join` function for variants
3307            (ValType::I64, ValType::I32)
3308            | (ValType::F64, ValType::I32)
3309            | (ValType::I32, ValType::F32)
3310            | (ValType::I64, ValType::F32)
3311            | (ValType::F64, ValType::F32)
3312            | (ValType::I32, ValType::F64)
3313            | (ValType::I64, ValType::F64)
3314            | (ValType::F32, ValType::F64)
3315
3316            // not used in the component model
3317            | (ValType::Ref(_), _)
3318            | (_, ValType::Ref(_))
3319            | (ValType::V128, _)
3320            | (_, ValType::V128) => {
3321                panic!("cannot get {dst_ty:?} from {src_ty:?} local");
3322            }
3323        }
3324    }
3325
3326    fn i32_load8u(&mut self, mem: &Memory) {
3327        self.instruction(LocalGet(mem.addr.idx));
3328        self.instruction(I32Load8U(mem.memarg(0)));
3329    }
3330
3331    fn i32_load8s(&mut self, mem: &Memory) {
3332        self.instruction(LocalGet(mem.addr.idx));
3333        self.instruction(I32Load8S(mem.memarg(0)));
3334    }
3335
3336    fn i32_load16u(&mut self, mem: &Memory) {
3337        self.instruction(LocalGet(mem.addr.idx));
3338        self.instruction(I32Load16U(mem.memarg(1)));
3339    }
3340
3341    fn i32_load16s(&mut self, mem: &Memory) {
3342        self.instruction(LocalGet(mem.addr.idx));
3343        self.instruction(I32Load16S(mem.memarg(1)));
3344    }
3345
3346    fn i32_load(&mut self, mem: &Memory) {
3347        self.instruction(LocalGet(mem.addr.idx));
3348        self.instruction(I32Load(mem.memarg(2)));
3349    }
3350
3351    fn i64_load(&mut self, mem: &Memory) {
3352        self.instruction(LocalGet(mem.addr.idx));
3353        self.instruction(I64Load(mem.memarg(3)));
3354    }
3355
3356    fn ptr_load(&mut self, mem: &Memory) {
3357        if mem.opts.memory64 {
3358            self.i64_load(mem);
3359        } else {
3360            self.i32_load(mem);
3361        }
3362    }
3363
3364    fn ptr_add(&mut self, opts: &Options) {
3365        if opts.memory64 {
3366            self.instruction(I64Add);
3367        } else {
3368            self.instruction(I32Add);
3369        }
3370    }
3371
3372    fn ptr_sub(&mut self, opts: &Options) {
3373        if opts.memory64 {
3374            self.instruction(I64Sub);
3375        } else {
3376            self.instruction(I32Sub);
3377        }
3378    }
3379
3380    fn ptr_mul(&mut self, opts: &Options) {
3381        if opts.memory64 {
3382            self.instruction(I64Mul);
3383        } else {
3384            self.instruction(I32Mul);
3385        }
3386    }
3387
3388    fn ptr_ge_u(&mut self, opts: &Options) {
3389        if opts.memory64 {
3390            self.instruction(I64GeU);
3391        } else {
3392            self.instruction(I32GeU);
3393        }
3394    }
3395
3396    fn ptr_lt_u(&mut self, opts: &Options) {
3397        if opts.memory64 {
3398            self.instruction(I64LtU);
3399        } else {
3400            self.instruction(I32LtU);
3401        }
3402    }
3403
3404    fn ptr_shl(&mut self, opts: &Options) {
3405        if opts.memory64 {
3406            self.instruction(I64Shl);
3407        } else {
3408            self.instruction(I32Shl);
3409        }
3410    }
3411
3412    fn ptr_eqz(&mut self, opts: &Options) {
3413        if opts.memory64 {
3414            self.instruction(I64Eqz);
3415        } else {
3416            self.instruction(I32Eqz);
3417        }
3418    }
3419
3420    fn ptr_uconst(&mut self, opts: &Options, val: u32) {
3421        if opts.memory64 {
3422            self.instruction(I64Const(val.into()));
3423        } else {
3424            self.instruction(I32Const(val as i32));
3425        }
3426    }
3427
3428    fn ptr_iconst(&mut self, opts: &Options, val: i32) {
3429        if opts.memory64 {
3430            self.instruction(I64Const(val.into()));
3431        } else {
3432            self.instruction(I32Const(val));
3433        }
3434    }
3435
3436    fn ptr_eq(&mut self, opts: &Options) {
3437        if opts.memory64 {
3438            self.instruction(I64Eq);
3439        } else {
3440            self.instruction(I32Eq);
3441        }
3442    }
3443
3444    fn ptr_ne(&mut self, opts: &Options) {
3445        if opts.memory64 {
3446            self.instruction(I64Ne);
3447        } else {
3448            self.instruction(I32Ne);
3449        }
3450    }
3451
3452    fn ptr_and(&mut self, opts: &Options) {
3453        if opts.memory64 {
3454            self.instruction(I64And);
3455        } else {
3456            self.instruction(I32And);
3457        }
3458    }
3459
3460    fn ptr_or(&mut self, opts: &Options) {
3461        if opts.memory64 {
3462            self.instruction(I64Or);
3463        } else {
3464            self.instruction(I32Or);
3465        }
3466    }
3467
3468    fn ptr_xor(&mut self, opts: &Options) {
3469        if opts.memory64 {
3470            self.instruction(I64Xor);
3471        } else {
3472            self.instruction(I32Xor);
3473        }
3474    }
3475
3476    fn ptr_if(&mut self, opts: &Options, ty: BlockType) {
3477        if opts.memory64 {
3478            self.instruction(I64Const(0));
3479            self.instruction(I64Ne);
3480        }
3481        self.instruction(If(ty));
3482    }
3483
3484    fn ptr_br_if(&mut self, opts: &Options, depth: u32) {
3485        if opts.memory64 {
3486            self.instruction(I64Const(0));
3487            self.instruction(I64Ne);
3488        }
3489        self.instruction(BrIf(depth));
3490    }
3491
3492    fn f32_load(&mut self, mem: &Memory) {
3493        self.instruction(LocalGet(mem.addr.idx));
3494        self.instruction(F32Load(mem.memarg(2)));
3495    }
3496
3497    fn f64_load(&mut self, mem: &Memory) {
3498        self.instruction(LocalGet(mem.addr.idx));
3499        self.instruction(F64Load(mem.memarg(3)));
3500    }
3501
3502    fn push_dst_addr(&mut self, dst: &Destination) {
3503        if let Destination::Memory(mem) = dst {
3504            self.instruction(LocalGet(mem.addr.idx));
3505        }
3506    }
3507
3508    fn i32_store8(&mut self, mem: &Memory) {
3509        self.instruction(I32Store8(mem.memarg(0)));
3510    }
3511
3512    fn i32_store16(&mut self, mem: &Memory) {
3513        self.instruction(I32Store16(mem.memarg(1)));
3514    }
3515
3516    fn i32_store(&mut self, mem: &Memory) {
3517        self.instruction(I32Store(mem.memarg(2)));
3518    }
3519
3520    fn i64_store(&mut self, mem: &Memory) {
3521        self.instruction(I64Store(mem.memarg(3)));
3522    }
3523
3524    fn ptr_store(&mut self, mem: &Memory) {
3525        if mem.opts.memory64 {
3526            self.i64_store(mem);
3527        } else {
3528            self.i32_store(mem);
3529        }
3530    }
3531
3532    fn f32_store(&mut self, mem: &Memory) {
3533        self.instruction(F32Store(mem.memarg(2)));
3534    }
3535
3536    fn f64_store(&mut self, mem: &Memory) {
3537        self.instruction(F64Store(mem.memarg(3)));
3538    }
3539}
3540
3541impl<'a> Source<'a> {
3542    /// Given this `Source` returns an iterator over the `Source` for each of
3543    /// the component `fields` specified.
3544    ///
3545    /// This will automatically slice stack-based locals to the appropriate
3546    /// width for each component type and additionally calculate the appropriate
3547    /// offset for each memory-based type.
3548    fn record_field_srcs<'b>(
3549        &'b self,
3550        types: &'b ComponentTypesBuilder,
3551        fields: impl IntoIterator<Item = InterfaceType> + 'b,
3552    ) -> impl Iterator<Item = Source<'a>> + 'b
3553    where
3554        'a: 'b,
3555    {
3556        let mut offset = 0;
3557        fields.into_iter().map(move |ty| match self {
3558            Source::Memory(mem) => {
3559                let mem = next_field_offset(&mut offset, types, &ty, mem);
3560                Source::Memory(mem)
3561            }
3562            Source::Stack(stack) => {
3563                let cnt = types.flat_types(&ty).unwrap().len() as u32;
3564                offset += cnt;
3565                Source::Stack(stack.slice((offset - cnt) as usize..offset as usize))
3566            }
3567        })
3568    }
3569
3570    /// Returns the corresponding discriminant source and payload source f
3571    fn payload_src(
3572        &self,
3573        types: &ComponentTypesBuilder,
3574        info: &VariantInfo,
3575        case: Option<&InterfaceType>,
3576    ) -> Source<'a> {
3577        match self {
3578            Source::Stack(s) => {
3579                let flat_len = match case {
3580                    Some(case) => types.flat_types(case).unwrap().len(),
3581                    None => 0,
3582                };
3583                Source::Stack(s.slice(1..s.locals.len()).slice(0..flat_len))
3584            }
3585            Source::Memory(mem) => {
3586                let mem = if mem.opts.memory64 {
3587                    mem.bump(info.payload_offset64)
3588                } else {
3589                    mem.bump(info.payload_offset32)
3590                };
3591                Source::Memory(mem)
3592            }
3593        }
3594    }
3595
3596    fn opts(&self) -> &'a Options {
3597        match self {
3598            Source::Stack(s) => s.opts,
3599            Source::Memory(mem) => mem.opts,
3600        }
3601    }
3602}
3603
3604impl<'a> Destination<'a> {
3605    /// Same as `Source::record_field_srcs` but for destinations.
3606    fn record_field_dsts<'b, I>(
3607        &'b self,
3608        types: &'b ComponentTypesBuilder,
3609        fields: I,
3610    ) -> impl Iterator<Item = Destination<'b>> + use<'b, I>
3611    where
3612        'a: 'b,
3613        I: IntoIterator<Item = InterfaceType> + 'b,
3614    {
3615        let mut offset = 0;
3616        fields.into_iter().map(move |ty| match self {
3617            Destination::Memory(mem) => {
3618                let mem = next_field_offset(&mut offset, types, &ty, mem);
3619                Destination::Memory(mem)
3620            }
3621            Destination::Stack(s, opts) => {
3622                let cnt = types.flat_types(&ty).unwrap().len() as u32;
3623                offset += cnt;
3624                Destination::Stack(&s[(offset - cnt) as usize..offset as usize], opts)
3625            }
3626        })
3627    }
3628
3629    /// Returns the corresponding discriminant source and payload source f
3630    fn payload_dst(
3631        &self,
3632        types: &ComponentTypesBuilder,
3633        info: &VariantInfo,
3634        case: Option<&InterfaceType>,
3635    ) -> Destination<'_> {
3636        match self {
3637            Destination::Stack(s, opts) => {
3638                let flat_len = match case {
3639                    Some(case) => types.flat_types(case).unwrap().len(),
3640                    None => 0,
3641                };
3642                Destination::Stack(&s[1..][..flat_len], opts)
3643            }
3644            Destination::Memory(mem) => {
3645                let mem = if mem.opts.memory64 {
3646                    mem.bump(info.payload_offset64)
3647                } else {
3648                    mem.bump(info.payload_offset32)
3649                };
3650                Destination::Memory(mem)
3651            }
3652        }
3653    }
3654
3655    fn opts(&self) -> &'a Options {
3656        match self {
3657            Destination::Stack(_, opts) => opts,
3658            Destination::Memory(mem) => mem.opts,
3659        }
3660    }
3661}
3662
3663fn next_field_offset<'a>(
3664    offset: &mut u32,
3665    types: &ComponentTypesBuilder,
3666    field: &InterfaceType,
3667    mem: &Memory<'a>,
3668) -> Memory<'a> {
3669    let abi = types.canonical_abi(field);
3670    let offset = if mem.opts.memory64 {
3671        abi.next_field64(offset)
3672    } else {
3673        abi.next_field32(offset)
3674    };
3675    mem.bump(offset)
3676}
3677
3678impl<'a> Memory<'a> {
3679    fn memarg(&self, align: u32) -> MemArg {
3680        MemArg {
3681            offset: u64::from(self.offset),
3682            align,
3683            memory_index: self.opts.memory.unwrap().as_u32(),
3684        }
3685    }
3686
3687    fn bump(&self, offset: u32) -> Memory<'a> {
3688        Memory {
3689            opts: self.opts,
3690            addr: TempLocal::new(self.addr.idx, self.addr.ty),
3691            offset: self.offset + offset,
3692        }
3693    }
3694}
3695
3696impl<'a> Stack<'a> {
3697    fn slice(&self, range: Range<usize>) -> Stack<'a> {
3698        Stack {
3699            locals: &self.locals[range],
3700            opts: self.opts,
3701        }
3702    }
3703}
3704
3705struct VariantCase<'a> {
3706    src_i: u32,
3707    src_ty: Option<&'a InterfaceType>,
3708    dst_i: u32,
3709    dst_ty: Option<&'a InterfaceType>,
3710}
3711
3712fn variant_info<'a, I>(types: &ComponentTypesBuilder, cases: I) -> VariantInfo
3713where
3714    I: IntoIterator<Item = Option<&'a InterfaceType>>,
3715    I::IntoIter: ExactSizeIterator,
3716{
3717    VariantInfo::new(
3718        cases
3719            .into_iter()
3720            .map(|ty| ty.map(|ty| types.canonical_abi(ty))),
3721    )
3722    .0
3723}
3724
3725enum MallocSize {
3726    Const(u32),
3727    Local(u32),
3728}
3729
3730struct WasmString<'a> {
3731    ptr: TempLocal,
3732    len: TempLocal,
3733    opts: &'a Options,
3734}
3735
3736struct TempLocal {
3737    idx: u32,
3738    ty: ValType,
3739    needs_free: bool,
3740}
3741
3742impl TempLocal {
3743    fn new(idx: u32, ty: ValType) -> TempLocal {
3744        TempLocal {
3745            idx,
3746            ty,
3747            needs_free: false,
3748        }
3749    }
3750}
3751
3752impl std::ops::Drop for TempLocal {
3753    fn drop(&mut self) {
3754        if self.needs_free {
3755            panic!("temporary local not free'd");
3756        }
3757    }
3758}
3759
3760impl From<FlatType> for ValType {
3761    fn from(ty: FlatType) -> ValType {
3762        match ty {
3763            FlatType::I32 => ValType::I32,
3764            FlatType::I64 => ValType::I64,
3765            FlatType::F32 => ValType::F32,
3766            FlatType::F64 => ValType::F64,
3767        }
3768    }
3769}