wasmtime_cranelift/compiler/
component.rs

1//! Compilation support for the component model.
2
3use crate::{compiler::Compiler, TRAP_ALWAYS, TRAP_CANNOT_ENTER, TRAP_INTERNAL_ASSERT};
4use anyhow::Result;
5use cranelift_codegen::ir::condcodes::IntCC;
6use cranelift_codegen::ir::{self, InstBuilder, MemFlags, Value};
7use cranelift_codegen::isa::{CallConv, TargetIsa};
8use cranelift_frontend::FunctionBuilder;
9use std::any::Any;
10use wasmtime_environ::component::*;
11use wasmtime_environ::fact::SYNC_ENTER_FIXED_PARAMS;
12use wasmtime_environ::{
13    HostCall, ModuleInternedTypeIndex, PtrSize, TrapSentinel, Tunables, WasmFuncType, WasmValType,
14};
15
16struct TrampolineCompiler<'a> {
17    compiler: &'a Compiler,
18    isa: &'a (dyn TargetIsa + 'static),
19    builder: FunctionBuilder<'a>,
20    component: &'a Component,
21    types: &'a ComponentTypesBuilder,
22    offsets: VMComponentOffsets<u8>,
23    abi: Abi,
24    block0: ir::Block,
25    signature: ModuleInternedTypeIndex,
26    tunables: &'a Tunables,
27}
28
29#[derive(Debug, Copy, Clone)]
30enum Abi {
31    Wasm,
32    Array,
33}
34
35type GetLibcallFn =
36    fn(&dyn TargetIsa, &mut ir::Function) -> (ir::SigRef, ComponentBuiltinFunctionIndex);
37
38impl<'a> TrampolineCompiler<'a> {
39    fn new(
40        compiler: &'a Compiler,
41        func_compiler: &'a mut super::FunctionCompiler<'_>,
42        component: &'a Component,
43        types: &'a ComponentTypesBuilder,
44        index: TrampolineIndex,
45        abi: Abi,
46        tunables: &'a Tunables,
47    ) -> TrampolineCompiler<'a> {
48        let isa = &*compiler.isa;
49        let signature = component.trampolines[index];
50        let ty = types[signature].unwrap_func();
51        let func = ir::Function::with_name_signature(
52            ir::UserFuncName::user(0, 0),
53            match abi {
54                Abi::Wasm => crate::wasm_call_signature(isa, ty, &compiler.tunables),
55                Abi::Array => crate::array_call_signature(isa),
56            },
57        );
58        let (builder, block0) = func_compiler.builder(func);
59        TrampolineCompiler {
60            compiler,
61            isa,
62            builder,
63            component,
64            types,
65            offsets: VMComponentOffsets::new(isa.pointer_bytes(), component),
66            abi,
67            block0,
68            signature,
69            tunables,
70        }
71    }
72
73    fn translate(&mut self, trampoline: &Trampoline) {
74        match trampoline {
75            Trampoline::Transcoder {
76                op,
77                from,
78                from64,
79                to,
80                to64,
81            } => {
82                match self.abi {
83                    Abi::Wasm => {
84                        self.translate_transcode(*op, *from, *from64, *to, *to64);
85                    }
86                    // Transcoders can only actually be called by Wasm, so let's assert
87                    // that here.
88                    Abi::Array => {
89                        self.builder.ins().trap(TRAP_INTERNAL_ASSERT);
90                    }
91                }
92            }
93            Trampoline::LowerImport {
94                index,
95                options,
96                lower_ty,
97            } => {
98                self.translate_lower_import(*index, options, *lower_ty);
99            }
100            Trampoline::AlwaysTrap => {
101                self.translate_always_trap();
102            }
103            Trampoline::ResourceNew(ty) => self.translate_resource_new(*ty),
104            Trampoline::ResourceRep(ty) => self.translate_resource_rep(*ty),
105            Trampoline::ResourceDrop(ty) => self.translate_resource_drop(*ty),
106            Trampoline::BackpressureSet { instance } => {
107                self.translate_backpressure_set_call(*instance)
108            }
109            Trampoline::TaskReturn { results, options } => {
110                self.translate_task_return_call(*results, options)
111            }
112            Trampoline::WaitableSetNew { instance } => self.translate_waitable_set_new(*instance),
113            Trampoline::WaitableSetWait {
114                instance,
115                async_,
116                memory,
117            } => self.translate_task_wait_or_poll_call(
118                *instance,
119                *async_,
120                *memory,
121                host::waitable_set_wait,
122            ),
123            Trampoline::WaitableSetPoll {
124                instance,
125                async_,
126                memory,
127            } => self.translate_task_wait_or_poll_call(
128                *instance,
129                *async_,
130                *memory,
131                host::waitable_set_poll,
132            ),
133            Trampoline::WaitableSetDrop { instance } => self.translate_waitable_set_drop(*instance),
134            Trampoline::WaitableJoin { instance } => self.translate_waitable_join(*instance),
135            Trampoline::Yield { async_ } => self.translate_yield_call(*async_),
136            Trampoline::SubtaskDrop { instance } => self.translate_subtask_drop_call(*instance),
137            Trampoline::StreamNew { ty } => self.translate_future_or_stream_call(
138                &[ty.as_u32()],
139                None,
140                host::stream_new,
141                TrapSentinel::NegativeOne,
142            ),
143            Trampoline::StreamRead {
144                ty,
145                err_ctx_ty,
146                options,
147            } => {
148                let tys = &[ty.as_u32(), err_ctx_ty.as_u32()];
149                if let Some(info) = self.flat_stream_element_info(*ty).cloned() {
150                    self.translate_flat_stream_call(tys, options, host::flat_stream_read, &info)
151                } else {
152                    self.translate_future_or_stream_call(
153                        tys,
154                        Some(options),
155                        host::stream_read,
156                        TrapSentinel::NegativeOne,
157                    )
158                }
159            }
160            Trampoline::StreamWrite { ty, options } => {
161                let tys = &[ty.as_u32()];
162                if let Some(info) = self.flat_stream_element_info(*ty).cloned() {
163                    self.translate_flat_stream_call(tys, options, host::flat_stream_write, &info)
164                } else {
165                    self.translate_future_or_stream_call(
166                        tys,
167                        Some(options),
168                        host::stream_write,
169                        TrapSentinel::NegativeOne,
170                    )
171                }
172            }
173            Trampoline::StreamCancelRead { ty, async_ } => {
174                self.translate_cancel_call(ty.as_u32(), *async_, host::stream_cancel_read)
175            }
176            Trampoline::StreamCancelWrite { ty, async_ } => {
177                self.translate_cancel_call(ty.as_u32(), *async_, host::stream_cancel_write)
178            }
179            Trampoline::StreamCloseReadable { ty } => self.translate_future_or_stream_call(
180                &[ty.as_u32()],
181                None,
182                host::stream_close_readable,
183                TrapSentinel::Falsy,
184            ),
185            Trampoline::StreamCloseWritable { ty, err_ctx_ty } => self
186                .translate_future_or_stream_call(
187                    &[ty.as_u32(), err_ctx_ty.as_u32()],
188                    None,
189                    host::stream_close_writable,
190                    TrapSentinel::Falsy,
191                ),
192            Trampoline::FutureNew { ty } => self.translate_future_or_stream_call(
193                &[ty.as_u32()],
194                None,
195                host::future_new,
196                TrapSentinel::NegativeOne,
197            ),
198            Trampoline::FutureRead {
199                ty,
200                err_ctx_ty,
201                options,
202            } => self.translate_future_or_stream_call(
203                &[ty.as_u32(), err_ctx_ty.as_u32()],
204                Some(&options),
205                host::future_read,
206                TrapSentinel::NegativeOne,
207            ),
208            Trampoline::FutureWrite { ty, options } => self.translate_future_or_stream_call(
209                &[ty.as_u32()],
210                Some(options),
211                host::future_write,
212                TrapSentinel::NegativeOne,
213            ),
214            Trampoline::FutureCancelRead { ty, async_ } => {
215                self.translate_cancel_call(ty.as_u32(), *async_, host::future_cancel_read)
216            }
217            Trampoline::FutureCancelWrite { ty, async_ } => {
218                self.translate_cancel_call(ty.as_u32(), *async_, host::future_cancel_write)
219            }
220            Trampoline::FutureCloseReadable { ty } => self.translate_future_or_stream_call(
221                &[ty.as_u32()],
222                None,
223                host::future_close_readable,
224                TrapSentinel::Falsy,
225            ),
226            Trampoline::FutureCloseWritable { ty, err_ctx_ty } => self
227                .translate_future_or_stream_call(
228                    &[ty.as_u32(), err_ctx_ty.as_u32()],
229                    None,
230                    host::future_close_writable,
231                    TrapSentinel::Falsy,
232                ),
233            Trampoline::ErrorContextNew { ty, options } => self.translate_error_context_call(
234                *ty,
235                options,
236                host::error_context_new,
237                TrapSentinel::NegativeOne,
238            ),
239            Trampoline::ErrorContextDebugMessage { ty, options } => self
240                .translate_error_context_call(
241                    *ty,
242                    options,
243                    host::error_context_debug_message,
244                    TrapSentinel::Falsy,
245                ),
246            Trampoline::ErrorContextDrop { ty } => self.translate_error_context_drop_call(*ty),
247            Trampoline::ResourceTransferOwn => {
248                self.translate_host_libcall(host::resource_transfer_own, |me, rets| {
249                    rets[0] = me.raise_if_negative_one(rets[0]);
250                })
251            }
252            Trampoline::ResourceTransferBorrow => {
253                self.translate_host_libcall(host::resource_transfer_borrow, |me, rets| {
254                    rets[0] = me.raise_if_negative_one(rets[0]);
255                })
256            }
257            Trampoline::ResourceEnterCall => {
258                self.translate_host_libcall(host::resource_enter_call, |_, _| {})
259            }
260            Trampoline::ResourceExitCall => {
261                self.translate_host_libcall(host::resource_exit_call, |me, rets| {
262                    me.raise_if_host_trapped(rets.pop().unwrap());
263                })
264            }
265            Trampoline::SyncEnterCall => self.translate_sync_enter(),
266            Trampoline::SyncExitCall { callback } => self.translate_sync_exit(*callback),
267            Trampoline::AsyncEnterCall => {
268                self.translate_async_enter_or_exit(host::async_enter, None, TrapSentinel::Falsy)
269            }
270            Trampoline::AsyncExitCall {
271                callback,
272                post_return,
273            } => self.translate_async_enter_or_exit(
274                host::async_exit,
275                Some((*callback, *post_return)),
276                TrapSentinel::NegativeOne,
277            ),
278            Trampoline::FutureTransfer => {
279                self.translate_host_libcall(host::future_transfer, |me, rets| {
280                    rets[0] = me.raise_if_negative_one(rets[0]);
281                })
282            }
283            Trampoline::StreamTransfer => {
284                self.translate_host_libcall(host::stream_transfer, |me, rets| {
285                    rets[0] = me.raise_if_negative_one(rets[0]);
286                })
287            }
288            Trampoline::ErrorContextTransfer => {
289                self.translate_host_libcall(host::error_context_transfer, |me, rets| {
290                    rets[0] = me.raise_if_negative_one(rets[0]);
291                })
292            }
293        }
294    }
295
296    /// Determine whether the specified type can be optimized as a stream
297    /// payload by lifting and lowering with a simple `memcpy`.
298    ///
299    /// Any type containing only "flat", primitive data (i.e. no pointers or
300    /// handles) should qualify for this optimization, but it's also okay to
301    /// conservatively return `None` here; the fallback slow path will always
302    /// work -- it just won't be as efficient.
303    fn flat_stream_element_info(&self, ty: TypeStreamTableIndex) -> Option<&CanonicalAbiInfo> {
304        let payload = self.types[self.types[ty].ty].payload;
305        match payload {
306            None => Some(&CanonicalAbiInfo::ZERO),
307            Some(
308                payload @ (InterfaceType::Bool
309                | InterfaceType::S8
310                | InterfaceType::U8
311                | InterfaceType::S16
312                | InterfaceType::U16
313                | InterfaceType::S32
314                | InterfaceType::U32
315                | InterfaceType::S64
316                | InterfaceType::U64
317                | InterfaceType::Float32
318                | InterfaceType::Float64
319                | InterfaceType::Char),
320            ) => Some(self.types.canonical_abi(&payload)),
321            // TODO: Recursively check for other "flat" types (i.e. those without pointers or handles),
322            // e.g. `record`s, `variant`s, etc. which contain only flat types.
323            _ => None,
324        }
325    }
326
327    fn store_wasm_arguments(&mut self, args: &[Value]) -> (Value, Value) {
328        let pointer_type = self.isa.pointer_type();
329        let wasm_func_ty = &self.types[self.signature].unwrap_func();
330
331        // Start off by spilling all the wasm arguments into a stack slot to be
332        // passed to the host function.
333        match self.abi {
334            Abi::Wasm => {
335                let (ptr, len) = self.compiler.allocate_stack_array_and_spill_args(
336                    wasm_func_ty,
337                    &mut self.builder,
338                    args,
339                );
340                let len = self.builder.ins().iconst(pointer_type, i64::from(len));
341                (ptr, len)
342            }
343            Abi::Array => {
344                let params = self.builder.func.dfg.block_params(self.block0);
345                (params[2], params[3])
346            }
347        }
348    }
349
350    fn translate_intrinsic_libcall(
351        &mut self,
352        vmctx: ir::Value,
353        get_libcall: GetLibcallFn,
354        args: &[ir::Value],
355        sentinel: TrapSentinel,
356    ) {
357        match self.abi {
358            Abi::Wasm => {}
359
360            Abi::Array => {
361                // TODO: A guest could hypothetically export the same intrinsic
362                // it imported, allowing the host to call it directly.  We need
363                // to support that here (except for `sync-enter`, `sync-exit`,
364                // `async-enter`, and `async-exit`, which are only ever called
365                // from FACT-generated Wasm code and never exported).
366                //
367                // https://github.com/bytecodealliance/wasmtime/issues/10143
368                self.builder.ins().trap(TRAP_INTERNAL_ASSERT);
369                return;
370            }
371        }
372
373        let call = self.call_libcall(vmctx, get_libcall, args);
374
375        let result = self.builder.func.dfg.inst_results(call)[0];
376        match sentinel {
377            TrapSentinel::NegativeOne => {
378                let result = self.raise_if_negative_one(result);
379                self.abi_store_results(&[result]);
380            }
381            TrapSentinel::Falsy => {
382                self.raise_if_host_trapped(result);
383                self.builder.ins().return_(&[]);
384            }
385            _ => todo!("support additional return types if/when necessary"),
386        }
387    }
388
389    fn translate_task_return_call(&mut self, results: TypeTupleIndex, options: &CanonicalOptions) {
390        // FIXME(#10338) shouldn't ignore options here.
391        let _ = options;
392        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
393        let vmctx = args[0];
394
395        let (values_vec_ptr, values_vec_len) = self.store_wasm_arguments(&args[2..]);
396
397        let ty = self
398            .builder
399            .ins()
400            .iconst(ir::types::I32, i64::from(results.as_u32()));
401
402        self.translate_intrinsic_libcall(
403            vmctx,
404            host::task_return,
405            &[vmctx, ty, values_vec_ptr, values_vec_len],
406            TrapSentinel::Falsy,
407        );
408    }
409
410    fn translate_waitable_set_new(&mut self, instance: RuntimeComponentInstanceIndex) {
411        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
412        let vmctx = args[0];
413
414        let instance = self
415            .builder
416            .ins()
417            .iconst(ir::types::I32, i64::from(instance.as_u32()));
418
419        self.translate_intrinsic_libcall(
420            vmctx,
421            host::waitable_set_new,
422            &[vmctx, instance],
423            TrapSentinel::NegativeOne,
424        );
425    }
426
427    fn translate_waitable_set_drop(&mut self, instance: RuntimeComponentInstanceIndex) {
428        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
429        let vmctx = args[0];
430        let set = args[2];
431
432        let instance = self
433            .builder
434            .ins()
435            .iconst(ir::types::I32, i64::from(instance.as_u32()));
436
437        self.translate_intrinsic_libcall(
438            vmctx,
439            host::waitable_set_drop,
440            &[vmctx, instance, set],
441            TrapSentinel::Falsy,
442        );
443    }
444
445    fn translate_waitable_join(&mut self, instance: RuntimeComponentInstanceIndex) {
446        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
447        let vmctx = args[0];
448        let set = args[2];
449        let waitable = args[3];
450
451        let instance = self
452            .builder
453            .ins()
454            .iconst(ir::types::I32, i64::from(instance.as_u32()));
455
456        self.translate_intrinsic_libcall(
457            vmctx,
458            host::waitable_join,
459            &[vmctx, instance, set, waitable],
460            TrapSentinel::Falsy,
461        );
462    }
463
464    fn translate_sync_enter(&mut self) {
465        match self.abi {
466            Abi::Wasm => {}
467
468            Abi::Array => {
469                // This code can only be called from (FACT-generated) Wasm, so
470                // we don't need to support the array ABI.
471                self.builder.ins().trap(TRAP_INTERNAL_ASSERT);
472                return;
473            }
474        }
475
476        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
477        let vmctx = args[0];
478
479        let pointer_type = self.isa.pointer_type();
480        let wasm_func_ty = &self.types[self.signature].unwrap_func();
481
482        let param_offset = SYNC_ENTER_FIXED_PARAMS.len();
483        let spill_offset = param_offset + 2;
484
485        let (values_vec_ptr, len) = self.compiler.allocate_stack_array_and_spill_args(
486            &WasmFuncType::new(
487                wasm_func_ty
488                    .params()
489                    .iter()
490                    .skip(param_offset)
491                    .copied()
492                    .collect(),
493                Box::new([]),
494            ),
495            &mut self.builder,
496            &args[spill_offset..],
497        );
498        let values_vec_len = self.builder.ins().iconst(pointer_type, i64::from(len));
499
500        let mut callee_args = vec![vmctx];
501
502        // remaining non-Wasm parameters
503        callee_args.extend(args[2..spill_offset].iter().copied());
504
505        callee_args.push(values_vec_ptr);
506        callee_args.push(values_vec_len);
507
508        self.translate_intrinsic_libcall(
509            vmctx,
510            host::sync_enter,
511            &callee_args,
512            TrapSentinel::Falsy,
513        );
514    }
515
516    fn translate_sync_exit(&mut self, callback: Option<RuntimeCallbackIndex>) {
517        match self.abi {
518            Abi::Wasm => {}
519
520            Abi::Array => {
521                // This code can only be called from (FACT-generated) Wasm, so
522                // we don't need to support the array ABI.
523                self.builder.ins().trap(TRAP_INTERNAL_ASSERT);
524                return;
525            }
526        }
527
528        let pointer_type = self.isa.pointer_type();
529        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
530        let vmctx = args[0];
531        let wasm_func_ty = &self.types[self.signature].unwrap_func();
532
533        let mut callee_args = vec![vmctx, self.load_callback(vmctx, callback)];
534
535        // remaining non-Wasm parameters
536        callee_args.extend(args[2..].iter().copied());
537
538        let (values_vec_ptr, len) = self.compiler.allocate_stack_array_and_spill_args(
539            &WasmFuncType::new(
540                Box::new([]),
541                wasm_func_ty.returns().iter().copied().collect(),
542            ),
543            &mut self.builder,
544            &[],
545        );
546        let values_vec_len = self.builder.ins().iconst(pointer_type, i64::from(len));
547
548        callee_args.push(values_vec_ptr);
549        callee_args.push(values_vec_len);
550
551        let call = self.call_libcall(vmctx, host::sync_exit, &callee_args);
552
553        let succeeded = self.builder.func.dfg.inst_results(call)[0];
554        self.raise_if_host_trapped(succeeded);
555        // After the host function has returned the results are loaded from
556        // `values_vec_ptr` and then returned.
557        let results = self.compiler.load_values_from_array(
558            wasm_func_ty.returns(),
559            &mut self.builder,
560            values_vec_ptr,
561            values_vec_len,
562        );
563        self.builder.ins().return_(&results);
564    }
565
566    fn translate_async_enter_or_exit(
567        &mut self,
568        get_libcall: GetLibcallFn,
569        callback_and_post_return: Option<(
570            Option<RuntimeCallbackIndex>,
571            Option<RuntimePostReturnIndex>,
572        )>,
573        sentinel: TrapSentinel,
574    ) {
575        match self.abi {
576            Abi::Wasm => {}
577
578            Abi::Array => {
579                // This code can only be called from (FACT-generated) Wasm, so
580                // we don't need to support the array ABI.
581                self.builder.ins().trap(TRAP_INTERNAL_ASSERT);
582                return;
583            }
584        }
585
586        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
587        let vmctx = args[0];
588
589        let mut callee_args = vec![vmctx];
590
591        if let Some((callback, post_return)) = callback_and_post_return {
592            // callback: *mut VMFuncRef
593            callee_args.push(self.load_callback(vmctx, callback));
594            // post_return: *mut VMFuncRef
595            callee_args.push(self.load_post_return(vmctx, post_return));
596        }
597
598        // remaining parameters
599        callee_args.extend(args[2..].iter().copied());
600
601        self.translate_intrinsic_libcall(vmctx, get_libcall, &callee_args, sentinel);
602    }
603
604    fn translate_backpressure_set_call(&mut self, caller_instance: RuntimeComponentInstanceIndex) {
605        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
606        let vmctx = args[0];
607
608        let mut callee_args = vec![
609            vmctx,
610            self.builder
611                .ins()
612                .iconst(ir::types::I32, i64::from(caller_instance.as_u32())),
613        ];
614
615        callee_args.extend(args[2..].iter().copied());
616
617        self.translate_intrinsic_libcall(
618            vmctx,
619            host::backpressure_set,
620            &callee_args,
621            TrapSentinel::Falsy,
622        );
623    }
624
625    fn translate_task_wait_or_poll_call(
626        &mut self,
627        caller_instance: RuntimeComponentInstanceIndex,
628        async_: bool,
629        memory: RuntimeMemoryIndex,
630        get_libcall: GetLibcallFn,
631    ) {
632        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
633        let vmctx = args[0];
634
635        let mut callee_args = vec![
636            vmctx,
637            self.builder
638                .ins()
639                .iconst(ir::types::I32, i64::from(caller_instance.as_u32())),
640            self.builder
641                .ins()
642                .iconst(ir::types::I8, if async_ { 1 } else { 0 }),
643            self.load_memory(vmctx, memory),
644        ];
645
646        callee_args.extend(args[2..].iter().copied());
647
648        self.translate_intrinsic_libcall(
649            vmctx,
650            get_libcall,
651            &callee_args,
652            TrapSentinel::NegativeOne,
653        );
654    }
655
656    fn translate_yield_call(&mut self, async_: bool) {
657        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
658        let vmctx = args[0];
659
660        let callee_args = [
661            vmctx,
662            self.builder
663                .ins()
664                .iconst(ir::types::I8, if async_ { 1 } else { 0 }),
665        ];
666
667        self.translate_intrinsic_libcall(vmctx, host::yield_, &callee_args, TrapSentinel::Falsy);
668    }
669
670    fn translate_subtask_drop_call(&mut self, caller_instance: RuntimeComponentInstanceIndex) {
671        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
672        let vmctx = args[0];
673
674        let mut callee_args = vec![
675            vmctx,
676            self.builder
677                .ins()
678                .iconst(ir::types::I32, i64::from(caller_instance.as_u32())),
679        ];
680
681        callee_args.extend(args[2..].iter().copied());
682
683        self.translate_intrinsic_libcall(
684            vmctx,
685            host::subtask_drop,
686            &callee_args,
687            TrapSentinel::Falsy,
688        );
689    }
690
691    fn translate_lower_import(
692        &mut self,
693        index: LoweredIndex,
694        options: &CanonicalOptions,
695        lower_ty: TypeFuncIndex,
696    ) {
697        let pointer_type = self.isa.pointer_type();
698        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
699        let vmctx = args[0];
700        let wasm_func_ty = self.types[self.signature].unwrap_func();
701
702        let (values_vec_ptr, values_vec_len) = self.store_wasm_arguments(&args[2..]);
703
704        // Below this will incrementally build both the signature of the host
705        // function we're calling as well as the list of arguments since the
706        // list is somewhat long.
707        let mut callee_args = Vec::new();
708        let mut host_sig = ir::Signature::new(CallConv::triple_default(self.isa.triple()));
709
710        let CanonicalOptions {
711            instance,
712            memory,
713            realloc,
714            callback,
715            post_return,
716            string_encoding,
717            async_,
718        } = *options;
719
720        assert!(callback.is_none());
721
722        // vmctx: *mut VMComponentContext
723        host_sig.params.push(ir::AbiParam::new(pointer_type));
724        callee_args.push(vmctx);
725
726        // data: *mut u8,
727        host_sig.params.push(ir::AbiParam::new(pointer_type));
728        callee_args.push(self.builder.ins().load(
729            pointer_type,
730            MemFlags::trusted(),
731            vmctx,
732            i32::try_from(self.offsets.lowering_data(index)).unwrap(),
733        ));
734
735        // ty: TypeFuncIndex,
736        host_sig.params.push(ir::AbiParam::new(ir::types::I32));
737        callee_args.push(
738            self.builder
739                .ins()
740                .iconst(ir::types::I32, i64::from(lower_ty.as_u32())),
741        );
742
743        // caller_instance: RuntimeComponentInstanceIndex
744        host_sig.params.push(ir::AbiParam::new(ir::types::I32));
745        callee_args.push(
746            self.builder
747                .ins()
748                .iconst(ir::types::I32, i64::from(instance.as_u32())),
749        );
750
751        // flags: *mut VMGlobalDefinition
752        host_sig.params.push(ir::AbiParam::new(pointer_type));
753        callee_args.push(
754            self.builder
755                .ins()
756                .iadd_imm(vmctx, i64::from(self.offsets.instance_flags(instance))),
757        );
758
759        // memory: *mut VMMemoryDefinition
760        host_sig.params.push(ir::AbiParam::new(pointer_type));
761        callee_args.push(match memory {
762            Some(idx) => self.load_memory(vmctx, idx),
763            None => self.builder.ins().iconst(pointer_type, 0),
764        });
765
766        // realloc: *mut VMFuncRef
767        host_sig.params.push(ir::AbiParam::new(pointer_type));
768        callee_args.push(self.load_realloc(vmctx, realloc));
769
770        // A post-return option is only valid on `canon.lift`'d functions so no
771        // valid component should have this specified for a lowering which this
772        // trampoline compiler is interested in.
773        assert!(post_return.is_none());
774
775        // string_encoding: StringEncoding
776        host_sig.params.push(ir::AbiParam::new(ir::types::I8));
777        callee_args.push(self.string_encoding(string_encoding));
778
779        // async_: bool
780        host_sig.params.push(ir::AbiParam::new(ir::types::I8));
781        callee_args.push(
782            self.builder
783                .ins()
784                .iconst(ir::types::I8, if async_ { 1 } else { 0 }),
785        );
786
787        // storage: *mut ValRaw
788        host_sig.params.push(ir::AbiParam::new(pointer_type));
789        callee_args.push(values_vec_ptr);
790
791        // storage_len: usize
792        host_sig.params.push(ir::AbiParam::new(pointer_type));
793        callee_args.push(values_vec_len);
794
795        // return value is a bool whether a trap was raised or not
796        host_sig.returns.push(ir::AbiParam::new(ir::types::I8));
797
798        // Load host function pointer from the vmcontext and then call that
799        // indirect function pointer with the list of arguments.
800        let host_fn = self.builder.ins().load(
801            pointer_type,
802            MemFlags::trusted(),
803            vmctx,
804            i32::try_from(self.offsets.lowering_callee(index)).unwrap(),
805        );
806        let host_sig = self.builder.import_signature(host_sig);
807        let call = self.compiler.call_indirect_host(
808            &mut self.builder,
809            HostCall::ComponentLowerImport,
810            host_sig,
811            host_fn,
812            &callee_args,
813        );
814        let succeeded = self.builder.func.dfg.inst_results(call)[0];
815
816        match self.abi {
817            Abi::Wasm => {
818                self.raise_if_host_trapped(succeeded);
819                // After the host function has returned the results are loaded from
820                // `values_vec_ptr` and then returned.
821                let results = self.compiler.load_values_from_array(
822                    wasm_func_ty.returns(),
823                    &mut self.builder,
824                    values_vec_ptr,
825                    values_vec_len,
826                );
827                self.builder.ins().return_(&results);
828            }
829            Abi::Array => {
830                self.builder.ins().return_(&[succeeded]);
831            }
832        }
833    }
834
835    fn translate_always_trap(&mut self) {
836        if self.tunables.signals_based_traps {
837            self.builder.ins().trap(TRAP_ALWAYS);
838            return;
839        }
840
841        let args = self.abi_load_params();
842        let vmctx = args[0];
843
844        let (host_sig, index) = host::trap(self.isa, &mut self.builder.func);
845        let host_fn = self.load_libcall(vmctx, index);
846
847        let code = self.builder.ins().iconst(
848            ir::types::I8,
849            i64::from(wasmtime_environ::Trap::AlwaysTrapAdapter as u8),
850        );
851        self.compiler.call_indirect_host(
852            &mut self.builder,
853            index,
854            host_sig,
855            host_fn,
856            &[vmctx, code],
857        );
858        let succeeded = self.builder.ins().iconst(ir::types::I8, 0);
859        self.raise_if_host_trapped(succeeded);
860        // debug trap in case execution actually falls through, but this
861        // shouldn't ever get hit at runtime.
862        self.builder.ins().trap(TRAP_INTERNAL_ASSERT);
863    }
864
865    fn translate_resource_new(&mut self, resource: TypeResourceTableIndex) {
866        let args = self.abi_load_params();
867        let vmctx = args[0];
868
869        // The arguments this shim passes along to the libcall are:
870        //
871        //   * the vmctx
872        //   * a constant value for this `ResourceNew` intrinsic
873        //   * the wasm argument to wrap
874        let mut host_args = Vec::new();
875        host_args.push(vmctx);
876        host_args.push(
877            self.builder
878                .ins()
879                .iconst(ir::types::I32, i64::from(resource.as_u32())),
880        );
881        host_args.push(args[2]);
882
883        // Currently this only support resources represented by `i32`
884        assert_eq!(
885            self.types[self.signature].unwrap_func().params()[0],
886            WasmValType::I32
887        );
888        let call = self.call_libcall(vmctx, host::resource_new32, &host_args);
889        let result = self.builder.func.dfg.inst_results(call)[0];
890        let result = self.raise_if_negative_one(result);
891        self.abi_store_results(&[result]);
892    }
893
894    fn translate_resource_rep(&mut self, resource: TypeResourceTableIndex) {
895        let args = self.abi_load_params();
896        let vmctx = args[0];
897
898        // The arguments this shim passes along to the libcall are:
899        //
900        //   * the vmctx
901        //   * a constant value for this `ResourceRep` intrinsic
902        //   * the wasm argument to unwrap
903        let mut host_args = Vec::new();
904        host_args.push(vmctx);
905        host_args.push(
906            self.builder
907                .ins()
908                .iconst(ir::types::I32, i64::from(resource.as_u32())),
909        );
910        host_args.push(args[2]);
911
912        // Currently this only support resources represented by `i32`
913        assert_eq!(
914            self.types[self.signature].unwrap_func().returns()[0],
915            WasmValType::I32
916        );
917        let call = self.call_libcall(vmctx, host::resource_rep32, &host_args);
918        let result = self.builder.func.dfg.inst_results(call)[0];
919        let result = self.raise_if_negative_one(result);
920        self.abi_store_results(&[result]);
921    }
922
923    fn translate_resource_drop(&mut self, resource: TypeResourceTableIndex) {
924        let args = self.abi_load_params();
925        let vmctx = args[0];
926        let caller_vmctx = args[1];
927        let pointer_type = self.isa.pointer_type();
928
929        // The arguments this shim passes along to the libcall are:
930        //
931        //   * the vmctx
932        //   * a constant value for this `ResourceDrop` intrinsic
933        //   * the wasm handle index to drop
934        let mut host_args = Vec::new();
935        host_args.push(vmctx);
936        host_args.push(
937            self.builder
938                .ins()
939                .iconst(ir::types::I32, i64::from(resource.as_u32())),
940        );
941        host_args.push(args[2]);
942
943        let call = self.call_libcall(vmctx, host::resource_drop, &host_args);
944        let should_run_destructor = self.builder.func.dfg.inst_results(call)[0];
945
946        // Immediately raise a trap if requested by the host
947        let minus_one = self.builder.ins().iconst(ir::types::I64, -1);
948        let succeeded = self
949            .builder
950            .ins()
951            .icmp(IntCC::NotEqual, should_run_destructor, minus_one);
952        self.raise_if_host_trapped(succeeded);
953
954        let resource_ty = self.types[resource].ty;
955        let resource_def = self
956            .component
957            .defined_resource_index(resource_ty)
958            .map(|idx| {
959                self.component
960                    .initializers
961                    .iter()
962                    .filter_map(|i| match i {
963                        GlobalInitializer::Resource(r) if r.index == idx => Some(r),
964                        _ => None,
965                    })
966                    .next()
967                    .unwrap()
968            });
969        let has_destructor = match resource_def {
970            Some(def) => def.dtor.is_some(),
971            None => true,
972        };
973        // Synthesize the following:
974        //
975        //      ...
976        //      brif should_run_destructor, run_destructor_block, return_block
977        //
978        //    run_destructor_block:
979        //      ;; test may_enter, but only if the component instances
980        //      ;; differ
981        //      flags = load.i32 vmctx+$offset
982        //      masked = band flags, $FLAG_MAY_ENTER
983        //      trapz masked, CANNOT_ENTER_CODE
984        //
985        //      ;; ============================================================
986        //      ;; this is conditionally emitted based on whether the resource
987        //      ;; has a destructor or not, and can be statically omitted
988        //      ;; because that information is known at compile time here.
989        //      rep = ushr.i64 rep, 1
990        //      rep = ireduce.i32 rep
991        //      dtor = load.ptr vmctx+$offset
992        //      func_addr = load.ptr dtor+$offset
993        //      callee_vmctx = load.ptr dtor+$offset
994        //      call_indirect func_addr, callee_vmctx, vmctx, rep
995        //      ;; ============================================================
996        //
997        //      jump return_block
998        //
999        //    return_block:
1000        //      return
1001        //
1002        // This will decode `should_run_destructor` and run the destructor
1003        // funcref if one is specified for this resource. Note that not all
1004        // resources have destructors, hence the null check.
1005        self.builder.ensure_inserted_block();
1006        let current_block = self.builder.current_block().unwrap();
1007        let run_destructor_block = self.builder.create_block();
1008        self.builder
1009            .insert_block_after(run_destructor_block, current_block);
1010        let return_block = self.builder.create_block();
1011        self.builder
1012            .insert_block_after(return_block, run_destructor_block);
1013
1014        self.builder.ins().brif(
1015            should_run_destructor,
1016            run_destructor_block,
1017            &[],
1018            return_block,
1019            &[],
1020        );
1021
1022        let trusted = ir::MemFlags::trusted().with_readonly();
1023
1024        self.builder.switch_to_block(run_destructor_block);
1025
1026        // If this is a defined resource within the component itself then a
1027        // check needs to be emitted for the `may_enter` flag. Note though
1028        // that this check can be elided if the resource table resides in
1029        // the same component instance that defined the resource as the
1030        // component is calling itself.
1031        if let Some(def) = resource_def {
1032            if self.types[resource].instance != def.instance {
1033                let flags = self.builder.ins().load(
1034                    ir::types::I32,
1035                    trusted,
1036                    vmctx,
1037                    i32::try_from(self.offsets.instance_flags(def.instance)).unwrap(),
1038                );
1039                let masked = self
1040                    .builder
1041                    .ins()
1042                    .band_imm(flags, i64::from(FLAG_MAY_ENTER));
1043                self.builder.ins().trapz(masked, TRAP_CANNOT_ENTER);
1044            }
1045        }
1046
1047        // Conditionally emit destructor-execution code based on whether we
1048        // statically know that a destructor exists or not.
1049        if has_destructor {
1050            let rep = self.builder.ins().ushr_imm(should_run_destructor, 1);
1051            let rep = self.builder.ins().ireduce(ir::types::I32, rep);
1052            let index = self.types[resource].ty;
1053            // NB: despite the vmcontext storing nullable funcrefs for function
1054            // pointers we know this is statically never null due to the
1055            // `has_destructor` check above.
1056            let dtor_func_ref = self.builder.ins().load(
1057                pointer_type,
1058                trusted,
1059                vmctx,
1060                i32::try_from(self.offsets.resource_destructor(index)).unwrap(),
1061            );
1062            if cfg!(debug_assertions) {
1063                self.builder
1064                    .ins()
1065                    .trapz(dtor_func_ref, TRAP_INTERNAL_ASSERT);
1066            }
1067            let func_addr = self.builder.ins().load(
1068                pointer_type,
1069                trusted,
1070                dtor_func_ref,
1071                i32::from(self.offsets.ptr.vm_func_ref_wasm_call()),
1072            );
1073            let callee_vmctx = self.builder.ins().load(
1074                pointer_type,
1075                trusted,
1076                dtor_func_ref,
1077                i32::from(self.offsets.ptr.vm_func_ref_vmctx()),
1078            );
1079
1080            let sig = crate::wasm_call_signature(
1081                self.isa,
1082                &self.types[self.signature].unwrap_func(),
1083                &self.compiler.tunables,
1084            );
1085            let sig_ref = self.builder.import_signature(sig);
1086
1087            // NB: note that the "caller" vmctx here is the caller of this
1088            // intrinsic itself, not the `VMComponentContext`. This effectively
1089            // takes ourselves out of the chain here but that's ok since the
1090            // caller is only used for store/limits and that same info is
1091            // stored, but elsewhere, in the component context.
1092            self.builder.ins().call_indirect(
1093                sig_ref,
1094                func_addr,
1095                &[callee_vmctx, caller_vmctx, rep],
1096            );
1097        }
1098        self.builder.ins().jump(return_block, &[]);
1099        self.builder.seal_block(run_destructor_block);
1100
1101        self.builder.switch_to_block(return_block);
1102        self.builder.seal_block(return_block);
1103        self.abi_store_results(&[]);
1104    }
1105
1106    /// Invokes a host libcall and returns the result.
1107    ///
1108    /// Only intended for simple trampolines and effectively acts as a bridge
1109    /// from the wasm abi to host.
1110    fn translate_host_libcall(
1111        &mut self,
1112        get_libcall: GetLibcallFn,
1113        handle_results: fn(&mut Self, &mut Vec<ir::Value>),
1114    ) {
1115        match self.abi {
1116            Abi::Wasm => {}
1117
1118            // These trampolines can only actually be called by Wasm, so
1119            // let's assert that here.
1120            Abi::Array => {
1121                self.builder.ins().trap(TRAP_INTERNAL_ASSERT);
1122                return;
1123            }
1124        }
1125
1126        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
1127        let vmctx = args[0];
1128        let mut host_args = vec![vmctx];
1129        host_args.extend(args[2..].iter().copied());
1130
1131        let call = self.call_libcall(vmctx, get_libcall, &host_args);
1132        let mut results = self.builder.func.dfg.inst_results(call).to_vec();
1133        handle_results(self, &mut results);
1134        self.builder.ins().return_(&results);
1135    }
1136
1137    fn translate_cancel_call(&mut self, ty: u32, async_: bool, get_libcall: GetLibcallFn) {
1138        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
1139        let vmctx = args[0];
1140        let mut callee_args = vec![
1141            vmctx,
1142            self.builder.ins().iconst(ir::types::I32, i64::from(ty)),
1143            self.builder
1144                .ins()
1145                .iconst(ir::types::I8, if async_ { 1 } else { 0 }),
1146        ];
1147
1148        callee_args.extend(args[2..].iter().copied());
1149
1150        self.translate_intrinsic_libcall(
1151            vmctx,
1152            get_libcall,
1153            &callee_args,
1154            TrapSentinel::NegativeOne,
1155        );
1156    }
1157
1158    fn load_memory(&mut self, vmctx: ir::Value, memory: RuntimeMemoryIndex) -> ir::Value {
1159        self.builder.ins().load(
1160            self.isa.pointer_type(),
1161            MemFlags::trusted(),
1162            vmctx,
1163            i32::try_from(self.offsets.runtime_memory(memory)).unwrap(),
1164        )
1165    }
1166
1167    fn load_realloc(
1168        &mut self,
1169        vmctx: ir::Value,
1170        realloc: Option<RuntimeReallocIndex>,
1171    ) -> ir::Value {
1172        let pointer_type = self.isa.pointer_type();
1173        match realloc {
1174            Some(idx) => self.builder.ins().load(
1175                pointer_type,
1176                MemFlags::trusted(),
1177                vmctx,
1178                i32::try_from(self.offsets.runtime_realloc(idx)).unwrap(),
1179            ),
1180            None => self.builder.ins().iconst(pointer_type, 0),
1181        }
1182    }
1183
1184    fn load_callback(
1185        &mut self,
1186        vmctx: ir::Value,
1187        callback: Option<RuntimeCallbackIndex>,
1188    ) -> ir::Value {
1189        let pointer_type = self.isa.pointer_type();
1190        match callback {
1191            Some(idx) => self.builder.ins().load(
1192                pointer_type,
1193                MemFlags::trusted(),
1194                vmctx,
1195                i32::try_from(self.offsets.runtime_callback(idx)).unwrap(),
1196            ),
1197            None => self.builder.ins().iconst(pointer_type, 0),
1198        }
1199    }
1200
1201    fn load_post_return(
1202        &mut self,
1203        vmctx: ir::Value,
1204        post_return: Option<RuntimePostReturnIndex>,
1205    ) -> ir::Value {
1206        let pointer_type = self.isa.pointer_type();
1207        match post_return {
1208            Some(idx) => self.builder.ins().load(
1209                pointer_type,
1210                MemFlags::trusted(),
1211                vmctx,
1212                i32::try_from(self.offsets.runtime_post_return(idx)).unwrap(),
1213            ),
1214            None => self.builder.ins().iconst(pointer_type, 0),
1215        }
1216    }
1217
1218    fn string_encoding(&mut self, string_encoding: StringEncoding) -> ir::Value {
1219        self.builder
1220            .ins()
1221            .iconst(ir::types::I8, i64::from(string_encoding as u8))
1222    }
1223
1224    fn translate_future_or_stream_call(
1225        &mut self,
1226        tys: &[u32],
1227        options: Option<&CanonicalOptions>,
1228        get_libcall: GetLibcallFn,
1229        sentinel: TrapSentinel,
1230    ) {
1231        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
1232        let vmctx = args[0];
1233        let mut callee_args = vec![vmctx];
1234
1235        if let Some(options) = options {
1236            // memory: *mut VMMemoryDefinition
1237            callee_args.push(self.load_memory(vmctx, options.memory.unwrap()));
1238            // realloc: *mut VMFuncRef
1239            callee_args.push(self.load_realloc(vmctx, options.realloc));
1240            // string_encoding: StringEncoding
1241            callee_args.push(self.string_encoding(options.string_encoding))
1242        }
1243
1244        for ty in tys {
1245            callee_args.push(self.builder.ins().iconst(ir::types::I32, i64::from(*ty)));
1246        }
1247
1248        callee_args.extend(args[2..].iter().copied());
1249
1250        self.translate_intrinsic_libcall(vmctx, get_libcall, &callee_args, sentinel);
1251    }
1252
1253    fn translate_flat_stream_call(
1254        &mut self,
1255        tys: &[u32],
1256        options: &CanonicalOptions,
1257        get_libcall: GetLibcallFn,
1258        info: &CanonicalAbiInfo,
1259    ) {
1260        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
1261        let vmctx = args[0];
1262        let mut callee_args = vec![
1263            vmctx,
1264            self.load_memory(vmctx, options.memory.unwrap()),
1265            self.load_realloc(vmctx, options.realloc),
1266        ];
1267        for ty in tys {
1268            callee_args.push(self.builder.ins().iconst(ir::types::I32, i64::from(*ty)));
1269        }
1270
1271        callee_args.extend([
1272            self.builder
1273                .ins()
1274                .iconst(ir::types::I32, i64::from(info.size32)),
1275            self.builder
1276                .ins()
1277                .iconst(ir::types::I32, i64::from(info.align32)),
1278        ]);
1279
1280        callee_args.extend(args[2..].iter().copied());
1281
1282        self.translate_intrinsic_libcall(
1283            vmctx,
1284            get_libcall,
1285            &callee_args,
1286            TrapSentinel::NegativeOne,
1287        );
1288    }
1289
1290    fn translate_error_context_call(
1291        &mut self,
1292        ty: TypeComponentLocalErrorContextTableIndex,
1293        options: &CanonicalOptions,
1294        get_libcall: GetLibcallFn,
1295        sentinel: TrapSentinel,
1296    ) {
1297        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
1298        let vmctx = args[0];
1299        let mut callee_args = vec![
1300            vmctx,
1301            self.load_memory(vmctx, options.memory.unwrap()),
1302            self.load_realloc(vmctx, options.realloc),
1303            self.string_encoding(options.string_encoding),
1304            self.builder
1305                .ins()
1306                .iconst(ir::types::I32, i64::from(ty.as_u32())),
1307        ];
1308
1309        callee_args.extend(args[2..].iter().copied());
1310
1311        self.translate_intrinsic_libcall(vmctx, get_libcall, &callee_args, sentinel);
1312    }
1313
1314    fn translate_error_context_drop_call(&mut self, ty: TypeComponentLocalErrorContextTableIndex) {
1315        let args = self.builder.func.dfg.block_params(self.block0).to_vec();
1316        let vmctx = args[0];
1317        let mut callee_args = vec![
1318            vmctx,
1319            self.builder
1320                .ins()
1321                .iconst(ir::types::I32, i64::from(ty.as_u32())),
1322        ];
1323
1324        callee_args.extend(args[2..].iter().copied());
1325
1326        self.translate_intrinsic_libcall(
1327            vmctx,
1328            host::error_context_drop,
1329            &callee_args,
1330            TrapSentinel::Falsy,
1331        );
1332    }
1333
1334    /// Loads a host function pointer for a libcall stored at the `offset`
1335    /// provided in the libcalls array.
1336    ///
1337    /// The offset is calculated in the `host` module below.
1338    fn load_libcall(
1339        &mut self,
1340        vmctx: ir::Value,
1341        index: ComponentBuiltinFunctionIndex,
1342    ) -> ir::Value {
1343        let pointer_type = self.isa.pointer_type();
1344        // First load the pointer to the builtins structure which is static
1345        // per-process.
1346        let builtins_array = self.builder.ins().load(
1347            pointer_type,
1348            MemFlags::trusted().with_readonly(),
1349            vmctx,
1350            i32::try_from(self.offsets.builtins()).unwrap(),
1351        );
1352        // Next load the function pointer at `offset` and return that.
1353        self.builder.ins().load(
1354            pointer_type,
1355            MemFlags::trusted().with_readonly(),
1356            builtins_array,
1357            i32::try_from(index.index() * u32::from(self.offsets.ptr.size())).unwrap(),
1358        )
1359    }
1360
1361    fn abi_load_params(&mut self) -> Vec<ir::Value> {
1362        let mut block0_params = self.builder.func.dfg.block_params(self.block0).to_vec();
1363        match self.abi {
1364            // Wasm and native ABIs pass parameters as normal function
1365            // parameters.
1366            Abi::Wasm => block0_params,
1367
1368            // The array ABI passes a pointer/length as the 3rd/4th arguments
1369            // and those are used to load the actual wasm parameters.
1370            Abi::Array => {
1371                let results = self.compiler.load_values_from_array(
1372                    self.types[self.signature].unwrap_func().params(),
1373                    &mut self.builder,
1374                    block0_params[2],
1375                    block0_params[3],
1376                );
1377                block0_params.truncate(2);
1378                block0_params.extend(results);
1379                block0_params
1380            }
1381        }
1382    }
1383
1384    fn abi_store_results(&mut self, results: &[ir::Value]) {
1385        match self.abi {
1386            // Wasm/native ABIs return values as usual.
1387            Abi::Wasm => {
1388                self.builder.ins().return_(results);
1389            }
1390
1391            // The array ABI stores all results in the pointer/length passed
1392            // as arguments to this function, which contractually are required
1393            // to have enough space for the results.
1394            Abi::Array => {
1395                let block0_params = self.builder.func.dfg.block_params(self.block0);
1396                let (ptr, len) = (block0_params[2], block0_params[3]);
1397                self.compiler.store_values_to_array(
1398                    &mut self.builder,
1399                    self.types[self.signature].unwrap_func().returns(),
1400                    results,
1401                    ptr,
1402                    len,
1403                );
1404                let true_value = self.builder.ins().iconst(ir::types::I8, 1);
1405                self.builder.ins().return_(&[true_value]);
1406            }
1407        }
1408    }
1409
1410    fn raise_if_host_trapped(&mut self, succeeded: ir::Value) {
1411        let caller_vmctx = self.builder.func.dfg.block_params(self.block0)[1];
1412        self.compiler
1413            .raise_if_host_trapped(&mut self.builder, caller_vmctx, succeeded);
1414    }
1415
1416    fn raise_if_transcode_trapped(&mut self, amount_copied: ir::Value) {
1417        let pointer_type = self.isa.pointer_type();
1418        let minus_one = self.builder.ins().iconst(pointer_type, -1);
1419        let succeeded = self
1420            .builder
1421            .ins()
1422            .icmp(IntCC::NotEqual, amount_copied, minus_one);
1423        self.raise_if_host_trapped(succeeded);
1424    }
1425
1426    fn raise_if_negative_one(&mut self, ret: ir::Value) -> ir::Value {
1427        let minus_one = self.builder.ins().iconst(ir::types::I64, -1);
1428        let succeeded = self.builder.ins().icmp(IntCC::NotEqual, ret, minus_one);
1429        self.raise_if_host_trapped(succeeded);
1430        self.builder.ins().ireduce(ir::types::I32, ret)
1431    }
1432
1433    fn call_libcall(
1434        &mut self,
1435        vmctx: ir::Value,
1436        get_libcall: GetLibcallFn,
1437        args: &[ir::Value],
1438    ) -> ir::Inst {
1439        let (host_sig, index) = get_libcall(self.isa, &mut self.builder.func);
1440        let host_fn = self.load_libcall(vmctx, index);
1441        self.compiler
1442            .call_indirect_host(&mut self.builder, index, host_sig, host_fn, args)
1443    }
1444}
1445
1446impl ComponentCompiler for Compiler {
1447    fn compile_trampoline(
1448        &self,
1449        component: &ComponentTranslation,
1450        types: &ComponentTypesBuilder,
1451        index: TrampolineIndex,
1452        tunables: &Tunables,
1453    ) -> Result<AllCallFunc<Box<dyn Any + Send>>> {
1454        let compile = |abi: Abi| -> Result<_> {
1455            let mut compiler = self.function_compiler();
1456            let mut c = TrampolineCompiler::new(
1457                self,
1458                &mut compiler,
1459                &component.component,
1460                types,
1461                index,
1462                abi,
1463                tunables,
1464            );
1465
1466            // If we are crossing the Wasm-to-native boundary, we need to save the
1467            // exit FP and return address for stack walking purposes. However, we
1468            // always debug assert that our vmctx is a component context, regardless
1469            // whether we are actually crossing that boundary because it should
1470            // always hold.
1471            let vmctx = c.builder.block_params(c.block0)[0];
1472            let pointer_type = self.isa.pointer_type();
1473            super::debug_assert_vmctx_kind(
1474                &*self.isa,
1475                &mut c.builder,
1476                vmctx,
1477                wasmtime_environ::component::VMCOMPONENT_MAGIC,
1478            );
1479            if let Abi::Wasm = abi {
1480                let vm_store_context = c.builder.ins().load(
1481                    pointer_type,
1482                    MemFlags::trusted(),
1483                    vmctx,
1484                    i32::try_from(c.offsets.vm_store_context()).unwrap(),
1485                );
1486                super::save_last_wasm_exit_fp_and_pc(
1487                    &mut c.builder,
1488                    pointer_type,
1489                    &c.offsets.ptr,
1490                    vm_store_context,
1491                );
1492            }
1493
1494            c.translate(&component.trampolines[index]);
1495            c.builder.finalize();
1496
1497            Ok(Box::new(compiler.finish(&format!(
1498                "component_trampoline_{}_{abi:?}",
1499                index.as_u32(),
1500            ))?))
1501        };
1502        Ok(AllCallFunc {
1503            wasm_call: compile(Abi::Wasm)?,
1504            array_call: compile(Abi::Array)?,
1505        })
1506    }
1507}
1508
1509impl TrampolineCompiler<'_> {
1510    fn translate_transcode(
1511        &mut self,
1512        op: Transcode,
1513        from: RuntimeMemoryIndex,
1514        from64: bool,
1515        to: RuntimeMemoryIndex,
1516        to64: bool,
1517    ) {
1518        let pointer_type = self.isa.pointer_type();
1519        let vmctx = self.builder.func.dfg.block_params(self.block0)[0];
1520
1521        // Determine the static signature of the host libcall for this transcode
1522        // operation and additionally calculate the static offset within the
1523        // transode libcalls array.
1524        let get_libcall = match op {
1525            Transcode::Copy(FixedEncoding::Utf8) => host::utf8_to_utf8,
1526            Transcode::Copy(FixedEncoding::Utf16) => host::utf16_to_utf16,
1527            Transcode::Copy(FixedEncoding::Latin1) => host::latin1_to_latin1,
1528            Transcode::Latin1ToUtf16 => host::latin1_to_utf16,
1529            Transcode::Latin1ToUtf8 => host::latin1_to_utf8,
1530            Transcode::Utf16ToCompactProbablyUtf16 => host::utf16_to_compact_probably_utf16,
1531            Transcode::Utf16ToCompactUtf16 => host::utf16_to_compact_utf16,
1532            Transcode::Utf16ToLatin1 => host::utf16_to_latin1,
1533            Transcode::Utf16ToUtf8 => host::utf16_to_utf8,
1534            Transcode::Utf8ToCompactUtf16 => host::utf8_to_compact_utf16,
1535            Transcode::Utf8ToLatin1 => host::utf8_to_latin1,
1536            Transcode::Utf8ToUtf16 => host::utf8_to_utf16,
1537        };
1538
1539        // Load the base pointers for the from/to linear memories.
1540        let from_base = self.load_runtime_memory_base(vmctx, from);
1541        let to_base = self.load_runtime_memory_base(vmctx, to);
1542
1543        let mut args = Vec::new();
1544
1545        let uses_retptr = match op {
1546            Transcode::Utf16ToUtf8
1547            | Transcode::Latin1ToUtf8
1548            | Transcode::Utf8ToLatin1
1549            | Transcode::Utf16ToLatin1 => true,
1550            _ => false,
1551        };
1552
1553        // Most transcoders share roughly the same signature despite doing very
1554        // different things internally, so most libcalls are lumped together
1555        // here.
1556        match op {
1557            Transcode::Copy(_)
1558            | Transcode::Latin1ToUtf16
1559            | Transcode::Utf16ToCompactProbablyUtf16
1560            | Transcode::Utf8ToLatin1
1561            | Transcode::Utf16ToLatin1
1562            | Transcode::Utf8ToUtf16 => {
1563                args.push(self.ptr_param(0, from64, from_base));
1564                args.push(self.len_param(1, from64));
1565                args.push(self.ptr_param(2, to64, to_base));
1566            }
1567
1568            Transcode::Utf16ToUtf8 | Transcode::Latin1ToUtf8 => {
1569                args.push(self.ptr_param(0, from64, from_base));
1570                args.push(self.len_param(1, from64));
1571                args.push(self.ptr_param(2, to64, to_base));
1572                args.push(self.len_param(3, to64));
1573            }
1574
1575            Transcode::Utf8ToCompactUtf16 | Transcode::Utf16ToCompactUtf16 => {
1576                args.push(self.ptr_param(0, from64, from_base));
1577                args.push(self.len_param(1, from64));
1578                args.push(self.ptr_param(2, to64, to_base));
1579                args.push(self.len_param(3, to64));
1580                args.push(self.len_param(4, to64));
1581            }
1582        };
1583        if uses_retptr {
1584            let slot = self
1585                .builder
1586                .func
1587                .create_sized_stack_slot(ir::StackSlotData::new(
1588                    ir::StackSlotKind::ExplicitSlot,
1589                    pointer_type.bytes(),
1590                    0,
1591                ));
1592            args.push(self.builder.ins().stack_addr(pointer_type, slot, 0));
1593        }
1594        let call = self.call_libcall(vmctx, get_libcall, &args);
1595        let mut results = self.builder.func.dfg.inst_results(call).to_vec();
1596        if uses_retptr {
1597            results.push(self.builder.ins().load(
1598                pointer_type,
1599                ir::MemFlags::trusted(),
1600                *args.last().unwrap(),
1601                0,
1602            ));
1603        }
1604        let mut raw_results = Vec::new();
1605
1606        // Like the arguments the results are fairly similar across libcalls, so
1607        // they're lumped into various buckets here.
1608        match op {
1609            Transcode::Copy(_) | Transcode::Latin1ToUtf16 => {
1610                self.raise_if_host_trapped(results[0]);
1611            }
1612
1613            Transcode::Utf8ToUtf16
1614            | Transcode::Utf16ToCompactProbablyUtf16
1615            | Transcode::Utf8ToCompactUtf16
1616            | Transcode::Utf16ToCompactUtf16 => {
1617                self.raise_if_transcode_trapped(results[0]);
1618                raw_results.push(self.cast_from_pointer(results[0], to64));
1619            }
1620
1621            Transcode::Latin1ToUtf8
1622            | Transcode::Utf16ToUtf8
1623            | Transcode::Utf8ToLatin1
1624            | Transcode::Utf16ToLatin1 => {
1625                self.raise_if_transcode_trapped(results[0]);
1626                raw_results.push(self.cast_from_pointer(results[0], from64));
1627                raw_results.push(self.cast_from_pointer(results[1], to64));
1628            }
1629        };
1630
1631        self.builder.ins().return_(&raw_results);
1632    }
1633
1634    // Helper function to cast an input parameter to the host pointer type.
1635    fn len_param(&mut self, param: usize, is64: bool) -> ir::Value {
1636        let val = self.builder.func.dfg.block_params(self.block0)[2 + param];
1637        self.cast_to_pointer(val, is64)
1638    }
1639
1640    // Helper function to interpret an input parameter as a pointer into
1641    // linear memory. This will cast the input parameter to the host integer
1642    // type and then add that value to the base.
1643    //
1644    // Note that bounds-checking happens in adapter modules, and this
1645    // trampoline is simply calling the host libcall.
1646    fn ptr_param(&mut self, param: usize, is64: bool, base: ir::Value) -> ir::Value {
1647        let val = self.len_param(param, is64);
1648        self.builder.ins().iadd(base, val)
1649    }
1650
1651    // Helper function to cast a core wasm input to a host pointer type
1652    // which will go into the host libcall.
1653    fn cast_to_pointer(&mut self, val: ir::Value, is64: bool) -> ir::Value {
1654        let pointer_type = self.isa.pointer_type();
1655        let host64 = pointer_type == ir::types::I64;
1656        if is64 == host64 {
1657            val
1658        } else if !is64 {
1659            assert!(host64);
1660            self.builder.ins().uextend(pointer_type, val)
1661        } else {
1662            assert!(!host64);
1663            self.builder.ins().ireduce(pointer_type, val)
1664        }
1665    }
1666
1667    // Helper to cast a host pointer integer type to the destination type.
1668    fn cast_from_pointer(&mut self, val: ir::Value, is64: bool) -> ir::Value {
1669        let host64 = self.isa.pointer_type() == ir::types::I64;
1670        if is64 == host64 {
1671            val
1672        } else if !is64 {
1673            assert!(host64);
1674            self.builder.ins().ireduce(ir::types::I32, val)
1675        } else {
1676            assert!(!host64);
1677            self.builder.ins().uextend(ir::types::I64, val)
1678        }
1679    }
1680
1681    fn load_runtime_memory_base(&mut self, vmctx: ir::Value, mem: RuntimeMemoryIndex) -> ir::Value {
1682        let pointer_type = self.isa.pointer_type();
1683        let from_vmmemory_definition = self.load_memory(vmctx, mem);
1684        self.builder.ins().load(
1685            pointer_type,
1686            MemFlags::trusted(),
1687            from_vmmemory_definition,
1688            i32::from(self.offsets.ptr.vmmemory_definition_base()),
1689        )
1690    }
1691}
1692
1693/// Module with macro-generated contents that will return the signature and
1694/// offset for each of the host transcoder functions.
1695///
1696/// Note that a macro is used here to keep this in sync with the actual
1697/// transcoder functions themselves which are also defined via a macro.
1698mod host {
1699    use cranelift_codegen::ir::{self, AbiParam};
1700    use cranelift_codegen::isa::{CallConv, TargetIsa};
1701    use wasmtime_environ::component::ComponentBuiltinFunctionIndex;
1702
1703    macro_rules! define {
1704        (
1705            $(
1706                $( #[$attr:meta] )*
1707                $name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;
1708            )*
1709        ) => {
1710            $(
1711                pub(super) fn $name(isa: &dyn TargetIsa, func: &mut ir::Function) -> (ir::SigRef, ComponentBuiltinFunctionIndex) {
1712                    let pointer_type = isa.pointer_type();
1713                    let params = vec![
1714                        $( AbiParam::new(define!(@ty pointer_type $param)) ),*
1715                    ];
1716                    let returns = vec![
1717                        $( AbiParam::new(define!(@ty pointer_type $result)) )?
1718                    ];
1719                    let sig = func.import_signature(ir::Signature {
1720                        params,
1721                        returns,
1722                        call_conv: CallConv::triple_default(isa.triple()),
1723                    });
1724
1725                    (sig, ComponentBuiltinFunctionIndex::$name())
1726                }
1727            )*
1728        };
1729
1730        (@ty $ptr:ident size) => ($ptr);
1731        (@ty $ptr:ident ptr_u8) => ($ptr);
1732        (@ty $ptr:ident ptr_u16) => ($ptr);
1733        (@ty $ptr:ident ptr_size) => ($ptr);
1734        (@ty $ptr:ident bool) => (ir::types::I8);
1735        (@ty $ptr:ident u8) => (ir::types::I8);
1736        (@ty $ptr:ident u32) => (ir::types::I32);
1737        (@ty $ptr:ident u64) => (ir::types::I64);
1738        (@ty $ptr:ident vmctx) => ($ptr);
1739    }
1740
1741    wasmtime_environ::foreach_builtin_component_function!(define);
1742}