wasmtime/runtime/vm/
libcalls.rs

1//! Runtime library calls.
2//!
3//! Note that Wasm compilers may sometimes perform these inline rather than
4//! calling them, particularly when CPUs have special instructions which compute
5//! them directly.
6//!
7//! These functions are called by compiled Wasm code, and therefore must take
8//! certain care about some things:
9//!
10//! * They must only contain basic, raw i32/i64/f32/f64/pointer parameters that
11//!   are safe to pass across the system ABI.
12//!
13//! * If any nested function propagates an `Err(trap)` out to the library
14//!   function frame, we need to raise it. This involves some nasty and quite
15//!   unsafe code under the covers! Notably, after raising the trap, drops
16//!   **will not** be run for local variables! This can lead to things like
17//!   leaking `InstanceHandle`s which leads to never deallocating JIT code,
18//!   instances, and modules if we are not careful!
19//!
20//! * The libcall must be entered via a Wasm-to-libcall trampoline that saves
21//!   the last Wasm FP and PC for stack walking purposes. (For more details, see
22//!   `crates/wasmtime/src/runtime/vm/backtrace.rs`.)
23//!
24//! To make it easier to correctly handle all these things, **all** libcalls
25//! must be defined via the `libcall!` helper macro! See its doc comments below
26//! for an example, or just look at the rest of the file.
27//!
28//! ## Dealing with `externref`s
29//!
30//! When receiving a raw `*mut u8` that is actually a `VMExternRef` reference,
31//! convert it into a proper `VMExternRef` with `VMExternRef::clone_from_raw` as
32//! soon as apossible. Any GC before raw pointer is converted into a reference
33//! can potentially collect the referenced object, which could lead to use after
34//! free.
35//!
36//! Avoid this by eagerly converting into a proper `VMExternRef`! (Unfortunately
37//! there is no macro to help us automatically get this correct, so stay
38//! vigilant!)
39//!
40//! ```ignore
41//! pub unsafe extern "C" my_libcall_takes_ref(raw_extern_ref: *mut u8) {
42//!     // Before `clone_from_raw`, `raw_extern_ref` is potentially unrooted,
43//!     // and doing GC here could lead to use after free!
44//!
45//!     let my_extern_ref = if raw_extern_ref.is_null() {
46//!         None
47//!     } else {
48//!         Some(VMExternRef::clone_from_raw(raw_extern_ref))
49//!     };
50//!
51//!     // Now that we did `clone_from_raw`, it is safe to do a GC (or do
52//!     // anything else that might transitively GC, like call back into
53//!     // Wasm!)
54//! }
55//! ```
56
57#[cfg(feature = "stack-switching")]
58use super::stack_switching::VMContObj;
59use crate::prelude::*;
60#[cfg(feature = "gc")]
61use crate::runtime::vm::VMGcRef;
62use crate::runtime::vm::table::{Table, TableElementType};
63use crate::runtime::vm::vmcontext::VMFuncRef;
64use crate::runtime::vm::{
65    HostResultHasUnwindSentinel, Instance, TrapReason, VMStore, f32x4, f64x2, i8x16,
66};
67use core::convert::Infallible;
68use core::pin::Pin;
69use core::ptr::NonNull;
70#[cfg(feature = "threads")]
71use core::time::Duration;
72use wasmtime_environ::{
73    DataIndex, DefinedMemoryIndex, DefinedTableIndex, ElemIndex, FuncIndex, MemoryIndex,
74    TableIndex, Trap,
75};
76#[cfg(feature = "wmemcheck")]
77use wasmtime_wmemcheck::AccessError::{
78    DoubleMalloc, InvalidFree, InvalidRead, InvalidWrite, OutOfBounds,
79};
80
81/// Raw functions which are actually called from compiled code.
82///
83/// Invocation of a builtin currently looks like:
84///
85/// * A wasm function calls a cranelift-compiled trampoline that's generated
86///   once-per-builtin.
87/// * The cranelift-compiled trampoline performs any necessary actions to exit
88///   wasm, such as dealing with fp/pc/etc.
89/// * The cranelift-compiled trampoline loads a function pointer from an array
90///   stored in `VMContext` That function pointer is defined in this module.
91/// * This module runs, handling things like `catch_unwind` and `Result` and
92///   such.
93/// * This module delegates to the outer module (this file) which has the actual
94///   implementation.
95///
96/// For more information on converting from host-defined values to Cranelift ABI
97/// values see the `catch_unwind_and_record_trap` function.
98pub mod raw {
99    use crate::runtime::vm::{InstanceAndStore, VMContext, f32x4, f64x2, i8x16};
100    use core::ptr::NonNull;
101
102    macro_rules! libcall {
103        (
104            $(
105                $( #[cfg($attr:meta)] )?
106                $name:ident( vmctx: vmctx $(, $pname:ident: $param:ident )* ) $(-> $result:ident)?;
107            )*
108        ) => {
109            $(
110                // This is the direct entrypoint from the compiled module which
111                // still has the raw signature.
112                //
113                // This will delegate to the outer module to the actual
114                // implementation and automatically perform `catch_unwind` along
115                // with conversion of the return value in the face of traps.
116                #[allow(improper_ctypes_definitions, reason = "__m128i known not FFI-safe")]
117                pub unsafe extern "C" fn $name(
118                    vmctx: NonNull<VMContext>,
119                    $( $pname : libcall!(@ty $param), )*
120                ) $(-> libcall!(@ty $result))? {
121                    $(#[cfg($attr)])?
122                    {
123                        crate::runtime::vm::traphandlers::catch_unwind_and_record_trap(|| {
124                            InstanceAndStore::from_vmctx(vmctx, |pair| {
125                                let (instance, store) = pair.unpack_mut();
126                                super::$name(store, instance, $($pname),*)
127                            })
128                        })
129                    }
130                    $(
131                        #[cfg(not($attr))]
132                        {
133                            let _ = vmctx;
134                            unreachable!();
135                        }
136                    )?
137                }
138
139                // This works around a `rustc` bug where compiling with LTO
140                // will sometimes strip out some of these symbols resulting
141                // in a linking failure.
142                #[allow(improper_ctypes_definitions, reason = "__m128i known not FFI-safe")]
143                const _: () = {
144                    #[used]
145                    static I_AM_USED: unsafe extern "C" fn(
146                        NonNull<VMContext>,
147                        $( $pname : libcall!(@ty $param), )*
148                    ) $( -> libcall!(@ty $result))? = $name;
149                };
150            )*
151        };
152
153        (@ty u32) => (u32);
154        (@ty u64) => (u64);
155        (@ty f32) => (f32);
156        (@ty f64) => (f64);
157        (@ty u8) => (u8);
158        (@ty i8x16) => (i8x16);
159        (@ty f32x4) => (f32x4);
160        (@ty f64x2) => (f64x2);
161        (@ty bool) => (bool);
162        (@ty pointer) => (*mut u8);
163    }
164
165    wasmtime_environ::foreach_builtin_function!(libcall);
166}
167
168fn memory_grow(
169    store: &mut dyn VMStore,
170    mut instance: Pin<&mut Instance>,
171    delta: u64,
172    memory_index: u32,
173) -> Result<Option<AllocationSize>, TrapReason> {
174    let memory_index = DefinedMemoryIndex::from_u32(memory_index);
175    let module = instance.env_module();
176    let page_size_log2 = module.memories[module.memory_index(memory_index)].page_size_log2;
177
178    let result = instance
179        .as_mut()
180        .memory_grow(store, memory_index, delta)?
181        .map(|size_in_bytes| AllocationSize(size_in_bytes >> page_size_log2));
182
183    Ok(result)
184}
185
186/// A helper structure to represent the return value of a memory or table growth
187/// call.
188///
189/// This represents a byte or element-based count of the size of an item on the
190/// host. For example a memory is how many bytes large the memory is, or a table
191/// is how many elements large it is. It's assumed that the value here is never
192/// -1 or -2 as that would mean the entire host address space is allocated which
193/// is not possible.
194struct AllocationSize(usize);
195
196/// Special implementation for growth-related libcalls.
197///
198/// Here the optional return value means:
199///
200/// * `Some(val)` - the growth succeeded and the previous size of the item was
201///   `val`.
202/// * `None` - the growth failed.
203///
204/// The failure case returns -1 (or `usize::MAX` as an unsigned integer) and the
205/// successful case returns the `val` itself. Note that -2 (`usize::MAX - 1`
206/// when unsigned) is unwind as a sentinel to indicate an unwind as no valid
207/// allocation can be that large.
208unsafe impl HostResultHasUnwindSentinel for Option<AllocationSize> {
209    type Abi = *mut u8;
210    const SENTINEL: *mut u8 = (usize::MAX - 1) as *mut u8;
211
212    fn into_abi(self) -> *mut u8 {
213        match self {
214            Some(size) => {
215                debug_assert!(size.0 < (usize::MAX - 1));
216                size.0 as *mut u8
217            }
218            None => usize::MAX as *mut u8,
219        }
220    }
221}
222
223/// Implementation of `table.grow` for `funcref` tables.
224unsafe fn table_grow_func_ref(
225    store: &mut dyn VMStore,
226    mut instance: Pin<&mut Instance>,
227    defined_table_index: u32,
228    delta: u64,
229    init_value: *mut u8,
230) -> Result<Option<AllocationSize>> {
231    let defined_table_index = DefinedTableIndex::from_u32(defined_table_index);
232    let table_index = instance.env_module().table_index(defined_table_index);
233    debug_assert!(matches!(
234        instance.as_mut().table_element_type(table_index),
235        TableElementType::Func,
236    ));
237    let element = NonNull::new(init_value.cast::<VMFuncRef>()).into();
238    let result = instance
239        .defined_table_grow(store, defined_table_index, delta, element)?
240        .map(AllocationSize);
241    Ok(result)
242}
243
244/// Implementation of `table.grow` for GC-reference tables.
245#[cfg(feature = "gc")]
246unsafe fn table_grow_gc_ref(
247    store: &mut dyn VMStore,
248    mut instance: Pin<&mut Instance>,
249    defined_table_index: u32,
250    delta: u64,
251    init_value: u32,
252) -> Result<Option<AllocationSize>> {
253    let defined_table_index = DefinedTableIndex::from_u32(defined_table_index);
254    let table_index = instance.env_module().table_index(defined_table_index);
255    debug_assert!(matches!(
256        instance.as_mut().table_element_type(table_index),
257        TableElementType::GcRef,
258    ));
259
260    let element = VMGcRef::from_raw_u32(init_value)
261        .map(|r| {
262            store
263                .store_opaque_mut()
264                .unwrap_gc_store_mut()
265                .clone_gc_ref(&r)
266        })
267        .into();
268
269    let result = instance
270        .defined_table_grow(store, defined_table_index, delta, element)?
271        .map(AllocationSize);
272    Ok(result)
273}
274
275#[cfg(feature = "stack-switching")]
276unsafe fn table_grow_cont_obj(
277    store: &mut dyn VMStore,
278    mut instance: Pin<&mut Instance>,
279    defined_table_index: u32,
280    delta: u64,
281    // The following two values together form the initial Option<VMContObj>.
282    // A None value is indicated by the pointer being null.
283    init_value_contref: *mut u8,
284    init_value_revision: u64,
285) -> Result<Option<AllocationSize>> {
286    let defined_table_index = DefinedTableIndex::from_u32(defined_table_index);
287    let table_index = instance.env_module().table_index(defined_table_index);
288    debug_assert!(matches!(
289        instance.as_mut().table_element_type(table_index),
290        TableElementType::Cont,
291    ));
292    let element = VMContObj::from_raw_parts(init_value_contref, init_value_revision).into();
293    let result = instance
294        .defined_table_grow(store, defined_table_index, delta, element)?
295        .map(AllocationSize);
296    Ok(result)
297}
298
299/// Implementation of `table.fill` for `funcref`s.
300unsafe fn table_fill_func_ref(
301    store: &mut dyn VMStore,
302    instance: Pin<&mut Instance>,
303    table_index: u32,
304    dst: u64,
305    val: *mut u8,
306    len: u64,
307) -> Result<()> {
308    let table_index = TableIndex::from_u32(table_index);
309    let table = &mut *instance.get_table(table_index);
310    match table.element_type() {
311        TableElementType::Func => {
312            let val = NonNull::new(val.cast::<VMFuncRef>());
313            table.fill(store.optional_gc_store_mut(), dst, val.into(), len)?;
314            Ok(())
315        }
316        TableElementType::GcRef => unreachable!(),
317        TableElementType::Cont => unreachable!(),
318    }
319}
320
321#[cfg(feature = "gc")]
322unsafe fn table_fill_gc_ref(
323    store: &mut dyn VMStore,
324    instance: Pin<&mut Instance>,
325    table_index: u32,
326    dst: u64,
327    val: u32,
328    len: u64,
329) -> Result<()> {
330    let table_index = TableIndex::from_u32(table_index);
331    let table = &mut *instance.get_table(table_index);
332    match table.element_type() {
333        TableElementType::Func => unreachable!(),
334        TableElementType::GcRef => {
335            let gc_store = store.store_opaque_mut().unwrap_gc_store_mut();
336            let gc_ref = VMGcRef::from_raw_u32(val);
337            let gc_ref = gc_ref.map(|r| gc_store.clone_gc_ref(&r));
338            table.fill(Some(gc_store), dst, gc_ref.into(), len)?;
339            Ok(())
340        }
341
342        TableElementType::Cont => unreachable!(),
343    }
344}
345
346#[cfg(feature = "stack-switching")]
347unsafe fn table_fill_cont_obj(
348    store: &mut dyn VMStore,
349    instance: Pin<&mut Instance>,
350    table_index: u32,
351    dst: u64,
352    value_contref: *mut u8,
353    value_revision: u64,
354    len: u64,
355) -> Result<()> {
356    let table_index = TableIndex::from_u32(table_index);
357    let table = &mut *instance.get_table(table_index);
358    match table.element_type() {
359        TableElementType::Cont => {
360            let contobj = VMContObj::from_raw_parts(value_contref, value_revision);
361            table.fill(store.optional_gc_store_mut(), dst, contobj.into(), len)?;
362            Ok(())
363        }
364        _ => panic!("Wrong table filling function"),
365    }
366}
367
368// Implementation of `table.copy`.
369unsafe fn table_copy(
370    store: &mut dyn VMStore,
371    mut instance: Pin<&mut Instance>,
372    dst_table_index: u32,
373    src_table_index: u32,
374    dst: u64,
375    src: u64,
376    len: u64,
377) -> Result<()> {
378    let dst_table_index = TableIndex::from_u32(dst_table_index);
379    let src_table_index = TableIndex::from_u32(src_table_index);
380    let store = store.store_opaque_mut();
381    let dst_table = instance.as_mut().get_table(dst_table_index);
382    // Lazy-initialize the whole range in the source table first.
383    let src_range = src..(src.checked_add(len).unwrap_or(u64::MAX));
384    let src_table = instance.get_table_with_lazy_init(src_table_index, src_range);
385    let gc_store = store.optional_gc_store_mut();
386    Table::copy(gc_store, dst_table, src_table, dst, src, len)?;
387    Ok(())
388}
389
390// Implementation of `table.init`.
391fn table_init(
392    store: &mut dyn VMStore,
393    instance: Pin<&mut Instance>,
394    table_index: u32,
395    elem_index: u32,
396    dst: u64,
397    src: u64,
398    len: u64,
399) -> Result<(), Trap> {
400    let table_index = TableIndex::from_u32(table_index);
401    let elem_index = ElemIndex::from_u32(elem_index);
402    instance.table_init(
403        store.store_opaque_mut(),
404        table_index,
405        elem_index,
406        dst,
407        src,
408        len,
409    )
410}
411
412// Implementation of `elem.drop`.
413fn elem_drop(_store: &mut dyn VMStore, instance: Pin<&mut Instance>, elem_index: u32) {
414    let elem_index = ElemIndex::from_u32(elem_index);
415    instance.elem_drop(elem_index)
416}
417
418// Implementation of `memory.copy`.
419fn memory_copy(
420    _store: &mut dyn VMStore,
421    instance: Pin<&mut Instance>,
422    dst_index: u32,
423    dst: u64,
424    src_index: u32,
425    src: u64,
426    len: u64,
427) -> Result<(), Trap> {
428    let src_index = MemoryIndex::from_u32(src_index);
429    let dst_index = MemoryIndex::from_u32(dst_index);
430    instance.memory_copy(dst_index, dst, src_index, src, len)
431}
432
433// Implementation of `memory.fill` for locally defined memories.
434fn memory_fill(
435    _store: &mut dyn VMStore,
436    instance: Pin<&mut Instance>,
437    memory_index: u32,
438    dst: u64,
439    val: u32,
440    len: u64,
441) -> Result<(), Trap> {
442    let memory_index = DefinedMemoryIndex::from_u32(memory_index);
443    #[expect(clippy::cast_possible_truncation, reason = "known to truncate here")]
444    instance.memory_fill(memory_index, dst, val as u8, len)
445}
446
447// Implementation of `memory.init`.
448fn memory_init(
449    _store: &mut dyn VMStore,
450    instance: Pin<&mut Instance>,
451    memory_index: u32,
452    data_index: u32,
453    dst: u64,
454    src: u32,
455    len: u32,
456) -> Result<(), Trap> {
457    let memory_index = MemoryIndex::from_u32(memory_index);
458    let data_index = DataIndex::from_u32(data_index);
459    instance.memory_init(memory_index, data_index, dst, src, len)
460}
461
462// Implementation of `ref.func`.
463fn ref_func(
464    _store: &mut dyn VMStore,
465    instance: Pin<&mut Instance>,
466    func_index: u32,
467) -> NonNull<u8> {
468    instance
469        .get_func_ref(FuncIndex::from_u32(func_index))
470        .expect("ref_func: funcref should always be available for given func index")
471        .cast()
472}
473
474// Implementation of `data.drop`.
475fn data_drop(_store: &mut dyn VMStore, instance: Pin<&mut Instance>, data_index: u32) {
476    let data_index = DataIndex::from_u32(data_index);
477    instance.data_drop(data_index)
478}
479
480// Returns a table entry after lazily initializing it.
481unsafe fn table_get_lazy_init_func_ref(
482    _store: &mut dyn VMStore,
483    instance: Pin<&mut Instance>,
484    table_index: u32,
485    index: u64,
486) -> *mut u8 {
487    let table_index = TableIndex::from_u32(table_index);
488    let table = instance.get_table_with_lazy_init(table_index, core::iter::once(index));
489    let elem = (*table)
490        .get(None, index)
491        .expect("table access already bounds-checked");
492
493    match elem.into_func_ref_asserting_initialized() {
494        Some(ptr) => ptr.as_ptr().cast(),
495        None => core::ptr::null_mut(),
496    }
497}
498
499/// Drop a GC reference.
500#[cfg(feature = "gc-drc")]
501unsafe fn drop_gc_ref(store: &mut dyn VMStore, _instance: Pin<&mut Instance>, gc_ref: u32) {
502    log::trace!("libcalls::drop_gc_ref({gc_ref:#x})");
503    let gc_ref = VMGcRef::from_raw_u32(gc_ref).expect("non-null VMGcRef");
504    store
505        .store_opaque_mut()
506        .unwrap_gc_store_mut()
507        .drop_gc_ref(gc_ref);
508}
509
510/// Grow the GC heap.
511#[cfg(feature = "gc-null")]
512unsafe fn grow_gc_heap(
513    store: &mut dyn VMStore,
514    _instance: Pin<&mut Instance>,
515    bytes_needed: u64,
516) -> Result<()> {
517    let orig_len = u64::try_from(store.gc_store()?.gc_heap.vmmemory().current_length()).unwrap();
518
519    store
520        .maybe_async_gc(None, Some(bytes_needed))
521        .context("failed to grow the GC heap")
522        .context(crate::Trap::AllocationTooLarge)?;
523
524    // JIT code relies on the memory having grown by `bytes_needed` bytes if
525    // this libcall returns successfully, so trap if we didn't grow that much.
526    let new_len = u64::try_from(store.gc_store()?.gc_heap.vmmemory().current_length()).unwrap();
527    if orig_len
528        .checked_add(bytes_needed)
529        .is_none_or(|expected_len| new_len < expected_len)
530    {
531        return Err(crate::Trap::AllocationTooLarge.into());
532    }
533
534    Ok(())
535}
536
537/// Allocate a raw, unininitialized GC object for Wasm code.
538///
539/// The Wasm code is responsible for initializing the object.
540#[cfg(feature = "gc-drc")]
541unsafe fn gc_alloc_raw(
542    store: &mut dyn VMStore,
543    instance: Pin<&mut Instance>,
544    kind_and_reserved: u32,
545    module_interned_type_index: u32,
546    size: u32,
547    align: u32,
548) -> Result<core::num::NonZeroU32> {
549    use crate::vm::VMGcHeader;
550    use core::alloc::Layout;
551    use wasmtime_environ::{ModuleInternedTypeIndex, VMGcKind};
552
553    let kind = VMGcKind::from_high_bits_of_u32(kind_and_reserved);
554    log::trace!("gc_alloc_raw(kind={kind:?}, size={size}, align={align})");
555
556    let module = instance
557        .runtime_module()
558        .expect("should never allocate GC types defined in a dummy module");
559
560    let module_interned_type_index = ModuleInternedTypeIndex::from_u32(module_interned_type_index);
561    let shared_type_index = module
562        .signatures()
563        .shared_type(module_interned_type_index)
564        .expect("should have engine type index for module type index");
565
566    let mut header = VMGcHeader::from_kind_and_index(kind, shared_type_index);
567    header.set_reserved_u27(kind_and_reserved & VMGcKind::UNUSED_MASK);
568
569    let size = usize::try_from(size).unwrap();
570    let align = usize::try_from(align).unwrap();
571    assert!(align.is_power_of_two());
572    let layout = Layout::from_size_align(size, align).map_err(|e| {
573        let err = Error::from(crate::Trap::AllocationTooLarge);
574        err.context(e)
575    })?;
576
577    let store = store.store_opaque_mut();
578    let gc_ref = unsafe {
579        store.retry_after_gc_maybe_async((), |store, ()| {
580            store
581                .unwrap_gc_store_mut()
582                .alloc_raw(header, layout)?
583                .map_err(|bytes_needed| crate::GcHeapOutOfMemory::new((), bytes_needed).into())
584        })?
585    };
586
587    let raw = store.unwrap_gc_store_mut().expose_gc_ref_to_wasm(gc_ref);
588    Ok(raw)
589}
590
591// Intern a `funcref` into the GC heap, returning its `FuncRefTableId`.
592//
593// This libcall may not GC.
594#[cfg(feature = "gc")]
595unsafe fn intern_func_ref_for_gc_heap(
596    store: &mut dyn VMStore,
597    _instance: Pin<&mut Instance>,
598    func_ref: *mut u8,
599) -> Result<u32> {
600    use crate::{store::AutoAssertNoGc, vm::SendSyncPtr};
601    use core::ptr::NonNull;
602
603    let mut store = AutoAssertNoGc::new(store.store_opaque_mut());
604
605    let func_ref = func_ref.cast::<VMFuncRef>();
606    let func_ref = NonNull::new(func_ref).map(SendSyncPtr::new);
607
608    let func_ref_id = store.gc_store_mut()?.func_ref_table.intern(func_ref);
609    Ok(func_ref_id.into_raw())
610}
611
612// Get the raw `VMFuncRef` pointer associated with a `FuncRefTableId` from an
613// earlier `intern_func_ref_for_gc_heap` call.
614//
615// This libcall may not GC.
616#[cfg(feature = "gc")]
617unsafe fn get_interned_func_ref(
618    store: &mut dyn VMStore,
619    instance: Pin<&mut Instance>,
620    func_ref_id: u32,
621    module_interned_type_index: u32,
622) -> *mut u8 {
623    use super::FuncRefTableId;
624    use crate::store::AutoAssertNoGc;
625    use wasmtime_environ::{ModuleInternedTypeIndex, packed_option::ReservedValue};
626
627    let store = AutoAssertNoGc::new(store.store_opaque_mut());
628
629    let func_ref_id = FuncRefTableId::from_raw(func_ref_id);
630    let module_interned_type_index = ModuleInternedTypeIndex::from_bits(module_interned_type_index);
631
632    let func_ref = if module_interned_type_index.is_reserved_value() {
633        store
634            .unwrap_gc_store()
635            .func_ref_table
636            .get_untyped(func_ref_id)
637    } else {
638        let types = store.engine().signatures();
639        let engine_ty = instance.engine_type_index(module_interned_type_index);
640        store
641            .unwrap_gc_store()
642            .func_ref_table
643            .get_typed(types, func_ref_id, engine_ty)
644    };
645
646    func_ref.map_or(core::ptr::null_mut(), |f| f.as_ptr().cast())
647}
648
649/// Implementation of the `array.new_data` instruction.
650#[cfg(feature = "gc")]
651unsafe fn array_new_data(
652    store: &mut dyn VMStore,
653    instance: Pin<&mut Instance>,
654    array_type_index: u32,
655    data_index: u32,
656    src: u32,
657    len: u32,
658) -> Result<core::num::NonZeroU32> {
659    use crate::ArrayType;
660    use wasmtime_environ::ModuleInternedTypeIndex;
661
662    let store = store.store_opaque_mut();
663    let array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index);
664    let data_index = DataIndex::from_u32(data_index);
665
666    // Calculate the byte-length of the data (as opposed to the element-length
667    // of the array).
668    let data_range = instance.wasm_data_range(data_index);
669    let shared_ty = instance.engine_type_index(array_type_index);
670    let array_ty = ArrayType::from_shared_type_index(store.engine(), shared_ty);
671    let one_elem_size = array_ty
672        .element_type()
673        .data_byte_size()
674        .expect("Wasm validation ensures that this type have a defined byte size");
675    let byte_len = len
676        .checked_mul(one_elem_size)
677        .and_then(|x| usize::try_from(x).ok())
678        .ok_or_else(|| Trap::MemoryOutOfBounds)?;
679
680    // Get the data from the segment, checking bounds.
681    let src = usize::try_from(src).map_err(|_| Trap::MemoryOutOfBounds)?;
682    let data = instance
683        .wasm_data(data_range)
684        .get(src..)
685        .and_then(|d| d.get(..byte_len))
686        .ok_or_else(|| Trap::MemoryOutOfBounds)?;
687
688    // Allocate the (uninitialized) array.
689    let gc_layout = store
690        .engine()
691        .signatures()
692        .layout(shared_ty)
693        .expect("array types have GC layouts");
694    let array_layout = gc_layout.unwrap_array();
695    let array_ref = store.retry_after_gc_maybe_async((), |store, ()| {
696        store
697            .unwrap_gc_store_mut()
698            .alloc_uninit_array(shared_ty, len, &array_layout)?
699            .map_err(|bytes_needed| crate::GcHeapOutOfMemory::new((), bytes_needed).into())
700    })?;
701
702    // Copy the data into the array, initializing it.
703    store
704        .unwrap_gc_store_mut()
705        .gc_object_data(array_ref.as_gc_ref())
706        .copy_from_slice(array_layout.base_size, data);
707
708    // Return the array to Wasm!
709    let raw = store
710        .unwrap_gc_store_mut()
711        .expose_gc_ref_to_wasm(array_ref.into());
712    Ok(raw)
713}
714
715/// Implementation of the `array.init_data` instruction.
716#[cfg(feature = "gc")]
717unsafe fn array_init_data(
718    store: &mut dyn VMStore,
719    instance: Pin<&mut Instance>,
720    array_type_index: u32,
721    array: u32,
722    dst: u32,
723    data_index: u32,
724    src: u32,
725    len: u32,
726) -> Result<()> {
727    use crate::ArrayType;
728    use wasmtime_environ::ModuleInternedTypeIndex;
729
730    let array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index);
731    let data_index = DataIndex::from_u32(data_index);
732
733    log::trace!(
734        "array.init_data(array={array:#x}, dst={dst}, data_index={data_index:?}, src={src}, len={len})",
735    );
736
737    // Null check the array.
738    let gc_ref = VMGcRef::from_raw_u32(array).ok_or_else(|| Trap::NullReference)?;
739    let array = gc_ref
740        .into_arrayref(&*store.unwrap_gc_store().gc_heap)
741        .expect("gc ref should be an array");
742
743    let dst = usize::try_from(dst).map_err(|_| Trap::MemoryOutOfBounds)?;
744    let src = usize::try_from(src).map_err(|_| Trap::MemoryOutOfBounds)?;
745    let len = usize::try_from(len).map_err(|_| Trap::MemoryOutOfBounds)?;
746
747    // Bounds check the array.
748    let array_len = array.len(store.store_opaque());
749    let array_len = usize::try_from(array_len).map_err(|_| Trap::ArrayOutOfBounds)?;
750    if dst.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > array_len {
751        return Err(Trap::ArrayOutOfBounds.into());
752    }
753
754    // Calculate the byte length from the array length.
755    let shared_ty = instance.engine_type_index(array_type_index);
756    let array_ty = ArrayType::from_shared_type_index(store.engine(), shared_ty);
757    let one_elem_size = array_ty
758        .element_type()
759        .data_byte_size()
760        .expect("Wasm validation ensures that this type have a defined byte size");
761    let data_len = len
762        .checked_mul(usize::try_from(one_elem_size).unwrap())
763        .ok_or_else(|| Trap::MemoryOutOfBounds)?;
764
765    // Get the data from the segment, checking its bounds.
766    let data_range = instance.wasm_data_range(data_index);
767    let data = instance
768        .wasm_data(data_range)
769        .get(src..)
770        .and_then(|d| d.get(..data_len))
771        .ok_or_else(|| Trap::MemoryOutOfBounds)?;
772
773    // Copy the data into the array.
774
775    let dst_offset = u32::try_from(dst)
776        .unwrap()
777        .checked_mul(one_elem_size)
778        .unwrap();
779
780    let array_layout = store
781        .engine()
782        .signatures()
783        .layout(shared_ty)
784        .expect("array types have GC layouts");
785    let array_layout = array_layout.unwrap_array();
786
787    let obj_offset = array_layout.base_size.checked_add(dst_offset).unwrap();
788
789    store
790        .unwrap_gc_store_mut()
791        .gc_object_data(array.as_gc_ref())
792        .copy_from_slice(obj_offset, data);
793
794    Ok(())
795}
796
797#[cfg(feature = "gc")]
798unsafe fn array_new_elem(
799    store: &mut dyn VMStore,
800    mut instance: Pin<&mut Instance>,
801    array_type_index: u32,
802    elem_index: u32,
803    src: u32,
804    len: u32,
805) -> Result<core::num::NonZeroU32> {
806    use crate::{
807        ArrayRef, ArrayRefPre, ArrayType, Func, RootSet, RootedGcRefImpl, Val,
808        store::AutoAssertNoGc,
809        vm::const_expr::{ConstEvalContext, ConstExprEvaluator},
810    };
811    use wasmtime_environ::{ModuleInternedTypeIndex, TableSegmentElements};
812
813    // Convert indices to their typed forms.
814    let array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index);
815    let elem_index = ElemIndex::from_u32(elem_index);
816
817    let mut storage = None;
818    let elements = instance.passive_element_segment(&mut storage, elem_index);
819
820    let src = usize::try_from(src).map_err(|_| Trap::TableOutOfBounds)?;
821    let len = usize::try_from(len).map_err(|_| Trap::TableOutOfBounds)?;
822
823    let shared_ty = instance.engine_type_index(array_type_index);
824    let array_ty = ArrayType::from_shared_type_index(store.engine(), shared_ty);
825    let elem_ty = array_ty.element_type();
826    let pre = ArrayRefPre::_new(store, array_ty);
827
828    RootSet::with_lifo_scope(store, |store| {
829        // Turn the elements into `Val`s.
830        let mut vals = Vec::with_capacity(usize::try_from(elements.len()).unwrap());
831        match elements {
832            TableSegmentElements::Functions(fs) => {
833                vals.extend(
834                    fs.get(src..)
835                        .and_then(|s| s.get(..len))
836                        .ok_or_else(|| Trap::TableOutOfBounds)?
837                        .iter()
838                        .map(|f| {
839                            let raw_func_ref = instance.as_mut().get_func_ref(*f);
840                            let func = raw_func_ref.map(|p| Func::from_vm_func_ref(store, p));
841                            Val::FuncRef(func)
842                        }),
843                );
844            }
845            TableSegmentElements::Expressions(xs) => {
846                let xs = xs
847                    .get(src..)
848                    .and_then(|s| s.get(..len))
849                    .ok_or_else(|| Trap::TableOutOfBounds)?;
850
851                let mut const_context = ConstEvalContext::new(instance.id());
852                let mut const_evaluator = ConstExprEvaluator::default();
853
854                vals.extend(xs.iter().map(|x| unsafe {
855                    let raw = const_evaluator
856                        .eval(store, &mut const_context, x)
857                        .expect("const expr should be valid");
858                    let mut store = AutoAssertNoGc::new(store);
859                    Val::_from_raw(&mut store, raw, elem_ty.unwrap_val_type())
860                }));
861            }
862        }
863
864        let array = unsafe { ArrayRef::new_fixed_maybe_async(store, &pre, &vals)? };
865
866        let mut store = AutoAssertNoGc::new(store);
867        let gc_ref = array.try_clone_gc_ref(&mut store)?;
868        let raw = store.unwrap_gc_store_mut().expose_gc_ref_to_wasm(gc_ref);
869        Ok(raw)
870    })
871}
872
873#[cfg(feature = "gc")]
874unsafe fn array_init_elem(
875    store: &mut dyn VMStore,
876    mut instance: Pin<&mut Instance>,
877    array_type_index: u32,
878    array: u32,
879    dst: u32,
880    elem_index: u32,
881    src: u32,
882    len: u32,
883) -> Result<()> {
884    use crate::{
885        ArrayRef, Func, OpaqueRootScope, Val,
886        store::AutoAssertNoGc,
887        vm::const_expr::{ConstEvalContext, ConstExprEvaluator},
888    };
889    use wasmtime_environ::{ModuleInternedTypeIndex, TableSegmentElements};
890
891    let mut store = OpaqueRootScope::new(store.store_opaque_mut());
892
893    // Convert the indices into their typed forms.
894    let _array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index);
895    let elem_index = ElemIndex::from_u32(elem_index);
896
897    log::trace!(
898        "array.init_elem(array={array:#x}, dst={dst}, elem_index={elem_index:?}, src={src}, len={len})",
899    );
900
901    // Convert the raw GC ref into a `Rooted<ArrayRef>`.
902    let array = VMGcRef::from_raw_u32(array).ok_or_else(|| Trap::NullReference)?;
903    let array = store.unwrap_gc_store_mut().clone_gc_ref(&array);
904    let array = {
905        let mut no_gc = AutoAssertNoGc::new(&mut store);
906        ArrayRef::from_cloned_gc_ref(&mut no_gc, array)
907    };
908
909    // Bounds check the destination within the array.
910    let array_len = array._len(&store)?;
911    log::trace!("array_len = {array_len}");
912    if dst.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > array_len {
913        return Err(Trap::ArrayOutOfBounds.into());
914    }
915
916    // Get the passive element segment.
917    let mut storage = None;
918    let elements = instance.passive_element_segment(&mut storage, elem_index);
919
920    // Convert array offsets into `usize`s.
921    let src = usize::try_from(src).map_err(|_| Trap::TableOutOfBounds)?;
922    let len = usize::try_from(len).map_err(|_| Trap::TableOutOfBounds)?;
923
924    // Turn the elements into `Val`s.
925    let vals = match elements {
926        TableSegmentElements::Functions(fs) => fs
927            .get(src..)
928            .and_then(|s| s.get(..len))
929            .ok_or_else(|| Trap::TableOutOfBounds)?
930            .iter()
931            .map(|f| {
932                let raw_func_ref = instance.as_mut().get_func_ref(*f);
933                let func = raw_func_ref.map(|p| Func::from_vm_func_ref(&store, p));
934                Val::FuncRef(func)
935            })
936            .collect::<Vec<_>>(),
937        TableSegmentElements::Expressions(xs) => {
938            let elem_ty = array._ty(&store)?.element_type();
939            let elem_ty = elem_ty.unwrap_val_type();
940
941            let mut const_context = ConstEvalContext::new(instance.id());
942            let mut const_evaluator = ConstExprEvaluator::default();
943
944            xs.get(src..)
945                .and_then(|s| s.get(..len))
946                .ok_or_else(|| Trap::TableOutOfBounds)?
947                .iter()
948                .map(|x| unsafe {
949                    let raw = const_evaluator
950                        .eval(&mut store, &mut const_context, x)
951                        .expect("const expr should be valid");
952                    let mut store = AutoAssertNoGc::new(&mut store);
953                    Val::_from_raw(&mut store, raw, elem_ty)
954                })
955                .collect::<Vec<_>>()
956        }
957    };
958
959    // Copy the values into the array.
960    for (i, val) in vals.into_iter().enumerate() {
961        let i = u32::try_from(i).unwrap();
962        let j = dst.checked_add(i).unwrap();
963        array._set(&mut store, j, val)?;
964    }
965
966    Ok(())
967}
968
969// TODO: Specialize this libcall for only non-GC array elements, so we never
970// have to do GC barriers and their associated indirect calls through the `dyn
971// GcHeap`. Instead, implement those copies inline in Wasm code. Then, use bulk
972// `memcpy`-style APIs to do the actual copies here.
973#[cfg(feature = "gc")]
974unsafe fn array_copy(
975    store: &mut dyn VMStore,
976    _instance: Pin<&mut Instance>,
977    dst_array: u32,
978    dst: u32,
979    src_array: u32,
980    src: u32,
981    len: u32,
982) -> Result<()> {
983    use crate::{ArrayRef, OpaqueRootScope, store::AutoAssertNoGc};
984
985    log::trace!(
986        "array.copy(dst_array={dst_array:#x}, dst_index={dst}, src_array={src_array:#x}, src_index={src}, len={len})",
987    );
988
989    let mut store = OpaqueRootScope::new(store.store_opaque_mut());
990    let mut store = AutoAssertNoGc::new(&mut store);
991
992    // Convert the raw GC refs into `Rooted<ArrayRef>`s.
993    let dst_array = VMGcRef::from_raw_u32(dst_array).ok_or_else(|| Trap::NullReference)?;
994    let dst_array = store.unwrap_gc_store_mut().clone_gc_ref(&dst_array);
995    let dst_array = ArrayRef::from_cloned_gc_ref(&mut store, dst_array);
996    let src_array = VMGcRef::from_raw_u32(src_array).ok_or_else(|| Trap::NullReference)?;
997    let src_array = store.unwrap_gc_store_mut().clone_gc_ref(&src_array);
998    let src_array = ArrayRef::from_cloned_gc_ref(&mut store, src_array);
999
1000    // Bounds check the destination array's elements.
1001    let dst_array_len = dst_array._len(&store)?;
1002    if dst.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > dst_array_len {
1003        return Err(Trap::ArrayOutOfBounds.into());
1004    }
1005
1006    // Bounds check the source array's elements.
1007    let src_array_len = src_array._len(&store)?;
1008    if src.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > src_array_len {
1009        return Err(Trap::ArrayOutOfBounds.into());
1010    }
1011
1012    let mut store = AutoAssertNoGc::new(&mut store);
1013    // If `src_array` and `dst_array` are the same array, then we are
1014    // potentially doing an overlapping copy, so make sure to copy elements in
1015    // the order that doesn't clobber the source elements before they are
1016    // copied. If they are different arrays, the order doesn't matter, but we
1017    // simply don't bother checking.
1018    if src > dst {
1019        for i in 0..len {
1020            let src_elem = src_array._get(&mut store, src + i)?;
1021            let dst_i = dst + i;
1022            dst_array._set(&mut store, dst_i, src_elem)?;
1023        }
1024    } else {
1025        for i in (0..len).rev() {
1026            let src_elem = src_array._get(&mut store, src + i)?;
1027            let dst_i = dst + i;
1028            dst_array._set(&mut store, dst_i, src_elem)?;
1029        }
1030    }
1031    Ok(())
1032}
1033
1034#[cfg(feature = "gc")]
1035unsafe fn is_subtype(
1036    store: &mut dyn VMStore,
1037    _instance: Pin<&mut Instance>,
1038    actual_engine_type: u32,
1039    expected_engine_type: u32,
1040) -> u32 {
1041    use wasmtime_environ::VMSharedTypeIndex;
1042
1043    let actual = VMSharedTypeIndex::from_u32(actual_engine_type);
1044    let expected = VMSharedTypeIndex::from_u32(expected_engine_type);
1045
1046    let is_subtype: bool = store.engine().signatures().is_subtype(actual, expected);
1047
1048    log::trace!("is_subtype(actual={actual:?}, expected={expected:?}) -> {is_subtype}",);
1049    is_subtype as u32
1050}
1051
1052// Implementation of `memory.atomic.notify` for locally defined memories.
1053#[cfg(feature = "threads")]
1054fn memory_atomic_notify(
1055    _store: &mut dyn VMStore,
1056    instance: Pin<&mut Instance>,
1057    memory_index: u32,
1058    addr_index: u64,
1059    count: u32,
1060) -> Result<u32, Trap> {
1061    let memory = DefinedMemoryIndex::from_u32(memory_index);
1062    instance
1063        .get_defined_memory(memory)
1064        .atomic_notify(addr_index, count)
1065}
1066
1067// Implementation of `memory.atomic.wait32` for locally defined memories.
1068#[cfg(feature = "threads")]
1069fn memory_atomic_wait32(
1070    _store: &mut dyn VMStore,
1071    instance: Pin<&mut Instance>,
1072    memory_index: u32,
1073    addr_index: u64,
1074    expected: u32,
1075    timeout: u64,
1076) -> Result<u32, Trap> {
1077    let timeout = (timeout as i64 >= 0).then(|| Duration::from_nanos(timeout));
1078    let memory = DefinedMemoryIndex::from_u32(memory_index);
1079    Ok(instance
1080        .get_defined_memory(memory)
1081        .atomic_wait32(addr_index, expected, timeout)? as u32)
1082}
1083
1084// Implementation of `memory.atomic.wait64` for locally defined memories.
1085#[cfg(feature = "threads")]
1086fn memory_atomic_wait64(
1087    _store: &mut dyn VMStore,
1088    instance: Pin<&mut Instance>,
1089    memory_index: u32,
1090    addr_index: u64,
1091    expected: u64,
1092    timeout: u64,
1093) -> Result<u32, Trap> {
1094    let timeout = (timeout as i64 >= 0).then(|| Duration::from_nanos(timeout));
1095    let memory = DefinedMemoryIndex::from_u32(memory_index);
1096    Ok(instance
1097        .get_defined_memory(memory)
1098        .atomic_wait64(addr_index, expected, timeout)? as u32)
1099}
1100
1101// Hook for when an instance runs out of fuel.
1102fn out_of_gas(store: &mut dyn VMStore, _instance: Pin<&mut Instance>) -> Result<()> {
1103    store.out_of_gas()
1104}
1105
1106// Hook for when an instance observes that the epoch has changed.
1107#[cfg(target_has_atomic = "64")]
1108fn new_epoch(store: &mut dyn VMStore, _instance: Pin<&mut Instance>) -> Result<NextEpoch> {
1109    store.new_epoch().map(NextEpoch)
1110}
1111
1112struct NextEpoch(u64);
1113
1114unsafe impl HostResultHasUnwindSentinel for NextEpoch {
1115    type Abi = u64;
1116    const SENTINEL: u64 = u64::MAX;
1117    fn into_abi(self) -> u64 {
1118        self.0
1119    }
1120}
1121
1122// Hook for validating malloc using wmemcheck_state.
1123#[cfg(feature = "wmemcheck")]
1124unsafe fn check_malloc(
1125    _store: &mut dyn VMStore,
1126    instance: Pin<&mut Instance>,
1127    addr: u32,
1128    len: u32,
1129) -> Result<()> {
1130    if let Some(wmemcheck_state) = instance.wmemcheck_state_mut() {
1131        let result = wmemcheck_state.malloc(addr as usize, len as usize);
1132        wmemcheck_state.memcheck_on();
1133        match result {
1134            Ok(()) => {}
1135            Err(DoubleMalloc { addr, len }) => {
1136                bail!("Double malloc at addr {:#x} of size {}", addr, len)
1137            }
1138            Err(OutOfBounds { addr, len }) => {
1139                bail!("Malloc out of bounds at addr {:#x} of size {}", addr, len);
1140            }
1141            _ => {
1142                panic!("unreachable")
1143            }
1144        }
1145    }
1146    Ok(())
1147}
1148
1149// Hook for validating free using wmemcheck_state.
1150#[cfg(feature = "wmemcheck")]
1151unsafe fn check_free(
1152    _store: &mut dyn VMStore,
1153    instance: Pin<&mut Instance>,
1154    addr: u32,
1155) -> Result<()> {
1156    if let Some(wmemcheck_state) = instance.wmemcheck_state_mut() {
1157        let result = wmemcheck_state.free(addr as usize);
1158        wmemcheck_state.memcheck_on();
1159        match result {
1160            Ok(()) => {}
1161            Err(InvalidFree { addr }) => {
1162                bail!("Invalid free at addr {:#x}", addr)
1163            }
1164            _ => {
1165                panic!("unreachable")
1166            }
1167        }
1168    }
1169    Ok(())
1170}
1171
1172// Hook for validating load using wmemcheck_state.
1173#[cfg(feature = "wmemcheck")]
1174fn check_load(
1175    _store: &mut dyn VMStore,
1176    instance: Pin<&mut Instance>,
1177    num_bytes: u32,
1178    addr: u32,
1179    offset: u32,
1180) -> Result<()> {
1181    if let Some(wmemcheck_state) = instance.wmemcheck_state_mut() {
1182        let result = wmemcheck_state.read(addr as usize + offset as usize, num_bytes as usize);
1183        match result {
1184            Ok(()) => {}
1185            Err(InvalidRead { addr, len }) => {
1186                bail!("Invalid load at addr {:#x} of size {}", addr, len);
1187            }
1188            Err(OutOfBounds { addr, len }) => {
1189                bail!("Load out of bounds at addr {:#x} of size {}", addr, len);
1190            }
1191            _ => {
1192                panic!("unreachable")
1193            }
1194        }
1195    }
1196    Ok(())
1197}
1198
1199// Hook for validating store using wmemcheck_state.
1200#[cfg(feature = "wmemcheck")]
1201fn check_store(
1202    _store: &mut dyn VMStore,
1203    instance: Pin<&mut Instance>,
1204    num_bytes: u32,
1205    addr: u32,
1206    offset: u32,
1207) -> Result<()> {
1208    if let Some(wmemcheck_state) = instance.wmemcheck_state_mut() {
1209        let result = wmemcheck_state.write(addr as usize + offset as usize, num_bytes as usize);
1210        match result {
1211            Ok(()) => {}
1212            Err(InvalidWrite { addr, len }) => {
1213                bail!("Invalid store at addr {:#x} of size {}", addr, len)
1214            }
1215            Err(OutOfBounds { addr, len }) => {
1216                bail!("Store out of bounds at addr {:#x} of size {}", addr, len)
1217            }
1218            _ => {
1219                panic!("unreachable")
1220            }
1221        }
1222    }
1223    Ok(())
1224}
1225
1226// Hook for turning wmemcheck load/store validation off when entering a malloc function.
1227#[cfg(feature = "wmemcheck")]
1228fn malloc_start(_store: &mut dyn VMStore, instance: Pin<&mut Instance>) {
1229    if let Some(wmemcheck_state) = instance.wmemcheck_state_mut() {
1230        wmemcheck_state.memcheck_off();
1231    }
1232}
1233
1234// Hook for turning wmemcheck load/store validation off when entering a free function.
1235#[cfg(feature = "wmemcheck")]
1236fn free_start(_store: &mut dyn VMStore, instance: Pin<&mut Instance>) {
1237    if let Some(wmemcheck_state) = instance.wmemcheck_state_mut() {
1238        wmemcheck_state.memcheck_off();
1239    }
1240}
1241
1242// Hook for tracking wasm stack updates using wmemcheck_state.
1243#[cfg(feature = "wmemcheck")]
1244fn update_stack_pointer(_store: &mut dyn VMStore, _instance: Pin<&mut Instance>, _value: u32) {
1245    // TODO: stack-tracing has yet to be finalized. All memory below
1246    // the address of the top of the stack is marked as valid for
1247    // loads and stores.
1248    // if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
1249    //     instance.wmemcheck_state.update_stack_pointer(value as usize);
1250    // }
1251}
1252
1253// Hook updating wmemcheck_state memory state vector every time memory.grow is called.
1254#[cfg(feature = "wmemcheck")]
1255fn update_mem_size(_store: &mut dyn VMStore, instance: Pin<&mut Instance>, num_pages: u32) {
1256    if let Some(wmemcheck_state) = instance.wmemcheck_state_mut() {
1257        const KIB: usize = 1024;
1258        let num_bytes = num_pages as usize * 64 * KIB;
1259        wmemcheck_state.update_mem_size(num_bytes);
1260    }
1261}
1262
1263fn floor_f32(_store: &mut dyn VMStore, _instance: Pin<&mut Instance>, val: f32) -> f32 {
1264    wasmtime_math::WasmFloat::wasm_floor(val)
1265}
1266
1267fn floor_f64(_store: &mut dyn VMStore, _instance: Pin<&mut Instance>, val: f64) -> f64 {
1268    wasmtime_math::WasmFloat::wasm_floor(val)
1269}
1270
1271fn ceil_f32(_store: &mut dyn VMStore, _instance: Pin<&mut Instance>, val: f32) -> f32 {
1272    wasmtime_math::WasmFloat::wasm_ceil(val)
1273}
1274
1275fn ceil_f64(_store: &mut dyn VMStore, _instance: Pin<&mut Instance>, val: f64) -> f64 {
1276    wasmtime_math::WasmFloat::wasm_ceil(val)
1277}
1278
1279fn trunc_f32(_store: &mut dyn VMStore, _instance: Pin<&mut Instance>, val: f32) -> f32 {
1280    wasmtime_math::WasmFloat::wasm_trunc(val)
1281}
1282
1283fn trunc_f64(_store: &mut dyn VMStore, _instance: Pin<&mut Instance>, val: f64) -> f64 {
1284    wasmtime_math::WasmFloat::wasm_trunc(val)
1285}
1286
1287fn nearest_f32(_store: &mut dyn VMStore, _instance: Pin<&mut Instance>, val: f32) -> f32 {
1288    wasmtime_math::WasmFloat::wasm_nearest(val)
1289}
1290
1291fn nearest_f64(_store: &mut dyn VMStore, _instance: Pin<&mut Instance>, val: f64) -> f64 {
1292    wasmtime_math::WasmFloat::wasm_nearest(val)
1293}
1294
1295// This intrinsic is only used on x86_64 platforms as an implementation of
1296// the `i8x16.swizzle` instruction when `pshufb` in SSSE3 is not available.
1297#[cfg(all(target_arch = "x86_64", target_feature = "sse"))]
1298fn i8x16_swizzle(
1299    _store: &mut dyn VMStore,
1300    _instance: Pin<&mut Instance>,
1301    a: i8x16,
1302    b: i8x16,
1303) -> i8x16 {
1304    union U {
1305        reg: i8x16,
1306        mem: [u8; 16],
1307    }
1308
1309    unsafe {
1310        let a = U { reg: a }.mem;
1311        let b = U { reg: b }.mem;
1312
1313        // Use the `swizzle` semantics of returning 0 on any out-of-bounds
1314        // index, rather than the x86 pshufb semantics, since Wasmtime uses
1315        // this to implement `i8x16.swizzle`.
1316        let select = |arr: &[u8; 16], byte: u8| {
1317            if byte >= 16 { 0x00 } else { arr[byte as usize] }
1318        };
1319
1320        U {
1321            mem: [
1322                select(&a, b[0]),
1323                select(&a, b[1]),
1324                select(&a, b[2]),
1325                select(&a, b[3]),
1326                select(&a, b[4]),
1327                select(&a, b[5]),
1328                select(&a, b[6]),
1329                select(&a, b[7]),
1330                select(&a, b[8]),
1331                select(&a, b[9]),
1332                select(&a, b[10]),
1333                select(&a, b[11]),
1334                select(&a, b[12]),
1335                select(&a, b[13]),
1336                select(&a, b[14]),
1337                select(&a, b[15]),
1338            ],
1339        }
1340        .reg
1341    }
1342}
1343
1344#[cfg(not(all(target_arch = "x86_64", target_feature = "sse")))]
1345fn i8x16_swizzle(
1346    _store: &mut dyn VMStore,
1347    _instance: Pin<&mut Instance>,
1348    _a: i8x16,
1349    _b: i8x16,
1350) -> i8x16 {
1351    unreachable!()
1352}
1353
1354// This intrinsic is only used on x86_64 platforms as an implementation of
1355// the `i8x16.shuffle` instruction when `pshufb` in SSSE3 is not available.
1356#[cfg(all(target_arch = "x86_64", target_feature = "sse"))]
1357fn i8x16_shuffle(
1358    _store: &mut dyn VMStore,
1359    _instance: Pin<&mut Instance>,
1360    a: i8x16,
1361    b: i8x16,
1362    c: i8x16,
1363) -> i8x16 {
1364    union U {
1365        reg: i8x16,
1366        mem: [u8; 16],
1367    }
1368
1369    unsafe {
1370        let ab = [U { reg: a }.mem, U { reg: b }.mem];
1371        let c = U { reg: c }.mem;
1372
1373        // Use the `shuffle` semantics of returning 0 on any out-of-bounds
1374        // index, rather than the x86 pshufb semantics, since Wasmtime uses
1375        // this to implement `i8x16.shuffle`.
1376        let select = |arr: &[[u8; 16]; 2], byte: u8| {
1377            if byte >= 32 {
1378                0x00
1379            } else if byte >= 16 {
1380                arr[1][byte as usize - 16]
1381            } else {
1382                arr[0][byte as usize]
1383            }
1384        };
1385
1386        U {
1387            mem: [
1388                select(&ab, c[0]),
1389                select(&ab, c[1]),
1390                select(&ab, c[2]),
1391                select(&ab, c[3]),
1392                select(&ab, c[4]),
1393                select(&ab, c[5]),
1394                select(&ab, c[6]),
1395                select(&ab, c[7]),
1396                select(&ab, c[8]),
1397                select(&ab, c[9]),
1398                select(&ab, c[10]),
1399                select(&ab, c[11]),
1400                select(&ab, c[12]),
1401                select(&ab, c[13]),
1402                select(&ab, c[14]),
1403                select(&ab, c[15]),
1404            ],
1405        }
1406        .reg
1407    }
1408}
1409
1410#[cfg(not(all(target_arch = "x86_64", target_feature = "sse")))]
1411fn i8x16_shuffle(
1412    _store: &mut dyn VMStore,
1413    _instance: Pin<&mut Instance>,
1414    _a: i8x16,
1415    _b: i8x16,
1416    _c: i8x16,
1417) -> i8x16 {
1418    unreachable!()
1419}
1420
1421fn fma_f32x4(
1422    _store: &mut dyn VMStore,
1423    _instance: Pin<&mut Instance>,
1424    x: f32x4,
1425    y: f32x4,
1426    z: f32x4,
1427) -> f32x4 {
1428    union U {
1429        reg: f32x4,
1430        mem: [f32; 4],
1431    }
1432
1433    unsafe {
1434        let x = U { reg: x }.mem;
1435        let y = U { reg: y }.mem;
1436        let z = U { reg: z }.mem;
1437
1438        U {
1439            mem: [
1440                wasmtime_math::WasmFloat::wasm_mul_add(x[0], y[0], z[0]),
1441                wasmtime_math::WasmFloat::wasm_mul_add(x[1], y[1], z[1]),
1442                wasmtime_math::WasmFloat::wasm_mul_add(x[2], y[2], z[2]),
1443                wasmtime_math::WasmFloat::wasm_mul_add(x[3], y[3], z[3]),
1444            ],
1445        }
1446        .reg
1447    }
1448}
1449
1450fn fma_f64x2(
1451    _store: &mut dyn VMStore,
1452    _instance: Pin<&mut Instance>,
1453    x: f64x2,
1454    y: f64x2,
1455    z: f64x2,
1456) -> f64x2 {
1457    union U {
1458        reg: f64x2,
1459        mem: [f64; 2],
1460    }
1461
1462    unsafe {
1463        let x = U { reg: x }.mem;
1464        let y = U { reg: y }.mem;
1465        let z = U { reg: z }.mem;
1466
1467        U {
1468            mem: [
1469                wasmtime_math::WasmFloat::wasm_mul_add(x[0], y[0], z[0]),
1470                wasmtime_math::WasmFloat::wasm_mul_add(x[1], y[1], z[1]),
1471            ],
1472        }
1473        .reg
1474    }
1475}
1476
1477/// This intrinsic is just used to record trap information.
1478///
1479/// The `Infallible` "ok" type here means that this never returns success, it
1480/// only ever returns an error, and this hooks into the machinery to handle
1481/// `Result` values to record such trap information.
1482fn trap(
1483    _store: &mut dyn VMStore,
1484    _instance: Pin<&mut Instance>,
1485    code: u8,
1486) -> Result<Infallible, TrapReason> {
1487    Err(TrapReason::Wasm(
1488        wasmtime_environ::Trap::from_u8(code).unwrap(),
1489    ))
1490}
1491
1492fn raise(_store: &mut dyn VMStore, _instance: Pin<&mut Instance>) {
1493    // SAFETY: this is only called from compiled wasm so we know that wasm has
1494    // already been entered. It's a dynamic safety precondition that the trap
1495    // information has already been arranged to be present.
1496    #[cfg(has_host_compiler_backend)]
1497    unsafe {
1498        crate::runtime::vm::traphandlers::raise_preexisting_trap()
1499    }
1500
1501    // When Cranelift isn't in use then this is an unused libcall for Pulley, so
1502    // just insert a stub to catch bugs if it's accidentally called.
1503    #[cfg(not(has_host_compiler_backend))]
1504    unreachable!()
1505}
1506
1507// Builtins for continuations. These are thin wrappers around the
1508// respective definitions in stack_switching.rs.
1509#[cfg(feature = "stack-switching")]
1510fn cont_new(
1511    store: &mut dyn VMStore,
1512    instance: Pin<&mut Instance>,
1513    func: *mut u8,
1514    param_count: u32,
1515    result_count: u32,
1516) -> Result<Option<AllocationSize>, TrapReason> {
1517    let ans =
1518        crate::vm::stack_switching::cont_new(store, instance, func, param_count, result_count)?;
1519    Ok(Some(AllocationSize(ans.cast::<u8>() as usize)))
1520}