1use crate::prelude::*;
58use crate::runtime::vm::table::{Table, TableElementType};
59use crate::runtime::vm::vmcontext::VMFuncRef;
60#[cfg(feature = "gc")]
61use crate::runtime::vm::VMGcRef;
62use crate::runtime::vm::{HostResultHasUnwindSentinel, Instance, TrapReason, VMStore};
63use core::convert::Infallible;
64use core::ptr::NonNull;
65#[cfg(feature = "threads")]
66use core::time::Duration;
67use wasmtime_environ::{DataIndex, ElemIndex, FuncIndex, MemoryIndex, TableIndex, Trap};
68#[cfg(feature = "wmemcheck")]
69use wasmtime_wmemcheck::AccessError::{
70 DoubleMalloc, InvalidFree, InvalidRead, InvalidWrite, OutOfBounds,
71};
72
73pub mod raw {
91 #![allow(unused_doc_comments, unused_attributes)]
94
95 use crate::runtime::vm::{InstanceAndStore, VMContext};
96 use core::ptr::NonNull;
97
98 macro_rules! libcall {
99 (
100 $(
101 $( #[cfg($attr:meta)] )?
102 $name:ident( vmctx: vmctx $(, $pname:ident: $param:ident )* ) $(-> $result:ident)?;
103 )*
104 ) => {
105 $(
106 #[allow(unused_variables, missing_docs)]
113 pub unsafe extern "C" fn $name(
114 vmctx: NonNull<VMContext>,
115 $( $pname : libcall!(@ty $param), )*
116 ) $(-> libcall!(@ty $result))? {
117 $(#[cfg($attr)])?
118 {
119 crate::runtime::vm::traphandlers::catch_unwind_and_record_trap(|| {
120 InstanceAndStore::from_vmctx(vmctx, |pair| {
121 let (instance, store) = pair.unpack_mut();
122 super::$name(store, instance, $($pname),*)
123 })
124 })
125 }
126 $(
127 #[cfg(not($attr))]
128 unreachable!();
129 )?
130 }
131
132 #[allow(non_upper_case_globals)]
136 const _: () = {
137 #[used]
138 static I_AM_USED: unsafe extern "C" fn(
139 NonNull<VMContext>,
140 $( $pname : libcall!(@ty $param), )*
141 ) $( -> libcall!(@ty $result))? = $name;
142 };
143 )*
144 };
145
146 (@ty u32) => (u32);
147 (@ty u64) => (u64);
148 (@ty u8) => (u8);
149 (@ty bool) => (bool);
150 (@ty pointer) => (*mut u8);
151 }
152
153 wasmtime_environ::foreach_builtin_function!(libcall);
154}
155
156fn memory32_grow(
157 store: &mut dyn VMStore,
158 instance: &mut Instance,
159 delta: u64,
160 memory_index: u32,
161) -> Result<Option<AllocationSize>, TrapReason> {
162 let memory_index = MemoryIndex::from_u32(memory_index);
163 let result = instance
164 .memory_grow(store, memory_index, delta)?
165 .map(|size_in_bytes| {
166 AllocationSize(size_in_bytes / instance.memory_page_size(memory_index))
167 });
168
169 Ok(result)
170}
171
172struct AllocationSize(usize);
181
182unsafe impl HostResultHasUnwindSentinel for Option<AllocationSize> {
195 type Abi = *mut u8;
196 const SENTINEL: *mut u8 = (usize::MAX - 1) as *mut u8;
197
198 fn into_abi(self) -> *mut u8 {
199 match self {
200 Some(size) => {
201 debug_assert!(size.0 < (usize::MAX - 1));
202 size.0 as *mut u8
203 }
204 None => usize::MAX as *mut u8,
205 }
206 }
207}
208
209unsafe fn table_grow_func_ref(
211 store: &mut dyn VMStore,
212 instance: &mut Instance,
213 table_index: u32,
214 delta: u64,
215 init_value: *mut u8,
216) -> Result<Option<AllocationSize>> {
217 let table_index = TableIndex::from_u32(table_index);
218
219 let element = match instance.table_element_type(table_index) {
220 TableElementType::Func => NonNull::new(init_value.cast::<VMFuncRef>()).into(),
221 TableElementType::GcRef => unreachable!(),
222 };
223
224 let result = instance
225 .table_grow(store, table_index, delta, element)?
226 .map(AllocationSize);
227 Ok(result)
228}
229
230#[cfg(feature = "gc")]
232unsafe fn table_grow_gc_ref(
233 store: &mut dyn VMStore,
234 instance: &mut Instance,
235 table_index: u32,
236 delta: u64,
237 init_value: u32,
238) -> Result<Option<AllocationSize>> {
239 let table_index = TableIndex::from_u32(table_index);
240
241 let element = match instance.table_element_type(table_index) {
242 TableElementType::Func => unreachable!(),
243 TableElementType::GcRef => VMGcRef::from_raw_u32(init_value)
244 .map(|r| {
245 store
246 .store_opaque_mut()
247 .unwrap_gc_store_mut()
248 .clone_gc_ref(&r)
249 })
250 .into(),
251 };
252
253 let result = instance
254 .table_grow(store, table_index, delta, element)?
255 .map(AllocationSize);
256 Ok(result)
257}
258
259unsafe fn table_fill_func_ref(
261 store: &mut dyn VMStore,
262 instance: &mut Instance,
263 table_index: u32,
264 dst: u64,
265 val: *mut u8,
266 len: u64,
267) -> Result<()> {
268 let table_index = TableIndex::from_u32(table_index);
269 let table = &mut *instance.get_table(table_index);
270 match table.element_type() {
271 TableElementType::Func => {
272 let val = NonNull::new(val.cast::<VMFuncRef>());
273 table.fill(store.optional_gc_store_mut()?, dst, val.into(), len)?;
274 Ok(())
275 }
276 TableElementType::GcRef => unreachable!(),
277 }
278}
279
280#[cfg(feature = "gc")]
281unsafe fn table_fill_gc_ref(
282 store: &mut dyn VMStore,
283 instance: &mut Instance,
284 table_index: u32,
285 dst: u64,
286 val: u32,
287 len: u64,
288) -> Result<()> {
289 let table_index = TableIndex::from_u32(table_index);
290 let table = &mut *instance.get_table(table_index);
291 match table.element_type() {
292 TableElementType::Func => unreachable!(),
293 TableElementType::GcRef => {
294 let gc_store = store.store_opaque_mut().unwrap_gc_store_mut();
295 let gc_ref = VMGcRef::from_raw_u32(val);
296 let gc_ref = gc_ref.map(|r| gc_store.clone_gc_ref(&r));
297 table.fill(Some(gc_store), dst, gc_ref.into(), len)?;
298 Ok(())
299 }
300 }
301}
302
303unsafe fn table_copy(
305 store: &mut dyn VMStore,
306 instance: &mut Instance,
307 dst_table_index: u32,
308 src_table_index: u32,
309 dst: u64,
310 src: u64,
311 len: u64,
312) -> Result<()> {
313 let dst_table_index = TableIndex::from_u32(dst_table_index);
314 let src_table_index = TableIndex::from_u32(src_table_index);
315 let store = store.store_opaque_mut();
316 let dst_table = instance.get_table(dst_table_index);
317 let src_range = src..(src.checked_add(len).unwrap_or(u64::MAX));
319 let src_table = instance.get_table_with_lazy_init(src_table_index, src_range);
320 let gc_store = store.optional_gc_store_mut()?;
321 Table::copy(gc_store, dst_table, src_table, dst, src, len)?;
322 Ok(())
323}
324
325fn table_init(
327 store: &mut dyn VMStore,
328 instance: &mut Instance,
329 table_index: u32,
330 elem_index: u32,
331 dst: u64,
332 src: u64,
333 len: u64,
334) -> Result<(), Trap> {
335 let table_index = TableIndex::from_u32(table_index);
336 let elem_index = ElemIndex::from_u32(elem_index);
337 instance.table_init(
338 store.store_opaque_mut(),
339 table_index,
340 elem_index,
341 dst,
342 src,
343 len,
344 )
345}
346
347fn elem_drop(_store: &mut dyn VMStore, instance: &mut Instance, elem_index: u32) {
349 let elem_index = ElemIndex::from_u32(elem_index);
350 instance.elem_drop(elem_index)
351}
352
353fn memory_copy(
355 _store: &mut dyn VMStore,
356 instance: &mut Instance,
357 dst_index: u32,
358 dst: u64,
359 src_index: u32,
360 src: u64,
361 len: u64,
362) -> Result<(), Trap> {
363 let src_index = MemoryIndex::from_u32(src_index);
364 let dst_index = MemoryIndex::from_u32(dst_index);
365 instance.memory_copy(dst_index, dst, src_index, src, len)
366}
367
368fn memory_fill(
370 _store: &mut dyn VMStore,
371 instance: &mut Instance,
372 memory_index: u32,
373 dst: u64,
374 val: u32,
375 len: u64,
376) -> Result<(), Trap> {
377 let memory_index = MemoryIndex::from_u32(memory_index);
378 #[allow(clippy::cast_possible_truncation)]
379 instance.memory_fill(memory_index, dst, val as u8, len)
380}
381
382fn memory_init(
384 _store: &mut dyn VMStore,
385 instance: &mut Instance,
386 memory_index: u32,
387 data_index: u32,
388 dst: u64,
389 src: u32,
390 len: u32,
391) -> Result<(), Trap> {
392 let memory_index = MemoryIndex::from_u32(memory_index);
393 let data_index = DataIndex::from_u32(data_index);
394 instance.memory_init(memory_index, data_index, dst, src, len)
395}
396
397fn ref_func(_store: &mut dyn VMStore, instance: &mut Instance, func_index: u32) -> NonNull<u8> {
399 instance
400 .get_func_ref(FuncIndex::from_u32(func_index))
401 .expect("ref_func: funcref should always be available for given func index")
402 .cast()
403}
404
405fn data_drop(_store: &mut dyn VMStore, instance: &mut Instance, data_index: u32) {
407 let data_index = DataIndex::from_u32(data_index);
408 instance.data_drop(data_index)
409}
410
411unsafe fn table_get_lazy_init_func_ref(
413 _store: &mut dyn VMStore,
414 instance: &mut Instance,
415 table_index: u32,
416 index: u64,
417) -> *mut u8 {
418 let table_index = TableIndex::from_u32(table_index);
419 let table = instance.get_table_with_lazy_init(table_index, core::iter::once(index));
420 let elem = (*table)
421 .get(None, index)
422 .expect("table access already bounds-checked");
423
424 match elem.into_func_ref_asserting_initialized() {
425 Some(ptr) => ptr.as_ptr().cast(),
426 None => core::ptr::null_mut(),
427 }
428}
429
430#[cfg(feature = "gc-drc")]
432unsafe fn drop_gc_ref(store: &mut dyn VMStore, _instance: &mut Instance, gc_ref: u32) {
433 log::trace!("libcalls::drop_gc_ref({gc_ref:#x})");
434 let gc_ref = VMGcRef::from_raw_u32(gc_ref).expect("non-null VMGcRef");
435 store
436 .store_opaque_mut()
437 .unwrap_gc_store_mut()
438 .drop_gc_ref(gc_ref);
439}
440
441#[cfg(feature = "gc-drc")]
444unsafe fn gc(store: &mut dyn VMStore, _instance: &mut Instance, gc_ref: u32) -> Result<u32> {
445 let gc_ref = VMGcRef::from_raw_u32(gc_ref);
446 let gc_ref = gc_ref.map(|r| {
447 store
448 .store_opaque_mut()
449 .unwrap_gc_store_mut()
450 .clone_gc_ref(&r)
451 });
452
453 if let Some(gc_ref) = &gc_ref {
454 let gc_store = store.store_opaque_mut().unwrap_gc_store_mut();
462 let gc_ref = gc_store.clone_gc_ref(gc_ref);
463 let _ = gc_store.expose_gc_ref_to_wasm(gc_ref);
464 }
465
466 match store.maybe_async_gc(gc_ref)? {
467 None => Ok(0),
468 Some(r) => {
469 let raw = store
470 .store_opaque_mut()
471 .unwrap_gc_store_mut()
472 .expose_gc_ref_to_wasm(r);
473 Ok(raw)
474 }
475 }
476}
477
478#[cfg(feature = "gc-drc")]
482unsafe fn gc_alloc_raw(
483 store: &mut dyn VMStore,
484 instance: &mut Instance,
485 kind: u32,
486 module_interned_type_index: u32,
487 size: u32,
488 align: u32,
489) -> Result<u32> {
490 use crate::{vm::VMGcHeader, GcHeapOutOfMemory};
491 use core::alloc::Layout;
492 use wasmtime_environ::{ModuleInternedTypeIndex, VMGcKind};
493
494 let kind = VMGcKind::from_high_bits_of_u32(kind);
495 log::trace!("gc_alloc_raw(kind={kind:?}, size={size}, align={align})",);
496
497 let module = instance
498 .runtime_module()
499 .expect("should never allocate GC types defined in a dummy module");
500
501 let module_interned_type_index = ModuleInternedTypeIndex::from_u32(module_interned_type_index);
502 let shared_type_index = module
503 .signatures()
504 .shared_type(module_interned_type_index)
505 .expect("should have engine type index for module type index");
506
507 let header = VMGcHeader::from_kind_and_index(kind, shared_type_index);
508
509 let size = usize::try_from(size).unwrap();
510 let align = usize::try_from(align).unwrap();
511 let layout = Layout::from_size_align(size, align).unwrap();
512
513 let gc_ref = match store
514 .store_opaque_mut()
515 .unwrap_gc_store_mut()
516 .alloc_raw(header, layout)?
517 {
518 Some(r) => r,
519 None => {
520 store.maybe_async_gc(None)?;
522
523 store
525 .unwrap_gc_store_mut()
526 .alloc_raw(header, layout)?
527 .ok_or_else(|| GcHeapOutOfMemory::new(()))?
528 }
529 };
530
531 let raw = store
532 .store_opaque_mut()
533 .unwrap_gc_store_mut()
534 .expose_gc_ref_to_wasm(gc_ref);
535
536 Ok(raw)
537}
538
539#[cfg(feature = "gc")]
543unsafe fn intern_func_ref_for_gc_heap(
544 store: &mut dyn VMStore,
545 _instance: &mut Instance,
546 func_ref: *mut u8,
547) -> Result<u32> {
548 use crate::{store::AutoAssertNoGc, vm::SendSyncPtr};
549 use core::ptr::NonNull;
550
551 let mut store = AutoAssertNoGc::new(store.store_opaque_mut());
552
553 let func_ref = func_ref.cast::<VMFuncRef>();
554 let func_ref = NonNull::new(func_ref).map(SendSyncPtr::new);
555
556 let func_ref_id = store.gc_store_mut()?.func_ref_table.intern(func_ref);
557 Ok(func_ref_id.into_raw())
558}
559
560#[cfg(feature = "gc")]
565unsafe fn get_interned_func_ref(
566 store: &mut dyn VMStore,
567 instance: &mut Instance,
568 func_ref_id: u32,
569 module_interned_type_index: u32,
570) -> *mut u8 {
571 use super::FuncRefTableId;
572 use crate::store::AutoAssertNoGc;
573 use wasmtime_environ::{packed_option::ReservedValue, ModuleInternedTypeIndex};
574
575 let store = AutoAssertNoGc::new(store.store_opaque_mut());
576
577 let func_ref_id = FuncRefTableId::from_raw(func_ref_id);
578 let module_interned_type_index = ModuleInternedTypeIndex::from_bits(module_interned_type_index);
579
580 let func_ref = if module_interned_type_index.is_reserved_value() {
581 store
582 .unwrap_gc_store()
583 .func_ref_table
584 .get_untyped(func_ref_id)
585 } else {
586 let types = store.engine().signatures();
587 let engine_ty = instance.engine_type_index(module_interned_type_index);
588 store
589 .unwrap_gc_store()
590 .func_ref_table
591 .get_typed(types, func_ref_id, engine_ty)
592 };
593
594 func_ref.map_or(core::ptr::null_mut(), |f| f.as_ptr().cast())
595}
596
597#[cfg(feature = "gc")]
599unsafe fn array_new_data(
600 store: &mut dyn VMStore,
601 instance: &mut Instance,
602 array_type_index: u32,
603 data_index: u32,
604 src: u32,
605 len: u32,
606) -> Result<u32> {
607 use crate::{ArrayType, GcHeapOutOfMemory};
608 use wasmtime_environ::ModuleInternedTypeIndex;
609
610 let array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index);
611 let data_index = DataIndex::from_u32(data_index);
612
613 let data_range = instance.wasm_data_range(data_index);
616 let shared_ty = instance.engine_type_index(array_type_index);
617 let array_ty = ArrayType::from_shared_type_index(store.store_opaque_mut().engine(), shared_ty);
618 let one_elem_size = array_ty
619 .element_type()
620 .data_byte_size()
621 .expect("Wasm validation ensures that this type have a defined byte size");
622 let byte_len = len
623 .checked_mul(one_elem_size)
624 .and_then(|x| usize::try_from(x).ok())
625 .ok_or_else(|| Trap::MemoryOutOfBounds)?;
626
627 let src = usize::try_from(src).map_err(|_| Trap::MemoryOutOfBounds)?;
629 let data = instance
630 .wasm_data(data_range)
631 .get(src..)
632 .and_then(|d| d.get(..byte_len))
633 .ok_or_else(|| Trap::MemoryOutOfBounds)?;
634
635 let gc_layout = store
637 .store_opaque_mut()
638 .engine()
639 .signatures()
640 .layout(shared_ty)
641 .expect("array types have GC layouts");
642 let array_layout = gc_layout.unwrap_array();
643 let array_ref = match store
644 .store_opaque_mut()
645 .unwrap_gc_store_mut()
646 .alloc_uninit_array(shared_ty, len, &array_layout)?
647 {
648 Some(a) => a,
649 None => {
650 store.maybe_async_gc(None)?;
653 store
654 .store_opaque_mut()
655 .unwrap_gc_store_mut()
656 .alloc_uninit_array(shared_ty, u32::try_from(byte_len).unwrap(), &array_layout)?
657 .ok_or_else(|| GcHeapOutOfMemory::new(()))?
658 }
659 };
660
661 store
663 .store_opaque_mut()
664 .unwrap_gc_store_mut()
665 .gc_object_data(array_ref.as_gc_ref())
666 .copy_from_slice(array_layout.base_size, data);
667
668 let raw = store
670 .store_opaque_mut()
671 .unwrap_gc_store_mut()
672 .expose_gc_ref_to_wasm(array_ref.into());
673 Ok(raw)
674}
675
676#[cfg(feature = "gc")]
678unsafe fn array_init_data(
679 store: &mut dyn VMStore,
680 instance: &mut Instance,
681 array_type_index: u32,
682 array: u32,
683 dst: u32,
684 data_index: u32,
685 src: u32,
686 len: u32,
687) -> Result<()> {
688 use crate::ArrayType;
689 use wasmtime_environ::ModuleInternedTypeIndex;
690
691 let array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index);
692 let data_index = DataIndex::from_u32(data_index);
693
694 log::trace!(
695 "array.init_data(array={array:#x}, dst={dst}, data_index={data_index:?}, src={src}, len={len})",
696 );
697
698 let gc_ref = VMGcRef::from_raw_u32(array).ok_or_else(|| Trap::NullReference)?;
700 let array = gc_ref
701 .into_arrayref(&*store.unwrap_gc_store().gc_heap)
702 .expect("gc ref should be an array");
703
704 let dst = usize::try_from(dst).map_err(|_| Trap::MemoryOutOfBounds)?;
705 let src = usize::try_from(src).map_err(|_| Trap::MemoryOutOfBounds)?;
706 let len = usize::try_from(len).map_err(|_| Trap::MemoryOutOfBounds)?;
707
708 let array_len = array.len(store.store_opaque());
710 let array_len = usize::try_from(array_len).map_err(|_| Trap::ArrayOutOfBounds)?;
711 if dst.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > array_len {
712 return Err(Trap::ArrayOutOfBounds.into());
713 }
714
715 let shared_ty = instance.engine_type_index(array_type_index);
717 let array_ty = ArrayType::from_shared_type_index(store.engine(), shared_ty);
718 let one_elem_size = array_ty
719 .element_type()
720 .data_byte_size()
721 .expect("Wasm validation ensures that this type have a defined byte size");
722 let data_len = len
723 .checked_mul(usize::try_from(one_elem_size).unwrap())
724 .ok_or_else(|| Trap::MemoryOutOfBounds)?;
725
726 let data_range = instance.wasm_data_range(data_index);
728 let data = instance
729 .wasm_data(data_range)
730 .get(src..)
731 .and_then(|d| d.get(..data_len))
732 .ok_or_else(|| Trap::MemoryOutOfBounds)?;
733
734 let dst_offset = u32::try_from(dst)
737 .unwrap()
738 .checked_mul(one_elem_size)
739 .unwrap();
740
741 let array_layout = store
742 .engine()
743 .signatures()
744 .layout(shared_ty)
745 .expect("array types have GC layouts");
746 let array_layout = array_layout.unwrap_array();
747
748 let obj_offset = array_layout.base_size.checked_add(dst_offset).unwrap();
749
750 store
751 .unwrap_gc_store_mut()
752 .gc_object_data(array.as_gc_ref())
753 .copy_from_slice(obj_offset, data);
754
755 Ok(())
756}
757
758#[cfg(feature = "gc")]
759unsafe fn array_new_elem(
760 store: &mut dyn VMStore,
761 instance: &mut Instance,
762 array_type_index: u32,
763 elem_index: u32,
764 src: u32,
765 len: u32,
766) -> Result<u32> {
767 use crate::{
768 store::AutoAssertNoGc,
769 vm::const_expr::{ConstEvalContext, ConstExprEvaluator},
770 ArrayRef, ArrayRefPre, ArrayType, Func, GcHeapOutOfMemory, RootSet, RootedGcRefImpl, Val,
771 };
772 use wasmtime_environ::{ModuleInternedTypeIndex, TableSegmentElements};
773
774 let array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index);
776 let elem_index = ElemIndex::from_u32(elem_index);
777
778 let mut storage = None;
779 let elements = instance.passive_element_segment(&mut storage, elem_index);
780
781 let src = usize::try_from(src).map_err(|_| Trap::TableOutOfBounds)?;
782 let len = usize::try_from(len).map_err(|_| Trap::TableOutOfBounds)?;
783
784 let shared_ty = instance.engine_type_index(array_type_index);
785 let array_ty = ArrayType::from_shared_type_index(store.engine(), shared_ty);
786 let elem_ty = array_ty.element_type();
787 let pre = ArrayRefPre::_new(store, array_ty);
788
789 RootSet::with_lifo_scope(store, |store| {
790 let mut vals = Vec::with_capacity(usize::try_from(elements.len()).unwrap());
792 match elements {
793 TableSegmentElements::Functions(fs) => {
794 vals.extend(
795 fs.get(src..)
796 .and_then(|s| s.get(..len))
797 .ok_or_else(|| Trap::TableOutOfBounds)?
798 .iter()
799 .map(|f| {
800 let raw_func_ref = instance.get_func_ref(*f);
801 let func = raw_func_ref.map(|p| Func::from_vm_func_ref(store, p));
802 Val::FuncRef(func)
803 }),
804 );
805 }
806 TableSegmentElements::Expressions(xs) => {
807 let xs = xs
808 .get(src..)
809 .and_then(|s| s.get(..len))
810 .ok_or_else(|| Trap::TableOutOfBounds)?;
811
812 let mut const_context = ConstEvalContext::new(instance);
813 let mut const_evaluator = ConstExprEvaluator::default();
814
815 vals.extend(xs.iter().map(|x| unsafe {
816 let raw = const_evaluator
817 .eval(store, &mut const_context, x)
818 .expect("const expr should be valid");
819 let mut store = AutoAssertNoGc::new(store);
820 Val::_from_raw(&mut store, raw, elem_ty.unwrap_val_type())
821 }));
822 }
823 }
824
825 let array = match ArrayRef::_new_fixed(store, &pre, &vals) {
826 Ok(a) => a,
827 Err(e) if e.is::<GcHeapOutOfMemory<()>>() => {
828 store.maybe_async_gc(None)?;
831 ArrayRef::_new_fixed(store, &pre, &vals)?
832 }
833 Err(e) => return Err(e),
834 };
835
836 let mut store = AutoAssertNoGc::new(store);
837 let gc_ref = array.try_clone_gc_ref(&mut store)?;
838 let raw = store.unwrap_gc_store_mut().expose_gc_ref_to_wasm(gc_ref);
839 Ok(raw)
840 })
841}
842
843#[cfg(feature = "gc")]
844unsafe fn array_init_elem(
845 store: &mut dyn VMStore,
846 instance: &mut Instance,
847 array_type_index: u32,
848 array: u32,
849 dst: u32,
850 elem_index: u32,
851 src: u32,
852 len: u32,
853) -> Result<()> {
854 use crate::{
855 store::AutoAssertNoGc,
856 vm::const_expr::{ConstEvalContext, ConstExprEvaluator},
857 ArrayRef, Func, OpaqueRootScope, Val,
858 };
859 use wasmtime_environ::{ModuleInternedTypeIndex, TableSegmentElements};
860
861 let mut store = OpaqueRootScope::new(store.store_opaque_mut());
862
863 let _array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index);
865 let elem_index = ElemIndex::from_u32(elem_index);
866
867 log::trace!(
868 "array.init_elem(array={array:#x}, dst={dst}, elem_index={elem_index:?}, src={src}, len={len})",
869 );
870
871 let array = VMGcRef::from_raw_u32(array).ok_or_else(|| Trap::NullReference)?;
873 let array = store.unwrap_gc_store_mut().clone_gc_ref(&array);
874 let array = {
875 let mut no_gc = AutoAssertNoGc::new(&mut store);
876 ArrayRef::from_cloned_gc_ref(&mut no_gc, array)
877 };
878
879 let array_len = array._len(&store)?;
881 log::trace!("array_len = {array_len}");
882 if dst.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > array_len {
883 return Err(Trap::ArrayOutOfBounds.into());
884 }
885
886 let mut storage = None;
888 let elements = instance.passive_element_segment(&mut storage, elem_index);
889
890 let src = usize::try_from(src).map_err(|_| Trap::TableOutOfBounds)?;
892 let len = usize::try_from(len).map_err(|_| Trap::TableOutOfBounds)?;
893
894 let vals = match elements {
896 TableSegmentElements::Functions(fs) => fs
897 .get(src..)
898 .and_then(|s| s.get(..len))
899 .ok_or_else(|| Trap::TableOutOfBounds)?
900 .iter()
901 .map(|f| {
902 let raw_func_ref = instance.get_func_ref(*f);
903 let func = raw_func_ref.map(|p| Func::from_vm_func_ref(&mut store, p));
904 Val::FuncRef(func)
905 })
906 .collect::<Vec<_>>(),
907 TableSegmentElements::Expressions(xs) => {
908 let elem_ty = array._ty(&store)?.element_type();
909 let elem_ty = elem_ty.unwrap_val_type();
910
911 let mut const_context = ConstEvalContext::new(instance);
912 let mut const_evaluator = ConstExprEvaluator::default();
913
914 xs.get(src..)
915 .and_then(|s| s.get(..len))
916 .ok_or_else(|| Trap::TableOutOfBounds)?
917 .iter()
918 .map(|x| unsafe {
919 let raw = const_evaluator
920 .eval(&mut store, &mut const_context, x)
921 .expect("const expr should be valid");
922 let mut store = AutoAssertNoGc::new(&mut store);
923 Val::_from_raw(&mut store, raw, elem_ty)
924 })
925 .collect::<Vec<_>>()
926 }
927 };
928
929 for (i, val) in vals.into_iter().enumerate() {
931 let i = u32::try_from(i).unwrap();
932 let j = dst.checked_add(i).unwrap();
933 array._set(&mut store, j, val)?;
934 }
935
936 Ok(())
937}
938
939#[cfg(feature = "gc")]
944unsafe fn array_copy(
945 store: &mut dyn VMStore,
946 _instance: &mut Instance,
947 dst_array: u32,
948 dst: u32,
949 src_array: u32,
950 src: u32,
951 len: u32,
952) -> Result<()> {
953 use crate::{store::AutoAssertNoGc, ArrayRef, OpaqueRootScope};
954
955 log::trace!(
956 "array.copy(dst_array={dst_array:#x}, dst_index={dst}, src_array={src_array:#x}, src_index={src}, len={len})",
957 );
958
959 let mut store = OpaqueRootScope::new(store.store_opaque_mut());
960 let mut store = AutoAssertNoGc::new(&mut store);
961
962 let dst_array = VMGcRef::from_raw_u32(dst_array).ok_or_else(|| Trap::NullReference)?;
964 let dst_array = store.unwrap_gc_store_mut().clone_gc_ref(&dst_array);
965 let dst_array = ArrayRef::from_cloned_gc_ref(&mut store, dst_array);
966 let src_array = VMGcRef::from_raw_u32(src_array).ok_or_else(|| Trap::NullReference)?;
967 let src_array = store.unwrap_gc_store_mut().clone_gc_ref(&src_array);
968 let src_array = ArrayRef::from_cloned_gc_ref(&mut store, src_array);
969
970 let dst_array_len = dst_array._len(&store)?;
972 if dst.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > dst_array_len {
973 return Err(Trap::ArrayOutOfBounds.into());
974 }
975
976 let src_array_len = src_array._len(&store)?;
978 if src.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > src_array_len {
979 return Err(Trap::ArrayOutOfBounds.into());
980 }
981
982 let mut store = AutoAssertNoGc::new(&mut store);
983 if src > dst {
989 for i in 0..len {
990 let src_elem = src_array._get(&mut store, src + i)?;
991 let dst_i = dst + i;
992 dst_array._set(&mut store, dst_i, src_elem)?;
993 }
994 } else {
995 for i in (0..len).rev() {
996 let src_elem = src_array._get(&mut store, src + i)?;
997 let dst_i = dst + i;
998 dst_array._set(&mut store, dst_i, src_elem)?;
999 }
1000 }
1001 Ok(())
1002}
1003
1004#[cfg(feature = "gc")]
1005unsafe fn is_subtype(
1006 store: &mut dyn VMStore,
1007 _instance: &mut Instance,
1008 actual_engine_type: u32,
1009 expected_engine_type: u32,
1010) -> u32 {
1011 use wasmtime_environ::VMSharedTypeIndex;
1012
1013 let actual = VMSharedTypeIndex::from_u32(actual_engine_type);
1014 let expected = VMSharedTypeIndex::from_u32(expected_engine_type);
1015
1016 let is_subtype: bool = store
1017 .engine()
1018 .signatures()
1019 .is_subtype(actual, expected)
1020 .into();
1021
1022 log::trace!("is_subtype(actual={actual:?}, expected={expected:?}) -> {is_subtype}",);
1023 is_subtype as u32
1024}
1025
1026#[cfg(feature = "threads")]
1028fn memory_atomic_notify(
1029 _store: &mut dyn VMStore,
1030 instance: &mut Instance,
1031 memory_index: u32,
1032 addr_index: u64,
1033 count: u32,
1034) -> Result<u32, Trap> {
1035 let memory = MemoryIndex::from_u32(memory_index);
1036 instance
1037 .get_runtime_memory(memory)
1038 .atomic_notify(addr_index, count)
1039}
1040
1041#[cfg(feature = "threads")]
1043fn memory_atomic_wait32(
1044 _store: &mut dyn VMStore,
1045 instance: &mut Instance,
1046 memory_index: u32,
1047 addr_index: u64,
1048 expected: u32,
1049 timeout: u64,
1050) -> Result<u32, Trap> {
1051 let timeout = (timeout as i64 >= 0).then(|| Duration::from_nanos(timeout));
1052 let memory = MemoryIndex::from_u32(memory_index);
1053 Ok(instance
1054 .get_runtime_memory(memory)
1055 .atomic_wait32(addr_index, expected, timeout)? as u32)
1056}
1057
1058#[cfg(feature = "threads")]
1060fn memory_atomic_wait64(
1061 _store: &mut dyn VMStore,
1062 instance: &mut Instance,
1063 memory_index: u32,
1064 addr_index: u64,
1065 expected: u64,
1066 timeout: u64,
1067) -> Result<u32, Trap> {
1068 let timeout = (timeout as i64 >= 0).then(|| Duration::from_nanos(timeout));
1069 let memory = MemoryIndex::from_u32(memory_index);
1070 Ok(instance
1071 .get_runtime_memory(memory)
1072 .atomic_wait64(addr_index, expected, timeout)? as u32)
1073}
1074
1075fn out_of_gas(store: &mut dyn VMStore, _instance: &mut Instance) -> Result<()> {
1077 store.out_of_gas()
1078}
1079
1080#[cfg(target_has_atomic = "64")]
1082fn new_epoch(store: &mut dyn VMStore, _instance: &mut Instance) -> Result<NextEpoch> {
1083 store.new_epoch().map(NextEpoch)
1084}
1085
1086struct NextEpoch(u64);
1087
1088unsafe impl HostResultHasUnwindSentinel for NextEpoch {
1089 type Abi = u64;
1090 const SENTINEL: u64 = u64::MAX;
1091 fn into_abi(self) -> u64 {
1092 self.0
1093 }
1094}
1095
1096#[cfg(feature = "wmemcheck")]
1098unsafe fn check_malloc(
1099 _store: &mut dyn VMStore,
1100 instance: &mut Instance,
1101 addr: u32,
1102 len: u32,
1103) -> Result<()> {
1104 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
1105 let result = wmemcheck_state.malloc(addr as usize, len as usize);
1106 wmemcheck_state.memcheck_on();
1107 match result {
1108 Ok(()) => {}
1109 Err(DoubleMalloc { addr, len }) => {
1110 bail!("Double malloc at addr {:#x} of size {}", addr, len)
1111 }
1112 Err(OutOfBounds { addr, len }) => {
1113 bail!("Malloc out of bounds at addr {:#x} of size {}", addr, len);
1114 }
1115 _ => {
1116 panic!("unreachable")
1117 }
1118 }
1119 }
1120 Ok(())
1121}
1122
1123#[cfg(feature = "wmemcheck")]
1125unsafe fn check_free(_store: &mut dyn VMStore, instance: &mut Instance, addr: u32) -> Result<()> {
1126 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
1127 let result = wmemcheck_state.free(addr as usize);
1128 wmemcheck_state.memcheck_on();
1129 match result {
1130 Ok(()) => {}
1131 Err(InvalidFree { addr }) => {
1132 bail!("Invalid free at addr {:#x}", addr)
1133 }
1134 _ => {
1135 panic!("unreachable")
1136 }
1137 }
1138 }
1139 Ok(())
1140}
1141
1142#[cfg(feature = "wmemcheck")]
1144fn check_load(
1145 _store: &mut dyn VMStore,
1146 instance: &mut Instance,
1147 num_bytes: u32,
1148 addr: u32,
1149 offset: u32,
1150) -> Result<()> {
1151 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
1152 let result = wmemcheck_state.read(addr as usize + offset as usize, num_bytes as usize);
1153 match result {
1154 Ok(()) => {}
1155 Err(InvalidRead { addr, len }) => {
1156 bail!("Invalid load at addr {:#x} of size {}", addr, len);
1157 }
1158 Err(OutOfBounds { addr, len }) => {
1159 bail!("Load out of bounds at addr {:#x} of size {}", addr, len);
1160 }
1161 _ => {
1162 panic!("unreachable")
1163 }
1164 }
1165 }
1166 Ok(())
1167}
1168
1169#[cfg(feature = "wmemcheck")]
1171fn check_store(
1172 _store: &mut dyn VMStore,
1173 instance: &mut Instance,
1174 num_bytes: u32,
1175 addr: u32,
1176 offset: u32,
1177) -> Result<()> {
1178 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
1179 let result = wmemcheck_state.write(addr as usize + offset as usize, num_bytes as usize);
1180 match result {
1181 Ok(()) => {}
1182 Err(InvalidWrite { addr, len }) => {
1183 bail!("Invalid store at addr {:#x} of size {}", addr, len)
1184 }
1185 Err(OutOfBounds { addr, len }) => {
1186 bail!("Store out of bounds at addr {:#x} of size {}", addr, len)
1187 }
1188 _ => {
1189 panic!("unreachable")
1190 }
1191 }
1192 }
1193 Ok(())
1194}
1195
1196#[cfg(feature = "wmemcheck")]
1198fn malloc_start(_store: &mut dyn VMStore, instance: &mut Instance) {
1199 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
1200 wmemcheck_state.memcheck_off();
1201 }
1202}
1203
1204#[cfg(feature = "wmemcheck")]
1206fn free_start(_store: &mut dyn VMStore, instance: &mut Instance) {
1207 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
1208 wmemcheck_state.memcheck_off();
1209 }
1210}
1211
1212#[cfg(feature = "wmemcheck")]
1214fn update_stack_pointer(_store: &mut dyn VMStore, _instance: &mut Instance, _value: u32) {
1215 }
1222
1223#[cfg(feature = "wmemcheck")]
1225fn update_mem_size(_store: &mut dyn VMStore, instance: &mut Instance, num_pages: u32) {
1226 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
1227 const KIB: usize = 1024;
1228 let num_bytes = num_pages as usize * 64 * KIB;
1229 wmemcheck_state.update_mem_size(num_bytes);
1230 }
1231}
1232
1233fn trap(
1239 _store: &mut dyn VMStore,
1240 _instance: &mut Instance,
1241 code: u8,
1242) -> Result<Infallible, TrapReason> {
1243 Err(TrapReason::Wasm(
1244 wasmtime_environ::Trap::from_u8(code).unwrap(),
1245 ))
1246}
1247
1248fn raise(_store: &mut dyn VMStore, _instance: &mut Instance) {
1249 #[cfg(has_host_compiler_backend)]
1253 unsafe {
1254 crate::runtime::vm::traphandlers::raise_preexisting_trap()
1255 }
1256
1257 #[cfg(not(has_host_compiler_backend))]
1260 unreachable!()
1261}
1262
1263#[allow(missing_docs)]
1272pub mod relocs {
1273 pub extern "C" fn floorf32(f: f32) -> f32 {
1274 wasmtime_math::WasmFloat::wasm_floor(f)
1275 }
1276
1277 pub extern "C" fn floorf64(f: f64) -> f64 {
1278 wasmtime_math::WasmFloat::wasm_floor(f)
1279 }
1280
1281 pub extern "C" fn ceilf32(f: f32) -> f32 {
1282 wasmtime_math::WasmFloat::wasm_ceil(f)
1283 }
1284
1285 pub extern "C" fn ceilf64(f: f64) -> f64 {
1286 wasmtime_math::WasmFloat::wasm_ceil(f)
1287 }
1288
1289 pub extern "C" fn truncf32(f: f32) -> f32 {
1290 wasmtime_math::WasmFloat::wasm_trunc(f)
1291 }
1292
1293 pub extern "C" fn truncf64(f: f64) -> f64 {
1294 wasmtime_math::WasmFloat::wasm_trunc(f)
1295 }
1296
1297 pub extern "C" fn nearestf32(x: f32) -> f32 {
1298 wasmtime_math::WasmFloat::wasm_nearest(x)
1299 }
1300
1301 pub extern "C" fn nearestf64(x: f64) -> f64 {
1302 wasmtime_math::WasmFloat::wasm_nearest(x)
1303 }
1304
1305 pub extern "C" fn fmaf32(a: f32, b: f32, c: f32) -> f32 {
1306 wasmtime_math::WasmFloat::wasm_mul_add(a, b, c)
1307 }
1308
1309 pub extern "C" fn fmaf64(a: f64, b: f64, c: f64) -> f64 {
1310 wasmtime_math::WasmFloat::wasm_mul_add(a, b, c)
1311 }
1312
1313 #[cfg(target_arch = "x86_64")]
1316 use core::arch::x86_64::__m128i;
1317 #[cfg(target_arch = "x86_64")]
1318 #[target_feature(enable = "sse")]
1319 #[allow(improper_ctypes_definitions)]
1320 pub unsafe extern "C" fn x86_pshufb(a: __m128i, b: __m128i) -> __m128i {
1321 union U {
1322 reg: __m128i,
1323 mem: [u8; 16],
1324 }
1325
1326 unsafe {
1327 let a = U { reg: a }.mem;
1328 let b = U { reg: b }.mem;
1329
1330 let select = |arr: &[u8; 16], byte: u8| {
1331 if byte & 0x80 != 0 {
1332 0x00
1333 } else {
1334 arr[(byte & 0xf) as usize]
1335 }
1336 };
1337
1338 U {
1339 mem: [
1340 select(&a, b[0]),
1341 select(&a, b[1]),
1342 select(&a, b[2]),
1343 select(&a, b[3]),
1344 select(&a, b[4]),
1345 select(&a, b[5]),
1346 select(&a, b[6]),
1347 select(&a, b[7]),
1348 select(&a, b[8]),
1349 select(&a, b[9]),
1350 select(&a, b[10]),
1351 select(&a, b[11]),
1352 select(&a, b[12]),
1353 select(&a, b[13]),
1354 select(&a, b[14]),
1355 select(&a, b[15]),
1356 ],
1357 }
1358 .reg
1359 }
1360 }
1361}