1use crate::prelude::*;
2use crate::runtime::vm::const_expr::{ConstEvalContext, ConstExprEvaluator};
3use crate::runtime::vm::imports::Imports;
4use crate::runtime::vm::instance::{Instance, InstanceHandle};
5use crate::runtime::vm::memory::Memory;
6use crate::runtime::vm::mpk::ProtectionKey;
7use crate::runtime::vm::table::Table;
8use crate::runtime::vm::{CompiledModuleId, ModuleRuntimeInfo, VMFuncRef, VMGcRef, VMStore};
9use crate::store::{AutoAssertNoGc, StoreOpaque};
10use crate::vm::VMGlobalDefinition;
11use core::ptr::NonNull;
12use core::{any::Any, mem, ptr};
13use wasmtime_environ::{
14 DefinedMemoryIndex, DefinedTableIndex, HostPtr, InitMemory, MemoryInitialization,
15 MemoryInitializer, Module, PrimaryMap, SizeOverflow, TableInitialValue, Trap, Tunables,
16 VMOffsets, WasmHeapTopType,
17};
18
19#[cfg(feature = "gc")]
20use crate::runtime::vm::{GcHeap, GcRuntime};
21
22#[cfg(feature = "component-model")]
23use wasmtime_environ::{
24 component::{Component, VMComponentOffsets},
25 StaticModuleIndex,
26};
27
28mod on_demand;
29pub use self::on_demand::OnDemandInstanceAllocator;
30
31#[cfg(feature = "pooling-allocator")]
32mod pooling;
33#[cfg(feature = "pooling-allocator")]
34pub use self::pooling::{
35 InstanceLimits, PoolConcurrencyLimitError, PoolingInstanceAllocator,
36 PoolingInstanceAllocatorConfig,
37};
38
39pub struct InstanceAllocationRequest<'a> {
41 pub runtime_info: &'a ModuleRuntimeInfo,
47
48 pub imports: Imports<'a>,
50
51 pub host_state: Box<dyn Any + Send + Sync>,
53
54 pub store: StorePtr,
70
71 #[cfg_attr(not(feature = "wmemcheck"), allow(dead_code))]
73 pub wmemcheck: bool,
74
75 #[cfg_attr(
78 not(feature = "pooling-allocator"),
79 expect(
80 dead_code,
81 reason = "easier to keep this field than remove it, not perf-critical to remove"
82 )
83 )]
84 pub pkey: Option<ProtectionKey>,
85
86 pub tunables: &'a Tunables,
88}
89
90pub struct StorePtr(Option<NonNull<dyn VMStore>>);
96
97impl StorePtr {
98 pub fn empty() -> Self {
100 Self(None)
101 }
102
103 pub fn new(ptr: NonNull<dyn VMStore>) -> Self {
105 Self(Some(ptr))
106 }
107
108 pub fn as_raw(&self) -> Option<NonNull<dyn VMStore>> {
110 self.0
111 }
112
113 pub(crate) unsafe fn get(&mut self) -> Option<&mut dyn VMStore> {
117 let ptr = self.0?.as_mut();
118 Some(ptr)
119 }
120}
121
122#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
124pub struct MemoryAllocationIndex(u32);
125
126impl Default for MemoryAllocationIndex {
127 fn default() -> Self {
128 MemoryAllocationIndex(u32::MAX)
131 }
132}
133
134impl MemoryAllocationIndex {
135 #[cfg(feature = "pooling-allocator")]
137 pub fn index(&self) -> usize {
138 self.0 as usize
139 }
140}
141
142#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
144pub struct TableAllocationIndex(u32);
145
146impl Default for TableAllocationIndex {
147 fn default() -> Self {
148 TableAllocationIndex(u32::MAX)
151 }
152}
153
154impl TableAllocationIndex {
155 #[cfg(feature = "pooling-allocator")]
157 pub fn index(&self) -> usize {
158 self.0 as usize
159 }
160}
161
162#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
164pub struct GcHeapAllocationIndex(u32);
165
166impl Default for GcHeapAllocationIndex {
167 fn default() -> Self {
168 GcHeapAllocationIndex(u32::MAX)
171 }
172}
173
174impl GcHeapAllocationIndex {
175 pub fn index(&self) -> usize {
177 self.0 as usize
178 }
179}
180
181pub unsafe trait InstanceAllocatorImpl {
193 #[cfg(feature = "component-model")]
196 fn validate_component_impl<'a>(
197 &self,
198 component: &Component,
199 offsets: &VMComponentOffsets<HostPtr>,
200 get_module: &'a dyn Fn(StaticModuleIndex) -> &'a Module,
201 ) -> Result<()>;
202
203 fn validate_module_impl(&self, module: &Module, offsets: &VMOffsets<HostPtr>) -> Result<()>;
205
206 fn increment_component_instance_count(&self) -> Result<()>;
226
227 fn decrement_component_instance_count(&self);
229
230 fn increment_core_instance_count(&self) -> Result<()>;
237
238 fn decrement_core_instance_count(&self);
240
241 unsafe fn allocate_memory(
248 &self,
249 request: &mut InstanceAllocationRequest,
250 ty: &wasmtime_environ::Memory,
251 tunables: &Tunables,
252 memory_index: DefinedMemoryIndex,
253 ) -> Result<(MemoryAllocationIndex, Memory)>;
254
255 unsafe fn deallocate_memory(
263 &self,
264 memory_index: DefinedMemoryIndex,
265 allocation_index: MemoryAllocationIndex,
266 memory: Memory,
267 );
268
269 unsafe fn allocate_table(
276 &self,
277 req: &mut InstanceAllocationRequest,
278 table: &wasmtime_environ::Table,
279 tunables: &Tunables,
280 table_index: DefinedTableIndex,
281 ) -> Result<(TableAllocationIndex, Table)>;
282
283 unsafe fn deallocate_table(
291 &self,
292 table_index: DefinedTableIndex,
293 allocation_index: TableAllocationIndex,
294 table: Table,
295 );
296
297 #[cfg(feature = "async")]
299 fn allocate_fiber_stack(&self) -> Result<wasmtime_fiber::FiberStack>;
300
301 #[cfg(feature = "async")]
309 unsafe fn deallocate_fiber_stack(&self, stack: wasmtime_fiber::FiberStack);
310
311 #[cfg(feature = "gc")]
313 fn allocate_gc_heap(
314 &self,
315 gc_runtime: &dyn GcRuntime,
316 ) -> Result<(GcHeapAllocationIndex, Box<dyn GcHeap>)>;
317
318 #[cfg(feature = "gc")]
321 fn deallocate_gc_heap(&self, allocation_index: GcHeapAllocationIndex, gc_heap: Box<dyn GcHeap>);
322
323 fn purge_module(&self, module: CompiledModuleId);
329
330 fn next_available_pkey(&self) -> Option<ProtectionKey>;
336
337 fn restrict_to_pkey(&self, pkey: ProtectionKey);
344
345 fn allow_all_pkeys(&self);
347}
348
349pub trait InstanceAllocator: InstanceAllocatorImpl {
355 #[cfg(feature = "component-model")]
358 fn validate_component<'a>(
359 &self,
360 component: &Component,
361 offsets: &VMComponentOffsets<HostPtr>,
362 get_module: &'a dyn Fn(StaticModuleIndex) -> &'a Module,
363 ) -> Result<()> {
364 InstanceAllocatorImpl::validate_component_impl(self, component, offsets, get_module)
365 }
366
367 fn validate_module(&self, module: &Module, offsets: &VMOffsets<HostPtr>) -> Result<()> {
370 InstanceAllocatorImpl::validate_module_impl(self, module, offsets)
371 }
372
373 unsafe fn allocate_module(
387 &self,
388 mut request: InstanceAllocationRequest,
389 ) -> Result<InstanceHandle> {
390 let module = request.runtime_info.env_module();
391
392 #[cfg(debug_assertions)]
393 InstanceAllocatorImpl::validate_module_impl(self, module, request.runtime_info.offsets())
394 .expect("module should have already been validated before allocation");
395
396 self.increment_core_instance_count()?;
397
398 let num_defined_memories = module.num_defined_memories();
399 let mut memories = PrimaryMap::with_capacity(num_defined_memories);
400
401 let num_defined_tables = module.num_defined_tables();
402 let mut tables = PrimaryMap::with_capacity(num_defined_tables);
403
404 match (|| {
405 self.allocate_memories(&mut request, &mut memories)?;
406 self.allocate_tables(&mut request, &mut tables)?;
407 Ok(())
408 })() {
409 Ok(_) => Ok(Instance::new(request, memories, tables, &module.memories)),
410 Err(e) => {
411 self.deallocate_memories(&mut memories);
412 self.deallocate_tables(&mut tables);
413 self.decrement_core_instance_count();
414 Err(e)
415 }
416 }
417 }
418
419 unsafe fn deallocate_module(&self, handle: &mut InstanceHandle) {
428 self.deallocate_memories(&mut handle.instance_mut().memories);
429 self.deallocate_tables(&mut handle.instance_mut().tables);
430
431 let layout = Instance::alloc_layout(handle.instance().offsets());
432 let ptr = handle.instance.take().unwrap();
433 ptr::drop_in_place(ptr.as_ptr());
434 alloc::alloc::dealloc(ptr.as_ptr().cast(), layout);
435
436 self.decrement_core_instance_count();
437 }
438
439 unsafe fn allocate_memories(
447 &self,
448 request: &mut InstanceAllocationRequest,
449 memories: &mut PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
450 ) -> Result<()> {
451 let module = request.runtime_info.env_module();
452
453 #[cfg(debug_assertions)]
454 InstanceAllocatorImpl::validate_module_impl(self, module, request.runtime_info.offsets())
455 .expect("module should have already been validated before allocation");
456
457 for (memory_index, ty) in module.memories.iter().skip(module.num_imported_memories) {
458 let memory_index = module
459 .defined_memory_index(memory_index)
460 .expect("should be a defined memory since we skipped imported ones");
461
462 memories.push(self.allocate_memory(request, ty, request.tunables, memory_index)?);
463 }
464
465 Ok(())
466 }
467
468 unsafe fn deallocate_memories(
475 &self,
476 memories: &mut PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
477 ) {
478 for (memory_index, (allocation_index, memory)) in mem::take(memories) {
479 self.deallocate_memory(memory_index, allocation_index, memory);
484 }
485 }
486
487 unsafe fn allocate_tables(
495 &self,
496 request: &mut InstanceAllocationRequest,
497 tables: &mut PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
498 ) -> Result<()> {
499 let module = request.runtime_info.env_module();
500
501 #[cfg(debug_assertions)]
502 InstanceAllocatorImpl::validate_module_impl(self, module, request.runtime_info.offsets())
503 .expect("module should have already been validated before allocation");
504
505 for (index, table) in module.tables.iter().skip(module.num_imported_tables) {
506 let def_index = module
507 .defined_table_index(index)
508 .expect("should be a defined table since we skipped imported ones");
509
510 tables.push(self.allocate_table(request, table, request.tunables, def_index)?);
511 }
512
513 Ok(())
514 }
515
516 unsafe fn deallocate_tables(
523 &self,
524 tables: &mut PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
525 ) {
526 for (table_index, (allocation_index, table)) in mem::take(tables) {
527 self.deallocate_table(table_index, allocation_index, table);
528 }
529 }
530}
531
532impl<T: InstanceAllocatorImpl> InstanceAllocator for T {}
536
537fn check_table_init_bounds(
538 store: &mut StoreOpaque,
539 instance: &mut Instance,
540 module: &Module,
541) -> Result<()> {
542 let mut const_evaluator = ConstExprEvaluator::default();
543
544 for segment in module.table_initialization.segments.iter() {
545 let table = unsafe { &*instance.get_table(segment.table_index) };
546 let mut context = ConstEvalContext::new(instance);
547 let start = unsafe {
548 const_evaluator
549 .eval(store, &mut context, &segment.offset)
550 .expect("const expression should be valid")
551 };
552 let start = usize::try_from(start.get_u32()).unwrap();
553 let end = start.checked_add(usize::try_from(segment.elements.len()).unwrap());
554
555 match end {
556 Some(end) if end <= table.size() => {
557 }
559 _ => {
560 bail!("table out of bounds: elements segment does not fit")
561 }
562 }
563 }
564
565 Ok(())
566}
567
568fn initialize_tables(
569 store: &mut StoreOpaque,
570 context: &mut ConstEvalContext<'_>,
571 const_evaluator: &mut ConstExprEvaluator,
572 module: &Module,
573) -> Result<()> {
574 for (table, init) in module.table_initialization.initial_values.iter() {
575 match init {
576 TableInitialValue::Null { precomputed: _ } => {}
578
579 TableInitialValue::Expr(expr) => {
580 let raw = unsafe {
581 const_evaluator
582 .eval(store, context, expr)
583 .expect("const expression should be valid")
584 };
585 let idx = module.table_index(table);
586 let table = unsafe { context.instance.get_defined_table(table).as_mut().unwrap() };
587 match module.tables[idx].ref_type.heap_type.top() {
588 WasmHeapTopType::Extern => {
589 let gc_ref = VMGcRef::from_raw_u32(raw.get_externref());
590 let gc_store = store.gc_store_mut()?;
591 let items = (0..table.size())
592 .map(|_| gc_ref.as_ref().map(|r| gc_store.clone_gc_ref(r)));
593 table.init_gc_refs(0, items)?;
594 }
595
596 WasmHeapTopType::Any => {
597 let gc_ref = VMGcRef::from_raw_u32(raw.get_anyref());
598 let gc_store = store.gc_store_mut()?;
599 let items = (0..table.size())
600 .map(|_| gc_ref.as_ref().map(|r| gc_store.clone_gc_ref(r)));
601 table.init_gc_refs(0, items)?;
602 }
603
604 WasmHeapTopType::Func => {
605 let funcref = NonNull::new(raw.get_funcref().cast::<VMFuncRef>());
606 let items = (0..table.size()).map(|_| funcref);
607 table.init_func(0, items)?;
608 }
609
610 WasmHeapTopType::Cont => todo!(), }
612 }
613 }
614 }
615
616 for segment in module.table_initialization.segments.iter() {
624 let start = unsafe {
625 const_evaluator
626 .eval(store, context, &segment.offset)
627 .expect("const expression should be valid")
628 };
629 context.instance.table_init_segment(
630 store,
631 const_evaluator,
632 segment.table_index,
633 &segment.elements,
634 start.get_u64(),
635 0,
636 segment.elements.len(),
637 )?;
638 }
639
640 Ok(())
641}
642
643fn get_memory_init_start(
644 store: &mut StoreOpaque,
645 init: &MemoryInitializer,
646 instance: &mut Instance,
647) -> Result<u64> {
648 let mut context = ConstEvalContext::new(instance);
649 let mut const_evaluator = ConstExprEvaluator::default();
650 unsafe { const_evaluator.eval(store, &mut context, &init.offset) }.map(|v| {
651 match instance.env_module().memories[init.memory_index].idx_type {
652 wasmtime_environ::IndexType::I32 => v.get_u32().into(),
653 wasmtime_environ::IndexType::I64 => v.get_u64(),
654 }
655 })
656}
657
658fn check_memory_init_bounds(
659 store: &mut StoreOpaque,
660 instance: &mut Instance,
661 initializers: &[MemoryInitializer],
662) -> Result<()> {
663 for init in initializers {
664 let memory = instance.get_memory(init.memory_index);
665 let start = get_memory_init_start(store, init, instance)?;
666 let end = usize::try_from(start)
667 .ok()
668 .and_then(|start| start.checked_add(init.data.len()));
669
670 match end {
671 Some(end) if end <= memory.current_length() => {
672 }
674 _ => {
675 bail!("memory out of bounds: data segment does not fit")
676 }
677 }
678 }
679
680 Ok(())
681}
682
683fn initialize_memories(
684 store: &mut StoreOpaque,
685 context: &mut ConstEvalContext<'_>,
686 const_evaluator: &mut ConstExprEvaluator,
687 module: &Module,
688) -> Result<()> {
689 struct InitMemoryAtInstantiation<'a, 'b> {
699 module: &'a Module,
700 store: &'a mut StoreOpaque,
701 context: &'a mut ConstEvalContext<'b>,
702 const_evaluator: &'a mut ConstExprEvaluator,
703 }
704
705 impl InitMemory for InitMemoryAtInstantiation<'_, '_> {
706 fn memory_size_in_bytes(
707 &mut self,
708 memory: wasmtime_environ::MemoryIndex,
709 ) -> Result<u64, SizeOverflow> {
710 let len = self.context.instance.get_memory(memory).current_length();
711 let len = u64::try_from(len).unwrap();
712 Ok(len)
713 }
714
715 fn eval_offset(
716 &mut self,
717 memory: wasmtime_environ::MemoryIndex,
718 expr: &wasmtime_environ::ConstExpr,
719 ) -> Option<u64> {
720 let val = unsafe { self.const_evaluator.eval(self.store, self.context, expr) }
721 .expect("const expression should be valid");
722 Some(
723 match self.context.instance.env_module().memories[memory].idx_type {
724 wasmtime_environ::IndexType::I32 => val.get_u32().into(),
725 wasmtime_environ::IndexType::I64 => val.get_u64(),
726 },
727 )
728 }
729
730 fn write(
731 &mut self,
732 memory_index: wasmtime_environ::MemoryIndex,
733 init: &wasmtime_environ::StaticMemoryInitializer,
734 ) -> bool {
735 if let Some(memory_index) = self.module.defined_memory_index(memory_index) {
740 if !self.context.instance.memories[memory_index].1.needs_init() {
741 return true;
742 }
743 }
744 let memory = self.context.instance.get_memory(memory_index);
745
746 unsafe {
747 let src = self.context.instance.wasm_data(init.data.clone());
748 let offset = usize::try_from(init.offset).unwrap();
749 let dst = memory.base.as_ptr().add(offset);
750
751 assert!(offset + src.len() <= memory.current_length());
752
753 ptr::copy_nonoverlapping(src.as_ptr(), dst, src.len())
757 }
758 true
759 }
760 }
761
762 let ok = module
763 .memory_initialization
764 .init_memory(&mut InitMemoryAtInstantiation {
765 module,
766 store,
767 context,
768 const_evaluator,
769 });
770 if !ok {
771 return Err(Trap::MemoryOutOfBounds.into());
772 }
773
774 Ok(())
775}
776
777fn check_init_bounds(
778 store: &mut StoreOpaque,
779 instance: &mut Instance,
780 module: &Module,
781) -> Result<()> {
782 check_table_init_bounds(store, instance, module)?;
783
784 match &module.memory_initialization {
785 MemoryInitialization::Segmented(initializers) => {
786 check_memory_init_bounds(store, instance, initializers)?;
787 }
788 MemoryInitialization::Static { .. } => {}
790 }
791
792 Ok(())
793}
794
795fn initialize_globals(
796 store: &mut StoreOpaque,
797 context: &mut ConstEvalContext<'_>,
798 const_evaluator: &mut ConstExprEvaluator,
799 module: &Module,
800) -> Result<()> {
801 assert!(core::ptr::eq(&**context.instance.env_module(), module));
802
803 let mut store = AutoAssertNoGc::new(store);
804
805 for (index, init) in module.global_initializers.iter() {
806 let raw = unsafe {
807 const_evaluator
808 .eval(&mut store, context, init)
809 .expect("should be a valid const expr")
810 };
811
812 let to = context.instance.global_ptr(index);
813 let wasm_ty = module.globals[module.global_index(index)].wasm_ty;
814
815 #[cfg(feature = "wmemcheck")]
816 if index.as_u32() == 0 && wasm_ty == wasmtime_environ::WasmValType::I32 {
817 if let Some(wmemcheck) = &mut context.instance.wmemcheck_state {
818 let size = usize::try_from(raw.get_i32()).unwrap();
819 wmemcheck.set_stack_size(size);
820 }
821 }
822
823 unsafe {
826 to.write(VMGlobalDefinition::from_val_raw(&mut store, wasm_ty, raw)?);
827 };
828 }
829 Ok(())
830}
831
832pub(super) fn initialize_instance(
833 store: &mut StoreOpaque,
834 instance: &mut Instance,
835 module: &Module,
836 is_bulk_memory: bool,
837) -> Result<()> {
838 if !is_bulk_memory {
843 check_init_bounds(store, instance, module)?;
844 }
845
846 let mut context = ConstEvalContext::new(instance);
847 let mut const_evaluator = ConstExprEvaluator::default();
848
849 initialize_globals(store, &mut context, &mut const_evaluator, module)?;
850 initialize_tables(store, &mut context, &mut const_evaluator, module)?;
851 initialize_memories(store, &mut context, &mut const_evaluator, &module)?;
852
853 Ok(())
854}
855
856#[cfg(test)]
857mod tests {
858 use super::*;
859
860 #[test]
861 fn allocator_traits_are_object_safe() {
862 fn _instance_allocator(_: &dyn InstanceAllocatorImpl) {}
863 fn _instance_allocator_ext(_: &dyn InstanceAllocator) {}
864 }
865}