1use crate::compiler::Compiler;
2use crate::translate::{
3 FuncTranslationState, GlobalVariable, Heap, HeapData, StructFieldsVec, TableData, TableSize,
4 TargetEnvironment,
5};
6use crate::{gc, BuiltinFunctionSignatures, TRAP_INTERNAL_ASSERT};
7use cranelift_codegen::cursor::FuncCursor;
8use cranelift_codegen::ir::condcodes::{FloatCC, IntCC};
9use cranelift_codegen::ir::immediates::{Imm64, Offset32};
10use cranelift_codegen::ir::pcc::Fact;
11use cranelift_codegen::ir::types::*;
12use cranelift_codegen::ir::{self, types};
13use cranelift_codegen::ir::{ArgumentPurpose, Function, InstBuilder, MemFlags};
14use cranelift_codegen::isa::{TargetFrontendConfig, TargetIsa};
15use cranelift_entity::packed_option::ReservedValue;
16use cranelift_entity::{EntityRef, PrimaryMap, SecondaryMap};
17use cranelift_frontend::FunctionBuilder;
18use cranelift_frontend::Variable;
19use smallvec::SmallVec;
20use std::mem;
21use wasmparser::{Operator, WasmFeatures};
22use wasmtime_environ::{
23 BuiltinFunctionIndex, DataIndex, ElemIndex, EngineOrModuleTypeIndex, FuncIndex, GlobalIndex,
24 IndexType, Memory, MemoryIndex, Module, ModuleInternedTypeIndex, ModuleTranslation,
25 ModuleTypesBuilder, PtrSize, Table, TableIndex, TripleExt, Tunables, TypeConvert, TypeIndex,
26 VMOffsets, WasmCompositeInnerType, WasmFuncType, WasmHeapTopType, WasmHeapType, WasmRefType,
27 WasmResult, WasmValType,
28};
29use wasmtime_environ::{FUNCREF_INIT_BIT, FUNCREF_MASK};
30
31#[derive(Debug)]
32pub(crate) enum Extension {
33 Sign,
34 Zero,
35}
36
37pub(crate) struct BuiltinFunctions {
40 types: BuiltinFunctionSignatures,
41
42 builtins: [Option<ir::FuncRef>; BuiltinFunctionIndex::len() as usize],
43}
44
45impl BuiltinFunctions {
46 fn new(compiler: &Compiler) -> Self {
47 Self {
48 types: BuiltinFunctionSignatures::new(compiler),
49 builtins: [None; BuiltinFunctionIndex::len() as usize],
50 }
51 }
52
53 fn load_builtin(&mut self, func: &mut Function, index: BuiltinFunctionIndex) -> ir::FuncRef {
54 let cache = &mut self.builtins[index.index() as usize];
55 if let Some(f) = cache {
56 return *f;
57 }
58 let signature = func.import_signature(self.types.wasm_signature(index));
59 let name =
60 ir::ExternalName::User(func.declare_imported_user_function(ir::UserExternalName {
61 namespace: crate::NS_WASMTIME_BUILTIN,
62 index: index.index(),
63 }));
64 let f = func.import_function(ir::ExtFuncData {
65 name,
66 signature,
67 colocated: true,
68 });
69 *cache = Some(f);
70 f
71 }
72}
73
74macro_rules! declare_function_signatures {
77 ($(
78 $( #[$attr:meta] )*
79 $name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;
80 )*) => {
81 $(impl BuiltinFunctions {
82 $( #[$attr] )*
83 pub(crate) fn $name(&mut self, func: &mut Function) -> ir::FuncRef {
84 self.load_builtin(func, BuiltinFunctionIndex::$name())
85 }
86 })*
87 };
88}
89wasmtime_environ::foreach_builtin_function!(declare_function_signatures);
90
91pub struct FuncEnvironment<'module_environment> {
93 compiler: &'module_environment Compiler,
94 isa: &'module_environment (dyn TargetIsa + 'module_environment),
95 pub(crate) module: &'module_environment Module,
96 pub(crate) types: &'module_environment ModuleTypesBuilder,
97 wasm_func_ty: &'module_environment WasmFuncType,
98 sig_ref_to_ty: SecondaryMap<ir::SigRef, Option<&'module_environment WasmFuncType>>,
99
100 #[cfg(feature = "gc")]
101 pub(crate) ty_to_gc_layout: std::collections::HashMap<
102 wasmtime_environ::ModuleInternedTypeIndex,
103 wasmtime_environ::GcLayout,
104 >,
105
106 #[cfg(feature = "wmemcheck")]
107 translation: &'module_environment ModuleTranslation<'module_environment>,
108
109 heaps: PrimaryMap<Heap, HeapData>,
111
112 tables: SecondaryMap<TableIndex, Option<TableData>>,
114
115 vmctx: Option<ir::GlobalValue>,
117
118 pcc_vmctx_memtype: Option<ir::MemoryType>,
121
122 pub(crate) builtin_functions: BuiltinFunctions,
124
125 pub(crate) offsets: VMOffsets<u8>,
127
128 pub(crate) tunables: &'module_environment Tunables,
129
130 fuel_var: cranelift_frontend::Variable,
135
136 vmstore_context_ptr: ir::Value,
141
142 epoch_deadline_var: cranelift_frontend::Variable,
146
147 epoch_ptr_var: cranelift_frontend::Variable,
154
155 fuel_consumed: i64,
156
157 pub(crate) stack_limit_at_function_entry: Option<ir::GlobalValue>,
165}
166
167impl<'module_environment> FuncEnvironment<'module_environment> {
168 pub fn new(
169 compiler: &'module_environment Compiler,
170 translation: &'module_environment ModuleTranslation<'module_environment>,
171 types: &'module_environment ModuleTypesBuilder,
172 wasm_func_ty: &'module_environment WasmFuncType,
173 ) -> Self {
174 let tunables = compiler.tunables();
175 let builtin_functions = BuiltinFunctions::new(compiler);
176
177 let _ = BuiltinFunctions::raise;
180
181 Self {
182 isa: compiler.isa(),
183 module: &translation.module,
184 compiler,
185 types,
186 wasm_func_ty,
187 sig_ref_to_ty: SecondaryMap::default(),
188
189 #[cfg(feature = "gc")]
190 ty_to_gc_layout: std::collections::HashMap::new(),
191
192 heaps: PrimaryMap::default(),
193 tables: SecondaryMap::default(),
194 vmctx: None,
195 pcc_vmctx_memtype: None,
196 builtin_functions,
197 offsets: VMOffsets::new(compiler.isa().pointer_bytes(), &translation.module),
198 tunables,
199 fuel_var: Variable::new(0),
200 epoch_deadline_var: Variable::new(0),
201 epoch_ptr_var: Variable::new(0),
202 vmstore_context_ptr: ir::Value::reserved_value(),
203
204 fuel_consumed: 1,
207
208 #[cfg(feature = "wmemcheck")]
209 translation,
210
211 stack_limit_at_function_entry: None,
212 }
213 }
214
215 pub(crate) fn pointer_type(&self) -> ir::Type {
216 self.isa.pointer_type()
217 }
218
219 pub(crate) fn vmctx(&mut self, func: &mut Function) -> ir::GlobalValue {
220 self.vmctx.unwrap_or_else(|| {
221 let vmctx = func.create_global_value(ir::GlobalValueData::VMContext);
222 if self.isa.flags().enable_pcc() {
223 let vmctx_memtype = func.create_memory_type(ir::MemoryTypeData::Struct {
227 size: 0,
228 fields: vec![],
229 });
230
231 self.pcc_vmctx_memtype = Some(vmctx_memtype);
232 func.global_value_facts[vmctx] = Some(Fact::Mem {
233 ty: vmctx_memtype,
234 min_offset: 0,
235 max_offset: 0,
236 nullable: false,
237 });
238 }
239
240 self.vmctx = Some(vmctx);
241 vmctx
242 })
243 }
244
245 pub(crate) fn vmctx_val(&mut self, pos: &mut FuncCursor<'_>) -> ir::Value {
246 let pointer_type = self.pointer_type();
247 let vmctx = self.vmctx(&mut pos.func);
248 pos.ins().global_value(pointer_type, vmctx)
249 }
250
251 fn get_table_copy_func(
252 &mut self,
253 func: &mut Function,
254 dst_table_index: TableIndex,
255 src_table_index: TableIndex,
256 ) -> (ir::FuncRef, usize, usize) {
257 let sig = self.builtin_functions.table_copy(func);
258 (
259 sig,
260 dst_table_index.as_u32() as usize,
261 src_table_index.as_u32() as usize,
262 )
263 }
264
265 #[cfg(feature = "threads")]
266 fn get_memory_atomic_wait(
267 &mut self,
268 func: &mut Function,
269 memory_index: MemoryIndex,
270 ty: ir::Type,
271 ) -> (ir::FuncRef, usize) {
272 match ty {
273 I32 => (
274 self.builtin_functions.memory_atomic_wait32(func),
275 memory_index.index(),
276 ),
277 I64 => (
278 self.builtin_functions.memory_atomic_wait64(func),
279 memory_index.index(),
280 ),
281 x => panic!("get_memory_atomic_wait unsupported type: {x:?}"),
282 }
283 }
284
285 fn get_global_location(
286 &mut self,
287 func: &mut ir::Function,
288 index: GlobalIndex,
289 ) -> (ir::GlobalValue, i32) {
290 let pointer_type = self.pointer_type();
291 let vmctx = self.vmctx(func);
292 if let Some(def_index) = self.module.defined_global_index(index) {
293 let offset = i32::try_from(self.offsets.vmctx_vmglobal_definition(def_index)).unwrap();
294 (vmctx, offset)
295 } else {
296 let from_offset = self.offsets.vmctx_vmglobal_import_from(index);
297 let global = func.create_global_value(ir::GlobalValueData::Load {
298 base: vmctx,
299 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
300 global_type: pointer_type,
301 flags: MemFlags::trusted().with_readonly().with_can_move(),
302 });
303 (global, 0)
304 }
305 }
306
307 fn declare_vmstore_context_ptr(&mut self, builder: &mut FunctionBuilder<'_>) {
308 let pointer_type = self.pointer_type();
313 let vmctx = self.vmctx(builder.func);
314 let base = builder.ins().global_value(pointer_type, vmctx);
315 let offset = i32::from(self.offsets.ptr.vmctx_runtime_limits());
316 debug_assert!(self.vmstore_context_ptr.is_reserved_value());
317 self.vmstore_context_ptr = builder.ins().load(
318 pointer_type,
319 ir::MemFlags::trusted().with_readonly().with_can_move(),
320 base,
321 offset,
322 );
323 }
324
325 fn fuel_function_entry(&mut self, builder: &mut FunctionBuilder<'_>) {
326 builder.declare_var(self.fuel_var, ir::types::I64);
331 self.fuel_load_into_var(builder);
332 self.fuel_check(builder);
333 }
334
335 fn fuel_function_exit(&mut self, builder: &mut FunctionBuilder<'_>) {
336 self.fuel_save_from_var(builder);
340 }
341
342 fn fuel_before_op(
343 &mut self,
344 op: &Operator<'_>,
345 builder: &mut FunctionBuilder<'_>,
346 reachable: bool,
347 ) {
348 if !reachable {
349 debug_assert_eq!(self.fuel_consumed, 0);
353 return;
354 }
355
356 self.fuel_consumed += match op {
357 Operator::Nop | Operator::Drop => 0,
359
360 Operator::Block { .. }
364 | Operator::Loop { .. }
365 | Operator::Unreachable
366 | Operator::Return
367 | Operator::Else
368 | Operator::End => 0,
369
370 _ => 1,
372 };
373
374 match op {
375 Operator::Unreachable
385 | Operator::Return
386 | Operator::CallIndirect { .. }
387 | Operator::Call { .. }
388 | Operator::ReturnCall { .. }
389 | Operator::ReturnCallRef { .. }
390 | Operator::ReturnCallIndirect { .. } => {
391 self.fuel_increment_var(builder);
392 self.fuel_save_from_var(builder);
393 }
394
395 Operator::Loop { .. }
398
399 | Operator::If { .. }
403
404 | Operator::Br { .. }
408 | Operator::BrIf { .. }
409 | Operator::BrTable { .. }
410
411 | Operator::End
416
417 | Operator::Else => self.fuel_increment_var(builder),
422
423 _ => {}
441 }
442 }
443
444 fn fuel_after_op(&mut self, op: &Operator<'_>, builder: &mut FunctionBuilder<'_>) {
445 match op {
448 Operator::Call { .. } | Operator::CallIndirect { .. } => {
449 self.fuel_load_into_var(builder);
450 }
451 _ => {}
452 }
453 }
454
455 fn fuel_increment_var(&mut self, builder: &mut FunctionBuilder<'_>) {
458 let consumption = mem::replace(&mut self.fuel_consumed, 0);
459 if consumption == 0 {
460 return;
461 }
462
463 let fuel = builder.use_var(self.fuel_var);
464 let fuel = builder.ins().iadd_imm(fuel, consumption);
465 builder.def_var(self.fuel_var, fuel);
466 }
467
468 fn fuel_load_into_var(&mut self, builder: &mut FunctionBuilder<'_>) {
470 let (addr, offset) = self.fuel_addr_offset();
471 let fuel = builder
472 .ins()
473 .load(ir::types::I64, ir::MemFlags::trusted(), addr, offset);
474 builder.def_var(self.fuel_var, fuel);
475 }
476
477 fn fuel_save_from_var(&mut self, builder: &mut FunctionBuilder<'_>) {
480 let (addr, offset) = self.fuel_addr_offset();
481 let fuel_consumed = builder.use_var(self.fuel_var);
482 builder
483 .ins()
484 .store(ir::MemFlags::trusted(), fuel_consumed, addr, offset);
485 }
486
487 fn fuel_addr_offset(&mut self) -> (ir::Value, ir::immediates::Offset32) {
490 debug_assert!(!self.vmstore_context_ptr.is_reserved_value());
491 (
492 self.vmstore_context_ptr,
493 i32::from(self.offsets.ptr.vmstore_context_fuel_consumed()).into(),
494 )
495 }
496
497 fn fuel_check(&mut self, builder: &mut FunctionBuilder) {
500 self.fuel_increment_var(builder);
501 let out_of_gas_block = builder.create_block();
502 let continuation_block = builder.create_block();
503
504 let zero = builder.ins().iconst(ir::types::I64, 0);
511 let fuel = builder.use_var(self.fuel_var);
512 let cmp = builder
513 .ins()
514 .icmp(IntCC::SignedGreaterThanOrEqual, fuel, zero);
515 builder
516 .ins()
517 .brif(cmp, out_of_gas_block, &[], continuation_block, &[]);
518 builder.seal_block(out_of_gas_block);
519
520 builder.switch_to_block(out_of_gas_block);
528 self.fuel_save_from_var(builder);
529 let out_of_gas = self.builtin_functions.out_of_gas(builder.func);
530 let vmctx = self.vmctx_val(&mut builder.cursor());
531 builder.ins().call(out_of_gas, &[vmctx]);
532 self.fuel_load_into_var(builder);
533 builder.ins().jump(continuation_block, &[]);
534 builder.seal_block(continuation_block);
535
536 builder.switch_to_block(continuation_block);
537 }
538
539 fn epoch_function_entry(&mut self, builder: &mut FunctionBuilder<'_>) {
540 builder.declare_var(self.epoch_deadline_var, ir::types::I64);
541 builder.declare_var(self.epoch_ptr_var, self.pointer_type());
544 let epoch_ptr = self.epoch_ptr(builder);
545 builder.def_var(self.epoch_ptr_var, epoch_ptr);
546
547 let continuation_block = builder.create_block();
565 let cur_epoch_value = self.epoch_load_current(builder);
566 self.epoch_check_full(builder, cur_epoch_value, continuation_block);
567 }
568
569 #[cfg(feature = "wmemcheck")]
570 fn hook_malloc_exit(&mut self, builder: &mut FunctionBuilder, retvals: &[ir::Value]) {
571 let check_malloc = self.builtin_functions.check_malloc(builder.func);
572 let vmctx = self.vmctx_val(&mut builder.cursor());
573 let func_args = builder
574 .func
575 .dfg
576 .block_params(builder.func.layout.entry_block().unwrap());
577 let len = if func_args.len() < 3 {
578 return;
579 } else {
580 func_args[2]
583 };
584 let retval = if retvals.len() < 1 {
585 return;
586 } else {
587 retvals[0]
588 };
589 builder.ins().call(check_malloc, &[vmctx, retval, len]);
590 }
591
592 #[cfg(feature = "wmemcheck")]
593 fn hook_free_exit(&mut self, builder: &mut FunctionBuilder) {
594 let check_free = self.builtin_functions.check_free(builder.func);
595 let vmctx = self.vmctx_val(&mut builder.cursor());
596 let func_args = builder
597 .func
598 .dfg
599 .block_params(builder.func.layout.entry_block().unwrap());
600 let ptr = if func_args.len() < 3 {
601 return;
602 } else {
603 func_args[2]
606 };
607 builder.ins().call(check_free, &[vmctx, ptr]);
608 }
609
610 fn epoch_ptr(&mut self, builder: &mut FunctionBuilder<'_>) -> ir::Value {
611 let vmctx = self.vmctx(builder.func);
612 let pointer_type = self.pointer_type();
613 let base = builder.ins().global_value(pointer_type, vmctx);
614 let offset = i32::from(self.offsets.ptr.vmctx_epoch_ptr());
615 let epoch_ptr = builder
616 .ins()
617 .load(pointer_type, ir::MemFlags::trusted(), base, offset);
618 epoch_ptr
619 }
620
621 fn epoch_load_current(&mut self, builder: &mut FunctionBuilder<'_>) -> ir::Value {
622 let addr = builder.use_var(self.epoch_ptr_var);
623 builder.ins().load(
624 ir::types::I64,
625 ir::MemFlags::trusted(),
626 addr,
627 ir::immediates::Offset32::new(0),
628 )
629 }
630
631 fn epoch_check(&mut self, builder: &mut FunctionBuilder<'_>) {
632 let continuation_block = builder.create_block();
633
634 let cur_epoch_value = self.epoch_load_current(builder);
636 self.epoch_check_cached(builder, cur_epoch_value, continuation_block);
637
638 self.epoch_check_full(builder, cur_epoch_value, continuation_block);
643 }
644
645 fn epoch_check_cached(
646 &mut self,
647 builder: &mut FunctionBuilder,
648 cur_epoch_value: ir::Value,
649 continuation_block: ir::Block,
650 ) {
651 let new_epoch_block = builder.create_block();
652 builder.set_cold_block(new_epoch_block);
653
654 let epoch_deadline = builder.use_var(self.epoch_deadline_var);
655 let cmp = builder.ins().icmp(
656 IntCC::UnsignedGreaterThanOrEqual,
657 cur_epoch_value,
658 epoch_deadline,
659 );
660 builder
661 .ins()
662 .brif(cmp, new_epoch_block, &[], continuation_block, &[]);
663 builder.seal_block(new_epoch_block);
664
665 builder.switch_to_block(new_epoch_block);
666 }
667
668 fn epoch_check_full(
669 &mut self,
670 builder: &mut FunctionBuilder,
671 cur_epoch_value: ir::Value,
672 continuation_block: ir::Block,
673 ) {
674 let deadline = builder.ins().load(
678 ir::types::I64,
679 ir::MemFlags::trusted(),
680 self.vmstore_context_ptr,
681 ir::immediates::Offset32::new(self.offsets.ptr.vmstore_context_epoch_deadline() as i32),
682 );
683 builder.def_var(self.epoch_deadline_var, deadline);
684 self.epoch_check_cached(builder, cur_epoch_value, continuation_block);
685
686 let new_epoch = self.builtin_functions.new_epoch(builder.func);
687 let vmctx = self.vmctx_val(&mut builder.cursor());
688 let call = builder.ins().call(new_epoch, &[vmctx]);
691 let new_deadline = *builder.func.dfg.inst_results(call).first().unwrap();
692 builder.def_var(self.epoch_deadline_var, new_deadline);
693 builder.ins().jump(continuation_block, &[]);
694 builder.seal_block(continuation_block);
695
696 builder.switch_to_block(continuation_block);
697 }
698
699 fn memory(&self, index: MemoryIndex) -> Memory {
701 self.module.memories[index]
702 }
703
704 fn table(&self, index: TableIndex) -> Table {
706 self.module.tables[index]
707 }
708
709 fn cast_index_to_i64(
713 &self,
714 pos: &mut FuncCursor<'_>,
715 val: ir::Value,
716 index_type: IndexType,
717 ) -> ir::Value {
718 match index_type {
719 IndexType::I32 => pos.ins().uextend(I64, val),
720 IndexType::I64 => val,
721 }
722 }
723
724 fn convert_pointer_to_index_type(
732 &self,
733 mut pos: FuncCursor<'_>,
734 val: ir::Value,
735 index_type: IndexType,
736 single_byte_pages: bool,
741 ) -> ir::Value {
742 let desired_type = index_type_to_ir_type(index_type);
743 let pointer_type = self.pointer_type();
744 assert_eq!(pos.func.dfg.value_type(val), pointer_type);
745
746 if pointer_type == desired_type {
750 val
751 } else if pointer_type.bits() > desired_type.bits() {
752 pos.ins().ireduce(desired_type, val)
753 } else {
754 match single_byte_pages {
760 false => {
761 pos.ins().sextend(desired_type, val)
770 }
771 true => {
772 let extended = pos.ins().uextend(desired_type, val);
777 let neg_one = pos.ins().iconst(desired_type, -1);
778 let is_failure = pos.ins().icmp_imm(IntCC::Equal, val, -1);
779 pos.ins().select(is_failure, neg_one, extended)
780 }
781 }
782 }
783 }
784
785 fn ensure_table_exists(&mut self, func: &mut ir::Function, index: TableIndex) {
790 if self.tables[index].is_some() {
791 return;
792 }
793
794 let pointer_type = self.pointer_type();
795
796 let (ptr, base_offset, current_elements_offset) = {
797 let vmctx = self.vmctx(func);
798 if let Some(def_index) = self.module.defined_table_index(index) {
799 let base_offset =
800 i32::try_from(self.offsets.vmctx_vmtable_definition_base(def_index)).unwrap();
801 let current_elements_offset = i32::try_from(
802 self.offsets
803 .vmctx_vmtable_definition_current_elements(def_index),
804 )
805 .unwrap();
806 (vmctx, base_offset, current_elements_offset)
807 } else {
808 let from_offset = self.offsets.vmctx_vmtable_import_from(index);
809 let table = func.create_global_value(ir::GlobalValueData::Load {
810 base: vmctx,
811 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
812 global_type: pointer_type,
813 flags: MemFlags::trusted().with_readonly().with_can_move(),
814 });
815 let base_offset = i32::from(self.offsets.vmtable_definition_base());
816 let current_elements_offset =
817 i32::from(self.offsets.vmtable_definition_current_elements());
818 (table, base_offset, current_elements_offset)
819 }
820 };
821
822 let table = &self.module.tables[index];
823 let element_size = if table.ref_type.is_vmgcref_type() {
824 ir::types::I32.bytes()
826 } else {
827 self.reference_type(table.ref_type.heap_type).0.bytes()
828 };
829
830 let base_gv = func.create_global_value(ir::GlobalValueData::Load {
831 base: ptr,
832 offset: Offset32::new(base_offset),
833 global_type: pointer_type,
834 flags: if Some(table.limits.min) == table.limits.max {
835 MemFlags::trusted().with_readonly().with_can_move()
838 } else {
839 MemFlags::trusted()
840 },
841 });
842
843 let bound = if Some(table.limits.min) == table.limits.max {
844 TableSize::Static {
845 bound: table.limits.min,
846 }
847 } else {
848 TableSize::Dynamic {
849 bound_gv: func.create_global_value(ir::GlobalValueData::Load {
850 base: ptr,
851 offset: Offset32::new(current_elements_offset),
852 global_type: ir::Type::int(
853 u16::from(self.offsets.size_of_vmtable_definition_current_elements()) * 8,
854 )
855 .unwrap(),
856 flags: MemFlags::trusted(),
857 }),
858 }
859 };
860
861 self.tables[index] = Some(TableData {
862 base_gv,
863 bound,
864 element_size,
865 });
866 }
867
868 fn get_or_init_func_ref_table_elem(
869 &mut self,
870 builder: &mut FunctionBuilder,
871 table_index: TableIndex,
872 index: ir::Value,
873 cold_blocks: bool,
874 ) -> ir::Value {
875 let pointer_type = self.pointer_type();
876 self.ensure_table_exists(builder.func, table_index);
877 let table_data = self.tables[table_index].clone().unwrap();
878
879 let (table_entry_addr, flags) = table_data.prepare_table_addr(self, builder, index);
884 let value = builder.ins().load(pointer_type, flags, table_entry_addr, 0);
885
886 if !self.tunables.table_lazy_init {
887 return value;
888 }
889
890 assert_eq!(FUNCREF_MASK as isize, -2);
897 let value_masked = builder.ins().band_imm(value, Imm64::from(-2));
898
899 let null_block = builder.create_block();
900 let continuation_block = builder.create_block();
901 if cold_blocks {
902 builder.set_cold_block(null_block);
903 builder.set_cold_block(continuation_block);
904 }
905 let result_param = builder.append_block_param(continuation_block, pointer_type);
906 builder.set_cold_block(null_block);
907
908 builder
909 .ins()
910 .brif(value, continuation_block, &[value_masked], null_block, &[]);
911 builder.seal_block(null_block);
912
913 builder.switch_to_block(null_block);
914 let index_type = self.table(table_index).idx_type;
915 let table_index = builder.ins().iconst(I32, table_index.index() as i64);
916 let lazy_init = self
917 .builtin_functions
918 .table_get_lazy_init_func_ref(builder.func);
919 let vmctx = self.vmctx_val(&mut builder.cursor());
920 let index = self.cast_index_to_i64(&mut builder.cursor(), index, index_type);
921 let call_inst = builder.ins().call(lazy_init, &[vmctx, table_index, index]);
922 let returned_entry = builder.func.dfg.inst_results(call_inst)[0];
923 builder.ins().jump(continuation_block, &[returned_entry]);
924 builder.seal_block(continuation_block);
925
926 builder.switch_to_block(continuation_block);
927 result_param
928 }
929
930 #[cfg(feature = "wmemcheck")]
931 fn check_malloc_start(&mut self, builder: &mut FunctionBuilder) {
932 let malloc_start = self.builtin_functions.malloc_start(builder.func);
933 let vmctx = self.vmctx_val(&mut builder.cursor());
934 builder.ins().call(malloc_start, &[vmctx]);
935 }
936
937 #[cfg(feature = "wmemcheck")]
938 fn check_free_start(&mut self, builder: &mut FunctionBuilder) {
939 let free_start = self.builtin_functions.free_start(builder.func);
940 let vmctx = self.vmctx_val(&mut builder.cursor());
941 builder.ins().call(free_start, &[vmctx]);
942 }
943
944 #[cfg(feature = "wmemcheck")]
945 fn current_func_name(&self, builder: &mut FunctionBuilder) -> Option<&str> {
946 let func_index = match &builder.func.name {
947 ir::UserFuncName::User(user) => FuncIndex::from_u32(user.index),
948 _ => {
949 panic!("function name not a UserFuncName::User as expected")
950 }
951 };
952 self.translation
953 .debuginfo
954 .name_section
955 .func_names
956 .get(&func_index)
957 .copied()
958 }
959
960 fn create_empty_struct_memtype(&self, func: &mut ir::Function) -> ir::MemoryType {
963 func.create_memory_type(ir::MemoryTypeData::Struct {
964 size: 0,
965 fields: vec![],
966 })
967 }
968
969 fn add_field_to_memtype(
975 &self,
976 func: &mut ir::Function,
977 memtype: ir::MemoryType,
978 offset: u32,
979 pointee: ir::MemoryType,
980 readonly: bool,
981 ) {
982 let ptr_size = self.pointer_type().bytes();
983 match &mut func.memory_types[memtype] {
984 ir::MemoryTypeData::Struct { size, fields } => {
985 *size = std::cmp::max(*size, offset.checked_add(ptr_size).unwrap().into());
986 fields.push(ir::MemoryTypeField {
987 ty: self.pointer_type(),
988 offset: offset.into(),
989 readonly,
990 fact: Some(ir::Fact::Mem {
991 ty: pointee,
992 min_offset: 0,
993 max_offset: 0,
994 nullable: false,
995 }),
996 });
997
998 fields.sort_by_key(|f| f.offset);
1003 }
1004 _ => panic!("Cannot add field to non-struct memtype"),
1005 }
1006 }
1007
1008 fn load_pointer_with_memtypes(
1013 &mut self,
1014 func: &mut ir::Function,
1015 offset: u32,
1016 readonly: bool,
1017 memtype: Option<ir::MemoryType>,
1018 ) -> (ir::GlobalValue, Option<ir::MemoryType>) {
1019 let vmctx = self.vmctx(func);
1020 let pointee = func.create_global_value(ir::GlobalValueData::Load {
1021 base: vmctx,
1022 offset: Offset32::new(i32::try_from(offset).unwrap()),
1023 global_type: self.pointer_type(),
1024 flags: MemFlags::trusted().with_readonly().with_can_move(),
1025 });
1026
1027 let mt = memtype.map(|mt| {
1028 let pointee_mt = self.create_empty_struct_memtype(func);
1029 self.add_field_to_memtype(func, mt, offset, pointee_mt, readonly);
1030 func.global_value_facts[pointee] = Some(Fact::Mem {
1031 ty: pointee_mt,
1032 min_offset: 0,
1033 max_offset: 0,
1034 nullable: false,
1035 });
1036 pointee_mt
1037 });
1038 (pointee, mt)
1039 }
1040
1041 pub fn conditionally_trap(
1046 &mut self,
1047 builder: &mut FunctionBuilder,
1048 trap_cond: ir::Value,
1049 trap: ir::TrapCode,
1050 ) {
1051 assert!(!self.clif_instruction_traps_enabled());
1052
1053 let trap_block = builder.create_block();
1054 builder.set_cold_block(trap_block);
1055 let continuation_block = builder.create_block();
1056
1057 builder
1058 .ins()
1059 .brif(trap_cond, trap_block, &[], continuation_block, &[]);
1060
1061 builder.seal_block(trap_block);
1062 builder.seal_block(continuation_block);
1063
1064 builder.switch_to_block(trap_block);
1065 self.trap(builder, trap);
1066 builder.switch_to_block(continuation_block);
1067 }
1068
1069 fn guard_zero_divisor(&mut self, builder: &mut FunctionBuilder, rhs: ir::Value) {
1072 if self.clif_instruction_traps_enabled() {
1073 return;
1074 }
1075 self.trapz(builder, rhs, ir::TrapCode::INTEGER_DIVISION_BY_ZERO);
1076 }
1077
1078 fn guard_signed_divide(
1081 &mut self,
1082 builder: &mut FunctionBuilder,
1083 lhs: ir::Value,
1084 rhs: ir::Value,
1085 ) {
1086 if self.clif_instruction_traps_enabled() {
1087 return;
1088 }
1089 self.trapz(builder, rhs, ir::TrapCode::INTEGER_DIVISION_BY_ZERO);
1090
1091 let ty = builder.func.dfg.value_type(rhs);
1092 let minus_one = builder.ins().iconst(ty, -1);
1093 let rhs_is_minus_one = builder.ins().icmp(IntCC::Equal, rhs, minus_one);
1094 let int_min = builder.ins().iconst(
1095 ty,
1096 match ty {
1097 I32 => i64::from(i32::MIN),
1098 I64 => i64::MIN,
1099 _ => unreachable!(),
1100 },
1101 );
1102 let lhs_is_int_min = builder.ins().icmp(IntCC::Equal, lhs, int_min);
1103 let is_integer_overflow = builder.ins().band(rhs_is_minus_one, lhs_is_int_min);
1104 self.conditionally_trap(builder, is_integer_overflow, ir::TrapCode::INTEGER_OVERFLOW);
1105 }
1106
1107 fn guard_fcvt_to_int(
1110 &mut self,
1111 builder: &mut FunctionBuilder,
1112 ty: ir::Type,
1113 val: ir::Value,
1114 range32: (f64, f64),
1115 range64: (f64, f64),
1116 ) {
1117 assert!(!self.clif_instruction_traps_enabled());
1118 let val_ty = builder.func.dfg.value_type(val);
1119 let val = if val_ty == F64 {
1120 val
1121 } else {
1122 builder.ins().fpromote(F64, val)
1123 };
1124 let isnan = builder.ins().fcmp(FloatCC::NotEqual, val, val);
1125 self.trapnz(builder, isnan, ir::TrapCode::BAD_CONVERSION_TO_INTEGER);
1126 let val = builder.ins().trunc(val);
1127 let (lower_bound, upper_bound) = match ty {
1128 I32 => range32,
1129 I64 => range64,
1130 _ => unreachable!(),
1131 };
1132 let lower_bound = builder.ins().f64const(lower_bound);
1133 let too_small = builder
1134 .ins()
1135 .fcmp(FloatCC::LessThanOrEqual, val, lower_bound);
1136 self.trapnz(builder, too_small, ir::TrapCode::INTEGER_OVERFLOW);
1137 let upper_bound = builder.ins().f64const(upper_bound);
1138 let too_large = builder
1139 .ins()
1140 .fcmp(FloatCC::GreaterThanOrEqual, val, upper_bound);
1141 self.trapnz(builder, too_large, ir::TrapCode::INTEGER_OVERFLOW);
1142 }
1143
1144 pub(crate) fn vmshared_type_index_ty(&self) -> Type {
1146 Type::int_with_byte_size(self.offsets.size_of_vmshared_type_index().into()).unwrap()
1147 }
1148
1149 pub(crate) fn module_interned_to_shared_ty(
1152 &mut self,
1153 pos: &mut FuncCursor,
1154 interned_ty: ModuleInternedTypeIndex,
1155 ) -> ir::Value {
1156 let vmctx = self.vmctx_val(pos);
1157 let pointer_type = self.pointer_type();
1158 let mem_flags = ir::MemFlags::trusted().with_readonly().with_can_move();
1159
1160 let shared_indices = pos.ins().load(
1162 pointer_type,
1163 mem_flags,
1164 vmctx,
1165 i32::from(self.offsets.ptr.vmctx_type_ids_array()),
1166 );
1167
1168 let ty = self.vmshared_type_index_ty();
1170 let offset = i32::try_from(interned_ty.as_u32().checked_mul(ty.bytes()).unwrap()).unwrap();
1171
1172 pos.ins().load(ty, mem_flags, shared_indices, offset)
1175 }
1176
1177 pub(crate) fn load_funcref_type_index(
1182 &mut self,
1183 pos: &mut FuncCursor,
1184 mem_flags: ir::MemFlags,
1185 funcref: ir::Value,
1186 ) -> ir::Value {
1187 let ty = self.vmshared_type_index_ty();
1188 pos.ins().load(
1189 ty,
1190 mem_flags,
1191 funcref,
1192 i32::from(self.offsets.ptr.vm_func_ref_type_index()),
1193 )
1194 }
1195}
1196
1197struct Call<'a, 'func, 'module_env> {
1198 builder: &'a mut FunctionBuilder<'func>,
1199 env: &'a mut FuncEnvironment<'module_env>,
1200 tail: bool,
1201}
1202
1203enum CheckIndirectCallTypeSignature {
1204 Runtime,
1205 StaticMatch {
1206 may_be_null: bool,
1209 },
1210 StaticTrap,
1211}
1212
1213impl<'a, 'func, 'module_env> Call<'a, 'func, 'module_env> {
1214 pub fn new(
1216 builder: &'a mut FunctionBuilder<'func>,
1217 env: &'a mut FuncEnvironment<'module_env>,
1218 ) -> Self {
1219 Call {
1220 builder,
1221 env,
1222 tail: false,
1223 }
1224 }
1225
1226 pub fn new_tail(
1228 builder: &'a mut FunctionBuilder<'func>,
1229 env: &'a mut FuncEnvironment<'module_env>,
1230 ) -> Self {
1231 Call {
1232 builder,
1233 env,
1234 tail: true,
1235 }
1236 }
1237
1238 pub fn direct_call(
1240 mut self,
1241 callee_index: FuncIndex,
1242 callee: ir::FuncRef,
1243 call_args: &[ir::Value],
1244 ) -> WasmResult<ir::Inst> {
1245 let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1246 let caller_vmctx = self
1247 .builder
1248 .func
1249 .special_param(ArgumentPurpose::VMContext)
1250 .unwrap();
1251
1252 if !self.env.module.is_imported_function(callee_index) {
1254 real_call_args.push(caller_vmctx);
1257
1258 real_call_args.push(caller_vmctx);
1260
1261 real_call_args.extend_from_slice(call_args);
1263
1264 return Ok(self.direct_call_inst(callee, &real_call_args));
1266 }
1267
1268 let pointer_type = self.env.pointer_type();
1271 let sig_ref = self.builder.func.dfg.ext_funcs[callee].signature;
1272 let vmctx = self.env.vmctx(self.builder.func);
1273 let base = self.builder.ins().global_value(pointer_type, vmctx);
1274
1275 let mem_flags = ir::MemFlags::trusted().with_readonly().with_can_move();
1276
1277 let body_offset = i32::try_from(
1279 self.env
1280 .offsets
1281 .vmctx_vmfunction_import_wasm_call(callee_index),
1282 )
1283 .unwrap();
1284 let func_addr = self
1285 .builder
1286 .ins()
1287 .load(pointer_type, mem_flags, base, body_offset);
1288
1289 let vmctx_offset =
1291 i32::try_from(self.env.offsets.vmctx_vmfunction_import_vmctx(callee_index)).unwrap();
1292 let vmctx = self
1293 .builder
1294 .ins()
1295 .load(pointer_type, mem_flags, base, vmctx_offset);
1296 real_call_args.push(vmctx);
1297 real_call_args.push(caller_vmctx);
1298
1299 real_call_args.extend_from_slice(call_args);
1301
1302 Ok(self.indirect_call_inst(sig_ref, func_addr, &real_call_args))
1304 }
1305
1306 pub fn indirect_call(
1308 mut self,
1309 features: &WasmFeatures,
1310 table_index: TableIndex,
1311 ty_index: TypeIndex,
1312 sig_ref: ir::SigRef,
1313 callee: ir::Value,
1314 call_args: &[ir::Value],
1315 ) -> WasmResult<Option<ir::Inst>> {
1316 let (code_ptr, callee_vmctx) = match self.check_and_load_code_and_callee_vmctx(
1317 features,
1318 table_index,
1319 ty_index,
1320 callee,
1321 false,
1322 )? {
1323 Some(pair) => pair,
1324 None => return Ok(None),
1325 };
1326
1327 self.unchecked_call_impl(sig_ref, code_ptr, callee_vmctx, call_args)
1328 .map(Some)
1329 }
1330
1331 fn check_and_load_code_and_callee_vmctx(
1332 &mut self,
1333 features: &WasmFeatures,
1334 table_index: TableIndex,
1335 ty_index: TypeIndex,
1336 callee: ir::Value,
1337 cold_blocks: bool,
1338 ) -> WasmResult<Option<(ir::Value, ir::Value)>> {
1339 let funcref_ptr = self.env.get_or_init_func_ref_table_elem(
1341 self.builder,
1342 table_index,
1343 callee,
1344 cold_blocks,
1345 );
1346
1347 let check =
1349 self.check_indirect_call_type_signature(features, table_index, ty_index, funcref_ptr);
1350
1351 let trap_code = match check {
1352 CheckIndirectCallTypeSignature::Runtime => None,
1356
1357 CheckIndirectCallTypeSignature::StaticMatch { may_be_null } => {
1366 if may_be_null {
1367 Some(crate::TRAP_INDIRECT_CALL_TO_NULL)
1368 } else {
1369 None
1370 }
1371 }
1372
1373 CheckIndirectCallTypeSignature::StaticTrap => return Ok(None),
1376 };
1377
1378 Ok(Some(self.load_code_and_vmctx(funcref_ptr, trap_code)))
1379 }
1380
1381 fn check_indirect_call_type_signature(
1382 &mut self,
1383 features: &WasmFeatures,
1384 table_index: TableIndex,
1385 ty_index: TypeIndex,
1386 funcref_ptr: ir::Value,
1387 ) -> CheckIndirectCallTypeSignature {
1388 let table = &self.env.module.tables[table_index];
1389 let sig_id_size = self.env.offsets.size_of_vmshared_type_index();
1390 let sig_id_type = Type::int(u16::from(sig_id_size) * 8).unwrap();
1391
1392 match table.ref_type.heap_type {
1396 WasmHeapType::Func => {}
1400
1401 WasmHeapType::ConcreteFunc(EngineOrModuleTypeIndex::Module(table_ty)) => {
1405 let specified_ty = self.env.module.types[ty_index].unwrap_module_type_index();
1409 if specified_ty == table_ty {
1410 return CheckIndirectCallTypeSignature::StaticMatch {
1411 may_be_null: table.ref_type.nullable,
1412 };
1413 }
1414
1415 if features.gc() {
1416 } else {
1420 if table.ref_type.nullable {
1430 if self.env.clif_memory_traps_enabled() {
1431 self.builder.ins().load(
1432 sig_id_type,
1433 ir::MemFlags::trusted()
1434 .with_readonly()
1435 .with_trap_code(Some(crate::TRAP_INDIRECT_CALL_TO_NULL)),
1436 funcref_ptr,
1437 i32::from(self.env.offsets.ptr.vm_func_ref_type_index()),
1438 );
1439 } else {
1440 self.env.trapz(
1441 self.builder,
1442 funcref_ptr,
1443 crate::TRAP_INDIRECT_CALL_TO_NULL,
1444 );
1445 }
1446 }
1447 self.env.trap(self.builder, crate::TRAP_BAD_SIGNATURE);
1448 return CheckIndirectCallTypeSignature::StaticTrap;
1449 }
1450 }
1451
1452 WasmHeapType::NoFunc => {
1455 assert!(table.ref_type.nullable);
1456 self.env
1457 .trap(self.builder, crate::TRAP_INDIRECT_CALL_TO_NULL);
1458 return CheckIndirectCallTypeSignature::StaticTrap;
1459 }
1460
1461 WasmHeapType::Cont | WasmHeapType::ConcreteCont(_) | WasmHeapType::NoCont => todo!(), WasmHeapType::ConcreteFunc(EngineOrModuleTypeIndex::Engine(_))
1467 | WasmHeapType::ConcreteFunc(EngineOrModuleTypeIndex::RecGroup(_))
1468 | WasmHeapType::Extern
1469 | WasmHeapType::NoExtern
1470 | WasmHeapType::Any
1471 | WasmHeapType::Eq
1472 | WasmHeapType::I31
1473 | WasmHeapType::Array
1474 | WasmHeapType::ConcreteArray(_)
1475 | WasmHeapType::Struct
1476 | WasmHeapType::ConcreteStruct(_)
1477 | WasmHeapType::None => {
1478 unreachable!()
1479 }
1480 }
1481
1482 let interned_ty = self.env.module.types[ty_index].unwrap_module_type_index();
1484 let caller_sig_id = self
1485 .env
1486 .module_interned_to_shared_ty(&mut self.builder.cursor(), interned_ty);
1487
1488 let mut mem_flags = ir::MemFlags::trusted().with_readonly();
1493 if self.env.clif_memory_traps_enabled() {
1494 mem_flags = mem_flags.with_trap_code(Some(crate::TRAP_INDIRECT_CALL_TO_NULL));
1495 } else {
1496 self.env
1497 .trapz(self.builder, funcref_ptr, crate::TRAP_INDIRECT_CALL_TO_NULL);
1498 }
1499 let callee_sig_id =
1500 self.env
1501 .load_funcref_type_index(&mut self.builder.cursor(), mem_flags, funcref_ptr);
1502
1503 let matches = if features.gc() {
1506 #[cfg(feature = "gc")]
1507 {
1508 self.env
1509 .is_subtype(self.builder, callee_sig_id, caller_sig_id)
1510 }
1511 #[cfg(not(feature = "gc"))]
1512 {
1513 unreachable!()
1514 }
1515 } else {
1516 self.builder
1517 .ins()
1518 .icmp(IntCC::Equal, callee_sig_id, caller_sig_id)
1519 };
1520 self.env
1521 .trapz(self.builder, matches, crate::TRAP_BAD_SIGNATURE);
1522 CheckIndirectCallTypeSignature::Runtime
1523 }
1524
1525 pub fn call_ref(
1527 mut self,
1528 sig_ref: ir::SigRef,
1529 callee: ir::Value,
1530 args: &[ir::Value],
1531 ) -> WasmResult<ir::Inst> {
1532 let callee_load_trap_code = Some(crate::TRAP_NULL_REFERENCE);
1539
1540 self.unchecked_call(sig_ref, callee, callee_load_trap_code, args)
1541 }
1542
1543 fn unchecked_call(
1549 &mut self,
1550 sig_ref: ir::SigRef,
1551 callee: ir::Value,
1552 callee_load_trap_code: Option<ir::TrapCode>,
1553 call_args: &[ir::Value],
1554 ) -> WasmResult<ir::Inst> {
1555 let (func_addr, callee_vmctx) = self.load_code_and_vmctx(callee, callee_load_trap_code);
1556 self.unchecked_call_impl(sig_ref, func_addr, callee_vmctx, call_args)
1557 }
1558
1559 fn load_code_and_vmctx(
1560 &mut self,
1561 callee: ir::Value,
1562 callee_load_trap_code: Option<ir::TrapCode>,
1563 ) -> (ir::Value, ir::Value) {
1564 let pointer_type = self.env.pointer_type();
1565
1566 let mem_flags = ir::MemFlags::trusted().with_readonly();
1574 let mut callee_flags = mem_flags;
1575 if self.env.clif_memory_traps_enabled() {
1576 callee_flags = callee_flags.with_trap_code(callee_load_trap_code);
1577 } else {
1578 if let Some(trap) = callee_load_trap_code {
1579 self.env.trapz(self.builder, callee, trap);
1580 }
1581 }
1582 let func_addr = self.builder.ins().load(
1583 pointer_type,
1584 callee_flags,
1585 callee,
1586 i32::from(self.env.offsets.ptr.vm_func_ref_wasm_call()),
1587 );
1588 let callee_vmctx = self.builder.ins().load(
1589 pointer_type,
1590 mem_flags,
1591 callee,
1592 i32::from(self.env.offsets.ptr.vm_func_ref_vmctx()),
1593 );
1594
1595 (func_addr, callee_vmctx)
1596 }
1597
1598 fn unchecked_call_impl(
1602 &mut self,
1603 sig_ref: ir::SigRef,
1604 func_addr: ir::Value,
1605 callee_vmctx: ir::Value,
1606 call_args: &[ir::Value],
1607 ) -> WasmResult<ir::Inst> {
1608 let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1609 let caller_vmctx = self
1610 .builder
1611 .func
1612 .special_param(ArgumentPurpose::VMContext)
1613 .unwrap();
1614
1615 real_call_args.push(callee_vmctx);
1617 real_call_args.push(caller_vmctx);
1618
1619 real_call_args.extend_from_slice(call_args);
1621
1622 Ok(self.indirect_call_inst(sig_ref, func_addr, &real_call_args))
1623 }
1624
1625 fn direct_call_inst(&mut self, callee: ir::FuncRef, args: &[ir::Value]) -> ir::Inst {
1626 if self.tail {
1627 self.builder.ins().return_call(callee, args)
1628 } else {
1629 let inst = self.builder.ins().call(callee, args);
1630 let results: SmallVec<[_; 4]> = self
1631 .builder
1632 .func
1633 .dfg
1634 .inst_results(inst)
1635 .iter()
1636 .copied()
1637 .collect();
1638 for (i, val) in results.into_iter().enumerate() {
1639 if self
1640 .env
1641 .func_ref_result_needs_stack_map(&self.builder.func, callee, i)
1642 {
1643 self.builder.declare_value_needs_stack_map(val);
1644 }
1645 }
1646 inst
1647 }
1648 }
1649
1650 fn indirect_call_inst(
1651 &mut self,
1652 sig_ref: ir::SigRef,
1653 func_addr: ir::Value,
1654 args: &[ir::Value],
1655 ) -> ir::Inst {
1656 if self.tail {
1657 self.builder
1658 .ins()
1659 .return_call_indirect(sig_ref, func_addr, args)
1660 } else {
1661 let inst = self.builder.ins().call_indirect(sig_ref, func_addr, args);
1662 let results: SmallVec<[_; 4]> = self
1663 .builder
1664 .func
1665 .dfg
1666 .inst_results(inst)
1667 .iter()
1668 .copied()
1669 .collect();
1670 for (i, val) in results.into_iter().enumerate() {
1671 if self.env.sig_ref_result_needs_stack_map(sig_ref, i) {
1672 self.builder.declare_value_needs_stack_map(val);
1673 }
1674 }
1675 inst
1676 }
1677 }
1678}
1679
1680impl TypeConvert for FuncEnvironment<'_> {
1681 fn lookup_heap_type(&self, ty: wasmparser::UnpackedIndex) -> WasmHeapType {
1682 wasmtime_environ::WasmparserTypeConverter::new(self.types, |idx| {
1683 self.module.types[idx].unwrap_module_type_index()
1684 })
1685 .lookup_heap_type(ty)
1686 }
1687
1688 fn lookup_type_index(&self, index: wasmparser::UnpackedIndex) -> EngineOrModuleTypeIndex {
1689 wasmtime_environ::WasmparserTypeConverter::new(self.types, |idx| {
1690 self.module.types[idx].unwrap_module_type_index()
1691 })
1692 .lookup_type_index(index)
1693 }
1694}
1695
1696impl<'module_environment> TargetEnvironment for FuncEnvironment<'module_environment> {
1697 fn target_config(&self) -> TargetFrontendConfig {
1698 self.isa.frontend_config()
1699 }
1700
1701 fn reference_type(&self, wasm_ty: WasmHeapType) -> (ir::Type, bool) {
1702 let ty = crate::reference_type(wasm_ty, self.pointer_type());
1703 let needs_stack_map = match wasm_ty.top() {
1704 WasmHeapTopType::Extern | WasmHeapTopType::Any => true,
1705 WasmHeapTopType::Func => false,
1706 WasmHeapTopType::Cont => todo!(), };
1708 (ty, needs_stack_map)
1709 }
1710
1711 fn heap_access_spectre_mitigation(&self) -> bool {
1712 self.isa.flags().enable_heap_access_spectre_mitigation()
1713 }
1714
1715 fn proof_carrying_code(&self) -> bool {
1716 self.isa.flags().enable_pcc()
1717 }
1718
1719 fn tunables(&self) -> &Tunables {
1720 self.compiler.tunables()
1721 }
1722}
1723
1724impl FuncEnvironment<'_> {
1725 pub fn heaps(&self) -> &PrimaryMap<Heap, HeapData> {
1726 &self.heaps
1727 }
1728
1729 pub fn is_wasm_parameter(&self, _signature: &ir::Signature, index: usize) -> bool {
1730 index >= 2
1733 }
1734
1735 pub fn param_needs_stack_map(&self, _signature: &ir::Signature, index: usize) -> bool {
1736 if index < 2 {
1738 return false;
1739 }
1740
1741 self.wasm_func_ty.params()[index - 2].is_vmgcref_type_and_not_i31()
1742 }
1743
1744 pub fn sig_ref_result_needs_stack_map(&self, sig_ref: ir::SigRef, index: usize) -> bool {
1745 let wasm_func_ty = self.sig_ref_to_ty[sig_ref].as_ref().unwrap();
1746 wasm_func_ty.returns()[index].is_vmgcref_type_and_not_i31()
1747 }
1748
1749 pub fn func_ref_result_needs_stack_map(
1750 &self,
1751 func: &ir::Function,
1752 func_ref: ir::FuncRef,
1753 index: usize,
1754 ) -> bool {
1755 let sig_ref = func.dfg.ext_funcs[func_ref].signature;
1756 let wasm_func_ty = self.sig_ref_to_ty[sig_ref].as_ref().unwrap();
1757 wasm_func_ty.returns()[index].is_vmgcref_type_and_not_i31()
1758 }
1759
1760 pub fn after_locals(&mut self, num_locals: usize) {
1761 self.fuel_var = Variable::new(num_locals);
1762 self.epoch_deadline_var = Variable::new(num_locals + 1);
1763 self.epoch_ptr_var = Variable::new(num_locals + 2);
1764 }
1765
1766 pub fn translate_table_grow(
1767 &mut self,
1768 builder: &mut FunctionBuilder<'_>,
1769 table_index: TableIndex,
1770 delta: ir::Value,
1771 init_value: ir::Value,
1772 ) -> WasmResult<ir::Value> {
1773 let mut pos = builder.cursor();
1774 let table = self.table(table_index);
1775 let ty = table.ref_type.heap_type;
1776 let grow = if ty.is_vmgcref_type() {
1777 gc::builtins::table_grow_gc_ref(self, &mut pos.func)?
1778 } else {
1779 debug_assert_eq!(ty.top(), WasmHeapTopType::Func);
1780 self.builtin_functions.table_grow_func_ref(&mut pos.func)
1781 };
1782
1783 let vmctx = self.vmctx_val(&mut pos);
1784
1785 let index_type = table.idx_type;
1786 let delta = self.cast_index_to_i64(&mut pos, delta, index_type);
1787 let table_index_arg = pos.ins().iconst(I32, table_index.as_u32() as i64);
1788 let call_inst = pos
1789 .ins()
1790 .call(grow, &[vmctx, table_index_arg, delta, init_value]);
1791 let result = pos.func.dfg.first_result(call_inst);
1792 Ok(self.convert_pointer_to_index_type(builder.cursor(), result, index_type, false))
1793 }
1794
1795 pub fn translate_table_get(
1796 &mut self,
1797 builder: &mut FunctionBuilder,
1798 table_index: TableIndex,
1799 index: ir::Value,
1800 ) -> WasmResult<ir::Value> {
1801 let table = self.module.tables[table_index];
1802 self.ensure_table_exists(builder.func, table_index);
1803 let table_data = self.tables[table_index].clone().unwrap();
1804 let heap_ty = table.ref_type.heap_type;
1805 match heap_ty.top() {
1806 WasmHeapTopType::Any | WasmHeapTopType::Extern => {
1808 let (src, flags) = table_data.prepare_table_addr(self, builder, index);
1809 gc::gc_compiler(self)?.translate_read_gc_reference(
1810 self,
1811 builder,
1812 table.ref_type,
1813 src,
1814 flags,
1815 )
1816 }
1817
1818 WasmHeapTopType::Func => {
1820 Ok(self.get_or_init_func_ref_table_elem(builder, table_index, index, false))
1821 }
1822
1823 WasmHeapTopType::Cont => todo!(), }
1826 }
1827
1828 pub fn translate_table_set(
1829 &mut self,
1830 builder: &mut FunctionBuilder,
1831 table_index: TableIndex,
1832 value: ir::Value,
1833 index: ir::Value,
1834 ) -> WasmResult<()> {
1835 let table = self.module.tables[table_index];
1836 self.ensure_table_exists(builder.func, table_index);
1837 let table_data = self.tables[table_index].clone().unwrap();
1838 let heap_ty = table.ref_type.heap_type;
1839 match heap_ty.top() {
1840 WasmHeapTopType::Any | WasmHeapTopType::Extern => {
1842 let (dst, flags) = table_data.prepare_table_addr(self, builder, index);
1843 gc::gc_compiler(self)?.translate_write_gc_reference(
1844 self,
1845 builder,
1846 table.ref_type,
1847 dst,
1848 value,
1849 flags,
1850 )
1851 }
1852
1853 WasmHeapTopType::Func => {
1855 let (elem_addr, flags) = table_data.prepare_table_addr(self, builder, index);
1856 let value_with_init_bit = if self.tunables.table_lazy_init {
1860 builder
1861 .ins()
1862 .bor_imm(value, Imm64::from(FUNCREF_INIT_BIT as i64))
1863 } else {
1864 value
1865 };
1866 builder
1867 .ins()
1868 .store(flags, value_with_init_bit, elem_addr, 0);
1869 Ok(())
1870 }
1871
1872 WasmHeapTopType::Cont => todo!(), }
1875 }
1876
1877 pub fn translate_table_fill(
1878 &mut self,
1879 builder: &mut FunctionBuilder<'_>,
1880 table_index: TableIndex,
1881 dst: ir::Value,
1882 val: ir::Value,
1883 len: ir::Value,
1884 ) -> WasmResult<()> {
1885 let mut pos = builder.cursor();
1886 let table = self.table(table_index);
1887 let index_type = table.idx_type;
1888 let dst = self.cast_index_to_i64(&mut pos, dst, index_type);
1889 let len = self.cast_index_to_i64(&mut pos, len, index_type);
1890 let ty = table.ref_type.heap_type;
1891 let libcall = if ty.is_vmgcref_type() {
1892 gc::builtins::table_fill_gc_ref(self, &mut pos.func)?
1893 } else {
1894 debug_assert_eq!(ty.top(), WasmHeapTopType::Func);
1895 self.builtin_functions.table_fill_func_ref(&mut pos.func)
1896 };
1897
1898 let vmctx = self.vmctx_val(&mut pos);
1899
1900 let table_index_arg = pos.ins().iconst(I32, table_index.as_u32() as i64);
1901 pos.ins()
1902 .call(libcall, &[vmctx, table_index_arg, dst, val, len]);
1903
1904 Ok(())
1905 }
1906
1907 pub fn translate_ref_i31(
1908 &mut self,
1909 mut pos: FuncCursor,
1910 val: ir::Value,
1911 ) -> WasmResult<ir::Value> {
1912 debug_assert_eq!(pos.func.dfg.value_type(val), ir::types::I32);
1913 let shifted = pos.ins().ishl_imm(val, 1);
1914 let tagged = pos
1915 .ins()
1916 .bor_imm(shifted, i64::from(crate::I31_REF_DISCRIMINANT));
1917 let (ref_ty, _needs_stack_map) = self.reference_type(WasmHeapType::I31);
1918 debug_assert_eq!(ref_ty, ir::types::I32);
1919 Ok(tagged)
1920 }
1921
1922 pub fn translate_i31_get_s(
1923 &mut self,
1924 builder: &mut FunctionBuilder,
1925 i31ref: ir::Value,
1926 ) -> WasmResult<ir::Value> {
1927 self.trapz(builder, i31ref, crate::TRAP_NULL_REFERENCE);
1931 Ok(builder.ins().sshr_imm(i31ref, 1))
1932 }
1933
1934 pub fn translate_i31_get_u(
1935 &mut self,
1936 builder: &mut FunctionBuilder,
1937 i31ref: ir::Value,
1938 ) -> WasmResult<ir::Value> {
1939 self.trapz(builder, i31ref, crate::TRAP_NULL_REFERENCE);
1943 Ok(builder.ins().ushr_imm(i31ref, 1))
1944 }
1945
1946 pub fn struct_fields_len(&mut self, struct_type_index: TypeIndex) -> WasmResult<usize> {
1947 let ty = self.module.types[struct_type_index].unwrap_module_type_index();
1948 match &self.types[ty].composite_type.inner {
1949 WasmCompositeInnerType::Struct(s) => Ok(s.fields.len()),
1950 _ => unreachable!(),
1951 }
1952 }
1953
1954 pub fn translate_struct_new(
1955 &mut self,
1956 builder: &mut FunctionBuilder,
1957 struct_type_index: TypeIndex,
1958 fields: StructFieldsVec,
1959 ) -> WasmResult<ir::Value> {
1960 gc::translate_struct_new(self, builder, struct_type_index, &fields)
1961 }
1962
1963 pub fn translate_struct_new_default(
1964 &mut self,
1965 builder: &mut FunctionBuilder,
1966 struct_type_index: TypeIndex,
1967 ) -> WasmResult<ir::Value> {
1968 gc::translate_struct_new_default(self, builder, struct_type_index)
1969 }
1970
1971 pub fn translate_struct_get(
1972 &mut self,
1973 builder: &mut FunctionBuilder,
1974 struct_type_index: TypeIndex,
1975 field_index: u32,
1976 struct_ref: ir::Value,
1977 extension: Option<Extension>,
1978 ) -> WasmResult<ir::Value> {
1979 gc::translate_struct_get(
1980 self,
1981 builder,
1982 struct_type_index,
1983 field_index,
1984 struct_ref,
1985 extension,
1986 )
1987 }
1988
1989 pub fn translate_struct_set(
1990 &mut self,
1991 builder: &mut FunctionBuilder,
1992 struct_type_index: TypeIndex,
1993 field_index: u32,
1994 struct_ref: ir::Value,
1995 value: ir::Value,
1996 ) -> WasmResult<()> {
1997 gc::translate_struct_set(
1998 self,
1999 builder,
2000 struct_type_index,
2001 field_index,
2002 struct_ref,
2003 value,
2004 )
2005 }
2006
2007 pub fn translate_array_new(
2008 &mut self,
2009 builder: &mut FunctionBuilder,
2010 array_type_index: TypeIndex,
2011 elem: ir::Value,
2012 len: ir::Value,
2013 ) -> WasmResult<ir::Value> {
2014 gc::translate_array_new(self, builder, array_type_index, elem, len)
2015 }
2016
2017 pub fn translate_array_new_default(
2018 &mut self,
2019 builder: &mut FunctionBuilder,
2020 array_type_index: TypeIndex,
2021 len: ir::Value,
2022 ) -> WasmResult<ir::Value> {
2023 gc::translate_array_new_default(self, builder, array_type_index, len)
2024 }
2025
2026 pub fn translate_array_new_fixed(
2027 &mut self,
2028 builder: &mut FunctionBuilder,
2029 array_type_index: TypeIndex,
2030 elems: &[ir::Value],
2031 ) -> WasmResult<ir::Value> {
2032 gc::translate_array_new_fixed(self, builder, array_type_index, elems)
2033 }
2034
2035 pub fn translate_array_new_data(
2036 &mut self,
2037 builder: &mut FunctionBuilder,
2038 array_type_index: TypeIndex,
2039 data_index: DataIndex,
2040 data_offset: ir::Value,
2041 len: ir::Value,
2042 ) -> WasmResult<ir::Value> {
2043 let libcall = gc::builtins::array_new_data(self, builder.func)?;
2044 let vmctx = self.vmctx_val(&mut builder.cursor());
2045 let interned_type_index = self.module.types[array_type_index].unwrap_module_type_index();
2046 let interned_type_index = builder
2047 .ins()
2048 .iconst(I32, i64::from(interned_type_index.as_u32()));
2049 let data_index = builder.ins().iconst(I32, i64::from(data_index.as_u32()));
2050 let call_inst = builder.ins().call(
2051 libcall,
2052 &[vmctx, interned_type_index, data_index, data_offset, len],
2053 );
2054 let result = builder.func.dfg.first_result(call_inst);
2055 Ok(builder.ins().ireduce(ir::types::I32, result))
2056 }
2057
2058 pub fn translate_array_new_elem(
2059 &mut self,
2060 builder: &mut FunctionBuilder,
2061 array_type_index: TypeIndex,
2062 elem_index: ElemIndex,
2063 elem_offset: ir::Value,
2064 len: ir::Value,
2065 ) -> WasmResult<ir::Value> {
2066 let libcall = gc::builtins::array_new_elem(self, builder.func)?;
2067 let vmctx = self.vmctx_val(&mut builder.cursor());
2068 let interned_type_index = self.module.types[array_type_index].unwrap_module_type_index();
2069 let interned_type_index = builder
2070 .ins()
2071 .iconst(I32, i64::from(interned_type_index.as_u32()));
2072 let elem_index = builder.ins().iconst(I32, i64::from(elem_index.as_u32()));
2073 let call_inst = builder.ins().call(
2074 libcall,
2075 &[vmctx, interned_type_index, elem_index, elem_offset, len],
2076 );
2077 let result = builder.func.dfg.first_result(call_inst);
2078 Ok(builder.ins().ireduce(ir::types::I32, result))
2079 }
2080
2081 pub fn translate_array_copy(
2082 &mut self,
2083 builder: &mut FunctionBuilder,
2084 _dst_array_type_index: TypeIndex,
2085 dst_array: ir::Value,
2086 dst_index: ir::Value,
2087 _src_array_type_index: TypeIndex,
2088 src_array: ir::Value,
2089 src_index: ir::Value,
2090 len: ir::Value,
2091 ) -> WasmResult<()> {
2092 let libcall = gc::builtins::array_copy(self, builder.func)?;
2093 let vmctx = self.vmctx_val(&mut builder.cursor());
2094 builder.ins().call(
2095 libcall,
2096 &[vmctx, dst_array, dst_index, src_array, src_index, len],
2097 );
2098 Ok(())
2099 }
2100
2101 pub fn translate_array_fill(
2102 &mut self,
2103 builder: &mut FunctionBuilder,
2104 array_type_index: TypeIndex,
2105 array: ir::Value,
2106 index: ir::Value,
2107 value: ir::Value,
2108 len: ir::Value,
2109 ) -> WasmResult<()> {
2110 gc::translate_array_fill(self, builder, array_type_index, array, index, value, len)
2111 }
2112
2113 pub fn translate_array_init_data(
2114 &mut self,
2115 builder: &mut FunctionBuilder,
2116 array_type_index: TypeIndex,
2117 array: ir::Value,
2118 dst_index: ir::Value,
2119 data_index: DataIndex,
2120 data_offset: ir::Value,
2121 len: ir::Value,
2122 ) -> WasmResult<()> {
2123 let libcall = gc::builtins::array_init_data(self, builder.func)?;
2124 let vmctx = self.vmctx_val(&mut builder.cursor());
2125 let interned_type_index = self.module.types[array_type_index].unwrap_module_type_index();
2126 let interned_type_index = builder
2127 .ins()
2128 .iconst(I32, i64::from(interned_type_index.as_u32()));
2129 let data_index = builder.ins().iconst(I32, i64::from(data_index.as_u32()));
2130 builder.ins().call(
2131 libcall,
2132 &[
2133 vmctx,
2134 interned_type_index,
2135 array,
2136 dst_index,
2137 data_index,
2138 data_offset,
2139 len,
2140 ],
2141 );
2142 Ok(())
2143 }
2144
2145 pub fn translate_array_init_elem(
2146 &mut self,
2147 builder: &mut FunctionBuilder,
2148 array_type_index: TypeIndex,
2149 array: ir::Value,
2150 dst_index: ir::Value,
2151 elem_index: ElemIndex,
2152 elem_offset: ir::Value,
2153 len: ir::Value,
2154 ) -> WasmResult<()> {
2155 let libcall = gc::builtins::array_init_elem(self, builder.func)?;
2156 let vmctx = self.vmctx_val(&mut builder.cursor());
2157 let interned_type_index = self.module.types[array_type_index].unwrap_module_type_index();
2158 let interned_type_index = builder
2159 .ins()
2160 .iconst(I32, i64::from(interned_type_index.as_u32()));
2161 let elem_index = builder.ins().iconst(I32, i64::from(elem_index.as_u32()));
2162 builder.ins().call(
2163 libcall,
2164 &[
2165 vmctx,
2166 interned_type_index,
2167 array,
2168 dst_index,
2169 elem_index,
2170 elem_offset,
2171 len,
2172 ],
2173 );
2174 Ok(())
2175 }
2176
2177 pub fn translate_array_len(
2178 &mut self,
2179 builder: &mut FunctionBuilder,
2180 array: ir::Value,
2181 ) -> WasmResult<ir::Value> {
2182 gc::translate_array_len(self, builder, array)
2183 }
2184
2185 pub fn translate_array_get(
2186 &mut self,
2187 builder: &mut FunctionBuilder,
2188 array_type_index: TypeIndex,
2189 array: ir::Value,
2190 index: ir::Value,
2191 extension: Option<Extension>,
2192 ) -> WasmResult<ir::Value> {
2193 gc::translate_array_get(self, builder, array_type_index, array, index, extension)
2194 }
2195
2196 pub fn translate_array_set(
2197 &mut self,
2198 builder: &mut FunctionBuilder,
2199 array_type_index: TypeIndex,
2200 array: ir::Value,
2201 index: ir::Value,
2202 value: ir::Value,
2203 ) -> WasmResult<()> {
2204 gc::translate_array_set(self, builder, array_type_index, array, index, value)
2205 }
2206
2207 pub fn translate_ref_test(
2208 &mut self,
2209 builder: &mut FunctionBuilder<'_>,
2210 ref_ty: WasmRefType,
2211 gc_ref: ir::Value,
2212 ) -> WasmResult<ir::Value> {
2213 gc::translate_ref_test(self, builder, ref_ty, gc_ref)
2214 }
2215
2216 pub fn translate_ref_null(
2217 &mut self,
2218 mut pos: cranelift_codegen::cursor::FuncCursor,
2219 ht: WasmHeapType,
2220 ) -> WasmResult<ir::Value> {
2221 Ok(match ht.top() {
2222 WasmHeapTopType::Func => pos.ins().iconst(self.pointer_type(), 0),
2223 WasmHeapTopType::Any | WasmHeapTopType::Extern => pos.ins().iconst(types::I32, 0),
2225 WasmHeapTopType::Cont => todo!(), })
2227 }
2228
2229 pub fn translate_ref_is_null(
2230 &mut self,
2231 mut pos: cranelift_codegen::cursor::FuncCursor,
2232 value: ir::Value,
2233 ) -> WasmResult<ir::Value> {
2234 let byte_is_null =
2235 pos.ins()
2236 .icmp_imm(cranelift_codegen::ir::condcodes::IntCC::Equal, value, 0);
2237 Ok(pos.ins().uextend(ir::types::I32, byte_is_null))
2238 }
2239
2240 pub fn translate_ref_func(
2241 &mut self,
2242 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
2243 func_index: FuncIndex,
2244 ) -> WasmResult<ir::Value> {
2245 let func_index = pos.ins().iconst(I32, func_index.as_u32() as i64);
2246 let ref_func = self.builtin_functions.ref_func(&mut pos.func);
2247 let vmctx = self.vmctx_val(&mut pos);
2248
2249 let call_inst = pos.ins().call(ref_func, &[vmctx, func_index]);
2250 Ok(pos.func.dfg.first_result(call_inst))
2251 }
2252
2253 pub fn translate_custom_global_get(
2254 &mut self,
2255 builder: &mut FunctionBuilder,
2256 index: GlobalIndex,
2257 ) -> WasmResult<ir::Value> {
2258 let global_ty = self.module.globals[index];
2259 let wasm_ty = global_ty.wasm_ty;
2260 debug_assert!(
2261 wasm_ty.is_vmgcref_type(),
2262 "We only use GlobalVariable::Custom for VMGcRef types"
2263 );
2264 let WasmValType::Ref(ref_ty) = wasm_ty else {
2265 unreachable!()
2266 };
2267
2268 let (gv, offset) = self.get_global_location(builder.func, index);
2269 let gv = builder.ins().global_value(self.pointer_type(), gv);
2270 let src = builder.ins().iadd_imm(gv, i64::from(offset));
2271
2272 gc::gc_compiler(self)?.translate_read_gc_reference(
2273 self,
2274 builder,
2275 ref_ty,
2276 src,
2277 if global_ty.mutability {
2278 ir::MemFlags::trusted()
2279 } else {
2280 ir::MemFlags::trusted().with_readonly().with_can_move()
2281 },
2282 )
2283 }
2284
2285 pub fn translate_custom_global_set(
2286 &mut self,
2287 builder: &mut FunctionBuilder,
2288 index: GlobalIndex,
2289 value: ir::Value,
2290 ) -> WasmResult<()> {
2291 let ty = self.module.globals[index].wasm_ty;
2292 debug_assert!(
2293 ty.is_vmgcref_type(),
2294 "We only use GlobalVariable::Custom for VMGcRef types"
2295 );
2296 let WasmValType::Ref(ty) = ty else {
2297 unreachable!()
2298 };
2299
2300 let (gv, offset) = self.get_global_location(builder.func, index);
2301 let gv = builder.ins().global_value(self.pointer_type(), gv);
2302 let src = builder.ins().iadd_imm(gv, i64::from(offset));
2303
2304 gc::gc_compiler(self)?.translate_write_gc_reference(
2305 self,
2306 builder,
2307 ty,
2308 src,
2309 value,
2310 ir::MemFlags::trusted(),
2311 )
2312 }
2313
2314 pub fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> WasmResult<Heap> {
2315 let pointer_type = self.pointer_type();
2316 let memory = self.module.memories[index];
2317 let is_shared = memory.shared;
2318
2319 let (ptr, base_offset, current_length_offset, ptr_memtype) = {
2320 let vmctx = self.vmctx(func);
2321 if let Some(def_index) = self.module.defined_memory_index(index) {
2322 if is_shared {
2323 let from_offset = self.offsets.vmctx_vmmemory_pointer(def_index);
2328 let (memory, def_mt) = self.load_pointer_with_memtypes(
2329 func,
2330 from_offset,
2331 true,
2332 self.pcc_vmctx_memtype,
2333 );
2334 let base_offset = i32::from(self.offsets.ptr.vmmemory_definition_base());
2335 let current_length_offset =
2336 i32::from(self.offsets.ptr.vmmemory_definition_current_length());
2337 (memory, base_offset, current_length_offset, def_mt)
2338 } else {
2339 let owned_index = self.module.owned_memory_index(def_index);
2340 let owned_base_offset =
2341 self.offsets.vmctx_vmmemory_definition_base(owned_index);
2342 let owned_length_offset = self
2343 .offsets
2344 .vmctx_vmmemory_definition_current_length(owned_index);
2345 let current_base_offset = i32::try_from(owned_base_offset).unwrap();
2346 let current_length_offset = i32::try_from(owned_length_offset).unwrap();
2347 (
2348 vmctx,
2349 current_base_offset,
2350 current_length_offset,
2351 self.pcc_vmctx_memtype,
2352 )
2353 }
2354 } else {
2355 let from_offset = self.offsets.vmctx_vmmemory_import_from(index);
2356 let (memory, def_mt) = self.load_pointer_with_memtypes(
2357 func,
2358 from_offset,
2359 true,
2360 self.pcc_vmctx_memtype,
2361 );
2362 let base_offset = i32::from(self.offsets.ptr.vmmemory_definition_base());
2363 let current_length_offset =
2364 i32::from(self.offsets.ptr.vmmemory_definition_current_length());
2365 (memory, base_offset, current_length_offset, def_mt)
2366 }
2367 };
2368
2369 let heap_bound = func.create_global_value(ir::GlobalValueData::Load {
2370 base: ptr,
2371 offset: Offset32::new(current_length_offset),
2372 global_type: pointer_type,
2373 flags: MemFlags::trusted(),
2374 });
2375
2376 let host_page_size_log2 = self.target_config().page_size_align_log2;
2379 let (base_fact, memory_type) = if !memory
2380 .can_elide_bounds_check(self.tunables, host_page_size_log2)
2381 {
2382 if let Some(ptr_memtype) = ptr_memtype {
2383 let data_mt = func.create_memory_type(ir::MemoryTypeData::DynamicMemory {
2385 gv: heap_bound,
2386 size: self.tunables.memory_guard_size,
2387 });
2388 let base_fact = ir::Fact::dynamic_base_ptr(data_mt);
2390 let length_fact = ir::Fact::global_value(
2392 u16::try_from(self.isa.pointer_type().bits()).unwrap(),
2393 heap_bound,
2394 );
2395 match &mut func.memory_types[ptr_memtype] {
2397 ir::MemoryTypeData::Struct { size, fields } => {
2398 let base_offset = u64::try_from(base_offset).unwrap();
2399 fields.push(ir::MemoryTypeField {
2400 offset: base_offset,
2401 ty: self.isa.pointer_type(),
2402 readonly: true,
2408 fact: Some(base_fact.clone()),
2409 });
2410 let current_length_offset = u64::try_from(current_length_offset).unwrap();
2411 fields.push(ir::MemoryTypeField {
2412 offset: current_length_offset,
2413 ty: self.isa.pointer_type(),
2414 readonly: true,
2416 fact: Some(length_fact),
2417 });
2418
2419 let pointer_size = u64::from(self.isa.pointer_type().bytes());
2420 let fields_end = std::cmp::max(
2421 base_offset + pointer_size,
2422 current_length_offset + pointer_size,
2423 );
2424 *size = std::cmp::max(*size, fields_end);
2425 }
2426 _ => {
2427 panic!("Bad memtype");
2428 }
2429 }
2430 (Some(base_fact), Some(data_mt))
2432 } else {
2433 (None, None)
2434 }
2435 } else {
2436 if let Some(ptr_memtype) = ptr_memtype {
2437 let data_mt = func.create_memory_type(ir::MemoryTypeData::Memory {
2439 size: self
2440 .tunables
2441 .memory_reservation
2442 .checked_add(self.tunables.memory_guard_size)
2443 .expect("Memory plan has overflowing size plus guard"),
2444 });
2445 let base_fact = Fact::Mem {
2447 ty: data_mt,
2448 min_offset: 0,
2449 max_offset: 0,
2450 nullable: false,
2451 };
2452 match &mut func.memory_types[ptr_memtype] {
2454 ir::MemoryTypeData::Struct { size, fields } => {
2455 let offset = u64::try_from(base_offset).unwrap();
2456 fields.push(ir::MemoryTypeField {
2457 offset,
2458 ty: self.isa.pointer_type(),
2459 readonly: true,
2465 fact: Some(base_fact.clone()),
2466 });
2467 *size = std::cmp::max(
2468 *size,
2469 offset + u64::from(self.isa.pointer_type().bytes()),
2470 );
2471 }
2472 _ => {
2473 panic!("Bad memtype");
2474 }
2475 }
2476 (Some(base_fact), Some(data_mt))
2478 } else {
2479 (None, None)
2480 }
2481 };
2482
2483 let mut flags = MemFlags::trusted().with_checked().with_can_move();
2484 if !memory.memory_may_move(self.tunables) {
2485 flags.set_readonly();
2486 }
2487 let heap_base = func.create_global_value(ir::GlobalValueData::Load {
2488 base: ptr,
2489 offset: Offset32::new(base_offset),
2490 global_type: pointer_type,
2491 flags,
2492 });
2493 func.global_value_facts[heap_base] = base_fact;
2494
2495 Ok(self.heaps.push(HeapData {
2496 base: heap_base,
2497 bound: heap_bound,
2498 pcc_memory_type: memory_type,
2499 memory,
2500 }))
2501 }
2502
2503 pub fn make_global(
2504 &mut self,
2505 func: &mut ir::Function,
2506 index: GlobalIndex,
2507 ) -> WasmResult<GlobalVariable> {
2508 let ty = self.module.globals[index].wasm_ty;
2509
2510 if ty.is_vmgcref_type() {
2511 return Ok(GlobalVariable::Custom);
2518 }
2519
2520 let (gv, offset) = self.get_global_location(func, index);
2521 Ok(GlobalVariable::Memory {
2522 gv,
2523 offset: offset.into(),
2524 ty: super::value_type(self.isa, ty),
2525 })
2526 }
2527
2528 pub fn make_indirect_sig(
2529 &mut self,
2530 func: &mut ir::Function,
2531 index: TypeIndex,
2532 ) -> WasmResult<ir::SigRef> {
2533 let interned_index = self.module.types[index].unwrap_module_type_index();
2534 let wasm_func_ty = self.types[interned_index].unwrap_func();
2535 let sig = crate::wasm_call_signature(self.isa, wasm_func_ty, &self.tunables);
2536 let sig_ref = func.import_signature(sig);
2537 self.sig_ref_to_ty[sig_ref] = Some(wasm_func_ty);
2538 Ok(sig_ref)
2539 }
2540
2541 pub fn make_direct_func(
2542 &mut self,
2543 func: &mut ir::Function,
2544 index: FuncIndex,
2545 ) -> WasmResult<ir::FuncRef> {
2546 let sig = self.module.functions[index]
2547 .signature
2548 .unwrap_module_type_index();
2549 let wasm_func_ty = self.types[sig].unwrap_func();
2550 let sig = crate::wasm_call_signature(self.isa, wasm_func_ty, &self.tunables);
2551 let signature = func.import_signature(sig);
2552 self.sig_ref_to_ty[signature] = Some(wasm_func_ty);
2553 let name =
2554 ir::ExternalName::User(func.declare_imported_user_function(ir::UserExternalName {
2555 namespace: crate::NS_WASM_FUNC,
2556 index: index.as_u32(),
2557 }));
2558 Ok(func.import_function(ir::ExtFuncData {
2559 name,
2560 signature,
2561
2562 colocated: self.module.defined_func_index(index).is_some(),
2577 }))
2578 }
2579
2580 pub fn translate_call_indirect(
2581 &mut self,
2582 builder: &mut FunctionBuilder,
2583 features: &WasmFeatures,
2584 table_index: TableIndex,
2585 ty_index: TypeIndex,
2586 sig_ref: ir::SigRef,
2587 callee: ir::Value,
2588 call_args: &[ir::Value],
2589 ) -> WasmResult<Option<ir::Inst>> {
2590 Call::new(builder, self).indirect_call(
2591 features,
2592 table_index,
2593 ty_index,
2594 sig_ref,
2595 callee,
2596 call_args,
2597 )
2598 }
2599
2600 pub fn translate_call(
2601 &mut self,
2602 builder: &mut FunctionBuilder,
2603 callee_index: FuncIndex,
2604 callee: ir::FuncRef,
2605 call_args: &[ir::Value],
2606 ) -> WasmResult<ir::Inst> {
2607 Call::new(builder, self).direct_call(callee_index, callee, call_args)
2608 }
2609
2610 pub fn translate_call_ref(
2611 &mut self,
2612 builder: &mut FunctionBuilder,
2613 sig_ref: ir::SigRef,
2614 callee: ir::Value,
2615 call_args: &[ir::Value],
2616 ) -> WasmResult<ir::Inst> {
2617 Call::new(builder, self).call_ref(sig_ref, callee, call_args)
2618 }
2619
2620 pub fn translate_return_call(
2621 &mut self,
2622 builder: &mut FunctionBuilder,
2623 callee_index: FuncIndex,
2624 callee: ir::FuncRef,
2625 call_args: &[ir::Value],
2626 ) -> WasmResult<()> {
2627 Call::new_tail(builder, self).direct_call(callee_index, callee, call_args)?;
2628 Ok(())
2629 }
2630
2631 pub fn translate_return_call_indirect(
2632 &mut self,
2633 builder: &mut FunctionBuilder,
2634 features: &WasmFeatures,
2635 table_index: TableIndex,
2636 ty_index: TypeIndex,
2637 sig_ref: ir::SigRef,
2638 callee: ir::Value,
2639 call_args: &[ir::Value],
2640 ) -> WasmResult<()> {
2641 Call::new_tail(builder, self).indirect_call(
2642 features,
2643 table_index,
2644 ty_index,
2645 sig_ref,
2646 callee,
2647 call_args,
2648 )?;
2649 Ok(())
2650 }
2651
2652 pub fn translate_return_call_ref(
2653 &mut self,
2654 builder: &mut FunctionBuilder,
2655 sig_ref: ir::SigRef,
2656 callee: ir::Value,
2657 call_args: &[ir::Value],
2658 ) -> WasmResult<()> {
2659 Call::new_tail(builder, self).call_ref(sig_ref, callee, call_args)?;
2660 Ok(())
2661 }
2662
2663 pub fn translate_memory_grow(
2664 &mut self,
2665 builder: &mut FunctionBuilder<'_>,
2666 index: MemoryIndex,
2667 _heap: Heap,
2668 val: ir::Value,
2669 ) -> WasmResult<ir::Value> {
2670 let mut pos = builder.cursor();
2671 let memory_grow = self.builtin_functions.memory32_grow(&mut pos.func);
2672 let index_arg = index.index();
2673
2674 let memory_index = pos.ins().iconst(I32, index_arg as i64);
2675 let vmctx = self.vmctx_val(&mut pos);
2676
2677 let index_type = self.memory(index).idx_type;
2678 let val = self.cast_index_to_i64(&mut pos, val, index_type);
2679 let call_inst = pos.ins().call(memory_grow, &[vmctx, val, memory_index]);
2680 let result = *pos.func.dfg.inst_results(call_inst).first().unwrap();
2681 let single_byte_pages = match self.memory(index).page_size_log2 {
2682 16 => false,
2683 0 => true,
2684 _ => unreachable!("only page sizes 2**0 and 2**16 are currently valid"),
2685 };
2686 Ok(self.convert_pointer_to_index_type(
2687 builder.cursor(),
2688 result,
2689 index_type,
2690 single_byte_pages,
2691 ))
2692 }
2693
2694 pub fn translate_memory_size(
2695 &mut self,
2696 mut pos: FuncCursor<'_>,
2697 index: MemoryIndex,
2698 _heap: Heap,
2699 ) -> WasmResult<ir::Value> {
2700 let pointer_type = self.pointer_type();
2701 let vmctx = self.vmctx(&mut pos.func);
2702 let is_shared = self.module.memories[index].shared;
2703 let base = pos.ins().global_value(pointer_type, vmctx);
2704 let current_length_in_bytes = match self.module.defined_memory_index(index) {
2705 Some(def_index) => {
2706 if is_shared {
2707 let offset =
2708 i32::try_from(self.offsets.vmctx_vmmemory_pointer(def_index)).unwrap();
2709 let vmmemory_ptr =
2710 pos.ins()
2711 .load(pointer_type, ir::MemFlags::trusted(), base, offset);
2712 let vmmemory_definition_offset =
2713 i64::from(self.offsets.ptr.vmmemory_definition_current_length());
2714 let vmmemory_definition_ptr =
2715 pos.ins().iadd_imm(vmmemory_ptr, vmmemory_definition_offset);
2716 pos.ins().atomic_load(
2723 pointer_type,
2724 ir::MemFlags::trusted(),
2725 vmmemory_definition_ptr,
2726 )
2727 } else {
2728 let owned_index = self.module.owned_memory_index(def_index);
2729 let offset = i32::try_from(
2730 self.offsets
2731 .vmctx_vmmemory_definition_current_length(owned_index),
2732 )
2733 .unwrap();
2734 pos.ins()
2735 .load(pointer_type, ir::MemFlags::trusted(), base, offset)
2736 }
2737 }
2738 None => {
2739 let offset = i32::try_from(self.offsets.vmctx_vmmemory_import_from(index)).unwrap();
2740 let vmmemory_ptr =
2741 pos.ins()
2742 .load(pointer_type, ir::MemFlags::trusted(), base, offset);
2743 if is_shared {
2744 let vmmemory_definition_offset =
2745 i64::from(self.offsets.ptr.vmmemory_definition_current_length());
2746 let vmmemory_definition_ptr =
2747 pos.ins().iadd_imm(vmmemory_ptr, vmmemory_definition_offset);
2748 pos.ins().atomic_load(
2749 pointer_type,
2750 ir::MemFlags::trusted(),
2751 vmmemory_definition_ptr,
2752 )
2753 } else {
2754 pos.ins().load(
2755 pointer_type,
2756 ir::MemFlags::trusted(),
2757 vmmemory_ptr,
2758 i32::from(self.offsets.ptr.vmmemory_definition_current_length()),
2759 )
2760 }
2761 }
2762 };
2763
2764 let page_size_log2 = i64::from(self.module.memories[index].page_size_log2);
2765 let current_length_in_pages = pos.ins().ushr_imm(current_length_in_bytes, page_size_log2);
2766 let single_byte_pages = match page_size_log2 {
2767 16 => false,
2768 0 => true,
2769 _ => unreachable!("only page sizes 2**0 and 2**16 are currently valid"),
2770 };
2771 Ok(self.convert_pointer_to_index_type(
2772 pos,
2773 current_length_in_pages,
2774 self.memory(index).idx_type,
2775 single_byte_pages,
2776 ))
2777 }
2778
2779 pub fn translate_memory_copy(
2780 &mut self,
2781 builder: &mut FunctionBuilder<'_>,
2782 src_index: MemoryIndex,
2783 _src_heap: Heap,
2784 dst_index: MemoryIndex,
2785 _dst_heap: Heap,
2786 dst: ir::Value,
2787 src: ir::Value,
2788 len: ir::Value,
2789 ) -> WasmResult<()> {
2790 let mut pos = builder.cursor();
2791 let vmctx = self.vmctx_val(&mut pos);
2792
2793 let memory_copy = self.builtin_functions.memory_copy(&mut pos.func);
2794 let dst = self.cast_index_to_i64(&mut pos, dst, self.memory(dst_index).idx_type);
2795 let src = self.cast_index_to_i64(&mut pos, src, self.memory(src_index).idx_type);
2796 let len = if index_type_to_ir_type(self.memory(dst_index).idx_type) == I64
2802 && index_type_to_ir_type(self.memory(src_index).idx_type) == I64
2803 {
2804 len
2805 } else {
2806 pos.ins().uextend(I64, len)
2807 };
2808 let src_index = pos.ins().iconst(I32, i64::from(src_index.as_u32()));
2809 let dst_index = pos.ins().iconst(I32, i64::from(dst_index.as_u32()));
2810 pos.ins()
2811 .call(memory_copy, &[vmctx, dst_index, dst, src_index, src, len]);
2812
2813 Ok(())
2814 }
2815
2816 pub fn translate_memory_fill(
2817 &mut self,
2818 builder: &mut FunctionBuilder<'_>,
2819 memory_index: MemoryIndex,
2820 _heap: Heap,
2821 dst: ir::Value,
2822 val: ir::Value,
2823 len: ir::Value,
2824 ) -> WasmResult<()> {
2825 let mut pos = builder.cursor();
2826 let memory_fill = self.builtin_functions.memory_fill(&mut pos.func);
2827 let dst = self.cast_index_to_i64(&mut pos, dst, self.memory(memory_index).idx_type);
2828 let len = self.cast_index_to_i64(&mut pos, len, self.memory(memory_index).idx_type);
2829 let memory_index_arg = pos.ins().iconst(I32, i64::from(memory_index.as_u32()));
2830
2831 let vmctx = self.vmctx_val(&mut pos);
2832
2833 pos.ins()
2834 .call(memory_fill, &[vmctx, memory_index_arg, dst, val, len]);
2835
2836 Ok(())
2837 }
2838
2839 pub fn translate_memory_init(
2840 &mut self,
2841 builder: &mut FunctionBuilder<'_>,
2842 memory_index: MemoryIndex,
2843 _heap: Heap,
2844 seg_index: u32,
2845 dst: ir::Value,
2846 src: ir::Value,
2847 len: ir::Value,
2848 ) -> WasmResult<()> {
2849 let mut pos = builder.cursor();
2850 let memory_init = self.builtin_functions.memory_init(&mut pos.func);
2851
2852 let memory_index_arg = pos.ins().iconst(I32, memory_index.index() as i64);
2853 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2854
2855 let vmctx = self.vmctx_val(&mut pos);
2856
2857 let dst = self.cast_index_to_i64(&mut pos, dst, self.memory(memory_index).idx_type);
2858
2859 pos.ins().call(
2860 memory_init,
2861 &[vmctx, memory_index_arg, seg_index_arg, dst, src, len],
2862 );
2863
2864 Ok(())
2865 }
2866
2867 pub fn translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()> {
2868 let data_drop = self.builtin_functions.data_drop(&mut pos.func);
2869 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2870 let vmctx = self.vmctx_val(&mut pos);
2871 pos.ins().call(data_drop, &[vmctx, seg_index_arg]);
2872 Ok(())
2873 }
2874
2875 pub fn translate_table_size(
2876 &mut self,
2877 pos: FuncCursor,
2878 table_index: TableIndex,
2879 ) -> WasmResult<ir::Value> {
2880 self.ensure_table_exists(pos.func, table_index);
2881 let table_data = self.tables[table_index].as_ref().unwrap();
2882 let index_type = index_type_to_ir_type(self.table(table_index).idx_type);
2883 Ok(table_data.bound.bound(&*self.isa, pos, index_type))
2884 }
2885
2886 pub fn translate_table_copy(
2887 &mut self,
2888 builder: &mut FunctionBuilder<'_>,
2889 dst_table_index: TableIndex,
2890 src_table_index: TableIndex,
2891 dst: ir::Value,
2892 src: ir::Value,
2893 len: ir::Value,
2894 ) -> WasmResult<()> {
2895 let (table_copy, dst_table_index_arg, src_table_index_arg) =
2896 self.get_table_copy_func(&mut builder.func, dst_table_index, src_table_index);
2897
2898 let mut pos = builder.cursor();
2899 let dst = self.cast_index_to_i64(&mut pos, dst, self.table(dst_table_index).idx_type);
2900 let src = self.cast_index_to_i64(&mut pos, src, self.table(src_table_index).idx_type);
2901 let len = if index_type_to_ir_type(self.table(dst_table_index).idx_type) == I64
2902 && index_type_to_ir_type(self.table(src_table_index).idx_type) == I64
2903 {
2904 len
2905 } else {
2906 pos.ins().uextend(I64, len)
2907 };
2908 let dst_table_index_arg = pos.ins().iconst(I32, dst_table_index_arg as i64);
2909 let src_table_index_arg = pos.ins().iconst(I32, src_table_index_arg as i64);
2910 let vmctx = self.vmctx_val(&mut pos);
2911 pos.ins().call(
2912 table_copy,
2913 &[
2914 vmctx,
2915 dst_table_index_arg,
2916 src_table_index_arg,
2917 dst,
2918 src,
2919 len,
2920 ],
2921 );
2922
2923 Ok(())
2924 }
2925
2926 pub fn translate_table_init(
2927 &mut self,
2928 builder: &mut FunctionBuilder<'_>,
2929 seg_index: u32,
2930 table_index: TableIndex,
2931 dst: ir::Value,
2932 src: ir::Value,
2933 len: ir::Value,
2934 ) -> WasmResult<()> {
2935 let mut pos = builder.cursor();
2936 let table_init = self.builtin_functions.table_init(&mut pos.func);
2937 let table_index_arg = pos.ins().iconst(I32, i64::from(table_index.as_u32()));
2938 let seg_index_arg = pos.ins().iconst(I32, i64::from(seg_index));
2939 let vmctx = self.vmctx_val(&mut pos);
2940 let index_type = self.table(table_index).idx_type;
2941 let dst = self.cast_index_to_i64(&mut pos, dst, index_type);
2942 let src = pos.ins().uextend(I64, src);
2943 let len = pos.ins().uextend(I64, len);
2944
2945 pos.ins().call(
2946 table_init,
2947 &[vmctx, table_index_arg, seg_index_arg, dst, src, len],
2948 );
2949
2950 Ok(())
2951 }
2952
2953 pub fn translate_elem_drop(&mut self, mut pos: FuncCursor, elem_index: u32) -> WasmResult<()> {
2954 let elem_drop = self.builtin_functions.elem_drop(&mut pos.func);
2955 let elem_index_arg = pos.ins().iconst(I32, elem_index as i64);
2956 let vmctx = self.vmctx_val(&mut pos);
2957 pos.ins().call(elem_drop, &[vmctx, elem_index_arg]);
2958 Ok(())
2959 }
2960
2961 pub fn translate_atomic_wait(
2962 &mut self,
2963 builder: &mut FunctionBuilder<'_>,
2964 memory_index: MemoryIndex,
2965 _heap: Heap,
2966 addr: ir::Value,
2967 expected: ir::Value,
2968 timeout: ir::Value,
2969 ) -> WasmResult<ir::Value> {
2970 #[cfg(feature = "threads")]
2971 {
2972 let mut pos = builder.cursor();
2973 let addr = self.cast_index_to_i64(&mut pos, addr, self.memory(memory_index).idx_type);
2974 let implied_ty = pos.func.dfg.value_type(expected);
2975 let (wait_func, memory_index) =
2976 self.get_memory_atomic_wait(&mut pos.func, memory_index, implied_ty);
2977
2978 let memory_index_arg = pos.ins().iconst(I32, memory_index as i64);
2979
2980 let vmctx = self.vmctx_val(&mut pos);
2981
2982 let call_inst = pos.ins().call(
2983 wait_func,
2984 &[vmctx, memory_index_arg, addr, expected, timeout],
2985 );
2986 let ret = pos.func.dfg.inst_results(call_inst)[0];
2987 Ok(builder.ins().ireduce(ir::types::I32, ret))
2988 }
2989 #[cfg(not(feature = "threads"))]
2990 {
2991 let _ = (builder, memory_index, addr, expected, timeout);
2992 Err(wasmtime_environ::WasmError::Unsupported(
2993 "threads support disabled at compile time".to_string(),
2994 ))
2995 }
2996 }
2997
2998 pub fn translate_atomic_notify(
2999 &mut self,
3000 builder: &mut FunctionBuilder<'_>,
3001 memory_index: MemoryIndex,
3002 _heap: Heap,
3003 addr: ir::Value,
3004 count: ir::Value,
3005 ) -> WasmResult<ir::Value> {
3006 #[cfg(feature = "threads")]
3007 {
3008 let mut pos = builder.cursor();
3009 let addr = self.cast_index_to_i64(&mut pos, addr, self.memory(memory_index).idx_type);
3010 let atomic_notify = self.builtin_functions.memory_atomic_notify(&mut pos.func);
3011
3012 let memory_index_arg = pos.ins().iconst(I32, memory_index.index() as i64);
3013 let vmctx = self.vmctx_val(&mut pos);
3014 let call_inst = pos
3015 .ins()
3016 .call(atomic_notify, &[vmctx, memory_index_arg, addr, count]);
3017 let ret = pos.func.dfg.inst_results(call_inst)[0];
3018 Ok(builder.ins().ireduce(ir::types::I32, ret))
3019 }
3020 #[cfg(not(feature = "threads"))]
3021 {
3022 let _ = (builder, memory_index, addr, count);
3023 Err(wasmtime_environ::WasmError::Unsupported(
3024 "threads support disabled at compile time".to_string(),
3025 ))
3026 }
3027 }
3028
3029 pub fn translate_loop_header(&mut self, builder: &mut FunctionBuilder) -> WasmResult<()> {
3030 if self.tunables.consume_fuel {
3033 self.fuel_check(builder);
3034 }
3035
3036 if self.tunables.epoch_interruption {
3039 self.epoch_check(builder);
3040 }
3041
3042 Ok(())
3043 }
3044
3045 pub fn before_translate_operator(
3046 &mut self,
3047 op: &Operator,
3048 builder: &mut FunctionBuilder,
3049 state: &FuncTranslationState,
3050 ) -> WasmResult<()> {
3051 if self.tunables.consume_fuel {
3052 self.fuel_before_op(op, builder, state.reachable());
3053 }
3054 Ok(())
3055 }
3056
3057 pub fn after_translate_operator(
3058 &mut self,
3059 op: &Operator,
3060 builder: &mut FunctionBuilder,
3061 state: &FuncTranslationState,
3062 ) -> WasmResult<()> {
3063 if self.tunables.consume_fuel && state.reachable() {
3064 self.fuel_after_op(op, builder);
3065 }
3066 Ok(())
3067 }
3068
3069 pub fn before_unconditionally_trapping_memory_access(
3070 &mut self,
3071 builder: &mut FunctionBuilder,
3072 ) -> WasmResult<()> {
3073 if self.tunables.consume_fuel {
3074 self.fuel_increment_var(builder);
3075 self.fuel_save_from_var(builder);
3076 }
3077 Ok(())
3078 }
3079
3080 pub fn before_translate_function(
3081 &mut self,
3082 builder: &mut FunctionBuilder,
3083 _state: &FuncTranslationState,
3084 ) -> WasmResult<()> {
3085 if let Some(gv) = self.stack_limit_at_function_entry {
3088 let limit = builder.ins().global_value(self.pointer_type(), gv);
3089 let sp = builder.ins().get_stack_pointer(self.pointer_type());
3090 let overflow = builder.ins().icmp(IntCC::UnsignedLessThan, sp, limit);
3091 self.conditionally_trap(builder, overflow, ir::TrapCode::STACK_OVERFLOW);
3092 }
3093
3094 if self.tunables.consume_fuel || self.tunables.epoch_interruption {
3097 self.declare_vmstore_context_ptr(builder);
3098 }
3099 if self.tunables.consume_fuel {
3101 self.fuel_function_entry(builder);
3102 }
3103 if self.tunables.epoch_interruption {
3105 self.epoch_function_entry(builder);
3106 }
3107
3108 #[cfg(feature = "wmemcheck")]
3109 if self.compiler.wmemcheck {
3110 let func_name = self.current_func_name(builder);
3111 if func_name == Some("malloc") {
3112 self.check_malloc_start(builder);
3113 } else if func_name == Some("free") {
3114 self.check_free_start(builder);
3115 }
3116 }
3117
3118 Ok(())
3119 }
3120
3121 pub fn after_translate_function(
3122 &mut self,
3123 builder: &mut FunctionBuilder,
3124 state: &FuncTranslationState,
3125 ) -> WasmResult<()> {
3126 if self.tunables.consume_fuel && state.reachable() {
3127 self.fuel_function_exit(builder);
3128 }
3129 Ok(())
3130 }
3131
3132 pub fn relaxed_simd_deterministic(&self) -> bool {
3133 self.tunables.relaxed_simd_deterministic
3134 }
3135
3136 pub fn has_native_fma(&self) -> bool {
3137 self.isa.has_native_fma()
3138 }
3139
3140 pub fn is_x86(&self) -> bool {
3141 self.isa.triple().architecture == target_lexicon::Architecture::X86_64
3142 }
3143
3144 pub fn use_x86_blendv_for_relaxed_laneselect(&self, ty: Type) -> bool {
3145 self.isa.has_x86_blendv_lowering(ty)
3146 }
3147
3148 pub fn use_x86_pshufb_for_relaxed_swizzle(&self) -> bool {
3149 self.isa.has_x86_pshufb_lowering()
3150 }
3151
3152 pub fn use_x86_pmulhrsw_for_relaxed_q15mul(&self) -> bool {
3153 self.isa.has_x86_pmulhrsw_lowering()
3154 }
3155
3156 pub fn use_x86_pmaddubsw_for_dot(&self) -> bool {
3157 self.isa.has_x86_pmaddubsw_lowering()
3158 }
3159
3160 pub fn handle_before_return(&mut self, retvals: &[ir::Value], builder: &mut FunctionBuilder) {
3161 #[cfg(feature = "wmemcheck")]
3162 if self.compiler.wmemcheck {
3163 let func_name = self.current_func_name(builder);
3164 if func_name == Some("malloc") {
3165 self.hook_malloc_exit(builder, retvals);
3166 } else if func_name == Some("free") {
3167 self.hook_free_exit(builder);
3168 }
3169 }
3170 #[cfg(not(feature = "wmemcheck"))]
3171 let _ = (retvals, builder);
3172 }
3173
3174 pub fn before_load(
3175 &mut self,
3176 builder: &mut FunctionBuilder,
3177 val_size: u8,
3178 addr: ir::Value,
3179 offset: u64,
3180 ) {
3181 #[cfg(feature = "wmemcheck")]
3182 if self.compiler.wmemcheck {
3183 let check_load = self.builtin_functions.check_load(builder.func);
3184 let vmctx = self.vmctx_val(&mut builder.cursor());
3185 let num_bytes = builder.ins().iconst(I32, val_size as i64);
3186 let offset_val = builder.ins().iconst(I64, offset as i64);
3187 builder
3188 .ins()
3189 .call(check_load, &[vmctx, num_bytes, addr, offset_val]);
3190 }
3191 #[cfg(not(feature = "wmemcheck"))]
3192 let _ = (builder, val_size, addr, offset);
3193 }
3194
3195 pub fn before_store(
3196 &mut self,
3197 builder: &mut FunctionBuilder,
3198 val_size: u8,
3199 addr: ir::Value,
3200 offset: u64,
3201 ) {
3202 #[cfg(feature = "wmemcheck")]
3203 if self.compiler.wmemcheck {
3204 let check_store = self.builtin_functions.check_store(builder.func);
3205 let vmctx = self.vmctx_val(&mut builder.cursor());
3206 let num_bytes = builder.ins().iconst(I32, val_size as i64);
3207 let offset_val = builder.ins().iconst(I64, offset as i64);
3208 builder
3209 .ins()
3210 .call(check_store, &[vmctx, num_bytes, addr, offset_val]);
3211 }
3212 #[cfg(not(feature = "wmemcheck"))]
3213 let _ = (builder, val_size, addr, offset);
3214 }
3215
3216 pub fn update_global(
3217 &mut self,
3218 builder: &mut FunctionBuilder,
3219 global_index: u32,
3220 value: ir::Value,
3221 ) {
3222 #[cfg(feature = "wmemcheck")]
3223 if self.compiler.wmemcheck {
3224 if global_index == 0 {
3225 let update_stack_pointer =
3227 self.builtin_functions.update_stack_pointer(builder.func);
3228 let vmctx = self.vmctx_val(&mut builder.cursor());
3229 builder.ins().call(update_stack_pointer, &[vmctx, value]);
3230 }
3231 }
3232 #[cfg(not(feature = "wmemcheck"))]
3233 let _ = (builder, global_index, value);
3234 }
3235
3236 pub fn before_memory_grow(
3237 &mut self,
3238 builder: &mut FunctionBuilder,
3239 num_pages: ir::Value,
3240 mem_index: MemoryIndex,
3241 ) {
3242 #[cfg(feature = "wmemcheck")]
3243 if self.compiler.wmemcheck && mem_index.as_u32() == 0 {
3244 let update_mem_size = self.builtin_functions.update_mem_size(builder.func);
3245 let vmctx = self.vmctx_val(&mut builder.cursor());
3246 builder.ins().call(update_mem_size, &[vmctx, num_pages]);
3247 }
3248 #[cfg(not(feature = "wmemcheck"))]
3249 let _ = (builder, num_pages, mem_index);
3250 }
3251
3252 pub fn isa(&self) -> &dyn TargetIsa {
3253 &*self.isa
3254 }
3255
3256 pub fn trap(&mut self, builder: &mut FunctionBuilder, trap: ir::TrapCode) {
3257 match (
3258 self.clif_instruction_traps_enabled(),
3259 crate::clif_trap_to_env_trap(trap),
3260 ) {
3261 (true, _) | (_, None) => {
3264 builder.ins().trap(trap);
3265 }
3266 (false, Some(trap)) => {
3271 let libcall = self.builtin_functions.trap(&mut builder.func);
3272 let vmctx = self.vmctx_val(&mut builder.cursor());
3273 let trap_code = builder.ins().iconst(I8, i64::from(trap as u8));
3274 builder.ins().call(libcall, &[vmctx, trap_code]);
3275 let raise = self.builtin_functions.raise(&mut builder.func);
3276 builder.ins().call(raise, &[vmctx]);
3277 builder.ins().trap(TRAP_INTERNAL_ASSERT);
3278 }
3279 }
3280 }
3281
3282 pub fn trapz(&mut self, builder: &mut FunctionBuilder, value: ir::Value, trap: ir::TrapCode) {
3283 if self.clif_instruction_traps_enabled() {
3284 builder.ins().trapz(value, trap);
3285 } else {
3286 let ty = builder.func.dfg.value_type(value);
3287 let zero = builder.ins().iconst(ty, 0);
3288 let cmp = builder.ins().icmp(IntCC::Equal, value, zero);
3289 self.conditionally_trap(builder, cmp, trap);
3290 }
3291 }
3292
3293 pub fn trapnz(&mut self, builder: &mut FunctionBuilder, value: ir::Value, trap: ir::TrapCode) {
3294 if self.clif_instruction_traps_enabled() {
3295 builder.ins().trapnz(value, trap);
3296 } else {
3297 let ty = builder.func.dfg.value_type(value);
3298 let zero = builder.ins().iconst(ty, 0);
3299 let cmp = builder.ins().icmp(IntCC::NotEqual, value, zero);
3300 self.conditionally_trap(builder, cmp, trap);
3301 }
3302 }
3303
3304 pub fn uadd_overflow_trap(
3305 &mut self,
3306 builder: &mut FunctionBuilder,
3307 lhs: ir::Value,
3308 rhs: ir::Value,
3309 trap: ir::TrapCode,
3310 ) -> ir::Value {
3311 if self.clif_instruction_traps_enabled() {
3312 builder.ins().uadd_overflow_trap(lhs, rhs, trap)
3313 } else {
3314 let (ret, overflow) = builder.ins().uadd_overflow(lhs, rhs);
3315 self.conditionally_trap(builder, overflow, trap);
3316 ret
3317 }
3318 }
3319
3320 pub fn translate_sdiv(
3321 &mut self,
3322 builder: &mut FunctionBuilder,
3323 lhs: ir::Value,
3324 rhs: ir::Value,
3325 ) -> ir::Value {
3326 self.guard_signed_divide(builder, lhs, rhs);
3327 builder.ins().sdiv(lhs, rhs)
3328 }
3329
3330 pub fn translate_udiv(
3331 &mut self,
3332 builder: &mut FunctionBuilder,
3333 lhs: ir::Value,
3334 rhs: ir::Value,
3335 ) -> ir::Value {
3336 self.guard_zero_divisor(builder, rhs);
3337 builder.ins().udiv(lhs, rhs)
3338 }
3339
3340 pub fn translate_srem(
3341 &mut self,
3342 builder: &mut FunctionBuilder,
3343 lhs: ir::Value,
3344 rhs: ir::Value,
3345 ) -> ir::Value {
3346 self.guard_zero_divisor(builder, rhs);
3347 builder.ins().srem(lhs, rhs)
3348 }
3349
3350 pub fn translate_urem(
3351 &mut self,
3352 builder: &mut FunctionBuilder,
3353 lhs: ir::Value,
3354 rhs: ir::Value,
3355 ) -> ir::Value {
3356 self.guard_zero_divisor(builder, rhs);
3357 builder.ins().urem(lhs, rhs)
3358 }
3359
3360 pub fn translate_fcvt_to_sint(
3361 &mut self,
3362 builder: &mut FunctionBuilder,
3363 ty: ir::Type,
3364 val: ir::Value,
3365 ) -> ir::Value {
3366 if !self.clif_instruction_traps_enabled() {
3369 self.guard_fcvt_to_int(
3370 builder,
3371 ty,
3372 val,
3373 (-2147483649.0, 2147483648.0),
3374 (-9223372036854777856.0, 9223372036854775808.0),
3375 );
3376 }
3377 builder.ins().fcvt_to_sint(ty, val)
3378 }
3379
3380 pub fn translate_fcvt_to_uint(
3381 &mut self,
3382 builder: &mut FunctionBuilder,
3383 ty: ir::Type,
3384 val: ir::Value,
3385 ) -> ir::Value {
3386 if !self.clif_instruction_traps_enabled() {
3387 self.guard_fcvt_to_int(
3388 builder,
3389 ty,
3390 val,
3391 (-1.0, 4294967296.0),
3392 (-1.0, 18446744073709551616.0),
3393 );
3394 }
3395 builder.ins().fcvt_to_uint(ty, val)
3396 }
3397
3398 pub fn clif_memory_traps_enabled(&self) -> bool {
3406 self.tunables.signals_based_traps && !self.is_pulley()
3407 }
3408
3409 pub fn clif_instruction_traps_enabled(&self) -> bool {
3416 self.tunables.signals_based_traps || self.is_pulley()
3417 }
3418
3419 pub fn load_from_zero_allowed(&self) -> bool {
3422 self.is_pulley()
3425 || (self.clif_memory_traps_enabled() && self.heap_access_spectre_mitigation())
3426 }
3427
3428 pub fn is_pulley(&self) -> bool {
3430 self.isa.triple().is_pulley()
3431 }
3432}
3433
3434fn index_type_to_ir_type(index_type: IndexType) -> ir::Type {
3439 match index_type {
3440 IndexType::I32 => I32,
3441 IndexType::I64 => I64,
3442 }
3443}