1use crate::{
2 Result,
3 abi::{ABIOperand, ABISig, RetArea, vmctx},
4 bail,
5 codegen::BlockSig,
6 ensure, format_err,
7 isa::reg::{Reg, RegClass, writable},
8 masm::{
9 AtomicWaitKind, Extend, Imm, IntCmpKind, IntScratch, LaneSelector, LoadKind,
10 MacroAssembler, OperandSize, RegImm, RmwOp, SPOffset, ShiftKind, StoreKind, TrapCode,
11 UNTRUSTED_FLAGS, Zero,
12 },
13 stack::{TypedReg, Val},
14};
15use cranelift_codegen::{
16 binemit::CodeOffset,
17 ir::{RelSourceLoc, SourceLoc},
18};
19use smallvec::SmallVec;
20use std::marker::PhantomData;
21use wasmparser::{
22 BinaryReader, FuncValidator, MemArg, Operator, OperatorsReader, ValidatorResources,
23 VisitOperator, VisitSimdOperator,
24};
25use wasmtime_cranelift::{TRAP_BAD_SIGNATURE, TRAP_HEAP_MISALIGNED, TRAP_TABLE_OUT_OF_BOUNDS};
26use wasmtime_environ::{
27 FUNCREF_MASK, GlobalIndex, MemoryIndex, PtrSize, TableIndex, Tunables, TypeIndex, WasmHeapType,
28 WasmValType,
29};
30
31mod context;
32pub(crate) use context::*;
33mod env;
34pub use env::*;
35mod call;
36pub(crate) use call::*;
37mod control;
38pub(crate) use control::*;
39mod builtin;
40pub use builtin::*;
41pub(crate) mod bounds;
42
43use bounds::{Bounds, ImmOffset, Index};
44
45mod phase;
46pub(crate) use phase::*;
47
48mod error;
49pub(crate) use error::*;
50
51pub(crate) trait BranchState {
54 fn unreachable_state_after_emission() -> bool;
57}
58
59pub(crate) struct ConditionalBranch;
61
62impl BranchState for ConditionalBranch {
63 fn unreachable_state_after_emission() -> bool {
64 false
65 }
66}
67
68pub(crate) struct UnconditionalBranch;
70
71impl BranchState for UnconditionalBranch {
72 fn unreachable_state_after_emission() -> bool {
73 true
74 }
75}
76
77#[derive(Default)]
81pub(crate) struct SourceLocation {
82 pub base: Option<SourceLoc>,
84 pub current: (CodeOffset, RelSourceLoc),
87}
88
89pub(crate) struct CodeGen<'a, 'translation: 'a, 'data: 'translation, M, P>
91where
92 M: MacroAssembler,
93 P: CodeGenPhase,
94{
95 pub sig: ABISig,
97
98 pub context: CodeGenContext<'a, P>,
100
101 pub env: FuncEnv<'a, 'translation, 'data, M::Ptr>,
103
104 pub masm: &'a mut M,
106
107 pub control_frames: SmallVec<[ControlStackFrame; 64]>,
111
112 pub source_location: SourceLocation,
114
115 pub tunables: &'a Tunables,
117
118 pub fuel_consumed: i64,
120 phase: PhantomData<P>,
121}
122
123impl<'a, 'translation, 'data, M> CodeGen<'a, 'translation, 'data, M, Prologue>
124where
125 M: MacroAssembler,
126{
127 pub fn new(
128 tunables: &'a Tunables,
129 masm: &'a mut M,
130 context: CodeGenContext<'a, Prologue>,
131 env: FuncEnv<'a, 'translation, 'data, M::Ptr>,
132 sig: ABISig,
133 ) -> CodeGen<'a, 'translation, 'data, M, Prologue> {
134 Self {
135 sig,
136 context,
137 masm,
138 env,
139 tunables,
140 source_location: Default::default(),
141 control_frames: Default::default(),
142 fuel_consumed: 1,
144 phase: PhantomData,
145 }
146 }
147
148 pub fn emit_prologue(mut self) -> Result<CodeGen<'a, 'translation, 'data, M, Emission>> {
150 let vmctx = self
151 .sig
152 .params()
153 .first()
154 .ok_or_else(|| format_err!(CodeGenError::vmcontext_arg_expected()))?
155 .unwrap_reg();
156
157 self.masm.start_source_loc(Default::default())?;
158 self.masm.prologue(vmctx)?;
160
161 self.masm.mov(
163 writable!(vmctx!(M)),
164 vmctx.into(),
165 self.env.ptr_type().try_into()?,
166 )?;
167
168 self.masm.reserve_stack(self.context.frame.locals_size)?;
169 self.spill_register_arguments()?;
170
171 let defined_locals_range = &self.context.frame.defined_locals_range;
172 self.masm.zero_mem_range(defined_locals_range.as_range())?;
173
174 if self.sig.params.has_retptr() {
177 match self.sig.params.unwrap_results_area_operand() {
178 ABIOperand::Reg { ty, reg, .. } => {
179 let results_base_slot = self.context.frame.results_base_slot.as_ref().unwrap();
180 ensure!(
181 results_base_slot.addressed_from_sp(),
182 CodeGenError::sp_addressing_expected(),
183 );
184 let addr = self.masm.local_address(results_base_slot)?;
185 self.masm.store((*reg).into(), addr, (*ty).try_into()?)?;
186 }
187 _ => {}
190 }
191 }
192
193 self.masm.end_source_loc()?;
194
195 Ok(CodeGen {
196 sig: self.sig,
197 context: self.context.for_emission(),
198 masm: self.masm,
199 env: self.env,
200 tunables: self.tunables,
201 source_location: self.source_location,
202 control_frames: self.control_frames,
203 fuel_consumed: self.fuel_consumed,
204 phase: PhantomData,
205 })
206 }
207
208 fn spill_register_arguments(&mut self) -> Result<()> {
209 use WasmValType::*;
210 for (operand, slot) in self
211 .sig
212 .params_without_retptr()
213 .iter()
214 .zip(self.context.frame.locals())
215 {
216 match (operand, slot) {
217 (ABIOperand::Reg { ty, reg, .. }, slot) => {
218 let addr = self.masm.local_address(slot)?;
219 match &ty {
220 I32 | I64 | F32 | F64 | V128 => {
221 self.masm.store((*reg).into(), addr, (*ty).try_into()?)?;
222 }
223 Ref(rt) => match rt.heap_type {
224 WasmHeapType::Func | WasmHeapType::Extern => {
225 self.masm.store_ptr(*reg, addr)?;
226 }
227 _ => bail!(CodeGenError::unsupported_wasm_type()),
228 },
229 }
230 }
231 _ => {}
233 }
234 }
235 Ok(())
236 }
237}
238
239impl<'a, 'translation, 'data, M> CodeGen<'a, 'translation, 'data, M, Emission>
240where
241 M: MacroAssembler,
242{
243 pub fn emit(
245 &mut self,
246 body: BinaryReader<'a>,
247 validator: &mut FuncValidator<ValidatorResources>,
248 ) -> Result<()> {
249 self.emit_body(body, validator)
250 .and_then(|_| self.emit_end())?;
251
252 Ok(())
253 }
254
255 pub fn pop_control_frame(&mut self) -> Result<ControlStackFrame> {
257 self.control_frames
258 .pop()
259 .ok_or_else(|| format_err!(CodeGenError::control_frame_expected()))
260 }
261
262 pub fn source_loc_from(&mut self, loc: SourceLoc) -> RelSourceLoc {
264 if self.source_location.base.is_none() && !loc.is_default() {
265 self.source_location.base = Some(loc);
266 }
267
268 RelSourceLoc::from_base_offset(self.source_location.base.unwrap_or_default(), loc)
269 }
270
271 pub fn handle_unreachable_else(&mut self) -> Result<()> {
279 let frame = self
280 .control_frames
281 .last_mut()
282 .ok_or_else(|| CodeGenError::control_frame_expected())?;
283 ensure!(frame.is_if(), CodeGenError::if_control_frame_expected());
284 if frame.is_next_sequence_reachable() {
285 self.context.reachable = true;
289 frame.ensure_stack_state(self.masm, &mut self.context)?;
290 frame.bind_else(self.masm, &mut self.context)?;
291 }
292 Ok(())
293 }
294
295 pub fn handle_unreachable_end(&mut self) -> Result<()> {
296 let mut frame = self.pop_control_frame()?;
297 let is_outermost = self.control_frames.len() == 0;
299
300 if frame.is_next_sequence_reachable() {
301 self.context.reachable = true;
302 frame.ensure_stack_state(self.masm, &mut self.context)?;
303 frame.bind_end(self.masm, &mut self.context)
304 } else if is_outermost {
305 frame.ensure_stack_state(self.masm, &mut self.context)
310 } else {
311 Ok(())
312 }
313 }
314
315 fn emit_body(
316 &mut self,
317 body: BinaryReader<'a>,
318 validator: &mut FuncValidator<ValidatorResources>,
319 ) -> Result<()> {
320 self.maybe_emit_fuel_check()?;
321
322 self.maybe_emit_epoch_check()?;
323
324 self.control_frames.push(ControlStackFrame::block(
327 BlockSig::from_sig(self.sig.clone()),
328 self.masm,
329 &mut self.context,
330 )?);
331
332 if self.sig.params.has_retptr() {
337 self.sig
338 .results
339 .set_ret_area(RetArea::slot(self.context.frame.results_base_slot.unwrap()));
340 }
341
342 let mut ops = OperatorsReader::new(body);
343 while !ops.eof() {
344 let offset = ops.original_position();
345 ops.visit_operator(&mut ValidateThenVisit(
346 validator.simd_visitor(offset),
347 self,
348 offset,
349 ))??;
350 }
351 ops.finish()?;
352 return Ok(());
353
354 struct ValidateThenVisit<'a, T, U>(T, &'a mut U, usize);
355
356 macro_rules! validate_then_visit {
357 ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident $ann:tt)*) => {
358 $(
359 fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output {
360 self.0.$visit($($($arg.clone()),*)?)?;
361 let op = Operator::$op $({ $($arg: $arg.clone()),* })?;
362 if self.1.visit(&op) {
363 self.1.before_visit_op(&op, self.2)?;
364 let res = self.1.$visit($($($arg),*)?)?;
365 self.1.after_visit_op()?;
366 Ok(res)
367 } else {
368 Ok(())
369 }
370 }
371 )*
372 };
373 }
374
375 fn visit_op_when_unreachable(op: &Operator) -> bool {
376 use Operator::*;
377 match op {
378 If { .. } | Block { .. } | Loop { .. } | Else | End => true,
379 _ => false,
380 }
381 }
382
383 trait VisitorHooks {
386 fn before_visit_op(&mut self, operator: &Operator, offset: usize) -> Result<()>;
388 fn after_visit_op(&mut self) -> Result<()>;
390
391 fn visit(&self, op: &Operator) -> bool;
400 }
401
402 impl<'a, 'translation, 'data, M: MacroAssembler> VisitorHooks
403 for CodeGen<'a, 'translation, 'data, M, Emission>
404 {
405 fn visit(&self, op: &Operator) -> bool {
406 self.context.reachable || visit_op_when_unreachable(op)
407 }
408
409 fn before_visit_op(&mut self, operator: &Operator, offset: usize) -> Result<()> {
410 self.source_location_before_visit_op(offset)?;
412
413 if self.tunables.consume_fuel {
415 self.fuel_before_visit_op(operator)?;
416 }
417 Ok(())
418 }
419
420 fn after_visit_op(&mut self) -> Result<()> {
421 self.source_location_after_visit_op()
423 }
424 }
425
426 impl<'a, T, U> VisitOperator<'a> for ValidateThenVisit<'_, T, U>
427 where
428 T: VisitSimdOperator<'a, Output = wasmparser::Result<()>>,
429 U: VisitSimdOperator<'a, Output = Result<()>> + VisitorHooks,
430 {
431 type Output = U::Output;
432
433 fn simd_visitor(
434 &mut self,
435 ) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>>
436 where
437 T:,
438 {
439 Some(self)
440 }
441
442 wasmparser::for_each_visit_operator!(validate_then_visit);
443 }
444
445 impl<'a, T, U> VisitSimdOperator<'a> for ValidateThenVisit<'_, T, U>
446 where
447 T: VisitSimdOperator<'a, Output = wasmparser::Result<()>>,
448 U: VisitSimdOperator<'a, Output = Result<()>> + VisitorHooks,
449 {
450 wasmparser::for_each_visit_simd_operator!(validate_then_visit);
451 }
452 }
453
454 pub fn emit_typecheck_funcref(
456 &mut self,
457 funcref_ptr: Reg,
458 type_index: TypeIndex,
459 ) -> Result<()> {
460 let ptr_size: OperandSize = self.env.ptr_type().try_into()?;
461 let sig_index_bytes = self.env.vmoffsets.size_of_vmshared_type_index();
462 let sig_size = OperandSize::from_bytes(sig_index_bytes);
463 let sig_index = self.env.translation.module.types[type_index].unwrap_module_type_index();
464 let sig_offset = sig_index
465 .as_u32()
466 .checked_mul(sig_index_bytes.into())
467 .unwrap();
468 let signatures_base_offset = self.env.vmoffsets.ptr.vmctx_type_ids_array();
469 let funcref_sig_offset = self.env.vmoffsets.ptr.vm_func_ref_type_index();
470 let caller_id = self.context.any_gpr(self.masm)?;
472
473 self.masm.with_scratch::<IntScratch, _>(|masm, scratch| {
474 masm.load(
476 masm.address_at_vmctx(signatures_base_offset.into())?,
477 scratch.writable(),
478 ptr_size,
479 )?;
480
481 masm.load(
482 masm.address_at_reg(scratch.inner(), sig_offset)?,
483 writable!(caller_id),
484 sig_size,
485 )
486 })?;
487
488 let callee_id = self.context.any_gpr(self.masm)?;
489 self.masm.load(
490 self.masm
491 .address_at_reg(funcref_ptr, funcref_sig_offset.into())?,
492 writable!(callee_id),
493 sig_size,
494 )?;
495
496 self.masm
498 .cmp(caller_id, callee_id.into(), OperandSize::S32)?;
499 self.masm.trapif(IntCmpKind::Ne, TRAP_BAD_SIGNATURE)?;
500 self.context.free_reg(callee_id);
501 self.context.free_reg(caller_id);
502 wasmtime_environ::error::Ok(())
503 }
504
505 fn emit_end(&mut self) -> Result<()> {
507 let base = SPOffset::from_u32(self.context.frame.locals_size);
511 self.masm.start_source_loc(Default::default())?;
512 if self.context.reachable {
513 ControlStackFrame::pop_abi_results_impl(
514 &mut self.sig.results,
515 &mut self.context,
516 self.masm,
517 |results, _, _| Ok(results.ret_area().copied()),
518 )?;
519 } else {
520 self.context.truncate_stack_to(0)?;
524 self.masm.reset_stack_pointer(base)?;
525 }
526 ensure!(
527 self.context.stack.len() == 0,
528 CodeGenError::unexpected_value_in_value_stack()
529 );
530 self.masm.free_stack(self.context.frame.locals_size)?;
531 self.masm.epilogue()?;
532 self.masm.end_source_loc()?;
533 Ok(())
534 }
535
536 pub fn emit_set_local(&mut self, index: u32) -> Result<TypedReg> {
540 if self.context.stack.contains_latent_local(index) {
543 self.context.spill(self.masm)?;
544 }
545 let src = self.context.pop_to_reg(self.masm, None)?;
546 let (ty, addr) = self.context.frame.get_local_address(index, self.masm)?;
550 self.masm
551 .store(RegImm::reg(src.reg), addr, ty.try_into()?)?;
552
553 Ok(src)
554 }
555
556 pub fn emit_get_global_addr(&mut self, index: GlobalIndex) -> Result<(WasmValType, Reg, u32)> {
558 let data = self.env.resolve_global(index);
559
560 if data.imported {
561 let global_base = self.masm.address_at_reg(vmctx!(M), data.offset)?;
562 let dst = self.context.any_gpr(self.masm)?;
563 self.masm.load_ptr(global_base, writable!(dst))?;
564 Ok((data.ty, dst, 0))
565 } else {
566 Ok((data.ty, vmctx!(M), data.offset))
567 }
568 }
569
570 pub fn emit_lazy_init_funcref(&mut self, table_index: TableIndex) -> Result<()> {
571 assert!(self.tunables.table_lazy_init, "unsupported eager init");
572 let table_data = self.env.resolve_table_data(table_index);
573 let ptr_type = self.env.ptr_type();
574 let builtin = self.env.builtins.table_get_lazy_init_func_ref::<M::ABI>()?;
575
576 self.context.spill(self.masm)?;
581 let elem_value: Reg = self.context.reg(
582 builtin.sig().results.unwrap_singleton().unwrap_reg(),
583 self.masm,
584 )?;
585
586 let index = self.context.pop_to_reg(self.masm, None)?;
587 let base = self.context.any_gpr(self.masm)?;
588
589 let elem_addr = self.emit_compute_table_elem_addr(index.into(), base, &table_data)?;
590 self.masm.load_ptr(elem_addr, writable!(elem_value))?;
591 self.context.free_reg(base);
594
595 let (defined, cont) = (self.masm.get_label()?, self.masm.get_label()?);
596
597 self.context
599 .stack
600 .extend([table_index.as_u32().try_into().unwrap(), index.into()]);
601
602 self.masm.branch(
603 IntCmpKind::Ne,
604 elem_value,
605 elem_value.into(),
606 defined,
607 ptr_type.try_into()?,
608 )?;
609 self.context.free_reg(elem_value);
613 FnCall::emit::<M>(
614 &mut self.env,
615 self.masm,
616 &mut self.context,
617 Callee::Builtin(builtin.clone()),
618 )?;
619
620 let top = self
623 .context
624 .stack
625 .peek()
626 .ok_or_else(|| CodeGenError::missing_values_in_stack())?;
627 let top = top.unwrap_reg();
628 ensure!(
629 top.reg == elem_value,
630 CodeGenError::table_element_value_expected()
631 );
632 self.masm.jmp(cont)?;
633
634 self.masm.bind(defined)?;
641 assert_eq!(FUNCREF_MASK as isize, -2);
642 let imm = RegImm::i64(-2);
643 let dst = top.into();
644 self.masm
645 .and(writable!(dst), dst, imm, top.ty.try_into()?)?;
646
647 self.masm.bind(cont)
648 }
649
650 pub fn emit_compute_heap_address(
671 &mut self,
672 memarg: &MemArg,
673 access_size: OperandSize,
674 ) -> Result<Option<Reg>> {
675 let ptr_size: OperandSize = self.env.ptr_type().try_into()?;
676 let enable_spectre_mitigation = self.env.heap_access_spectre_mitigation();
677 let add_offset_and_access_size = |offset: ImmOffset, access_size: OperandSize| {
678 (access_size.bytes() as u64) + (offset.as_u32() as u64)
679 };
680
681 let memory_index = MemoryIndex::from_u32(memarg.memory);
682 let heap = self.env.resolve_heap(memory_index);
683 let index = Index::from_typed_reg(self.context.pop_to_reg(self.masm, None)?);
684 let offset = bounds::ensure_index_and_offset(
685 self.masm,
686 index,
687 memarg.offset,
688 heap.index_type().try_into()?,
689 )?;
690 let offset_with_access_size = add_offset_and_access_size(offset, access_size);
691
692 let can_elide_bounds_check = heap
693 .memory
694 .can_elide_bounds_check(self.tunables, self.env.page_size_log2);
695
696 let addr = if offset_with_access_size > heap.memory.maximum_byte_size().unwrap_or(u64::MAX)
697 || (!self.tunables.memory_may_move
698 && offset_with_access_size > self.tunables.memory_reservation)
699 {
700 self.emit_fuel_increment()?;
707 self.masm.trap(TrapCode::HEAP_OUT_OF_BOUNDS)?;
708 self.context.reachable = false;
709 None
710
711 } else if can_elide_bounds_check
738 && u64::from(u32::MAX)
739 <= self.tunables.memory_reservation + self.tunables.memory_guard_size
740 - offset_with_access_size
741 {
742 assert!(can_elide_bounds_check);
743 assert!(heap.index_type() == WasmValType::I32);
744 let addr = self.context.any_gpr(self.masm)?;
745 bounds::load_heap_addr_unchecked(self.masm, &heap, index, offset, addr, ptr_size)?;
746 Some(addr)
747
748 } else if let Some(static_size) = heap.memory.static_heap_size() {
756 let bounds = Bounds::from_u64(static_size);
757 let addr = bounds::load_heap_addr_checked(
758 self.masm,
759 &mut self.context,
760 ptr_size,
761 &heap,
762 enable_spectre_mitigation,
763 bounds,
764 index,
765 offset,
766 |masm, bounds, index| {
767 let adjusted_bounds = bounds.as_u64() - offset_with_access_size;
768 let index_reg = index.as_typed_reg().reg;
769 masm.cmp(
770 index_reg,
771 RegImm::i64(adjusted_bounds as i64),
772 ptr_size,
778 )?;
779 Ok(IntCmpKind::GtU)
780 },
781 )?;
782 Some(addr)
783 } else {
784 let bounds = bounds::load_dynamic_heap_bounds::<_>(
790 &mut self.context,
791 self.masm,
792 &heap,
793 ptr_size,
794 )?;
795
796 let index_reg = index.as_typed_reg().reg;
797 let index_offset_and_access_size = self.context.any_gpr(self.masm)?;
801
802 self.masm.mov(
814 writable!(index_offset_and_access_size),
815 index_reg.into(),
816 heap.index_type().try_into()?,
817 )?;
818 self.masm.checked_uadd(
829 writable!(index_offset_and_access_size),
830 index_offset_and_access_size,
831 RegImm::i64(offset_with_access_size as i64),
832 ptr_size,
833 TrapCode::HEAP_OUT_OF_BOUNDS,
834 )?;
835
836 let addr = bounds::load_heap_addr_checked(
837 self.masm,
838 &mut self.context,
839 ptr_size,
840 &heap,
841 enable_spectre_mitigation,
842 bounds,
843 index,
844 offset,
845 |masm, bounds, _| {
846 let bounds_reg = bounds.as_typed_reg().reg;
847 masm.cmp(
848 index_offset_and_access_size,
849 bounds_reg.into(),
850 ptr_size,
854 )?;
855 Ok(IntCmpKind::GtU)
856 },
857 )?;
858 self.context.free_reg(bounds.as_typed_reg().reg);
859 self.context.free_reg(index_offset_and_access_size);
860 Some(addr)
861 };
862
863 self.context.free_reg(index.as_typed_reg().reg);
864 Ok(addr)
865 }
866
867 fn emit_check_align(&mut self, memarg: &MemArg, size: OperandSize) -> Result<()> {
869 if size.bytes() > 1 {
870 let addr = *self
872 .context
873 .stack
874 .peek()
875 .ok_or_else(|| CodeGenError::missing_values_in_stack())?;
876 let tmp = self.context.any_gpr(self.masm)?;
877 self.context.move_val_to_reg(&addr, tmp, self.masm)?;
878
879 if memarg.offset != 0 {
880 self.masm.add(
881 writable!(tmp),
882 tmp,
883 RegImm::Imm(Imm::I64(memarg.offset)),
884 size,
885 )?;
886 }
887
888 self.masm.and(
889 writable!(tmp),
890 tmp,
891 RegImm::Imm(Imm::I32(size.bytes() - 1)),
892 size,
893 )?;
894
895 self.masm.cmp(tmp, RegImm::Imm(Imm::i64(0)), size)?;
896 self.masm.trapif(IntCmpKind::Ne, TRAP_HEAP_MISALIGNED)?;
897 self.context.free_reg(tmp);
898 }
899
900 Ok(())
901 }
902
903 pub fn emit_compute_heap_address_align_checked(
904 &mut self,
905 memarg: &MemArg,
906 access_size: OperandSize,
907 ) -> Result<Option<Reg>> {
908 self.emit_check_align(memarg, access_size)?;
909 self.emit_compute_heap_address(memarg, access_size)
910 }
911
912 pub fn emit_wasm_load(
914 &mut self,
915 arg: &MemArg,
916 target_type: WasmValType,
917 kind: LoadKind,
918 ) -> Result<()> {
919 let emit_load = |this: &mut Self, dst, addr, kind| -> Result<()> {
920 let src = this.masm.address_at_reg(addr, 0)?;
921 this.masm.wasm_load(src, writable!(dst), kind)?;
922 this.context
923 .stack
924 .push(TypedReg::new(target_type, dst).into());
925 this.context.free_reg(addr);
926 Ok(())
927 };
928
929 match kind {
932 LoadKind::VectorLane(_) => {
933 let dst = self.context.pop_to_reg(self.masm, None)?;
937 let addr = self.emit_compute_heap_address(&arg, kind.derive_operand_size())?;
938 if let Some(addr) = addr {
939 emit_load(self, dst.reg, addr, kind)?;
940 } else {
941 self.context.free_reg(dst);
942 }
943 }
944 _ => {
945 let maybe_addr = match kind {
946 LoadKind::Atomic(_, _) => self.emit_compute_heap_address_align_checked(
947 &arg,
948 kind.derive_operand_size(),
949 )?,
950 _ => self.emit_compute_heap_address(&arg, kind.derive_operand_size())?,
951 };
952
953 if let Some(addr) = maybe_addr {
954 let dst = match target_type {
955 WasmValType::I32 | WasmValType::I64 => self.context.any_gpr(self.masm)?,
956 WasmValType::F32 | WasmValType::F64 => self.context.any_fpr(self.masm)?,
957 WasmValType::V128 => self.context.reg_for_type(target_type, self.masm)?,
958 _ => bail!(CodeGenError::unsupported_wasm_type()),
959 };
960
961 emit_load(self, dst, addr, kind)?;
962 }
963 }
964 }
965
966 Ok(())
967 }
968
969 pub fn emit_wasm_store(&mut self, arg: &MemArg, kind: StoreKind) -> Result<()> {
971 let src = self.context.pop_to_reg(self.masm, None)?;
972
973 let maybe_addr = match kind {
974 StoreKind::Atomic(size) => self.emit_compute_heap_address_align_checked(&arg, size)?,
975 StoreKind::Operand(size) | StoreKind::VectorLane(LaneSelector { size, .. }) => {
976 self.emit_compute_heap_address(&arg, size)?
977 }
978 };
979
980 if let Some(addr) = maybe_addr {
981 self.masm
982 .wasm_store(src.reg, self.masm.address_at_reg(addr, 0)?, kind)?;
983
984 self.context.free_reg(addr);
985 }
986 self.context.free_reg(src);
987
988 Ok(())
989 }
990
991 pub fn emit_compute_table_elem_addr(
994 &mut self,
995 index: Reg,
996 base: Reg,
997 table_data: &TableData,
998 ) -> Result<M::Address> {
999 let bound = self.context.any_gpr(self.masm)?;
1000 let tmp = self.context.any_gpr(self.masm)?;
1001 let ptr_size: OperandSize = self.env.ptr_type().try_into()?;
1002
1003 if let Some(offset) = table_data.import_from {
1004 self.masm
1008 .load_ptr(self.masm.address_at_vmctx(offset)?, writable!(base))?;
1009 } else {
1010 self.masm.mov(writable!(base), vmctx!(M).into(), ptr_size)?;
1013 };
1014
1015 let bound_addr = self
1017 .masm
1018 .address_at_reg(base, table_data.current_elems_offset)?;
1019 let bound_size = table_data.current_elements_size;
1020 self.masm.load(bound_addr, writable!(bound), bound_size)?;
1021 self.masm.cmp(index, bound.into(), bound_size)?;
1022 self.masm
1023 .trapif(IntCmpKind::GeU, TRAP_TABLE_OUT_OF_BOUNDS)?;
1024
1025 self.masm.with_scratch::<IntScratch, _>(|masm, scratch| {
1030 masm.mov(scratch.writable(), index.into(), bound_size)?;
1031 masm.mul(
1032 scratch.writable(),
1033 scratch.inner(),
1034 RegImm::i32(table_data.element_size.bytes() as i32),
1035 table_data.element_size,
1036 )?;
1037 masm.load_ptr(
1038 masm.address_at_reg(base, table_data.offset)?,
1039 writable!(base),
1040 )?;
1041 masm.mov(writable!(tmp), base.into(), ptr_size)?;
1044 masm.add(writable!(base), base, scratch.inner().into(), ptr_size)
1046 })?;
1047 if self.env.table_access_spectre_mitigation() {
1048 self.masm.cmp(index, bound.into(), OperandSize::S32)?;
1051 self.masm
1052 .cmov(writable!(base), tmp, IntCmpKind::GeU, ptr_size)?;
1053 }
1054 self.context.free_reg(bound);
1055 self.context.free_reg(tmp);
1056 self.masm.address_at_reg(base, 0)
1057 }
1058
1059 pub fn emit_compute_table_size(&mut self, table_data: &TableData) -> Result<()> {
1061 let size = self.context.any_gpr(self.masm)?;
1062 let ptr_size: OperandSize = self.env.ptr_type().try_into()?;
1063
1064 self.masm.with_scratch::<IntScratch, _>(|masm, scratch| {
1065 if let Some(offset) = table_data.import_from {
1066 masm.load_ptr(masm.address_at_vmctx(offset)?, scratch.writable())?;
1067 } else {
1068 masm.mov(scratch.writable(), vmctx!(M).into(), ptr_size)?;
1069 };
1070
1071 let size_addr =
1072 masm.address_at_reg(scratch.inner(), table_data.current_elems_offset)?;
1073 masm.load(size_addr, writable!(size), table_data.current_elements_size)
1074 })?;
1075
1076 self.context.stack.push(TypedReg::i32(size).into());
1077 Ok(())
1078 }
1079
1080 pub fn emit_compute_memory_size(&mut self, heap_data: &HeapData) -> Result<()> {
1082 let size_reg = self.context.any_gpr(self.masm)?;
1083
1084 self.masm.with_scratch::<IntScratch, _>(|masm, scratch| {
1085 let base = if let Some(offset) = heap_data.import_from {
1086 masm.load_ptr(masm.address_at_vmctx(offset)?, scratch.writable())?;
1087 scratch.inner()
1088 } else {
1089 vmctx!(M)
1090 };
1091
1092 let size_addr = masm.address_at_reg(base, heap_data.current_length_offset)?;
1093 masm.load_ptr(size_addr, writable!(size_reg))
1094 })?;
1095 let dst = TypedReg::new(heap_data.index_type(), size_reg);
1097 let pow = heap_data.memory.page_size_log2;
1098 self.masm.shift_ir(
1099 writable!(dst.reg),
1100 Imm::i32(pow as i32),
1101 dst.into(),
1102 ShiftKind::ShrU,
1103 heap_data.index_type().try_into()?,
1104 )?;
1105 self.context.stack.push(dst.into());
1106 Ok(())
1107 }
1108
1109 pub fn maybe_emit_fuel_check(&mut self) -> Result<()> {
1113 if !self.tunables.consume_fuel {
1114 return Ok(());
1115 }
1116
1117 self.emit_fuel_increment()?;
1118 let out_of_fuel = self.env.builtins.out_of_gas::<M::ABI>()?;
1119 let fuel_reg = self.context.without::<Result<Reg>, M, _>(
1120 &out_of_fuel.sig().regs,
1121 self.masm,
1122 |cx, masm| cx.any_gpr(masm),
1123 )??;
1124
1125 self.emit_load_fuel_consumed(fuel_reg)?;
1126
1127 let continuation = self.masm.get_label()?;
1129
1130 self.context.spill(self.masm)?;
1133 self.masm.branch(
1136 IntCmpKind::LtS,
1137 fuel_reg,
1138 RegImm::i64(0),
1139 continuation,
1140 OperandSize::S64,
1141 )?;
1142 FnCall::emit::<M>(
1144 &mut self.env,
1145 self.masm,
1146 &mut self.context,
1147 Callee::Builtin(out_of_fuel.clone()),
1148 )?;
1149 self.context.pop_and_free(self.masm)?;
1150
1151 self.masm.bind(continuation)?;
1153 self.context.free_reg(fuel_reg);
1154
1155 Ok(())
1156 }
1157
1158 fn emit_load_fuel_consumed(&mut self, fuel_reg: Reg) -> Result<()> {
1161 let store_context_offset = self.env.vmoffsets.ptr.vmctx_store_context();
1162 let fuel_offset = self.env.vmoffsets.ptr.vmstore_context_fuel_consumed();
1163 self.masm.load_ptr(
1164 self.masm
1165 .address_at_vmctx(u32::from(store_context_offset))?,
1166 writable!(fuel_reg),
1167 )?;
1168
1169 self.masm.load(
1170 self.masm.address_at_reg(fuel_reg, u32::from(fuel_offset))?,
1171 writable!(fuel_reg),
1172 OperandSize::S64,
1174 )
1175 }
1176
1177 pub fn maybe_emit_epoch_check(&mut self) -> Result<()> {
1180 if !self.tunables.epoch_interruption {
1181 return Ok(());
1182 }
1183
1184 let cont = self.masm.get_label()?;
1187 let new_epoch = self.env.builtins.new_epoch::<M::ABI>()?;
1188
1189 let (epoch_deadline_reg, epoch_counter_reg) =
1197 self.context.without::<Result<(Reg, Reg)>, M, _>(
1198 &new_epoch.sig().regs,
1199 self.masm,
1200 |cx, masm| Ok((cx.any_gpr(masm)?, cx.any_gpr(masm)?)),
1201 )??;
1202
1203 self.emit_load_epoch_deadline_and_counter(epoch_deadline_reg, epoch_counter_reg)?;
1204
1205 self.context.spill(self.masm)?;
1208 self.masm.branch(
1209 IntCmpKind::LtU,
1210 epoch_counter_reg,
1211 RegImm::reg(epoch_deadline_reg),
1212 cont,
1213 OperandSize::S64,
1214 )?;
1215 FnCall::emit::<M>(
1217 &mut self.env,
1218 self.masm,
1219 &mut self.context,
1220 Callee::Builtin(new_epoch.clone()),
1221 )?;
1222 self.visit_drop()?;
1225
1226 self.masm.bind(cont)?;
1228
1229 self.context.free_reg(epoch_deadline_reg);
1230 self.context.free_reg(epoch_counter_reg);
1231 Ok(())
1232 }
1233
1234 fn emit_load_epoch_deadline_and_counter(
1235 &mut self,
1236 epoch_deadline_reg: Reg,
1237 epoch_counter_reg: Reg,
1238 ) -> Result<()> {
1239 let epoch_ptr_offset = self.env.vmoffsets.ptr.vmctx_epoch_ptr();
1240 let store_context_offset = self.env.vmoffsets.ptr.vmctx_store_context();
1241 let epoch_deadline_offset = self.env.vmoffsets.ptr.vmstore_context_epoch_deadline();
1242
1243 self.masm.load_ptr(
1245 self.masm.address_at_vmctx(u32::from(epoch_ptr_offset))?,
1246 writable!(epoch_counter_reg),
1247 )?;
1248
1249 self.masm.load(
1252 self.masm.address_at_reg(epoch_counter_reg, 0)?,
1253 writable!(epoch_counter_reg),
1254 OperandSize::S64,
1255 )?;
1256
1257 self.masm.load_ptr(
1259 self.masm
1260 .address_at_vmctx(u32::from(store_context_offset))?,
1261 writable!(epoch_deadline_reg),
1262 )?;
1263
1264 self.masm.load(
1265 self.masm
1266 .address_at_reg(epoch_deadline_reg, u32::from(epoch_deadline_offset))?,
1267 writable!(epoch_deadline_reg),
1268 OperandSize::S64,
1270 )
1271 }
1272
1273 fn emit_fuel_increment(&mut self) -> Result<()> {
1276 let fuel_at_point = std::mem::replace(&mut self.fuel_consumed, 0);
1277 if fuel_at_point == 0 {
1278 return Ok(());
1279 }
1280
1281 let store_context_offset = self.env.vmoffsets.ptr.vmctx_store_context();
1282 let fuel_offset = self.env.vmoffsets.ptr.vmstore_context_fuel_consumed();
1283 let limits_reg = self.context.any_gpr(self.masm)?;
1284
1285 self.masm.load_ptr(
1287 self.masm
1288 .address_at_vmctx(u32::from(store_context_offset))?,
1289 writable!(limits_reg),
1290 )?;
1291
1292 self.masm.with_scratch::<IntScratch, _>(|masm, scratch| {
1293 masm.load(
1295 masm.address_at_reg(limits_reg, u32::from(fuel_offset))?,
1296 scratch.writable(),
1297 OperandSize::S64,
1298 )?;
1299
1300 masm.add(
1303 scratch.writable(),
1304 scratch.inner(),
1305 RegImm::i64(fuel_at_point),
1306 OperandSize::S64,
1307 )?;
1308
1309 masm.store(
1311 scratch.inner().into(),
1312 masm.address_at_reg(limits_reg, u32::from(fuel_offset))?,
1313 OperandSize::S64,
1314 )
1315 })?;
1316
1317 self.context.free_reg(limits_reg);
1318
1319 Ok(())
1320 }
1321
1322 fn fuel_before_visit_op(&mut self, op: &Operator) -> Result<()> {
1324 if !self.context.reachable {
1325 ensure!(self.fuel_consumed == 0, CodeGenError::illegal_fuel_state())
1328 }
1329
1330 self.fuel_consumed += self.tunables.operator_cost.cost(op);
1347
1348 match op {
1349 Operator::Unreachable
1350 | Operator::Loop { .. }
1351 | Operator::If { .. }
1352 | Operator::Else { .. }
1353 | Operator::Br { .. }
1354 | Operator::BrIf { .. }
1355 | Operator::BrTable { .. }
1356 | Operator::End
1357 | Operator::Return
1358 | Operator::CallIndirect { .. }
1359 | Operator::Call { .. }
1360 | Operator::ReturnCall { .. }
1361 | Operator::ReturnCallIndirect { .. } => self.emit_fuel_increment(),
1362 _ => Ok(()),
1363 }
1364 }
1365
1366 fn source_location_before_visit_op(&mut self, offset: usize) -> Result<()> {
1368 let loc = SourceLoc::new(offset as u32);
1369 let rel = self.source_loc_from(loc);
1370 self.source_location.current = self.masm.start_source_loc(rel)?;
1371 Ok(())
1372 }
1373
1374 fn source_location_after_visit_op(&mut self) -> Result<()> {
1376 if self.masm.current_code_offset()? >= self.source_location.current.0 {
1383 self.masm.end_source_loc()?;
1384 }
1385
1386 Ok(())
1387 }
1388
1389 pub(crate) fn emit_atomic_rmw(
1390 &mut self,
1391 arg: &MemArg,
1392 op: RmwOp,
1393 size: OperandSize,
1394 extend: Option<Extend<Zero>>,
1395 ) -> Result<()> {
1396 let operand = self.context.pop_to_reg(self.masm, None)?;
1400 if let Some(addr) = self.emit_compute_heap_address_align_checked(arg, size)? {
1401 let src = self.masm.address_at_reg(addr, 0)?;
1402 self.context.stack.push(operand.into());
1403 self.masm
1404 .atomic_rmw(&mut self.context, src, size, op, UNTRUSTED_FLAGS, extend)?;
1405 self.context.free_reg(addr);
1406 }
1407
1408 Ok(())
1409 }
1410
1411 pub(crate) fn emit_atomic_cmpxchg(
1412 &mut self,
1413 arg: &MemArg,
1414 size: OperandSize,
1415 extend: Option<Extend<Zero>>,
1416 ) -> Result<()> {
1417 let replacement = self.context.pop_to_reg(self.masm, None)?;
1428 let expected = self.context.pop_to_reg(self.masm, None)?;
1429
1430 if let Some(addr) = self.emit_compute_heap_address_align_checked(arg, size)? {
1431 self.context.stack.push(expected.into());
1433 self.context.stack.push(replacement.into());
1434
1435 let src = self.masm.address_at_reg(addr, 0)?;
1436 self.masm
1437 .atomic_cas(&mut self.context, src, size, UNTRUSTED_FLAGS, extend)?;
1438
1439 self.context.free_reg(addr);
1440 }
1441 Ok(())
1442 }
1443
1444 #[cfg(not(feature = "threads"))]
1445 pub fn emit_atomic_wait(&mut self, _arg: &MemArg, _kind: AtomicWaitKind) -> Result<()> {
1446 Err(CodeGenError::unimplemented_wasm_instruction().into())
1447 }
1448
1449 #[cfg(feature = "threads")]
1451 pub fn emit_atomic_wait(&mut self, arg: &MemArg, kind: AtomicWaitKind) -> Result<()> {
1452 let timeout = self.context.pop_to_reg(self.masm, None)?;
1465 let expected = self.context.pop_to_reg(self.masm, None)?;
1466 let addr = self.context.pop_to_reg(self.masm, None)?;
1467
1468 let stack_len = self.context.stack.len();
1470 let builtin = match kind {
1471 AtomicWaitKind::Wait32 => self.env.builtins.memory_atomic_wait32::<M::ABI>()?,
1472 AtomicWaitKind::Wait64 => self.env.builtins.memory_atomic_wait64::<M::ABI>()?,
1473 };
1474 let builtin = self.prepare_builtin_defined_memory_arg(
1475 MemoryIndex::from_u32(arg.memory),
1476 stack_len,
1477 builtin,
1478 )?;
1479
1480 if arg.offset != 0 {
1481 self.masm.add(
1482 writable!(addr.reg),
1483 addr.reg,
1484 RegImm::i64(arg.offset as i64),
1485 OperandSize::S64,
1486 )?;
1487 }
1488
1489 self.context
1490 .stack
1491 .push(TypedReg::new(WasmValType::I64, addr.reg).into());
1492 self.context.stack.push(expected.into());
1493 self.context.stack.push(timeout.into());
1494
1495 FnCall::emit::<M>(&mut self.env, self.masm, &mut self.context, builtin)?;
1496
1497 Ok(())
1498 }
1499
1500 #[cfg(not(feature = "threads"))]
1501 pub fn emit_atomic_notify(&mut self, _arg: &MemArg) -> Result<()> {
1502 Err(CodeGenError::unimplemented_wasm_instruction().into())
1503 }
1504
1505 #[cfg(feature = "threads")]
1506 pub fn emit_atomic_notify(&mut self, arg: &MemArg) -> Result<()> {
1507 let count = self.context.pop_to_reg(self.masm, None)?;
1519 let addr = self.context.pop_to_reg(self.masm, None)?;
1520
1521 let builtin = self.env.builtins.memory_atomic_notify::<M::ABI>()?;
1523 let stack_len = self.context.stack.len();
1524 let builtin = self.prepare_builtin_defined_memory_arg(
1525 MemoryIndex::from_u32(arg.memory),
1526 stack_len,
1527 builtin,
1528 )?;
1529
1530 if arg.offset != 0 {
1531 self.masm.add(
1532 writable!(addr.reg),
1533 addr.reg,
1534 RegImm::i64(arg.offset as i64),
1535 OperandSize::S64,
1536 )?;
1537 }
1538
1539 self.context
1541 .stack
1542 .push(TypedReg::new(WasmValType::I64, addr.reg).into());
1543 self.context.stack.push(count.into());
1544
1545 FnCall::emit::<M>(&mut self.env, self.masm, &mut self.context, builtin)?;
1546
1547 Ok(())
1548 }
1549
1550 pub fn prepare_builtin_defined_memory_arg(
1551 &mut self,
1552 mem: MemoryIndex,
1553 defined_index_at: usize,
1554 builtin: BuiltinFunction,
1555 ) -> Result<Callee> {
1556 match self.env.translation.module.defined_memory_index(mem) {
1557 Some(defined) => {
1560 self.context
1561 .stack
1562 .insert_many(defined_index_at, &[defined.as_u32().try_into()?]);
1563 Ok(Callee::Builtin(builtin))
1564 }
1565
1566 None => {
1570 let vmimport = self.env.vmoffsets.vmctx_vmmemory_import(mem);
1571 let vmctx_offset = vmimport + u32::from(self.env.vmoffsets.vmmemory_import_vmctx());
1572 let index_offset = vmimport + u32::from(self.env.vmoffsets.vmmemory_import_index());
1573 let index_addr = self.masm.address_at_vmctx(index_offset)?;
1574 let index_dst = self.context.reg_for_class(RegClass::Int, self.masm)?;
1575 self.masm
1576 .load(index_addr, writable!(index_dst), OperandSize::S32)?;
1577 self.context
1578 .stack
1579 .insert_many(defined_index_at, &[Val::reg(index_dst, WasmValType::I32)]);
1580 Ok(Callee::BuiltinWithDifferentVmctx(builtin, vmctx_offset))
1581 }
1582 }
1583 }
1584
1585 pub fn prepare_builtin_defined_table_arg(
1587 &mut self,
1588 table: TableIndex,
1589 defined_index_at: usize,
1590 builtin: BuiltinFunction,
1591 ) -> Result<Callee> {
1592 match self.env.translation.module.defined_table_index(table) {
1593 Some(defined) => {
1594 self.context
1595 .stack
1596 .insert_many(defined_index_at, &[defined.as_u32().try_into()?]);
1597 Ok(Callee::Builtin(builtin))
1598 }
1599 None => {
1600 let vmimport = self.env.vmoffsets.vmctx_vmtable_import(table);
1601 let vmctx_offset = vmimport + u32::from(self.env.vmoffsets.vmtable_import_vmctx());
1602 let index_offset = vmimport + u32::from(self.env.vmoffsets.vmtable_import_index());
1603 let index_addr = self.masm.address_at_vmctx(index_offset)?;
1604 let index_dst = self.context.reg_for_class(RegClass::Int, self.masm)?;
1605 self.masm
1606 .load(index_addr, writable!(index_dst), OperandSize::S32)?;
1607 self.context
1608 .stack
1609 .insert_many(defined_index_at, &[Val::reg(index_dst, WasmValType::I32)]);
1610 Ok(Callee::BuiltinWithDifferentVmctx(builtin, vmctx_offset))
1611 }
1612 }
1613 }
1614}
1615
1616pub fn control_index(depth: u32, control_length: usize) -> Result<usize> {
1619 (control_length - 1)
1620 .checked_sub(depth as usize)
1621 .ok_or_else(|| format_err!(CodeGenError::control_frame_expected()))
1622}