1use crate::{
2 abi::{ABIOperand, ABISig, RetArea, vmctx},
3 codegen::BlockSig,
4 isa::reg::{Reg, writable},
5 masm::{
6 AtomicWaitKind, Extend, Imm, IntCmpKind, IntScratch, LaneSelector, LoadKind,
7 MacroAssembler, OperandSize, RegImm, RmwOp, SPOffset, ShiftKind, StoreKind, TrapCode,
8 UNTRUSTED_FLAGS, Zero,
9 },
10 stack::TypedReg,
11};
12use anyhow::{Result, anyhow, bail, ensure};
13use cranelift_codegen::{
14 binemit::CodeOffset,
15 ir::{RelSourceLoc, SourceLoc},
16};
17use smallvec::SmallVec;
18use std::marker::PhantomData;
19use wasmparser::{
20 BinaryReader, FuncValidator, MemArg, Operator, OperatorsReader, ValidatorResources,
21 VisitOperator, VisitSimdOperator,
22};
23use wasmtime_cranelift::{TRAP_BAD_SIGNATURE, TRAP_HEAP_MISALIGNED, TRAP_TABLE_OUT_OF_BOUNDS};
24use wasmtime_environ::{
25 FUNCREF_MASK, GlobalIndex, MemoryIndex, PtrSize, TableIndex, Tunables, TypeIndex, WasmHeapType,
26 WasmValType,
27};
28
29mod context;
30pub(crate) use context::*;
31mod env;
32pub use env::*;
33mod call;
34pub(crate) use call::*;
35mod control;
36pub(crate) use control::*;
37mod builtin;
38pub use builtin::*;
39pub(crate) mod bounds;
40
41use bounds::{Bounds, ImmOffset, Index};
42
43mod phase;
44pub(crate) use phase::*;
45
46mod error;
47pub(crate) use error::*;
48
49pub(crate) trait BranchState {
52 fn unreachable_state_after_emission() -> bool;
55}
56
57pub(crate) struct ConditionalBranch;
59
60impl BranchState for ConditionalBranch {
61 fn unreachable_state_after_emission() -> bool {
62 false
63 }
64}
65
66pub(crate) struct UnconditionalBranch;
68
69impl BranchState for UnconditionalBranch {
70 fn unreachable_state_after_emission() -> bool {
71 true
72 }
73}
74
75#[derive(Default)]
79pub(crate) struct SourceLocation {
80 pub base: Option<SourceLoc>,
82 pub current: (CodeOffset, RelSourceLoc),
85}
86
87pub(crate) struct CodeGen<'a, 'translation: 'a, 'data: 'translation, M, P>
89where
90 M: MacroAssembler,
91 P: CodeGenPhase,
92{
93 pub sig: ABISig,
95
96 pub context: CodeGenContext<'a, P>,
98
99 pub env: FuncEnv<'a, 'translation, 'data, M::Ptr>,
101
102 pub masm: &'a mut M,
104
105 pub control_frames: SmallVec<[ControlStackFrame; 64]>,
109
110 pub source_location: SourceLocation,
112
113 pub tunables: &'a Tunables,
115
116 pub fuel_consumed: i64,
118 phase: PhantomData<P>,
119}
120
121impl<'a, 'translation, 'data, M> CodeGen<'a, 'translation, 'data, M, Prologue>
122where
123 M: MacroAssembler,
124{
125 pub fn new(
126 tunables: &'a Tunables,
127 masm: &'a mut M,
128 context: CodeGenContext<'a, Prologue>,
129 env: FuncEnv<'a, 'translation, 'data, M::Ptr>,
130 sig: ABISig,
131 ) -> CodeGen<'a, 'translation, 'data, M, Prologue> {
132 Self {
133 sig,
134 context,
135 masm,
136 env,
137 tunables,
138 source_location: Default::default(),
139 control_frames: Default::default(),
140 fuel_consumed: 1,
142 phase: PhantomData,
143 }
144 }
145
146 pub fn emit_prologue(mut self) -> Result<CodeGen<'a, 'translation, 'data, M, Emission>> {
148 let vmctx = self
149 .sig
150 .params()
151 .first()
152 .ok_or_else(|| anyhow!(CodeGenError::vmcontext_arg_expected()))?
153 .unwrap_reg();
154
155 self.masm.start_source_loc(Default::default())?;
156 self.masm.prologue(vmctx)?;
158
159 self.masm.mov(
161 writable!(vmctx!(M)),
162 vmctx.into(),
163 self.env.ptr_type().try_into()?,
164 )?;
165
166 self.masm.reserve_stack(self.context.frame.locals_size)?;
167 self.spill_register_arguments()?;
168
169 let defined_locals_range = &self.context.frame.defined_locals_range;
170 self.masm.zero_mem_range(defined_locals_range.as_range())?;
171
172 if self.sig.params.has_retptr() {
175 match self.sig.params.unwrap_results_area_operand() {
176 ABIOperand::Reg { ty, reg, .. } => {
177 let results_base_slot = self.context.frame.results_base_slot.as_ref().unwrap();
178 ensure!(
179 results_base_slot.addressed_from_sp(),
180 CodeGenError::sp_addressing_expected(),
181 );
182 let addr = self.masm.local_address(results_base_slot)?;
183 self.masm.store((*reg).into(), addr, (*ty).try_into()?)?;
184 }
185 _ => {}
188 }
189 }
190
191 self.masm.end_source_loc()?;
192
193 Ok(CodeGen {
194 sig: self.sig,
195 context: self.context.for_emission(),
196 masm: self.masm,
197 env: self.env,
198 tunables: self.tunables,
199 source_location: self.source_location,
200 control_frames: self.control_frames,
201 fuel_consumed: self.fuel_consumed,
202 phase: PhantomData,
203 })
204 }
205
206 fn spill_register_arguments(&mut self) -> Result<()> {
207 use WasmValType::*;
208 for (operand, slot) in self
209 .sig
210 .params_without_retptr()
211 .iter()
212 .zip(self.context.frame.locals())
213 {
214 match (operand, slot) {
215 (ABIOperand::Reg { ty, reg, .. }, slot) => {
216 let addr = self.masm.local_address(slot)?;
217 match &ty {
218 I32 | I64 | F32 | F64 | V128 => {
219 self.masm.store((*reg).into(), addr, (*ty).try_into()?)?;
220 }
221 Ref(rt) => match rt.heap_type {
222 WasmHeapType::Func | WasmHeapType::Extern => {
223 self.masm.store_ptr(*reg, addr)?;
224 }
225 _ => bail!(CodeGenError::unsupported_wasm_type()),
226 },
227 }
228 }
229 _ => {}
231 }
232 }
233 Ok(())
234 }
235}
236
237impl<'a, 'translation, 'data, M> CodeGen<'a, 'translation, 'data, M, Emission>
238where
239 M: MacroAssembler,
240{
241 pub fn emit(
243 &mut self,
244 body: BinaryReader<'a>,
245 validator: &mut FuncValidator<ValidatorResources>,
246 ) -> Result<()> {
247 self.emit_body(body, validator)
248 .and_then(|_| self.emit_end())?;
249
250 Ok(())
251 }
252
253 pub fn pop_control_frame(&mut self) -> Result<ControlStackFrame> {
255 self.control_frames
256 .pop()
257 .ok_or_else(|| anyhow!(CodeGenError::control_frame_expected()))
258 }
259
260 pub fn source_loc_from(&mut self, loc: SourceLoc) -> RelSourceLoc {
262 if self.source_location.base.is_none() && !loc.is_default() {
263 self.source_location.base = Some(loc);
264 }
265
266 RelSourceLoc::from_base_offset(self.source_location.base.unwrap_or_default(), loc)
267 }
268
269 pub fn handle_unreachable_else(&mut self) -> Result<()> {
277 let frame = self
278 .control_frames
279 .last_mut()
280 .ok_or_else(|| CodeGenError::control_frame_expected())?;
281 ensure!(frame.is_if(), CodeGenError::if_control_frame_expected());
282 if frame.is_next_sequence_reachable() {
283 self.context.reachable = true;
287 frame.ensure_stack_state(self.masm, &mut self.context)?;
288 frame.bind_else(self.masm, &mut self.context)?;
289 }
290 Ok(())
291 }
292
293 pub fn handle_unreachable_end(&mut self) -> Result<()> {
294 let mut frame = self.pop_control_frame()?;
295 let is_outermost = self.control_frames.len() == 0;
297
298 if frame.is_next_sequence_reachable() {
299 self.context.reachable = true;
300 frame.ensure_stack_state(self.masm, &mut self.context)?;
301 frame.bind_end(self.masm, &mut self.context)
302 } else if is_outermost {
303 frame.ensure_stack_state(self.masm, &mut self.context)
308 } else {
309 Ok(())
310 }
311 }
312
313 fn emit_body(
314 &mut self,
315 body: BinaryReader<'a>,
316 validator: &mut FuncValidator<ValidatorResources>,
317 ) -> Result<()> {
318 self.maybe_emit_fuel_check()?;
319
320 self.maybe_emit_epoch_check()?;
321
322 self.control_frames.push(ControlStackFrame::block(
325 BlockSig::from_sig(self.sig.clone()),
326 self.masm,
327 &mut self.context,
328 )?);
329
330 if self.sig.params.has_retptr() {
335 self.sig
336 .results
337 .set_ret_area(RetArea::slot(self.context.frame.results_base_slot.unwrap()));
338 }
339
340 let mut ops = OperatorsReader::new(body);
341 while !ops.eof() {
342 let offset = ops.original_position();
343 ops.visit_operator(&mut ValidateThenVisit(
344 validator.simd_visitor(offset),
345 self,
346 offset,
347 ))??;
348 }
349 ops.finish()?;
350 return Ok(());
351
352 struct ValidateThenVisit<'a, T, U>(T, &'a mut U, usize);
353
354 macro_rules! validate_then_visit {
355 ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident $ann:tt)*) => {
356 $(
357 fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output {
358 self.0.$visit($($($arg.clone()),*)?)?;
359 let op = Operator::$op $({ $($arg: $arg.clone()),* })?;
360 if self.1.visit(&op) {
361 self.1.before_visit_op(&op, self.2)?;
362 let res = self.1.$visit($($($arg),*)?)?;
363 self.1.after_visit_op()?;
364 Ok(res)
365 } else {
366 Ok(())
367 }
368 }
369 )*
370 };
371 }
372
373 fn visit_op_when_unreachable(op: &Operator) -> bool {
374 use Operator::*;
375 match op {
376 If { .. } | Block { .. } | Loop { .. } | Else | End => true,
377 _ => false,
378 }
379 }
380
381 trait VisitorHooks {
384 fn before_visit_op(&mut self, operator: &Operator, offset: usize) -> Result<()>;
386 fn after_visit_op(&mut self) -> Result<()>;
388
389 fn visit(&self, op: &Operator) -> bool;
398 }
399
400 impl<'a, 'translation, 'data, M: MacroAssembler> VisitorHooks
401 for CodeGen<'a, 'translation, 'data, M, Emission>
402 {
403 fn visit(&self, op: &Operator) -> bool {
404 self.context.reachable || visit_op_when_unreachable(op)
405 }
406
407 fn before_visit_op(&mut self, operator: &Operator, offset: usize) -> Result<()> {
408 self.source_location_before_visit_op(offset)?;
410
411 if self.tunables.consume_fuel {
413 self.fuel_before_visit_op(operator)?;
414 }
415 Ok(())
416 }
417
418 fn after_visit_op(&mut self) -> Result<()> {
419 self.source_location_after_visit_op()
421 }
422 }
423
424 impl<'a, T, U> VisitOperator<'a> for ValidateThenVisit<'_, T, U>
425 where
426 T: VisitSimdOperator<'a, Output = wasmparser::Result<()>>,
427 U: VisitSimdOperator<'a, Output = Result<()>> + VisitorHooks,
428 {
429 type Output = U::Output;
430
431 fn simd_visitor(
432 &mut self,
433 ) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>>
434 where
435 T:,
436 {
437 Some(self)
438 }
439
440 wasmparser::for_each_visit_operator!(validate_then_visit);
441 }
442
443 impl<'a, T, U> VisitSimdOperator<'a> for ValidateThenVisit<'_, T, U>
444 where
445 T: VisitSimdOperator<'a, Output = wasmparser::Result<()>>,
446 U: VisitSimdOperator<'a, Output = Result<()>> + VisitorHooks,
447 {
448 wasmparser::for_each_visit_simd_operator!(validate_then_visit);
449 }
450 }
451
452 pub fn emit_typecheck_funcref(
454 &mut self,
455 funcref_ptr: Reg,
456 type_index: TypeIndex,
457 ) -> Result<()> {
458 let ptr_size: OperandSize = self.env.ptr_type().try_into()?;
459 let sig_index_bytes = self.env.vmoffsets.size_of_vmshared_type_index();
460 let sig_size = OperandSize::from_bytes(sig_index_bytes);
461 let sig_index = self.env.translation.module.types[type_index].unwrap_module_type_index();
462 let sig_offset = sig_index
463 .as_u32()
464 .checked_mul(sig_index_bytes.into())
465 .unwrap();
466 let signatures_base_offset = self.env.vmoffsets.ptr.vmctx_type_ids_array();
467 let funcref_sig_offset = self.env.vmoffsets.ptr.vm_func_ref_type_index();
468 let caller_id = self.context.any_gpr(self.masm)?;
470
471 self.masm.with_scratch::<IntScratch, _>(|masm, scratch| {
472 masm.load(
474 masm.address_at_vmctx(signatures_base_offset.into())?,
475 scratch.writable(),
476 ptr_size,
477 )?;
478
479 masm.load(
480 masm.address_at_reg(scratch.inner(), sig_offset)?,
481 writable!(caller_id),
482 sig_size,
483 )
484 })?;
485
486 let callee_id = self.context.any_gpr(self.masm)?;
487 self.masm.load(
488 self.masm
489 .address_at_reg(funcref_ptr, funcref_sig_offset.into())?,
490 writable!(callee_id),
491 sig_size,
492 )?;
493
494 self.masm
496 .cmp(caller_id, callee_id.into(), OperandSize::S32)?;
497 self.masm.trapif(IntCmpKind::Ne, TRAP_BAD_SIGNATURE)?;
498 self.context.free_reg(callee_id);
499 self.context.free_reg(caller_id);
500 anyhow::Ok(())
501 }
502
503 fn emit_end(&mut self) -> Result<()> {
505 let base = SPOffset::from_u32(self.context.frame.locals_size);
509 self.masm.start_source_loc(Default::default())?;
510 if self.context.reachable {
511 ControlStackFrame::pop_abi_results_impl(
512 &mut self.sig.results,
513 &mut self.context,
514 self.masm,
515 |results, _, _| Ok(results.ret_area().copied()),
516 )?;
517 } else {
518 self.context.truncate_stack_to(0)?;
522 self.masm.reset_stack_pointer(base)?;
523 }
524 ensure!(
525 self.context.stack.len() == 0,
526 CodeGenError::unexpected_value_in_value_stack()
527 );
528 self.masm.free_stack(self.context.frame.locals_size)?;
529 self.masm.epilogue()?;
530 self.masm.end_source_loc()?;
531 Ok(())
532 }
533
534 pub fn emit_set_local(&mut self, index: u32) -> Result<TypedReg> {
538 if self.context.stack.contains_latent_local(index) {
541 self.context.spill(self.masm)?;
542 }
543 let src = self.context.pop_to_reg(self.masm, None)?;
544 let (ty, addr) = self.context.frame.get_local_address(index, self.masm)?;
548 self.masm
549 .store(RegImm::reg(src.reg), addr, ty.try_into()?)?;
550
551 Ok(src)
552 }
553
554 pub fn emit_get_global_addr(&mut self, index: GlobalIndex) -> Result<(WasmValType, Reg, u32)> {
556 let data = self.env.resolve_global(index);
557
558 if data.imported {
559 let global_base = self.masm.address_at_reg(vmctx!(M), data.offset)?;
560 let dst = self.context.any_gpr(self.masm)?;
561 self.masm.load_ptr(global_base, writable!(dst))?;
562 Ok((data.ty, dst, 0))
563 } else {
564 Ok((data.ty, vmctx!(M), data.offset))
565 }
566 }
567
568 pub fn emit_lazy_init_funcref(&mut self, table_index: TableIndex) -> Result<()> {
569 assert!(self.tunables.table_lazy_init, "unsupported eager init");
570 let table_data = self.env.resolve_table_data(table_index);
571 let ptr_type = self.env.ptr_type();
572 let builtin = self
573 .env
574 .builtins
575 .table_get_lazy_init_func_ref::<M::ABI, M::Ptr>()?;
576
577 self.context.spill(self.masm)?;
582 let elem_value: Reg = self.context.reg(
583 builtin.sig().results.unwrap_singleton().unwrap_reg(),
584 self.masm,
585 )?;
586
587 let index = self.context.pop_to_reg(self.masm, None)?;
588 let base = self.context.any_gpr(self.masm)?;
589
590 let elem_addr = self.emit_compute_table_elem_addr(index.into(), base, &table_data)?;
591 self.masm.load_ptr(elem_addr, writable!(elem_value))?;
592 self.context.free_reg(base);
595
596 let (defined, cont) = (self.masm.get_label()?, self.masm.get_label()?);
597
598 self.context
600 .stack
601 .extend([table_index.as_u32().try_into().unwrap(), index.into()]);
602
603 self.masm.branch(
604 IntCmpKind::Ne,
605 elem_value,
606 elem_value.into(),
607 defined,
608 ptr_type.try_into()?,
609 )?;
610 self.context.free_reg(elem_value);
614 FnCall::emit::<M>(
615 &mut self.env,
616 self.masm,
617 &mut self.context,
618 Callee::Builtin(builtin.clone()),
619 )?;
620
621 let top = self
624 .context
625 .stack
626 .peek()
627 .ok_or_else(|| CodeGenError::missing_values_in_stack())?;
628 let top = top.unwrap_reg();
629 ensure!(
630 top.reg == elem_value,
631 CodeGenError::table_element_value_expected()
632 );
633 self.masm.jmp(cont)?;
634
635 self.masm.bind(defined)?;
642 assert_eq!(FUNCREF_MASK as isize, -2);
643 let imm = RegImm::i64(-2);
644 let dst = top.into();
645 self.masm
646 .and(writable!(dst), dst, imm, top.ty.try_into()?)?;
647
648 self.masm.bind(cont)
649 }
650
651 pub fn emit_compute_heap_address(
672 &mut self,
673 memarg: &MemArg,
674 access_size: OperandSize,
675 ) -> Result<Option<Reg>> {
676 let ptr_size: OperandSize = self.env.ptr_type().try_into()?;
677 let enable_spectre_mitigation = self.env.heap_access_spectre_mitigation();
678 let add_offset_and_access_size = |offset: ImmOffset, access_size: OperandSize| {
679 (access_size.bytes() as u64) + (offset.as_u32() as u64)
680 };
681
682 let memory_index = MemoryIndex::from_u32(memarg.memory);
683 let heap = self.env.resolve_heap(memory_index);
684 let index = Index::from_typed_reg(self.context.pop_to_reg(self.masm, None)?);
685 let offset = bounds::ensure_index_and_offset(
686 self.masm,
687 index,
688 memarg.offset,
689 heap.index_type().try_into()?,
690 )?;
691 let offset_with_access_size = add_offset_and_access_size(offset, access_size);
692
693 let can_elide_bounds_check = heap
694 .memory
695 .can_elide_bounds_check(self.tunables, self.env.page_size_log2);
696
697 let addr = if offset_with_access_size > heap.memory.maximum_byte_size().unwrap_or(u64::MAX)
698 {
699 self.emit_fuel_increment()?;
706 self.masm.trap(TrapCode::HEAP_OUT_OF_BOUNDS)?;
707 self.context.reachable = false;
708 None
709 } else if !can_elide_bounds_check {
710 let bounds = bounds::load_dynamic_heap_bounds::<_>(
716 &mut self.context,
717 self.masm,
718 &heap,
719 ptr_size,
720 )?;
721
722 let index_reg = index.as_typed_reg().reg;
723 let index_offset_and_access_size = self.context.any_gpr(self.masm)?;
727
728 self.masm.mov(
740 writable!(index_offset_and_access_size),
741 index_reg.into(),
742 heap.index_type().try_into()?,
743 )?;
744 self.masm.checked_uadd(
755 writable!(index_offset_and_access_size),
756 index_offset_and_access_size,
757 RegImm::i64(offset_with_access_size as i64),
758 ptr_size,
759 TrapCode::HEAP_OUT_OF_BOUNDS,
760 )?;
761
762 let addr = bounds::load_heap_addr_checked(
763 self.masm,
764 &mut self.context,
765 ptr_size,
766 &heap,
767 enable_spectre_mitigation,
768 bounds,
769 index,
770 offset,
771 |masm, bounds, _| {
772 let bounds_reg = bounds.as_typed_reg().reg;
773 masm.cmp(
774 index_offset_and_access_size,
775 bounds_reg.into(),
776 ptr_size,
780 )?;
781 Ok(IntCmpKind::GtU)
782 },
783 )?;
784 self.context.free_reg(bounds.as_typed_reg().reg);
785 self.context.free_reg(index_offset_and_access_size);
786 Some(addr)
787
788 } else if u64::from(u32::MAX)
815 <= self.tunables.memory_reservation + self.tunables.memory_guard_size
816 - offset_with_access_size
817 {
818 assert!(can_elide_bounds_check);
819 assert!(heap.index_type() == WasmValType::I32);
820 let addr = self.context.any_gpr(self.masm)?;
821 bounds::load_heap_addr_unchecked(self.masm, &heap, index, offset, addr, ptr_size)?;
822 Some(addr)
823
824 } else {
832 assert!(can_elide_bounds_check);
833 assert!(heap.index_type() == WasmValType::I32);
834 let bounds = Bounds::from_u64(self.tunables.memory_reservation);
835 let addr = bounds::load_heap_addr_checked(
836 self.masm,
837 &mut self.context,
838 ptr_size,
839 &heap,
840 enable_spectre_mitigation,
841 bounds,
842 index,
843 offset,
844 |masm, bounds, index| {
845 let adjusted_bounds = bounds.as_u64() - offset_with_access_size;
846 let index_reg = index.as_typed_reg().reg;
847 masm.cmp(
848 index_reg,
849 RegImm::i64(adjusted_bounds as i64),
850 ptr_size,
856 )?;
857 Ok(IntCmpKind::GtU)
858 },
859 )?;
860 Some(addr)
861 };
862
863 self.context.free_reg(index.as_typed_reg().reg);
864 Ok(addr)
865 }
866
867 fn emit_check_align(&mut self, memarg: &MemArg, size: OperandSize) -> Result<()> {
869 if size.bytes() > 1 {
870 let addr = *self
872 .context
873 .stack
874 .peek()
875 .ok_or_else(|| CodeGenError::missing_values_in_stack())?;
876 let tmp = self.context.any_gpr(self.masm)?;
877 self.context.move_val_to_reg(&addr, tmp, self.masm)?;
878
879 if memarg.offset != 0 {
880 self.masm.add(
881 writable!(tmp),
882 tmp,
883 RegImm::Imm(Imm::I64(memarg.offset)),
884 size,
885 )?;
886 }
887
888 self.masm.and(
889 writable!(tmp),
890 tmp,
891 RegImm::Imm(Imm::I32(size.bytes() - 1)),
892 size,
893 )?;
894
895 self.masm.cmp(tmp, RegImm::Imm(Imm::i64(0)), size)?;
896 self.masm.trapif(IntCmpKind::Ne, TRAP_HEAP_MISALIGNED)?;
897 self.context.free_reg(tmp);
898 }
899
900 Ok(())
901 }
902
903 pub fn emit_compute_heap_address_align_checked(
904 &mut self,
905 memarg: &MemArg,
906 access_size: OperandSize,
907 ) -> Result<Option<Reg>> {
908 self.emit_check_align(memarg, access_size)?;
909 self.emit_compute_heap_address(memarg, access_size)
910 }
911
912 pub fn emit_wasm_load(
914 &mut self,
915 arg: &MemArg,
916 target_type: WasmValType,
917 kind: LoadKind,
918 ) -> Result<()> {
919 let emit_load = |this: &mut Self, dst, addr, kind| -> Result<()> {
920 let src = this.masm.address_at_reg(addr, 0)?;
921 this.masm.wasm_load(src, writable!(dst), kind)?;
922 this.context
923 .stack
924 .push(TypedReg::new(target_type, dst).into());
925 this.context.free_reg(addr);
926 Ok(())
927 };
928
929 match kind {
932 LoadKind::VectorLane(_) => {
933 let dst = self.context.pop_to_reg(self.masm, None)?;
937 let addr = self.emit_compute_heap_address(&arg, kind.derive_operand_size())?;
938 if let Some(addr) = addr {
939 emit_load(self, dst.reg, addr, kind)?;
940 } else {
941 self.context.free_reg(dst);
942 }
943 }
944 _ => {
945 let maybe_addr = match kind {
946 LoadKind::Atomic(_, _) => self.emit_compute_heap_address_align_checked(
947 &arg,
948 kind.derive_operand_size(),
949 )?,
950 _ => self.emit_compute_heap_address(&arg, kind.derive_operand_size())?,
951 };
952
953 if let Some(addr) = maybe_addr {
954 let dst = match target_type {
955 WasmValType::I32 | WasmValType::I64 => self.context.any_gpr(self.masm)?,
956 WasmValType::F32 | WasmValType::F64 => self.context.any_fpr(self.masm)?,
957 WasmValType::V128 => self.context.reg_for_type(target_type, self.masm)?,
958 _ => bail!(CodeGenError::unsupported_wasm_type()),
959 };
960
961 emit_load(self, dst, addr, kind)?;
962 }
963 }
964 }
965
966 Ok(())
967 }
968
969 pub fn emit_wasm_store(&mut self, arg: &MemArg, kind: StoreKind) -> Result<()> {
971 let src = self.context.pop_to_reg(self.masm, None)?;
972
973 let maybe_addr = match kind {
974 StoreKind::Atomic(size) => self.emit_compute_heap_address_align_checked(&arg, size)?,
975 StoreKind::Operand(size) | StoreKind::VectorLane(LaneSelector { size, .. }) => {
976 self.emit_compute_heap_address(&arg, size)?
977 }
978 };
979
980 if let Some(addr) = maybe_addr {
981 self.masm
982 .wasm_store(src.reg, self.masm.address_at_reg(addr, 0)?, kind)?;
983
984 self.context.free_reg(addr);
985 }
986 self.context.free_reg(src);
987
988 Ok(())
989 }
990
991 pub fn emit_compute_table_elem_addr(
994 &mut self,
995 index: Reg,
996 base: Reg,
997 table_data: &TableData,
998 ) -> Result<M::Address> {
999 let bound = self.context.any_gpr(self.masm)?;
1000 let tmp = self.context.any_gpr(self.masm)?;
1001 let ptr_size: OperandSize = self.env.ptr_type().try_into()?;
1002
1003 if let Some(offset) = table_data.import_from {
1004 self.masm
1008 .load_ptr(self.masm.address_at_vmctx(offset)?, writable!(base))?;
1009 } else {
1010 self.masm.mov(writable!(base), vmctx!(M).into(), ptr_size)?;
1013 };
1014
1015 let bound_addr = self
1017 .masm
1018 .address_at_reg(base, table_data.current_elems_offset)?;
1019 let bound_size = table_data.current_elements_size;
1020 self.masm.load(bound_addr, writable!(bound), bound_size)?;
1021 self.masm.cmp(index, bound.into(), bound_size)?;
1022 self.masm
1023 .trapif(IntCmpKind::GeU, TRAP_TABLE_OUT_OF_BOUNDS)?;
1024
1025 self.masm.with_scratch::<IntScratch, _>(|masm, scratch| {
1030 masm.mov(scratch.writable(), index.into(), bound_size)?;
1031 masm.mul(
1032 scratch.writable(),
1033 scratch.inner(),
1034 RegImm::i32(table_data.element_size.bytes() as i32),
1035 table_data.element_size,
1036 )?;
1037 masm.load_ptr(
1038 masm.address_at_reg(base, table_data.offset)?,
1039 writable!(base),
1040 )?;
1041 masm.mov(writable!(tmp), base.into(), ptr_size)?;
1044 masm.add(writable!(base), base, scratch.inner().into(), ptr_size)
1046 })?;
1047 if self.env.table_access_spectre_mitigation() {
1048 self.masm.cmp(index, bound.into(), OperandSize::S32)?;
1051 self.masm
1052 .cmov(writable!(base), tmp, IntCmpKind::GeU, ptr_size)?;
1053 }
1054 self.context.free_reg(bound);
1055 self.context.free_reg(tmp);
1056 self.masm.address_at_reg(base, 0)
1057 }
1058
1059 pub fn emit_compute_table_size(&mut self, table_data: &TableData) -> Result<()> {
1061 let size = self.context.any_gpr(self.masm)?;
1062 let ptr_size: OperandSize = self.env.ptr_type().try_into()?;
1063
1064 self.masm.with_scratch::<IntScratch, _>(|masm, scratch| {
1065 if let Some(offset) = table_data.import_from {
1066 masm.load_ptr(masm.address_at_vmctx(offset)?, scratch.writable())?;
1067 } else {
1068 masm.mov(scratch.writable(), vmctx!(M).into(), ptr_size)?;
1069 };
1070
1071 let size_addr =
1072 masm.address_at_reg(scratch.inner(), table_data.current_elems_offset)?;
1073 masm.load(size_addr, writable!(size), table_data.current_elements_size)
1074 })?;
1075
1076 self.context.stack.push(TypedReg::i32(size).into());
1077 Ok(())
1078 }
1079
1080 pub fn emit_compute_memory_size(&mut self, heap_data: &HeapData) -> Result<()> {
1082 let size_reg = self.context.any_gpr(self.masm)?;
1083
1084 self.masm.with_scratch::<IntScratch, _>(|masm, scratch| {
1085 let base = if let Some(offset) = heap_data.import_from {
1086 masm.load_ptr(masm.address_at_vmctx(offset)?, scratch.writable())?;
1087 scratch.inner()
1088 } else {
1089 vmctx!(M)
1090 };
1091
1092 let size_addr = masm.address_at_reg(base, heap_data.current_length_offset)?;
1093 masm.load_ptr(size_addr, writable!(size_reg))
1094 })?;
1095 let dst = TypedReg::new(heap_data.index_type(), size_reg);
1097 let pow = heap_data.memory.page_size_log2;
1098 self.masm.shift_ir(
1099 writable!(dst.reg),
1100 Imm::i32(pow as i32),
1101 dst.into(),
1102 ShiftKind::ShrU,
1103 heap_data.index_type().try_into()?,
1104 )?;
1105 self.context.stack.push(dst.into());
1106 Ok(())
1107 }
1108
1109 pub fn maybe_emit_fuel_check(&mut self) -> Result<()> {
1113 if !self.tunables.consume_fuel {
1114 return Ok(());
1115 }
1116
1117 self.emit_fuel_increment()?;
1118 let out_of_fuel = self.env.builtins.out_of_gas::<M::ABI, M::Ptr>()?;
1119 let fuel_reg = self.context.without::<Result<Reg>, M, _>(
1120 &out_of_fuel.sig().regs,
1121 self.masm,
1122 |cx, masm| cx.any_gpr(masm),
1123 )??;
1124
1125 self.emit_load_fuel_consumed(fuel_reg)?;
1126
1127 let continuation = self.masm.get_label()?;
1129
1130 self.context.spill(self.masm)?;
1133 self.masm.branch(
1136 IntCmpKind::LtS,
1137 fuel_reg,
1138 RegImm::i64(0),
1139 continuation,
1140 OperandSize::S64,
1141 )?;
1142 FnCall::emit::<M>(
1144 &mut self.env,
1145 self.masm,
1146 &mut self.context,
1147 Callee::Builtin(out_of_fuel.clone()),
1148 )?;
1149 self.context.pop_and_free(self.masm)?;
1150
1151 self.masm.bind(continuation)?;
1153 self.context.free_reg(fuel_reg);
1154
1155 Ok(())
1156 }
1157
1158 fn emit_load_fuel_consumed(&mut self, fuel_reg: Reg) -> Result<()> {
1161 let store_context_offset = self.env.vmoffsets.ptr.vmctx_store_context();
1162 let fuel_offset = self.env.vmoffsets.ptr.vmstore_context_fuel_consumed();
1163 self.masm.load_ptr(
1164 self.masm
1165 .address_at_vmctx(u32::from(store_context_offset))?,
1166 writable!(fuel_reg),
1167 )?;
1168
1169 self.masm.load(
1170 self.masm.address_at_reg(fuel_reg, u32::from(fuel_offset))?,
1171 writable!(fuel_reg),
1172 OperandSize::S64,
1174 )
1175 }
1176
1177 pub fn maybe_emit_epoch_check(&mut self) -> Result<()> {
1180 if !self.tunables.epoch_interruption {
1181 return Ok(());
1182 }
1183
1184 let cont = self.masm.get_label()?;
1187 let new_epoch = self.env.builtins.new_epoch::<M::ABI, M::Ptr>()?;
1188
1189 let (epoch_deadline_reg, epoch_counter_reg) =
1197 self.context.without::<Result<(Reg, Reg)>, M, _>(
1198 &new_epoch.sig().regs,
1199 self.masm,
1200 |cx, masm| Ok((cx.any_gpr(masm)?, cx.any_gpr(masm)?)),
1201 )??;
1202
1203 self.emit_load_epoch_deadline_and_counter(epoch_deadline_reg, epoch_counter_reg)?;
1204
1205 self.context.spill(self.masm)?;
1208 self.masm.branch(
1209 IntCmpKind::LtU,
1210 epoch_counter_reg,
1211 RegImm::reg(epoch_deadline_reg),
1212 cont,
1213 OperandSize::S64,
1214 )?;
1215 FnCall::emit::<M>(
1217 &mut self.env,
1218 self.masm,
1219 &mut self.context,
1220 Callee::Builtin(new_epoch.clone()),
1221 )?;
1222 self.visit_drop()?;
1225
1226 self.masm.bind(cont)?;
1228
1229 self.context.free_reg(epoch_deadline_reg);
1230 self.context.free_reg(epoch_counter_reg);
1231 Ok(())
1232 }
1233
1234 fn emit_load_epoch_deadline_and_counter(
1235 &mut self,
1236 epoch_deadline_reg: Reg,
1237 epoch_counter_reg: Reg,
1238 ) -> Result<()> {
1239 let epoch_ptr_offset = self.env.vmoffsets.ptr.vmctx_epoch_ptr();
1240 let store_context_offset = self.env.vmoffsets.ptr.vmctx_store_context();
1241 let epoch_deadline_offset = self.env.vmoffsets.ptr.vmstore_context_epoch_deadline();
1242
1243 self.masm.load_ptr(
1245 self.masm.address_at_vmctx(u32::from(epoch_ptr_offset))?,
1246 writable!(epoch_counter_reg),
1247 )?;
1248
1249 self.masm.load(
1252 self.masm.address_at_reg(epoch_counter_reg, 0)?,
1253 writable!(epoch_counter_reg),
1254 OperandSize::S64,
1255 )?;
1256
1257 self.masm.load_ptr(
1259 self.masm
1260 .address_at_vmctx(u32::from(store_context_offset))?,
1261 writable!(epoch_deadline_reg),
1262 )?;
1263
1264 self.masm.load(
1265 self.masm
1266 .address_at_reg(epoch_deadline_reg, u32::from(epoch_deadline_offset))?,
1267 writable!(epoch_deadline_reg),
1268 OperandSize::S64,
1270 )
1271 }
1272
1273 fn emit_fuel_increment(&mut self) -> Result<()> {
1276 let fuel_at_point = std::mem::replace(&mut self.fuel_consumed, 0);
1277 if fuel_at_point == 0 {
1278 return Ok(());
1279 }
1280
1281 let store_context_offset = self.env.vmoffsets.ptr.vmctx_store_context();
1282 let fuel_offset = self.env.vmoffsets.ptr.vmstore_context_fuel_consumed();
1283 let limits_reg = self.context.any_gpr(self.masm)?;
1284
1285 self.masm.load_ptr(
1287 self.masm
1288 .address_at_vmctx(u32::from(store_context_offset))?,
1289 writable!(limits_reg),
1290 )?;
1291
1292 self.masm.with_scratch::<IntScratch, _>(|masm, scratch| {
1293 masm.load(
1295 masm.address_at_reg(limits_reg, u32::from(fuel_offset))?,
1296 scratch.writable(),
1297 OperandSize::S64,
1298 )?;
1299
1300 masm.add(
1303 scratch.writable(),
1304 scratch.inner(),
1305 RegImm::i64(fuel_at_point),
1306 OperandSize::S64,
1307 )?;
1308
1309 masm.store(
1311 scratch.inner().into(),
1312 masm.address_at_reg(limits_reg, u32::from(fuel_offset))?,
1313 OperandSize::S64,
1314 )
1315 })?;
1316
1317 self.context.free_reg(limits_reg);
1318
1319 Ok(())
1320 }
1321
1322 fn fuel_before_visit_op(&mut self, op: &Operator) -> Result<()> {
1324 if !self.context.reachable {
1325 ensure!(self.fuel_consumed == 0, CodeGenError::illegal_fuel_state())
1328 }
1329
1330 self.fuel_consumed += match op {
1347 Operator::Nop | Operator::Drop => 0,
1348 Operator::Block { .. }
1349 | Operator::Loop { .. }
1350 | Operator::Unreachable
1351 | Operator::Return
1352 | Operator::Else
1353 | Operator::End => 0,
1354 _ => 1,
1355 };
1356
1357 match op {
1358 Operator::Unreachable
1359 | Operator::Loop { .. }
1360 | Operator::If { .. }
1361 | Operator::Else { .. }
1362 | Operator::Br { .. }
1363 | Operator::BrIf { .. }
1364 | Operator::BrTable { .. }
1365 | Operator::End
1366 | Operator::Return
1367 | Operator::CallIndirect { .. }
1368 | Operator::Call { .. }
1369 | Operator::ReturnCall { .. }
1370 | Operator::ReturnCallIndirect { .. } => self.emit_fuel_increment(),
1371 _ => Ok(()),
1372 }
1373 }
1374
1375 fn source_location_before_visit_op(&mut self, offset: usize) -> Result<()> {
1377 let loc = SourceLoc::new(offset as u32);
1378 let rel = self.source_loc_from(loc);
1379 self.source_location.current = self.masm.start_source_loc(rel)?;
1380 Ok(())
1381 }
1382
1383 fn source_location_after_visit_op(&mut self) -> Result<()> {
1385 if self.masm.current_code_offset()? >= self.source_location.current.0 {
1392 self.masm.end_source_loc()?;
1393 }
1394
1395 Ok(())
1396 }
1397
1398 pub(crate) fn emit_atomic_rmw(
1399 &mut self,
1400 arg: &MemArg,
1401 op: RmwOp,
1402 size: OperandSize,
1403 extend: Option<Extend<Zero>>,
1404 ) -> Result<()> {
1405 let operand = self.context.pop_to_reg(self.masm, None)?;
1409 if let Some(addr) = self.emit_compute_heap_address_align_checked(arg, size)? {
1410 let src = self.masm.address_at_reg(addr, 0)?;
1411 self.context.stack.push(operand.into());
1412 self.masm
1413 .atomic_rmw(&mut self.context, src, size, op, UNTRUSTED_FLAGS, extend)?;
1414 self.context.free_reg(addr);
1415 }
1416
1417 Ok(())
1418 }
1419
1420 pub(crate) fn emit_atomic_cmpxchg(
1421 &mut self,
1422 arg: &MemArg,
1423 size: OperandSize,
1424 extend: Option<Extend<Zero>>,
1425 ) -> Result<()> {
1426 let replacement = self.context.pop_to_reg(self.masm, None)?;
1437 let expected = self.context.pop_to_reg(self.masm, None)?;
1438
1439 if let Some(addr) = self.emit_compute_heap_address_align_checked(arg, size)? {
1440 self.context.stack.push(expected.into());
1442 self.context.stack.push(replacement.into());
1443
1444 let src = self.masm.address_at_reg(addr, 0)?;
1445 self.masm
1446 .atomic_cas(&mut self.context, src, size, UNTRUSTED_FLAGS, extend)?;
1447
1448 self.context.free_reg(addr);
1449 }
1450 Ok(())
1451 }
1452
1453 #[cfg(not(feature = "threads"))]
1454 pub fn emit_atomic_wait(&mut self, _arg: &MemArg, _kind: AtomicWaitKind) -> Result<()> {
1455 Err(CodeGenError::unimplemented_wasm_instruction().into())
1456 }
1457
1458 #[cfg(feature = "threads")]
1460 pub fn emit_atomic_wait(&mut self, arg: &MemArg, kind: AtomicWaitKind) -> Result<()> {
1461 let timeout = self.context.pop_to_reg(self.masm, None)?;
1474 let expected = self.context.pop_to_reg(self.masm, None)?;
1475 let addr = self.context.pop_to_reg(self.masm, None)?;
1476
1477 self.context
1479 .stack
1480 .push(crate::stack::Val::I32(arg.memory as i32));
1481
1482 if arg.offset != 0 {
1483 self.masm.add(
1484 writable!(addr.reg),
1485 addr.reg,
1486 RegImm::i64(arg.offset as i64),
1487 OperandSize::S64,
1488 )?;
1489 }
1490
1491 self.context
1492 .stack
1493 .push(TypedReg::new(WasmValType::I64, addr.reg).into());
1494 self.context.stack.push(expected.into());
1495 self.context.stack.push(timeout.into());
1496
1497 let builtin = match kind {
1498 AtomicWaitKind::Wait32 => self.env.builtins.memory_atomic_wait32::<M::ABI, M::Ptr>()?,
1499 AtomicWaitKind::Wait64 => self.env.builtins.memory_atomic_wait64::<M::ABI, M::Ptr>()?,
1500 };
1501
1502 FnCall::emit::<M>(
1503 &mut self.env,
1504 self.masm,
1505 &mut self.context,
1506 Callee::Builtin(builtin.clone()),
1507 )?;
1508
1509 Ok(())
1510 }
1511
1512 #[cfg(not(feature = "threads"))]
1513 pub fn emit_atomic_notify(&mut self, _arg: &MemArg) -> Result<()> {
1514 Err(CodeGenError::unimplemented_wasm_instruction().into())
1515 }
1516
1517 #[cfg(feature = "threads")]
1518 pub fn emit_atomic_notify(&mut self, arg: &MemArg) -> Result<()> {
1519 let count = self.context.pop_to_reg(self.masm, None)?;
1531 let addr = self.context.pop_to_reg(self.masm, None)?;
1532
1533 self.context
1535 .stack
1536 .push(crate::stack::Val::I32(arg.memory as i32));
1537
1538 if arg.offset != 0 {
1539 self.masm.add(
1540 writable!(addr.reg),
1541 addr.reg,
1542 RegImm::i64(arg.offset as i64),
1543 OperandSize::S64,
1544 )?;
1545 }
1546
1547 self.context
1549 .stack
1550 .push(TypedReg::new(WasmValType::I64, addr.reg).into());
1551 self.context.stack.push(count.into());
1552
1553 let builtin = self.env.builtins.memory_atomic_notify::<M::ABI, M::Ptr>()?;
1554
1555 FnCall::emit::<M>(
1556 &mut self.env,
1557 self.masm,
1558 &mut self.context,
1559 Callee::Builtin(builtin.clone()),
1560 )?;
1561
1562 Ok(())
1563 }
1564}
1565
1566pub fn control_index(depth: u32, control_length: usize) -> Result<usize> {
1569 (control_length - 1)
1570 .checked_sub(depth as usize)
1571 .ok_or_else(|| anyhow!(CodeGenError::control_frame_expected()))
1572}