1use crate::{
2 abi::{scratch, vmctx, ABIOperand, ABISig, RetArea},
3 codegen::BlockSig,
4 isa::reg::{writable, Reg},
5 masm::{
6 Extend, Imm, IntCmpKind, LaneSelector, LoadKind, MacroAssembler, OperandSize, RegImm,
7 RmwOp, SPOffset, ShiftKind, StoreKind, TrapCode, Zero, UNTRUSTED_FLAGS,
8 },
9 stack::TypedReg,
10};
11use anyhow::{anyhow, bail, ensure, Result};
12use cranelift_codegen::{
13 binemit::CodeOffset,
14 ir::{RelSourceLoc, SourceLoc},
15};
16use smallvec::SmallVec;
17use std::marker::PhantomData;
18use wasmparser::{
19 BinaryReader, FuncValidator, MemArg, Operator, ValidatorResources, VisitOperator,
20 VisitSimdOperator,
21};
22use wasmtime_cranelift::{TRAP_BAD_SIGNATURE, TRAP_HEAP_MISALIGNED, TRAP_TABLE_OUT_OF_BOUNDS};
23use wasmtime_environ::{
24 GlobalIndex, MemoryIndex, PtrSize, TableIndex, Tunables, TypeIndex, WasmHeapType, WasmValType,
25 FUNCREF_MASK,
26};
27
28mod context;
29pub(crate) use context::*;
30mod env;
31pub use env::*;
32mod call;
33pub(crate) use call::*;
34mod control;
35pub(crate) use control::*;
36mod builtin;
37pub use builtin::*;
38pub(crate) mod bounds;
39
40use bounds::{Bounds, ImmOffset, Index};
41
42mod phase;
43pub(crate) use phase::*;
44
45mod error;
46pub(crate) use error::*;
47
48#[derive(Default)]
52pub(crate) struct SourceLocation {
53 pub base: Option<SourceLoc>,
55 pub current: (CodeOffset, RelSourceLoc),
58}
59
60#[derive(Debug, Clone, Copy)]
62pub(crate) enum AtomicWaitKind {
63 Wait32,
64 Wait64,
65}
66
67pub(crate) struct CodeGen<'a, 'translation: 'a, 'data: 'translation, M, P>
69where
70 M: MacroAssembler,
71 P: CodeGenPhase,
72{
73 pub sig: ABISig,
75
76 pub context: CodeGenContext<'a, P>,
78
79 pub env: FuncEnv<'a, 'translation, 'data, M::Ptr>,
81
82 pub masm: &'a mut M,
84
85 pub control_frames: SmallVec<[ControlStackFrame; 64]>,
89
90 pub source_location: SourceLocation,
92
93 pub tunables: &'a Tunables,
95
96 pub fuel_consumed: i64,
98 phase: PhantomData<P>,
99}
100
101impl<'a, 'translation, 'data, M> CodeGen<'a, 'translation, 'data, M, Prologue>
102where
103 M: MacroAssembler,
104{
105 pub fn new(
106 tunables: &'a Tunables,
107 masm: &'a mut M,
108 context: CodeGenContext<'a, Prologue>,
109 env: FuncEnv<'a, 'translation, 'data, M::Ptr>,
110 sig: ABISig,
111 ) -> CodeGen<'a, 'translation, 'data, M, Prologue> {
112 Self {
113 sig,
114 context,
115 masm,
116 env,
117 tunables,
118 source_location: Default::default(),
119 control_frames: Default::default(),
120 fuel_consumed: 1,
122 phase: PhantomData,
123 }
124 }
125
126 pub fn emit_prologue(mut self) -> Result<CodeGen<'a, 'translation, 'data, M, Emission>> {
128 let vmctx = self
129 .sig
130 .params()
131 .first()
132 .ok_or_else(|| anyhow!(CodeGenError::vmcontext_arg_expected()))?
133 .unwrap_reg()
134 .into();
135
136 self.masm.start_source_loc(Default::default())?;
137 self.masm.prologue(vmctx)?;
139
140 self.masm.mov(
142 writable!(vmctx!(M)),
143 vmctx.into(),
144 self.env.ptr_type().try_into()?,
145 )?;
146
147 self.masm.reserve_stack(self.context.frame.locals_size)?;
148 self.spill_register_arguments()?;
149
150 let defined_locals_range = &self.context.frame.defined_locals_range;
151 self.masm.zero_mem_range(defined_locals_range.as_range())?;
152
153 if self.sig.params.has_retptr() {
156 match self.sig.params.unwrap_results_area_operand() {
157 ABIOperand::Reg { ty, reg, .. } => {
158 let results_base_slot = self.context.frame.results_base_slot.as_ref().unwrap();
159 ensure!(
160 results_base_slot.addressed_from_sp(),
161 CodeGenError::sp_addressing_expected(),
162 );
163 let addr = self.masm.local_address(results_base_slot)?;
164 self.masm.store((*reg).into(), addr, (*ty).try_into()?)?;
165 }
166 _ => {}
169 }
170 }
171
172 self.masm.end_source_loc()?;
173
174 Ok(CodeGen {
175 sig: self.sig,
176 context: self.context.for_emission(),
177 masm: self.masm,
178 env: self.env,
179 tunables: self.tunables,
180 source_location: self.source_location,
181 control_frames: self.control_frames,
182 fuel_consumed: self.fuel_consumed,
183 phase: PhantomData,
184 })
185 }
186
187 fn spill_register_arguments(&mut self) -> Result<()> {
188 use WasmValType::*;
189 for (operand, slot) in self
190 .sig
191 .params_without_retptr()
192 .iter()
193 .zip(self.context.frame.locals())
194 {
195 match (operand, slot) {
196 (ABIOperand::Reg { ty, reg, .. }, slot) => {
197 let addr = self.masm.local_address(slot)?;
198 match &ty {
199 I32 | I64 | F32 | F64 | V128 => {
200 self.masm.store((*reg).into(), addr, (*ty).try_into()?)?;
201 }
202 Ref(rt) => match rt.heap_type {
203 WasmHeapType::Func | WasmHeapType::Extern => {
204 self.masm.store_ptr((*reg).into(), addr)?;
205 }
206 _ => bail!(CodeGenError::unsupported_wasm_type()),
207 },
208 }
209 }
210 _ => {}
212 }
213 }
214 Ok(())
215 }
216}
217
218impl<'a, 'translation, 'data, M> CodeGen<'a, 'translation, 'data, M, Emission>
219where
220 M: MacroAssembler,
221{
222 pub fn emit(
224 &mut self,
225 body: &mut BinaryReader<'a>,
226 validator: &mut FuncValidator<ValidatorResources>,
227 ) -> Result<()> {
228 self.emit_body(body, validator)
229 .and_then(|_| self.emit_end())?;
230
231 Ok(())
232 }
233
234 pub fn pop_control_frame(&mut self) -> Result<ControlStackFrame> {
236 self.control_frames
237 .pop()
238 .ok_or_else(|| anyhow!(CodeGenError::control_frame_expected()))
239 }
240
241 pub fn source_loc_from(&mut self, loc: SourceLoc) -> RelSourceLoc {
243 if self.source_location.base.is_none() && !loc.is_default() {
244 self.source_location.base = Some(loc);
245 }
246
247 RelSourceLoc::from_base_offset(self.source_location.base.unwrap_or_default(), loc)
248 }
249
250 pub fn handle_unreachable_else(&mut self) -> Result<()> {
258 let frame = self
259 .control_frames
260 .last_mut()
261 .ok_or_else(|| CodeGenError::control_frame_expected())?;
262 ensure!(frame.is_if(), CodeGenError::if_control_frame_expected());
263 if frame.is_next_sequence_reachable() {
264 self.context.reachable = true;
268 frame.ensure_stack_state(self.masm, &mut self.context)?;
269 frame.bind_else(self.masm, &mut self.context)?;
270 }
271 Ok(())
272 }
273
274 pub fn handle_unreachable_end(&mut self) -> Result<()> {
275 let mut frame = self.pop_control_frame()?;
276 let is_outermost = self.control_frames.len() == 0;
278
279 if frame.is_next_sequence_reachable() {
280 self.context.reachable = true;
281 frame.ensure_stack_state(self.masm, &mut self.context)?;
282 frame.bind_end(self.masm, &mut self.context)
283 } else if is_outermost {
284 frame.ensure_stack_state(self.masm, &mut self.context)
289 } else {
290 Ok(())
291 }
292 }
293
294 fn emit_body(
295 &mut self,
296 body: &mut BinaryReader<'a>,
297 validator: &mut FuncValidator<ValidatorResources>,
298 ) -> Result<()> {
299 self.maybe_emit_fuel_check()?;
300
301 self.maybe_emit_epoch_check()?;
302
303 self.control_frames.push(ControlStackFrame::block(
306 BlockSig::from_sig(self.sig.clone()),
307 self.masm,
308 &mut self.context,
309 )?);
310
311 if self.sig.params.has_retptr() {
316 self.sig
317 .results
318 .set_ret_area(RetArea::slot(self.context.frame.results_base_slot.unwrap()));
319 }
320
321 while !body.eof() {
322 let offset = body.original_position();
323 body.visit_operator(&mut ValidateThenVisit(
324 validator.simd_visitor(offset),
325 self,
326 offset,
327 ))??;
328 }
329 validator.finish(body.original_position())?;
330 return Ok(());
331
332 struct ValidateThenVisit<'a, T, U>(T, &'a mut U, usize);
333
334 macro_rules! validate_then_visit {
335 ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident $ann:tt)*) => {
336 $(
337 fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output {
338 self.0.$visit($($($arg.clone()),*)?)?;
339 let op = Operator::$op $({ $($arg: $arg.clone()),* })?;
340 if self.1.visit(&op) {
341 self.1.before_visit_op(&op, self.2)?;
342 let res = self.1.$visit($($($arg),*)?)?;
343 self.1.after_visit_op()?;
344 Ok(res)
345 } else {
346 Ok(())
347 }
348 }
349 )*
350 };
351 }
352
353 fn visit_op_when_unreachable(op: &Operator) -> bool {
354 use Operator::*;
355 match op {
356 If { .. } | Block { .. } | Loop { .. } | Else | End => true,
357 _ => false,
358 }
359 }
360
361 trait VisitorHooks {
364 fn before_visit_op(&mut self, operator: &Operator, offset: usize) -> Result<()>;
366 fn after_visit_op(&mut self) -> Result<()>;
368
369 fn visit(&self, op: &Operator) -> bool;
378 }
379
380 impl<'a, 'translation, 'data, M: MacroAssembler> VisitorHooks
381 for CodeGen<'a, 'translation, 'data, M, Emission>
382 {
383 fn visit(&self, op: &Operator) -> bool {
384 self.context.reachable || visit_op_when_unreachable(op)
385 }
386
387 fn before_visit_op(&mut self, operator: &Operator, offset: usize) -> Result<()> {
388 self.source_location_before_visit_op(offset)?;
390
391 if self.tunables.consume_fuel {
393 self.fuel_before_visit_op(operator)?;
394 }
395 Ok(())
396 }
397
398 fn after_visit_op(&mut self) -> Result<()> {
399 self.source_location_after_visit_op()
401 }
402 }
403
404 impl<'a, T, U> VisitOperator<'a> for ValidateThenVisit<'_, T, U>
405 where
406 T: VisitSimdOperator<'a, Output = wasmparser::Result<()>>,
407 U: VisitSimdOperator<'a, Output = Result<()>> + VisitorHooks,
408 {
409 type Output = U::Output;
410
411 fn simd_visitor(
412 &mut self,
413 ) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>>
414 where
415 T:,
416 {
417 Some(self)
418 }
419
420 wasmparser::for_each_visit_operator!(validate_then_visit);
421 }
422
423 impl<'a, T, U> VisitSimdOperator<'a> for ValidateThenVisit<'_, T, U>
424 where
425 T: VisitSimdOperator<'a, Output = wasmparser::Result<()>>,
426 U: VisitSimdOperator<'a, Output = Result<()>> + VisitorHooks,
427 {
428 wasmparser::for_each_visit_simd_operator!(validate_then_visit);
429 }
430 }
431
432 pub fn emit_typecheck_funcref(
434 &mut self,
435 funcref_ptr: Reg,
436 type_index: TypeIndex,
437 ) -> Result<()> {
438 let ptr_size: OperandSize = self.env.ptr_type().try_into()?;
439 let sig_index_bytes = self.env.vmoffsets.size_of_vmshared_type_index();
440 let sig_size = OperandSize::from_bytes(sig_index_bytes);
441 let sig_index = self.env.translation.module.types[type_index].unwrap_module_type_index();
442 let sig_offset = sig_index
443 .as_u32()
444 .checked_mul(sig_index_bytes.into())
445 .unwrap();
446 let signatures_base_offset = self.env.vmoffsets.ptr.vmctx_type_ids_array();
447 let scratch = scratch!(M);
448 let funcref_sig_offset = self.env.vmoffsets.ptr.vm_func_ref_type_index();
449
450 self.masm.load(
452 self.masm.address_at_vmctx(signatures_base_offset.into())?,
453 writable!(scratch),
454 ptr_size,
455 )?;
456
457 let caller_id = self.context.any_gpr(self.masm)?;
459 self.masm.load(
460 self.masm.address_at_reg(scratch, sig_offset)?,
461 writable!(caller_id),
462 sig_size,
463 )?;
464
465 let callee_id = self.context.any_gpr(self.masm)?;
466 self.masm.load(
467 self.masm
468 .address_at_reg(funcref_ptr, funcref_sig_offset.into())?,
469 writable!(callee_id),
470 sig_size,
471 )?;
472
473 self.masm
475 .cmp(caller_id, callee_id.into(), OperandSize::S32)?;
476 self.masm.trapif(IntCmpKind::Ne, TRAP_BAD_SIGNATURE)?;
477 self.context.free_reg(callee_id);
478 self.context.free_reg(caller_id);
479 Ok(())
480 }
481
482 fn emit_end(&mut self) -> Result<()> {
484 let base = SPOffset::from_u32(self.context.frame.locals_size);
488 self.masm.start_source_loc(Default::default())?;
489 if self.context.reachable {
490 ControlStackFrame::pop_abi_results_impl(
491 &mut self.sig.results,
492 &mut self.context,
493 self.masm,
494 |results, _, _| Ok(results.ret_area().copied()),
495 )?;
496 } else {
497 self.context.truncate_stack_to(0)?;
501 self.masm.reset_stack_pointer(base)?;
502 }
503 ensure!(
504 self.context.stack.len() == 0,
505 CodeGenError::unexpected_value_in_value_stack()
506 );
507 self.masm.free_stack(self.context.frame.locals_size)?;
508 self.masm.epilogue()?;
509 self.masm.end_source_loc()?;
510 Ok(())
511 }
512
513 pub fn emit_set_local(&mut self, index: u32) -> Result<TypedReg> {
517 if self.context.stack.contains_latent_local(index) {
520 self.context.spill(self.masm)?;
521 }
522 let src = self.context.pop_to_reg(self.masm, None)?;
523 let (ty, addr) = self.context.frame.get_local_address(index, self.masm)?;
527 self.masm
528 .store(RegImm::reg(src.reg), addr, ty.try_into()?)?;
529
530 Ok(src)
531 }
532
533 pub fn emit_get_global_addr(&mut self, index: GlobalIndex) -> Result<(WasmValType, Reg, u32)> {
535 let data = self.env.resolve_global(index);
536
537 if data.imported {
538 let global_base = self.masm.address_at_reg(vmctx!(M), data.offset)?;
539 let dst = self.context.any_gpr(self.masm)?;
540 self.masm.load_ptr(global_base, writable!(dst))?;
541 Ok((data.ty, dst, 0))
542 } else {
543 Ok((data.ty, vmctx!(M), data.offset))
544 }
545 }
546
547 pub fn emit_lazy_init_funcref(&mut self, table_index: TableIndex) -> Result<()> {
548 assert!(self.tunables.table_lazy_init, "unsupported eager init");
549 let table_data = self.env.resolve_table_data(table_index);
550 let ptr_type = self.env.ptr_type();
551 let builtin = self
552 .env
553 .builtins
554 .table_get_lazy_init_func_ref::<M::ABI, M::Ptr>()?;
555
556 self.context.spill(self.masm)?;
561 let elem_value: Reg = self
562 .context
563 .reg(
564 builtin.sig().results.unwrap_singleton().unwrap_reg(),
565 self.masm,
566 )?
567 .into();
568
569 let index = self.context.pop_to_reg(self.masm, None)?;
570 let base = self.context.any_gpr(self.masm)?;
571
572 let elem_addr = self.emit_compute_table_elem_addr(index.into(), base, &table_data)?;
573 self.masm.load_ptr(elem_addr, writable!(elem_value))?;
574 self.context.free_reg(base);
577
578 let (defined, cont) = (self.masm.get_label()?, self.masm.get_label()?);
579
580 self.context
582 .stack
583 .extend([table_index.as_u32().try_into().unwrap(), index.into()]);
584
585 self.masm.branch(
586 IntCmpKind::Ne,
587 elem_value,
588 elem_value.into(),
589 defined,
590 ptr_type.try_into()?,
591 )?;
592 self.context.free_reg(elem_value);
596 FnCall::emit::<M>(
597 &mut self.env,
598 self.masm,
599 &mut self.context,
600 Callee::Builtin(builtin.clone()),
601 )?;
602
603 let top = self
606 .context
607 .stack
608 .peek()
609 .ok_or_else(|| CodeGenError::missing_values_in_stack())?;
610 let top = top.unwrap_reg();
611 ensure!(
612 top.reg == elem_value,
613 CodeGenError::table_element_value_expected()
614 );
615 self.masm.jmp(cont)?;
616
617 self.masm.bind(defined)?;
624 assert_eq!(FUNCREF_MASK as isize, -2);
625 let imm = RegImm::i64(-2);
626 let dst = top.into();
627 self.masm
628 .and(writable!(dst), dst, imm, top.ty.try_into()?)?;
629
630 self.masm.bind(cont)
631 }
632
633 pub fn emit_compute_heap_address(
654 &mut self,
655 memarg: &MemArg,
656 access_size: OperandSize,
657 ) -> Result<Option<Reg>> {
658 let ptr_size: OperandSize = self.env.ptr_type().try_into()?;
659 let enable_spectre_mitigation = self.env.heap_access_spectre_mitigation();
660 let add_offset_and_access_size = |offset: ImmOffset, access_size: OperandSize| {
661 (access_size.bytes() as u64) + (offset.as_u32() as u64)
662 };
663
664 let memory_index = MemoryIndex::from_u32(memarg.memory);
665 let heap = self.env.resolve_heap(memory_index);
666 let index = Index::from_typed_reg(self.context.pop_to_reg(self.masm, None)?);
667 let offset = bounds::ensure_index_and_offset(
668 self.masm,
669 index,
670 memarg.offset,
671 heap.index_type().try_into()?,
672 )?;
673 let offset_with_access_size = add_offset_and_access_size(offset, access_size);
674
675 let can_elide_bounds_check = heap
676 .memory
677 .can_elide_bounds_check(self.tunables, self.env.page_size_log2);
678
679 let addr = if offset_with_access_size > heap.memory.maximum_byte_size().unwrap_or(u64::MAX)
680 {
681 self.emit_fuel_increment()?;
688 self.masm.trap(TrapCode::HEAP_OUT_OF_BOUNDS)?;
689 self.context.reachable = false;
690 None
691 } else if !can_elide_bounds_check {
692 let bounds = bounds::load_dynamic_heap_bounds::<_>(
698 &mut self.context,
699 self.masm,
700 &heap,
701 ptr_size,
702 )?;
703
704 let index_reg = index.as_typed_reg().reg;
705 let index_offset_and_access_size = self.context.any_gpr(self.masm)?;
709
710 self.masm.mov(
722 writable!(index_offset_and_access_size),
723 index_reg.into(),
724 heap.index_type().try_into()?,
725 )?;
726 self.masm.checked_uadd(
737 writable!(index_offset_and_access_size),
738 index_offset_and_access_size,
739 RegImm::i64(offset_with_access_size as i64),
740 ptr_size,
741 TrapCode::HEAP_OUT_OF_BOUNDS,
742 )?;
743
744 let addr = bounds::load_heap_addr_checked(
745 self.masm,
746 &mut self.context,
747 ptr_size,
748 &heap,
749 enable_spectre_mitigation,
750 bounds,
751 index,
752 offset,
753 |masm, bounds, _| {
754 let bounds_reg = bounds.as_typed_reg().reg;
755 masm.cmp(
756 index_offset_and_access_size.into(),
757 bounds_reg.into(),
758 ptr_size,
762 )?;
763 Ok(IntCmpKind::GtU)
764 },
765 )?;
766 self.context.free_reg(bounds.as_typed_reg().reg);
767 self.context.free_reg(index_offset_and_access_size);
768 Some(addr)
769
770 } else if u64::from(u32::MAX)
797 <= self.tunables.memory_reservation + self.tunables.memory_guard_size
798 - offset_with_access_size
799 {
800 assert!(can_elide_bounds_check);
801 assert!(heap.index_type() == WasmValType::I32);
802 let addr = self.context.any_gpr(self.masm)?;
803 bounds::load_heap_addr_unchecked(self.masm, &heap, index, offset, addr, ptr_size)?;
804 Some(addr)
805
806 } else {
814 assert!(can_elide_bounds_check);
815 assert!(heap.index_type() == WasmValType::I32);
816 let bounds = Bounds::from_u64(self.tunables.memory_reservation);
817 let addr = bounds::load_heap_addr_checked(
818 self.masm,
819 &mut self.context,
820 ptr_size,
821 &heap,
822 enable_spectre_mitigation,
823 bounds,
824 index,
825 offset,
826 |masm, bounds, index| {
827 let adjusted_bounds = bounds.as_u64() - offset_with_access_size;
828 let index_reg = index.as_typed_reg().reg;
829 masm.cmp(
830 index_reg,
831 RegImm::i64(adjusted_bounds as i64),
832 ptr_size,
838 )?;
839 Ok(IntCmpKind::GtU)
840 },
841 )?;
842 Some(addr)
843 };
844
845 self.context.free_reg(index.as_typed_reg().reg);
846 Ok(addr)
847 }
848
849 fn emit_check_align(&mut self, memarg: &MemArg, size: OperandSize) -> Result<()> {
851 if size.bytes() > 1 {
852 let addr = *self
854 .context
855 .stack
856 .peek()
857 .ok_or_else(|| CodeGenError::missing_values_in_stack())?;
858 let tmp = self.context.any_gpr(self.masm)?;
859 self.context.move_val_to_reg(&addr, tmp, self.masm)?;
860
861 if memarg.offset != 0 {
862 self.masm.add(
863 writable!(tmp),
864 tmp,
865 RegImm::Imm(Imm::I64(memarg.offset)),
866 size,
867 )?;
868 }
869
870 self.masm.and(
871 writable!(tmp),
872 tmp,
873 RegImm::Imm(Imm::I32(size.bytes() - 1)),
874 size,
875 )?;
876
877 self.masm.cmp(tmp, RegImm::Imm(Imm::i64(0)), size)?;
878 self.masm.trapif(IntCmpKind::Ne, TRAP_HEAP_MISALIGNED)?;
879 self.context.free_reg(tmp);
880 }
881
882 Ok(())
883 }
884
885 pub fn emit_compute_heap_address_align_checked(
886 &mut self,
887 memarg: &MemArg,
888 access_size: OperandSize,
889 ) -> Result<Option<Reg>> {
890 self.emit_check_align(memarg, access_size)?;
891 self.emit_compute_heap_address(memarg, access_size)
892 }
893
894 pub fn emit_wasm_load(
896 &mut self,
897 arg: &MemArg,
898 target_type: WasmValType,
899 kind: LoadKind,
900 ) -> Result<()> {
901 let emit_load = |this: &mut Self, dst, addr, kind| -> Result<()> {
902 let src = this.masm.address_at_reg(addr, 0)?;
903 this.masm.wasm_load(src, writable!(dst), kind)?;
904 this.context
905 .stack
906 .push(TypedReg::new(target_type, dst).into());
907 this.context.free_reg(addr);
908 Ok(())
909 };
910
911 match kind {
912 LoadKind::VectorLane(_) => {
913 let dst = self.context.pop_to_reg(self.masm, None)?;
914 let addr = self.emit_compute_heap_address(&arg, kind.derive_operand_size())?;
915 if let Some(addr) = addr {
916 emit_load(self, dst.reg, addr, kind)?;
917 }
918 }
919 _ => {
920 let maybe_addr = match kind {
921 LoadKind::Atomic(_, _) => self.emit_compute_heap_address_align_checked(
922 &arg,
923 kind.derive_operand_size(),
924 )?,
925 _ => self.emit_compute_heap_address(&arg, kind.derive_operand_size())?,
926 };
927
928 if let Some(addr) = maybe_addr {
929 let dst = match target_type {
930 WasmValType::I32 | WasmValType::I64 => self.context.any_gpr(self.masm)?,
931 WasmValType::F32 | WasmValType::F64 => self.context.any_fpr(self.masm)?,
932 WasmValType::V128 => self.context.reg_for_type(target_type, self.masm)?,
933 _ => bail!(CodeGenError::unsupported_wasm_type()),
934 };
935
936 emit_load(self, dst, addr, kind)?;
937 }
938 }
939 }
940
941 Ok(())
942 }
943
944 pub fn emit_wasm_store(&mut self, arg: &MemArg, kind: StoreKind) -> Result<()> {
946 let src = self.context.pop_to_reg(self.masm, None)?;
947
948 let maybe_addr = match kind {
949 StoreKind::Atomic(size) => self.emit_compute_heap_address_align_checked(&arg, size)?,
950 StoreKind::Operand(size) | StoreKind::VectorLane(LaneSelector { size, .. }) => {
951 self.emit_compute_heap_address(&arg, size)?
952 }
953 };
954
955 if let Some(addr) = maybe_addr {
956 self.masm
957 .wasm_store(src.reg.into(), self.masm.address_at_reg(addr, 0)?, kind)?;
958
959 self.context.free_reg(addr);
960 }
961 self.context.free_reg(src);
962
963 Ok(())
964 }
965
966 pub fn emit_compute_table_elem_addr(
969 &mut self,
970 index: Reg,
971 base: Reg,
972 table_data: &TableData,
973 ) -> Result<M::Address> {
974 let scratch = scratch!(M);
975 let bound = self.context.any_gpr(self.masm)?;
976 let tmp = self.context.any_gpr(self.masm)?;
977 let ptr_size: OperandSize = self.env.ptr_type().try_into()?;
978
979 if let Some(offset) = table_data.import_from {
980 self.masm
984 .load_ptr(self.masm.address_at_vmctx(offset)?, writable!(base))?;
985 } else {
986 self.masm.mov(writable!(base), vmctx!(M).into(), ptr_size)?;
989 };
990
991 let bound_addr = self
993 .masm
994 .address_at_reg(base, table_data.current_elems_offset)?;
995 let bound_size = table_data.current_elements_size;
996 self.masm
997 .load(bound_addr, writable!(bound), bound_size.into())?;
998 self.masm.cmp(index, bound.into(), bound_size)?;
999 self.masm
1000 .trapif(IntCmpKind::GeU, TRAP_TABLE_OUT_OF_BOUNDS)?;
1001
1002 self.masm
1007 .mov(writable!(scratch), index.into(), bound_size)?;
1008 self.masm.mul(
1009 writable!(scratch),
1010 scratch,
1011 RegImm::i32(table_data.element_size.bytes() as i32),
1012 table_data.element_size,
1013 )?;
1014 self.masm.load_ptr(
1015 self.masm.address_at_reg(base, table_data.offset)?,
1016 writable!(base),
1017 )?;
1018 self.masm.mov(writable!(tmp), base.into(), ptr_size)?;
1021 self.masm
1023 .add(writable!(base), base, scratch.into(), ptr_size)?;
1024 if self.env.table_access_spectre_mitigation() {
1025 self.masm.cmp(index, bound.into(), OperandSize::S32)?;
1028 self.masm
1029 .cmov(writable!(base), tmp, IntCmpKind::GeU, ptr_size)?;
1030 }
1031 self.context.free_reg(bound);
1032 self.context.free_reg(tmp);
1033 self.masm.address_at_reg(base, 0)
1034 }
1035
1036 pub fn emit_compute_table_size(&mut self, table_data: &TableData) -> Result<()> {
1038 let scratch = scratch!(M);
1039 let size = self.context.any_gpr(self.masm)?;
1040 let ptr_size: OperandSize = self.env.ptr_type().try_into()?;
1041
1042 if let Some(offset) = table_data.import_from {
1043 self.masm
1044 .load_ptr(self.masm.address_at_vmctx(offset)?, writable!(scratch))?;
1045 } else {
1046 self.masm
1047 .mov(writable!(scratch), vmctx!(M).into(), ptr_size)?;
1048 };
1049
1050 let size_addr = self
1051 .masm
1052 .address_at_reg(scratch, table_data.current_elems_offset)?;
1053 self.masm.load(
1054 size_addr,
1055 writable!(size),
1056 table_data.current_elements_size.into(),
1057 )?;
1058
1059 self.context.stack.push(TypedReg::i32(size).into());
1060 Ok(())
1061 }
1062
1063 pub fn emit_compute_memory_size(&mut self, heap_data: &HeapData) -> Result<()> {
1065 let size_reg = self.context.any_gpr(self.masm)?;
1066 let scratch = scratch!(M);
1067
1068 let base = if let Some(offset) = heap_data.import_from {
1069 self.masm
1070 .load_ptr(self.masm.address_at_vmctx(offset)?, writable!(scratch))?;
1071 scratch
1072 } else {
1073 vmctx!(M)
1074 };
1075
1076 let size_addr = self
1077 .masm
1078 .address_at_reg(base, heap_data.current_length_offset)?;
1079 self.masm.load_ptr(size_addr, writable!(size_reg))?;
1080 let dst = TypedReg::new(heap_data.index_type(), size_reg);
1082 let pow = heap_data.memory.page_size_log2;
1083 self.masm.shift_ir(
1084 writable!(dst.reg),
1085 pow as u64,
1086 dst.into(),
1087 ShiftKind::ShrU,
1088 heap_data.index_type().try_into()?,
1089 )?;
1090 self.context.stack.push(dst.into());
1091 Ok(())
1092 }
1093
1094 pub fn maybe_emit_fuel_check(&mut self) -> Result<()> {
1098 if !self.tunables.consume_fuel {
1099 return Ok(());
1100 }
1101
1102 self.emit_fuel_increment()?;
1103 let out_of_fuel = self.env.builtins.out_of_gas::<M::ABI, M::Ptr>()?;
1104 let fuel_reg = self.context.without::<Result<Reg>, M, _>(
1105 &out_of_fuel.sig().regs,
1106 self.masm,
1107 |cx, masm| cx.any_gpr(masm),
1108 )??;
1109
1110 self.emit_load_fuel_consumed(fuel_reg)?;
1111
1112 let continuation = self.masm.get_label()?;
1114
1115 self.context.spill(self.masm)?;
1118 self.masm.branch(
1121 IntCmpKind::LtS,
1122 fuel_reg,
1123 RegImm::i64(0),
1124 continuation,
1125 OperandSize::S64,
1126 )?;
1127 FnCall::emit::<M>(
1129 &mut self.env,
1130 self.masm,
1131 &mut self.context,
1132 Callee::Builtin(out_of_fuel.clone()),
1133 )?;
1134 self.context.pop_and_free(self.masm)?;
1135
1136 self.masm.bind(continuation)?;
1138 self.context.free_reg(fuel_reg);
1139
1140 Ok(())
1141 }
1142
1143 fn emit_load_fuel_consumed(&mut self, fuel_reg: Reg) -> Result<()> {
1146 let limits_offset = self.env.vmoffsets.ptr.vmctx_runtime_limits();
1147 let fuel_offset = self.env.vmoffsets.ptr.vmstore_context_fuel_consumed();
1148 self.masm.load_ptr(
1149 self.masm.address_at_vmctx(u32::from(limits_offset))?,
1150 writable!(fuel_reg),
1151 )?;
1152
1153 self.masm.load(
1154 self.masm.address_at_reg(fuel_reg, u32::from(fuel_offset))?,
1155 writable!(fuel_reg),
1156 OperandSize::S64,
1158 )
1159 }
1160
1161 pub fn maybe_emit_epoch_check(&mut self) -> Result<()> {
1164 if !self.tunables.epoch_interruption {
1165 return Ok(());
1166 }
1167
1168 let cont = self.masm.get_label()?;
1171 let new_epoch = self.env.builtins.new_epoch::<M::ABI, M::Ptr>()?;
1172
1173 let (epoch_deadline_reg, epoch_counter_reg) =
1181 self.context.without::<Result<(Reg, Reg)>, M, _>(
1182 &new_epoch.sig().regs,
1183 self.masm,
1184 |cx, masm| Ok((cx.any_gpr(masm)?, cx.any_gpr(masm)?)),
1185 )??;
1186
1187 self.emit_load_epoch_deadline_and_counter(epoch_deadline_reg, epoch_counter_reg)?;
1188
1189 self.context.spill(self.masm)?;
1192 self.masm.branch(
1193 IntCmpKind::LtU,
1194 epoch_counter_reg,
1195 RegImm::reg(epoch_deadline_reg),
1196 cont,
1197 OperandSize::S64,
1198 )?;
1199 FnCall::emit::<M>(
1201 &mut self.env,
1202 self.masm,
1203 &mut self.context,
1204 Callee::Builtin(new_epoch.clone()),
1205 )?;
1206 self.visit_drop()?;
1209
1210 self.masm.bind(cont)?;
1212
1213 self.context.free_reg(epoch_deadline_reg);
1214 self.context.free_reg(epoch_counter_reg);
1215 Ok(())
1216 }
1217
1218 fn emit_load_epoch_deadline_and_counter(
1219 &mut self,
1220 epoch_deadline_reg: Reg,
1221 epoch_counter_reg: Reg,
1222 ) -> Result<()> {
1223 let epoch_ptr_offset = self.env.vmoffsets.ptr.vmctx_epoch_ptr();
1224 let runtime_limits_offset = self.env.vmoffsets.ptr.vmctx_runtime_limits();
1225 let epoch_deadline_offset = self.env.vmoffsets.ptr.vmstore_context_epoch_deadline();
1226
1227 self.masm.load_ptr(
1229 self.masm.address_at_vmctx(u32::from(epoch_ptr_offset))?,
1230 writable!(epoch_counter_reg),
1231 )?;
1232
1233 self.masm.load(
1236 self.masm.address_at_reg(epoch_counter_reg, 0)?,
1237 writable!(epoch_counter_reg),
1238 OperandSize::S64,
1239 )?;
1240
1241 self.masm.load_ptr(
1243 self.masm
1244 .address_at_vmctx(u32::from(runtime_limits_offset))?,
1245 writable!(epoch_deadline_reg),
1246 )?;
1247
1248 self.masm.load(
1249 self.masm
1250 .address_at_reg(epoch_deadline_reg, u32::from(epoch_deadline_offset))?,
1251 writable!(epoch_deadline_reg),
1252 OperandSize::S64,
1254 )
1255 }
1256
1257 fn emit_fuel_increment(&mut self) -> Result<()> {
1260 let fuel_at_point = std::mem::replace(&mut self.fuel_consumed, 0);
1261 if fuel_at_point == 0 {
1262 return Ok(());
1263 }
1264
1265 let limits_offset = self.env.vmoffsets.ptr.vmctx_runtime_limits();
1266 let fuel_offset = self.env.vmoffsets.ptr.vmstore_context_fuel_consumed();
1267 let limits_reg = self.context.any_gpr(self.masm)?;
1268
1269 self.masm.load_ptr(
1271 self.masm.address_at_vmctx(u32::from(limits_offset))?,
1272 writable!(limits_reg),
1273 )?;
1274
1275 self.masm.load(
1277 self.masm
1278 .address_at_reg(limits_reg, u32::from(fuel_offset))?,
1279 writable!(scratch!(M)),
1280 OperandSize::S64,
1281 )?;
1282
1283 self.masm.add(
1286 writable!(scratch!(M)),
1287 scratch!(M),
1288 RegImm::i64(fuel_at_point),
1289 OperandSize::S64,
1290 )?;
1291
1292 self.masm.store(
1294 scratch!(M).into(),
1295 self.masm
1296 .address_at_reg(limits_reg, u32::from(fuel_offset))?,
1297 OperandSize::S64,
1298 )?;
1299
1300 self.context.free_reg(limits_reg);
1301
1302 Ok(())
1303 }
1304
1305 fn fuel_before_visit_op(&mut self, op: &Operator) -> Result<()> {
1307 if !self.context.reachable {
1308 ensure!(self.fuel_consumed == 0, CodeGenError::illegal_fuel_state())
1311 }
1312
1313 self.fuel_consumed += match op {
1330 Operator::Nop | Operator::Drop => 0,
1331 Operator::Block { .. }
1332 | Operator::Loop { .. }
1333 | Operator::Unreachable
1334 | Operator::Return
1335 | Operator::Else
1336 | Operator::End => 0,
1337 _ => 1,
1338 };
1339
1340 match op {
1341 Operator::Unreachable
1342 | Operator::Loop { .. }
1343 | Operator::If { .. }
1344 | Operator::Else { .. }
1345 | Operator::Br { .. }
1346 | Operator::BrIf { .. }
1347 | Operator::BrTable { .. }
1348 | Operator::End
1349 | Operator::Return
1350 | Operator::CallIndirect { .. }
1351 | Operator::Call { .. }
1352 | Operator::ReturnCall { .. }
1353 | Operator::ReturnCallIndirect { .. } => self.emit_fuel_increment(),
1354 _ => Ok(()),
1355 }
1356 }
1357
1358 fn source_location_before_visit_op(&mut self, offset: usize) -> Result<()> {
1360 let loc = SourceLoc::new(offset as u32);
1361 let rel = self.source_loc_from(loc);
1362 self.source_location.current = self.masm.start_source_loc(rel)?;
1363 Ok(())
1364 }
1365
1366 fn source_location_after_visit_op(&mut self) -> Result<()> {
1368 if self.masm.current_code_offset()? >= self.source_location.current.0 {
1375 self.masm.end_source_loc()?;
1376 }
1377
1378 Ok(())
1379 }
1380
1381 pub(crate) fn emit_atomic_rmw(
1382 &mut self,
1383 arg: &MemArg,
1384 op: RmwOp,
1385 size: OperandSize,
1386 extend: Option<Extend<Zero>>,
1387 ) -> Result<()> {
1388 let operand = self.context.pop_to_reg(self.masm, None)?;
1392 if let Some(addr) = self.emit_compute_heap_address_align_checked(arg, size)? {
1393 let src = self.masm.address_at_reg(addr, 0)?;
1394 self.context.stack.push(operand.into());
1395 self.masm
1396 .atomic_rmw(&mut self.context, src, size, op, UNTRUSTED_FLAGS, extend)?;
1397 self.context.free_reg(addr);
1398 }
1399
1400 Ok(())
1401 }
1402
1403 pub(crate) fn emit_atomic_cmpxchg(
1404 &mut self,
1405 arg: &MemArg,
1406 size: OperandSize,
1407 extend: Option<Extend<Zero>>,
1408 ) -> Result<()> {
1409 let replacement = self.context.pop_to_reg(self.masm, None)?;
1420 let expected = self.context.pop_to_reg(self.masm, None)?;
1421
1422 if let Some(addr) = self.emit_compute_heap_address_align_checked(arg, size)? {
1423 self.context.stack.push(expected.into());
1425 self.context.stack.push(replacement.into());
1426
1427 let src = self.masm.address_at_reg(addr, 0)?;
1428 self.masm
1429 .atomic_cas(&mut self.context, src, size, UNTRUSTED_FLAGS, extend)?;
1430
1431 self.context.free_reg(addr);
1432 }
1433 Ok(())
1434 }
1435
1436 #[cfg(not(feature = "threads"))]
1437 pub fn emit_atomic_wait(&mut self, _arg: &MemArg, _kind: AtomicWaitKind) -> Result<()> {
1438 Err(CodeGenError::unimplemented_wasm_instruction().into())
1439 }
1440
1441 #[cfg(feature = "threads")]
1443 pub fn emit_atomic_wait(&mut self, arg: &MemArg, kind: AtomicWaitKind) -> Result<()> {
1444 let timeout = self.context.pop_to_reg(self.masm, None)?;
1457 let expected = self.context.pop_to_reg(self.masm, None)?;
1458 let addr = self.context.pop_to_reg(self.masm, None)?;
1459
1460 self.context
1462 .stack
1463 .push(crate::stack::Val::I32(arg.memory as i32));
1464
1465 if arg.offset != 0 {
1466 self.masm.add(
1467 writable!(addr.reg),
1468 addr.reg,
1469 RegImm::i64(arg.offset as i64),
1470 OperandSize::S64,
1471 )?;
1472 }
1473
1474 self.context
1475 .stack
1476 .push(TypedReg::new(WasmValType::I64, addr.reg).into());
1477 self.context.stack.push(expected.into());
1478 self.context.stack.push(timeout.into());
1479
1480 let builtin = match kind {
1481 AtomicWaitKind::Wait32 => self.env.builtins.memory_atomic_wait32::<M::ABI, M::Ptr>()?,
1482 AtomicWaitKind::Wait64 => self.env.builtins.memory_atomic_wait64::<M::ABI, M::Ptr>()?,
1483 };
1484
1485 FnCall::emit::<M>(
1486 &mut self.env,
1487 self.masm,
1488 &mut self.context,
1489 Callee::Builtin(builtin.clone()),
1490 )?;
1491
1492 Ok(())
1493 }
1494
1495 #[cfg(not(feature = "threads"))]
1496 pub fn emit_atomic_notify(&mut self, _arg: &MemArg) -> Result<()> {
1497 Err(CodeGenError::unimplemented_wasm_instruction().into())
1498 }
1499
1500 #[cfg(feature = "threads")]
1501 pub fn emit_atomic_notify(&mut self, arg: &MemArg) -> Result<()> {
1502 let count = self.context.pop_to_reg(self.masm, None)?;
1514 let addr = self.context.pop_to_reg(self.masm, None)?;
1515
1516 self.context
1518 .stack
1519 .push(crate::stack::Val::I32(arg.memory as i32));
1520
1521 if arg.offset != 0 {
1522 self.masm.add(
1523 writable!(addr.reg),
1524 addr.reg,
1525 RegImm::i64(arg.offset as i64),
1526 OperandSize::S64,
1527 )?;
1528 }
1529
1530 self.context
1532 .stack
1533 .push(TypedReg::new(WasmValType::I64, addr.reg).into());
1534 self.context.stack.push(count.into());
1535
1536 let builtin = self.env.builtins.memory_atomic_notify::<M::ABI, M::Ptr>()?;
1537
1538 FnCall::emit::<M>(
1539 &mut self.env,
1540 self.masm,
1541 &mut self.context,
1542 Callee::Builtin(builtin.clone()),
1543 )?;
1544
1545 Ok(())
1546 }
1547}
1548
1549pub fn control_index(depth: u32, control_length: usize) -> Result<usize> {
1552 (control_length - 1)
1553 .checked_sub(depth as usize)
1554 .ok_or_else(|| anyhow!(CodeGenError::control_frame_expected()))
1555}