1use super::env::HeapData;
5use crate::{
6 abi::vmctx,
7 codegen::{CodeGenContext, Emission},
8 isa::reg::{Reg, writable},
9 masm::{IntCmpKind, IntScratch, MacroAssembler, OperandSize, RegImm, TrapCode},
10 stack::TypedReg,
11};
12use anyhow::Result;
13use wasmtime_environ::Signed;
14
15#[derive(Debug, Copy, Clone)]
17pub(crate) struct ImmOffset(u32);
18
19impl ImmOffset {
20 pub fn from_u32(raw: u32) -> Self {
22 Self(raw)
23 }
24
25 pub fn as_u32(&self) -> u32 {
27 self.0
28 }
29}
30
31#[derive(Debug, Copy, Clone)]
33pub(crate) enum Bounds {
34 Static(u64),
36 Dynamic(TypedReg),
38}
39
40impl Bounds {
41 pub fn from_typed_reg(tr: TypedReg) -> Self {
43 Self::Dynamic(tr)
44 }
45
46 pub fn from_u64(raw: u64) -> Self {
48 Self::Static(raw)
49 }
50
51 pub fn as_typed_reg(&self) -> TypedReg {
53 match self {
54 Self::Dynamic(tr) => *tr,
55 _ => panic!(),
56 }
57 }
58
59 pub fn as_u64(&self) -> u64 {
61 match self {
62 Self::Static(v) => *v,
63 _ => panic!(),
64 }
65 }
66}
67
68#[derive(Debug, Copy, Clone)]
70pub(crate) struct Index(TypedReg);
71
72impl Index {
73 pub fn from_typed_reg(tr: TypedReg) -> Self {
75 Self(tr)
76 }
77
78 pub fn as_typed_reg(&self) -> TypedReg {
80 self.0
81 }
82}
83
84pub(crate) fn load_dynamic_heap_bounds<M>(
86 context: &mut CodeGenContext<Emission>,
87 masm: &mut M,
88 heap: &HeapData,
89 ptr_size: OperandSize,
90) -> Result<Bounds>
91where
92 M: MacroAssembler,
93{
94 let dst = context.any_gpr(masm)?;
95 match heap.memory.static_heap_size() {
96 Some(size) => masm.mov(writable!(dst), RegImm::i64(size.signed()), ptr_size)?,
98
99 None => {
100 masm.with_scratch::<IntScratch, _>(|masm, scratch| {
101 let base = if let Some(offset) = heap.import_from {
102 let addr = masm.address_at_vmctx(offset)?;
103 masm.load_ptr(addr, scratch.writable())?;
104 scratch.inner()
105 } else {
106 vmctx!(M)
107 };
108 let addr = masm.address_at_reg(base, heap.current_length_offset)?;
109 masm.load_ptr(addr, writable!(dst))
110 })?;
111 }
112 }
113
114 Ok(Bounds::from_typed_reg(TypedReg::new(
115 heap.index_type(),
116 dst,
117 )))
118}
119
120#[inline]
128pub(crate) fn ensure_index_and_offset<M: MacroAssembler>(
129 masm: &mut M,
130 index: Index,
131 offset: u64,
132 heap_ty_size: OperandSize,
133) -> Result<ImmOffset> {
134 match u32::try_from(offset) {
135 Ok(offs) => Ok(ImmOffset::from_u32(offs)),
137 Err(_) => {
140 masm.checked_uadd(
141 writable!(index.as_typed_reg().into()),
142 index.as_typed_reg().into(),
143 RegImm::i64(offset as i64),
144 heap_ty_size,
145 TrapCode::HEAP_OUT_OF_BOUNDS,
146 )?;
147
148 Ok(ImmOffset::from_u32(0))
149 }
150 }
151}
152
153pub(crate) fn load_heap_addr_checked<M, F>(
156 masm: &mut M,
157 context: &mut CodeGenContext<Emission>,
158 ptr_size: OperandSize,
159 heap: &HeapData,
160 enable_spectre_mitigation: bool,
161 bounds: Bounds,
162 index: Index,
163 offset: ImmOffset,
164 mut emit_check_condition: F,
165) -> Result<Reg>
166where
167 M: MacroAssembler,
168 F: FnMut(&mut M, Bounds, Index) -> Result<IntCmpKind>,
169{
170 let cmp_kind = emit_check_condition(masm, bounds, index)?;
171
172 masm.trapif(cmp_kind, TrapCode::HEAP_OUT_OF_BOUNDS)?;
173 let addr = context.any_gpr(masm)?;
174
175 load_heap_addr_unchecked(masm, heap, index, offset, addr, ptr_size)?;
176 if !enable_spectre_mitigation {
177 Ok(addr)
178 } else {
179 let tmp = context.any_gpr(masm)?;
182 masm.mov(writable!(tmp), RegImm::i64(0), ptr_size)?;
183 let cmp_kind = emit_check_condition(masm, bounds, index)?;
184 masm.cmov(writable!(addr), tmp, cmp_kind, ptr_size)?;
185 context.free_reg(tmp);
186 Ok(addr)
187 }
188}
189
190pub(crate) fn load_heap_addr_unchecked<M>(
194 masm: &mut M,
195 heap: &HeapData,
196 index: Index,
197 offset: ImmOffset,
198 dst: Reg,
199 ptr_size: OperandSize,
200) -> Result<()>
201where
202 M: MacroAssembler,
203{
204 masm.with_scratch::<IntScratch, _>(|masm, scratch| {
205 let base = if let Some(offset) = heap.import_from {
206 masm.load_ptr(masm.address_at_vmctx(offset)?, scratch.writable())?;
209 scratch.inner()
210 } else {
211 vmctx!(M)
214 };
215
216 masm.load_ptr(masm.address_at_reg(base, heap.offset)?, writable!(dst))
218 })?;
219
220 let index_reg = index.as_typed_reg().reg;
222 masm.add(writable!(dst), dst, index_reg.into(), ptr_size)?;
223
224 if offset.as_u32() > 0 {
225 masm.add(
226 writable!(dst),
227 dst,
228 RegImm::i64(offset.as_u32() as i64),
229 ptr_size,
230 )?;
231 }
232 Ok(())
233}