1use super::env::HeapData;
5use crate::{
6 abi::vmctx,
7 codegen::{CodeGenContext, Emission},
8 isa::reg::{Reg, writable},
9 masm::{IntCmpKind, IntScratch, MacroAssembler, OperandSize, RegImm, TrapCode},
10 stack::TypedReg,
11};
12use anyhow::Result;
13
14#[derive(Debug, Copy, Clone)]
16pub(crate) struct ImmOffset(u32);
17
18impl ImmOffset {
19 pub fn from_u32(raw: u32) -> Self {
21 Self(raw)
22 }
23
24 pub fn as_u32(&self) -> u32 {
26 self.0
27 }
28}
29
30#[derive(Debug, Copy, Clone)]
32pub(crate) enum Bounds {
33 Static(u64),
35 Dynamic(TypedReg),
37}
38
39impl Bounds {
40 pub fn from_typed_reg(tr: TypedReg) -> Self {
42 Self::Dynamic(tr)
43 }
44
45 pub fn from_u64(raw: u64) -> Self {
47 Self::Static(raw)
48 }
49
50 pub fn as_typed_reg(&self) -> TypedReg {
52 match self {
53 Self::Dynamic(tr) => *tr,
54 _ => panic!(),
55 }
56 }
57
58 pub fn as_u64(&self) -> u64 {
60 match self {
61 Self::Static(v) => *v,
62 _ => panic!(),
63 }
64 }
65}
66
67#[derive(Debug, Copy, Clone)]
69pub(crate) struct Index(TypedReg);
70
71impl Index {
72 pub fn from_typed_reg(tr: TypedReg) -> Self {
74 Self(tr)
75 }
76
77 pub fn as_typed_reg(&self) -> TypedReg {
79 self.0
80 }
81}
82
83pub(crate) fn load_dynamic_heap_bounds<M>(
85 context: &mut CodeGenContext<Emission>,
86 masm: &mut M,
87 heap: &HeapData,
88 ptr_size: OperandSize,
89) -> Result<Bounds>
90where
91 M: MacroAssembler,
92{
93 let dst = context.any_gpr(masm)?;
94 match heap.memory.static_heap_size() {
95 Some(size) => masm.mov(writable!(dst), RegImm::i64(size.cast_signed()), ptr_size)?,
97
98 None => {
99 masm.with_scratch::<IntScratch, _>(|masm, scratch| {
100 let base = if let Some(offset) = heap.import_from {
101 let addr = masm.address_at_vmctx(offset)?;
102 masm.load_ptr(addr, scratch.writable())?;
103 scratch.inner()
104 } else {
105 vmctx!(M)
106 };
107 let addr = masm.address_at_reg(base, heap.current_length_offset)?;
108 masm.load_ptr(addr, writable!(dst))
109 })?;
110 }
111 }
112
113 Ok(Bounds::from_typed_reg(TypedReg::new(
114 heap.index_type(),
115 dst,
116 )))
117}
118
119#[inline]
127pub(crate) fn ensure_index_and_offset<M: MacroAssembler>(
128 masm: &mut M,
129 index: Index,
130 offset: u64,
131 heap_ty_size: OperandSize,
132) -> Result<ImmOffset> {
133 match u32::try_from(offset) {
134 Ok(offs) => Ok(ImmOffset::from_u32(offs)),
136 Err(_) => {
139 masm.checked_uadd(
140 writable!(index.as_typed_reg().into()),
141 index.as_typed_reg().into(),
142 RegImm::i64(offset as i64),
143 heap_ty_size,
144 TrapCode::HEAP_OUT_OF_BOUNDS,
145 )?;
146
147 Ok(ImmOffset::from_u32(0))
148 }
149 }
150}
151
152pub(crate) fn load_heap_addr_checked<M, F>(
155 masm: &mut M,
156 context: &mut CodeGenContext<Emission>,
157 ptr_size: OperandSize,
158 heap: &HeapData,
159 enable_spectre_mitigation: bool,
160 bounds: Bounds,
161 index: Index,
162 offset: ImmOffset,
163 mut emit_check_condition: F,
164) -> Result<Reg>
165where
166 M: MacroAssembler,
167 F: FnMut(&mut M, Bounds, Index) -> Result<IntCmpKind>,
168{
169 let cmp_kind = emit_check_condition(masm, bounds, index)?;
170
171 masm.trapif(cmp_kind, TrapCode::HEAP_OUT_OF_BOUNDS)?;
172 let addr = context.any_gpr(masm)?;
173
174 load_heap_addr_unchecked(masm, heap, index, offset, addr, ptr_size)?;
175 if !enable_spectre_mitigation {
176 Ok(addr)
177 } else {
178 let tmp = context.any_gpr(masm)?;
181 masm.mov(writable!(tmp), RegImm::i64(0), ptr_size)?;
182 let cmp_kind = emit_check_condition(masm, bounds, index)?;
183 masm.cmov(writable!(addr), tmp, cmp_kind, ptr_size)?;
184 context.free_reg(tmp);
185 Ok(addr)
186 }
187}
188
189pub(crate) fn load_heap_addr_unchecked<M>(
193 masm: &mut M,
194 heap: &HeapData,
195 index: Index,
196 offset: ImmOffset,
197 dst: Reg,
198 ptr_size: OperandSize,
199) -> Result<()>
200where
201 M: MacroAssembler,
202{
203 masm.with_scratch::<IntScratch, _>(|masm, scratch| {
204 let base = if let Some(offset) = heap.import_from {
205 masm.load_ptr(masm.address_at_vmctx(offset)?, scratch.writable())?;
208 scratch.inner()
209 } else {
210 vmctx!(M)
213 };
214
215 masm.load_ptr(masm.address_at_reg(base, heap.offset)?, writable!(dst))
217 })?;
218
219 let index_reg = index.as_typed_reg().reg;
221 masm.add(writable!(dst), dst, index_reg.into(), ptr_size)?;
222
223 if offset.as_u32() > 0 {
224 masm.add(
225 writable!(dst),
226 dst,
227 RegImm::i64(offset.as_u32() as i64),
228 ptr_size,
229 )?;
230 }
231 Ok(())
232}