wasmtime_environ/component/
vmcomponent_offsets.rs1use crate::PtrSize;
20use crate::component::*;
21
22pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");
27
28pub const FLAG_MAY_LEAVE: i32 = 1 << 0;
31
32#[derive(Debug, Clone, Copy)]
34pub struct VMComponentOffsets<P> {
35 pub ptr: P,
37
38 pub num_lowerings: u32,
40 pub num_runtime_memories: u32,
42 pub num_runtime_tables: u32,
44 pub num_runtime_reallocs: u32,
46 pub num_runtime_callbacks: u32,
48 pub num_runtime_post_returns: u32,
50 pub num_runtime_component_instances: u32,
53 pub num_trampolines: u32,
55 pub num_unsafe_intrinsics: u32,
58 pub num_resources: u32,
60
61 magic: u32,
63 builtins: u32,
64 vm_store_context: u32,
65 flags: u32,
66 task_may_block: u32,
67 trampoline_func_refs: u32,
68 intrinsic_func_refs: u32,
69 lowerings: u32,
70 memories: u32,
71 tables: u32,
72 reallocs: u32,
73 callbacks: u32,
74 post_returns: u32,
75 resource_destructors: u32,
76 size: u32,
77}
78
79#[inline]
80fn align(offset: u32, align: u32) -> u32 {
81 assert!(align.is_power_of_two());
82 (offset + (align - 1)) & !(align - 1)
83}
84
85impl<P: PtrSize> VMComponentOffsets<P> {
86 pub fn new(ptr: P, component: &Component) -> Self {
89 let mut ret = Self {
90 ptr,
91 num_lowerings: component.num_lowerings,
92 num_runtime_memories: component.num_runtime_memories,
93 num_runtime_tables: component.num_runtime_tables,
94 num_runtime_reallocs: component.num_runtime_reallocs,
95 num_runtime_callbacks: component.num_runtime_callbacks,
96 num_runtime_post_returns: component.num_runtime_post_returns,
97 num_runtime_component_instances: component.num_runtime_component_instances,
98 num_trampolines: component.trampolines.len().try_into().unwrap(),
99 num_unsafe_intrinsics: if let Some(i) = component
100 .unsafe_intrinsics
101 .iter()
102 .rposition(|x| x.is_some())
103 {
104 u32::try_from(i + 1).unwrap()
118 } else {
119 0
120 },
121 num_resources: component.num_resources,
122 magic: 0,
123 builtins: 0,
124 vm_store_context: 0,
125 flags: 0,
126 task_may_block: 0,
127 trampoline_func_refs: 0,
128 intrinsic_func_refs: 0,
129 lowerings: 0,
130 memories: 0,
131 tables: 0,
132 reallocs: 0,
133 callbacks: 0,
134 post_returns: 0,
135 resource_destructors: 0,
136 size: 0,
137 };
138
139 #[inline]
144 fn cmul(count: u32, size: u8) -> u32 {
145 count.checked_mul(u32::from(size)).unwrap()
146 }
147
148 let mut next_field_offset = 0;
149
150 macro_rules! fields {
151 (size($field:ident) = $size:expr, $($rest:tt)*) => {
152 ret.$field = next_field_offset;
153 next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();
154 fields!($($rest)*);
155 };
156 (align($align:expr), $($rest:tt)*) => {
157 next_field_offset = align(next_field_offset, $align);
158 fields!($($rest)*);
159 };
160 () => {};
161 }
162
163 fields! {
164 size(magic) = 4u32,
165 align(u32::from(ret.ptr.size())),
166 size(builtins) = ret.ptr.size(),
167 size(vm_store_context) = ret.ptr.size(),
168 align(16),
169 size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
170 size(task_may_block) = ret.ptr.size_of_vmglobal_definition(),
171 align(u32::from(ret.ptr.size())),
172 size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),
173 size(intrinsic_func_refs) = cmul(ret.num_unsafe_intrinsics, ret.ptr.size_of_vm_func_ref()),
174 size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
175 size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
176 size(tables) = cmul(ret.num_runtime_tables, ret.size_of_vmtable_import()),
177 size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
178 size(callbacks) = cmul(ret.num_runtime_callbacks, ret.ptr.size()),
179 size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),
180 size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),
181 }
182
183 ret.size = next_field_offset;
184
185 assert_eq!(ret.magic, 0);
189
190 return ret;
191 }
192
193 #[inline]
195 pub fn pointer_size(&self) -> u8 {
196 self.ptr.size()
197 }
198
199 #[inline]
201 pub fn magic(&self) -> u32 {
202 self.magic
203 }
204
205 #[inline]
207 pub fn builtins(&self) -> u32 {
208 self.builtins
209 }
210
211 #[inline]
213 pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {
214 assert!(index.as_u32() < self.num_runtime_component_instances);
215 self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
216 }
217
218 pub fn task_may_block(&self) -> u32 {
220 self.task_may_block
221 }
222
223 #[inline]
225 pub fn vm_store_context(&self) -> u32 {
226 self.vm_store_context
227 }
228
229 #[inline]
231 pub fn trampoline_func_refs(&self) -> u32 {
232 self.trampoline_func_refs
233 }
234
235 #[inline]
237 pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {
238 assert!(index.as_u32() < self.num_trampolines);
239 self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
240 }
241
242 #[inline]
244 pub fn unsafe_intrinsic_func_refs(&self) -> u32 {
245 self.intrinsic_func_refs
246 }
247
248 #[inline]
250 pub fn unsafe_intrinsic_func_ref(&self, intrinsic: UnsafeIntrinsic) -> u32 {
251 assert!(intrinsic.index() < self.num_unsafe_intrinsics);
252 self.unsafe_intrinsic_func_refs()
253 + intrinsic.index() * u32::from(self.ptr.size_of_vm_func_ref())
254 }
255
256 #[inline]
258 pub fn lowerings(&self) -> u32 {
259 self.lowerings
260 }
261
262 #[inline]
264 pub fn lowering(&self, index: LoweredIndex) -> u32 {
265 assert!(index.as_u32() < self.num_lowerings);
266 self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())
267 }
268
269 #[inline]
271 pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {
272 self.lowering(index) + self.lowering_callee_offset()
273 }
274
275 #[inline]
277 pub fn lowering_data(&self, index: LoweredIndex) -> u32 {
278 self.lowering(index) + self.lowering_data_offset()
279 }
280
281 #[inline]
283 pub fn lowering_size(&self) -> u8 {
284 2 * self.ptr.size()
285 }
286
287 #[inline]
289 pub fn lowering_callee_offset(&self) -> u32 {
290 0
291 }
292
293 #[inline]
295 pub fn lowering_data_offset(&self) -> u32 {
296 u32::from(self.ptr.size())
297 }
298
299 #[inline]
301 pub fn runtime_memories(&self) -> u32 {
302 self.memories
303 }
304
305 #[inline]
308 pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {
309 assert!(index.as_u32() < self.num_runtime_memories);
310 self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())
311 }
312
313 #[inline]
315 pub fn runtime_tables(&self) -> u32 {
316 self.tables
317 }
318
319 #[inline]
321 pub fn runtime_table(&self, index: RuntimeTableIndex) -> u32 {
322 assert!(index.as_u32() < self.num_runtime_tables);
323 self.runtime_tables() + index.as_u32() * u32::from(self.size_of_vmtable_import())
324 }
325
326 #[inline]
329 pub fn size_of_vmtable_import(&self) -> u8 {
330 3 * self.pointer_size()
331 }
332
333 #[inline]
335 pub fn runtime_reallocs(&self) -> u32 {
336 self.reallocs
337 }
338
339 #[inline]
342 pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
343 assert!(index.as_u32() < self.num_runtime_reallocs);
344 self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())
345 }
346
347 #[inline]
349 pub fn runtime_callbacks(&self) -> u32 {
350 self.callbacks
351 }
352
353 #[inline]
356 pub fn runtime_callback(&self, index: RuntimeCallbackIndex) -> u32 {
357 assert!(index.as_u32() < self.num_runtime_callbacks);
358 self.runtime_callbacks() + index.as_u32() * u32::from(self.ptr.size())
359 }
360
361 #[inline]
363 pub fn runtime_post_returns(&self) -> u32 {
364 self.post_returns
365 }
366
367 #[inline]
370 pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {
371 assert!(index.as_u32() < self.num_runtime_post_returns);
372 self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())
373 }
374
375 #[inline]
377 pub fn resource_destructors(&self) -> u32 {
378 self.resource_destructors
379 }
380
381 #[inline]
384 pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {
385 assert!(index.as_u32() < self.num_resources);
386 self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())
387 }
388
389 #[inline]
391 pub fn size_of_vmctx(&self) -> u32 {
392 self.size
393 }
394}