wasmtime_environ/component/
vmcomponent_offsets.rs1use crate::PtrSize;
20use crate::component::*;
21
22pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");
27
28pub const FLAG_MAY_LEAVE: i32 = 1 << 0;
31
32pub const FLAG_NEEDS_POST_RETURN: i32 = 1 << 2;
35
36#[derive(Debug, Clone, Copy)]
38pub struct VMComponentOffsets<P> {
39 pub ptr: P,
41
42 pub num_lowerings: u32,
44 pub num_runtime_memories: u32,
46 pub num_runtime_tables: u32,
48 pub num_runtime_reallocs: u32,
50 pub num_runtime_callbacks: u32,
52 pub num_runtime_post_returns: u32,
54 pub num_runtime_component_instances: u32,
57 pub num_trampolines: u32,
59 pub num_unsafe_intrinsics: u32,
62 pub num_resources: u32,
64
65 magic: u32,
67 builtins: u32,
68 vm_store_context: u32,
69 flags: u32,
70 task_may_block: u32,
71 trampoline_func_refs: u32,
72 intrinsic_func_refs: u32,
73 lowerings: u32,
74 memories: u32,
75 tables: u32,
76 reallocs: u32,
77 callbacks: u32,
78 post_returns: u32,
79 resource_destructors: u32,
80 size: u32,
81}
82
83#[inline]
84fn align(offset: u32, align: u32) -> u32 {
85 assert!(align.is_power_of_two());
86 (offset + (align - 1)) & !(align - 1)
87}
88
89impl<P: PtrSize> VMComponentOffsets<P> {
90 pub fn new(ptr: P, component: &Component) -> Self {
93 let mut ret = Self {
94 ptr,
95 num_lowerings: component.num_lowerings,
96 num_runtime_memories: component.num_runtime_memories,
97 num_runtime_tables: component.num_runtime_tables,
98 num_runtime_reallocs: component.num_runtime_reallocs,
99 num_runtime_callbacks: component.num_runtime_callbacks,
100 num_runtime_post_returns: component.num_runtime_post_returns,
101 num_runtime_component_instances: component.num_runtime_component_instances,
102 num_trampolines: component.trampolines.len().try_into().unwrap(),
103 num_unsafe_intrinsics: if let Some(i) = component
104 .unsafe_intrinsics
105 .iter()
106 .rposition(|x| x.is_some())
107 {
108 u32::try_from(i + 1).unwrap()
122 } else {
123 0
124 },
125 num_resources: component.num_resources,
126 magic: 0,
127 builtins: 0,
128 vm_store_context: 0,
129 flags: 0,
130 task_may_block: 0,
131 trampoline_func_refs: 0,
132 intrinsic_func_refs: 0,
133 lowerings: 0,
134 memories: 0,
135 tables: 0,
136 reallocs: 0,
137 callbacks: 0,
138 post_returns: 0,
139 resource_destructors: 0,
140 size: 0,
141 };
142
143 #[inline]
148 fn cmul(count: u32, size: u8) -> u32 {
149 count.checked_mul(u32::from(size)).unwrap()
150 }
151
152 let mut next_field_offset = 0;
153
154 macro_rules! fields {
155 (size($field:ident) = $size:expr, $($rest:tt)*) => {
156 ret.$field = next_field_offset;
157 next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();
158 fields!($($rest)*);
159 };
160 (align($align:expr), $($rest:tt)*) => {
161 next_field_offset = align(next_field_offset, $align);
162 fields!($($rest)*);
163 };
164 () => {};
165 }
166
167 fields! {
168 size(magic) = 4u32,
169 align(u32::from(ret.ptr.size())),
170 size(builtins) = ret.ptr.size(),
171 size(vm_store_context) = ret.ptr.size(),
172 align(16),
173 size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
174 size(task_may_block) = ret.ptr.size_of_vmglobal_definition(),
175 align(u32::from(ret.ptr.size())),
176 size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),
177 size(intrinsic_func_refs) = cmul(ret.num_unsafe_intrinsics, ret.ptr.size_of_vm_func_ref()),
178 size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
179 size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
180 size(tables) = cmul(ret.num_runtime_tables, ret.size_of_vmtable_import()),
181 size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
182 size(callbacks) = cmul(ret.num_runtime_callbacks, ret.ptr.size()),
183 size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),
184 size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),
185 }
186
187 ret.size = next_field_offset;
188
189 assert_eq!(ret.magic, 0);
193
194 return ret;
195 }
196
197 #[inline]
199 pub fn pointer_size(&self) -> u8 {
200 self.ptr.size()
201 }
202
203 #[inline]
205 pub fn magic(&self) -> u32 {
206 self.magic
207 }
208
209 #[inline]
211 pub fn builtins(&self) -> u32 {
212 self.builtins
213 }
214
215 #[inline]
217 pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {
218 assert!(index.as_u32() < self.num_runtime_component_instances);
219 self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
220 }
221
222 pub fn task_may_block(&self) -> u32 {
224 self.task_may_block
225 }
226
227 #[inline]
229 pub fn vm_store_context(&self) -> u32 {
230 self.vm_store_context
231 }
232
233 #[inline]
235 pub fn trampoline_func_refs(&self) -> u32 {
236 self.trampoline_func_refs
237 }
238
239 #[inline]
241 pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {
242 assert!(index.as_u32() < self.num_trampolines);
243 self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
244 }
245
246 #[inline]
248 pub fn unsafe_intrinsic_func_refs(&self) -> u32 {
249 self.intrinsic_func_refs
250 }
251
252 #[inline]
254 pub fn unsafe_intrinsic_func_ref(&self, intrinsic: UnsafeIntrinsic) -> u32 {
255 assert!(intrinsic.index() < self.num_unsafe_intrinsics);
256 self.unsafe_intrinsic_func_refs()
257 + intrinsic.index() * u32::from(self.ptr.size_of_vm_func_ref())
258 }
259
260 #[inline]
262 pub fn lowerings(&self) -> u32 {
263 self.lowerings
264 }
265
266 #[inline]
268 pub fn lowering(&self, index: LoweredIndex) -> u32 {
269 assert!(index.as_u32() < self.num_lowerings);
270 self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())
271 }
272
273 #[inline]
275 pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {
276 self.lowering(index) + self.lowering_callee_offset()
277 }
278
279 #[inline]
281 pub fn lowering_data(&self, index: LoweredIndex) -> u32 {
282 self.lowering(index) + self.lowering_data_offset()
283 }
284
285 #[inline]
287 pub fn lowering_size(&self) -> u8 {
288 2 * self.ptr.size()
289 }
290
291 #[inline]
293 pub fn lowering_callee_offset(&self) -> u32 {
294 0
295 }
296
297 #[inline]
299 pub fn lowering_data_offset(&self) -> u32 {
300 u32::from(self.ptr.size())
301 }
302
303 #[inline]
305 pub fn runtime_memories(&self) -> u32 {
306 self.memories
307 }
308
309 #[inline]
312 pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {
313 assert!(index.as_u32() < self.num_runtime_memories);
314 self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())
315 }
316
317 #[inline]
319 pub fn runtime_tables(&self) -> u32 {
320 self.tables
321 }
322
323 #[inline]
325 pub fn runtime_table(&self, index: RuntimeTableIndex) -> u32 {
326 assert!(index.as_u32() < self.num_runtime_tables);
327 self.runtime_tables() + index.as_u32() * u32::from(self.size_of_vmtable_import())
328 }
329
330 #[inline]
333 pub fn size_of_vmtable_import(&self) -> u8 {
334 3 * self.pointer_size()
335 }
336
337 #[inline]
339 pub fn runtime_reallocs(&self) -> u32 {
340 self.reallocs
341 }
342
343 #[inline]
346 pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
347 assert!(index.as_u32() < self.num_runtime_reallocs);
348 self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())
349 }
350
351 #[inline]
353 pub fn runtime_callbacks(&self) -> u32 {
354 self.callbacks
355 }
356
357 #[inline]
360 pub fn runtime_callback(&self, index: RuntimeCallbackIndex) -> u32 {
361 assert!(index.as_u32() < self.num_runtime_callbacks);
362 self.runtime_callbacks() + index.as_u32() * u32::from(self.ptr.size())
363 }
364
365 #[inline]
367 pub fn runtime_post_returns(&self) -> u32 {
368 self.post_returns
369 }
370
371 #[inline]
374 pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {
375 assert!(index.as_u32() < self.num_runtime_post_returns);
376 self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())
377 }
378
379 #[inline]
381 pub fn resource_destructors(&self) -> u32 {
382 self.resource_destructors
383 }
384
385 #[inline]
388 pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {
389 assert!(index.as_u32() < self.num_resources);
390 self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())
391 }
392
393 #[inline]
395 pub fn size_of_vmctx(&self) -> u32 {
396 self.size
397 }
398}