wasmtime_environ/component/
vmcomponent_offsets.rs1use crate::PtrSize;
19use crate::component::*;
20
21pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");
26
27pub const FLAG_MAY_LEAVE: i32 = 1 << 0;
30
31pub const FLAG_MAY_ENTER: i32 = 1 << 1;
34
35pub const FLAG_NEEDS_POST_RETURN: i32 = 1 << 2;
38
39#[derive(Debug, Clone, Copy)]
41pub struct VMComponentOffsets<P> {
42 pub ptr: P,
44
45 pub num_lowerings: u32,
47 pub num_runtime_memories: u32,
49 pub num_runtime_tables: u32,
51 pub num_runtime_reallocs: u32,
53 pub num_runtime_callbacks: u32,
55 pub num_runtime_post_returns: u32,
57 pub num_runtime_component_instances: u32,
60 pub num_trampolines: u32,
62 pub num_unsafe_intrinsics: u32,
65 pub num_resources: u32,
67
68 magic: u32,
70 builtins: u32,
71 vm_store_context: u32,
72 flags: u32,
73 trampoline_func_refs: u32,
74 intrinsic_func_refs: u32,
75 lowerings: u32,
76 memories: u32,
77 tables: u32,
78 reallocs: u32,
79 callbacks: u32,
80 post_returns: u32,
81 resource_destructors: u32,
82 size: u32,
83}
84
85#[inline]
86fn align(offset: u32, align: u32) -> u32 {
87 assert!(align.is_power_of_two());
88 (offset + (align - 1)) & !(align - 1)
89}
90
91impl<P: PtrSize> VMComponentOffsets<P> {
92 pub fn new(ptr: P, component: &Component) -> Self {
95 let mut ret = Self {
96 ptr,
97 num_lowerings: component.num_lowerings,
98 num_runtime_memories: component.num_runtime_memories,
99 num_runtime_tables: component.num_runtime_tables,
100 num_runtime_reallocs: component.num_runtime_reallocs,
101 num_runtime_callbacks: component.num_runtime_callbacks,
102 num_runtime_post_returns: component.num_runtime_post_returns,
103 num_runtime_component_instances: component.num_runtime_component_instances,
104 num_trampolines: component.trampolines.len().try_into().unwrap(),
105 num_unsafe_intrinsics: if let Some(i) = component
106 .unsafe_intrinsics
107 .iter()
108 .rposition(|x| x.is_some())
109 {
110 u32::try_from(i + 1).unwrap()
124 } else {
125 0
126 },
127 num_resources: component.num_resources,
128 magic: 0,
129 builtins: 0,
130 vm_store_context: 0,
131 flags: 0,
132 trampoline_func_refs: 0,
133 intrinsic_func_refs: 0,
134 lowerings: 0,
135 memories: 0,
136 tables: 0,
137 reallocs: 0,
138 callbacks: 0,
139 post_returns: 0,
140 resource_destructors: 0,
141 size: 0,
142 };
143
144 #[inline]
149 fn cmul(count: u32, size: u8) -> u32 {
150 count.checked_mul(u32::from(size)).unwrap()
151 }
152
153 let mut next_field_offset = 0;
154
155 macro_rules! fields {
156 (size($field:ident) = $size:expr, $($rest:tt)*) => {
157 ret.$field = next_field_offset;
158 next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();
159 fields!($($rest)*);
160 };
161 (align($align:expr), $($rest:tt)*) => {
162 next_field_offset = align(next_field_offset, $align);
163 fields!($($rest)*);
164 };
165 () => {};
166 }
167
168 fields! {
169 size(magic) = 4u32,
170 align(u32::from(ret.ptr.size())),
171 size(builtins) = ret.ptr.size(),
172 size(vm_store_context) = ret.ptr.size(),
173 align(16),
174 size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
175 align(u32::from(ret.ptr.size())),
176 size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),
177 size(intrinsic_func_refs) = cmul(ret.num_unsafe_intrinsics, ret.ptr.size_of_vm_func_ref()),
178 size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
179 size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
180 size(tables) = cmul(ret.num_runtime_tables, ret.size_of_vmtable_import()),
181 size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
182 size(callbacks) = cmul(ret.num_runtime_callbacks, ret.ptr.size()),
183 size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),
184 size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),
185 }
186
187 ret.size = next_field_offset;
188
189 assert_eq!(ret.magic, 0);
193
194 return ret;
195 }
196
197 #[inline]
199 pub fn pointer_size(&self) -> u8 {
200 self.ptr.size()
201 }
202
203 #[inline]
205 pub fn magic(&self) -> u32 {
206 self.magic
207 }
208
209 #[inline]
211 pub fn builtins(&self) -> u32 {
212 self.builtins
213 }
214
215 #[inline]
217 pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {
218 assert!(index.as_u32() < self.num_runtime_component_instances);
219 self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
220 }
221
222 #[inline]
224 pub fn vm_store_context(&self) -> u32 {
225 self.vm_store_context
226 }
227
228 #[inline]
230 pub fn trampoline_func_refs(&self) -> u32 {
231 self.trampoline_func_refs
232 }
233
234 #[inline]
236 pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {
237 assert!(index.as_u32() < self.num_trampolines);
238 self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
239 }
240
241 #[inline]
243 pub fn unsafe_intrinsic_func_refs(&self) -> u32 {
244 self.intrinsic_func_refs
245 }
246
247 #[inline]
249 pub fn unsafe_intrinsic_func_ref(&self, intrinsic: UnsafeIntrinsic) -> u32 {
250 assert!(intrinsic.index() < self.num_unsafe_intrinsics);
251 self.unsafe_intrinsic_func_refs()
252 + intrinsic.index() * u32::from(self.ptr.size_of_vm_func_ref())
253 }
254
255 #[inline]
257 pub fn lowerings(&self) -> u32 {
258 self.lowerings
259 }
260
261 #[inline]
263 pub fn lowering(&self, index: LoweredIndex) -> u32 {
264 assert!(index.as_u32() < self.num_lowerings);
265 self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())
266 }
267
268 #[inline]
270 pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {
271 self.lowering(index) + self.lowering_callee_offset()
272 }
273
274 #[inline]
276 pub fn lowering_data(&self, index: LoweredIndex) -> u32 {
277 self.lowering(index) + self.lowering_data_offset()
278 }
279
280 #[inline]
282 pub fn lowering_size(&self) -> u8 {
283 2 * self.ptr.size()
284 }
285
286 #[inline]
288 pub fn lowering_callee_offset(&self) -> u32 {
289 0
290 }
291
292 #[inline]
294 pub fn lowering_data_offset(&self) -> u32 {
295 u32::from(self.ptr.size())
296 }
297
298 #[inline]
300 pub fn runtime_memories(&self) -> u32 {
301 self.memories
302 }
303
304 #[inline]
307 pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {
308 assert!(index.as_u32() < self.num_runtime_memories);
309 self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())
310 }
311
312 #[inline]
314 pub fn runtime_tables(&self) -> u32 {
315 self.tables
316 }
317
318 #[inline]
320 pub fn runtime_table(&self, index: RuntimeTableIndex) -> u32 {
321 assert!(index.as_u32() < self.num_runtime_tables);
322 self.runtime_tables() + index.as_u32() * u32::from(self.size_of_vmtable_import())
323 }
324
325 #[inline]
328 pub fn size_of_vmtable_import(&self) -> u8 {
329 3 * self.pointer_size()
330 }
331
332 #[inline]
334 pub fn runtime_reallocs(&self) -> u32 {
335 self.reallocs
336 }
337
338 #[inline]
341 pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
342 assert!(index.as_u32() < self.num_runtime_reallocs);
343 self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())
344 }
345
346 #[inline]
348 pub fn runtime_callbacks(&self) -> u32 {
349 self.callbacks
350 }
351
352 #[inline]
355 pub fn runtime_callback(&self, index: RuntimeCallbackIndex) -> u32 {
356 assert!(index.as_u32() < self.num_runtime_callbacks);
357 self.runtime_callbacks() + index.as_u32() * u32::from(self.ptr.size())
358 }
359
360 #[inline]
362 pub fn runtime_post_returns(&self) -> u32 {
363 self.post_returns
364 }
365
366 #[inline]
369 pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {
370 assert!(index.as_u32() < self.num_runtime_post_returns);
371 self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())
372 }
373
374 #[inline]
376 pub fn resource_destructors(&self) -> u32 {
377 self.resource_destructors
378 }
379
380 #[inline]
383 pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {
384 assert!(index.as_u32() < self.num_resources);
385 self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())
386 }
387
388 #[inline]
390 pub fn size_of_vmctx(&self) -> u32 {
391 self.size
392 }
393}