wasmtime_environ/component/
vmcomponent_offsets.rs1use crate::component::*;
18use crate::PtrSize;
19
20pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");
25
26pub const FLAG_MAY_LEAVE: i32 = 1 << 0;
29
30pub const FLAG_MAY_ENTER: i32 = 1 << 1;
33
34pub const FLAG_NEEDS_POST_RETURN: i32 = 1 << 2;
37
38#[derive(Debug, Clone, Copy)]
40pub struct VMComponentOffsets<P> {
41 pub ptr: P,
43
44 pub num_lowerings: u32,
46 pub num_runtime_memories: u32,
48 pub num_runtime_tables: u32,
50 pub num_runtime_reallocs: u32,
52 pub num_runtime_callbacks: u32,
54 pub num_runtime_post_returns: u32,
56 pub num_runtime_component_instances: u32,
59 pub num_trampolines: u32,
61 pub num_resources: u32,
63
64 magic: u32,
66 builtins: u32,
67 vm_store_context: u32,
68 flags: u32,
69 trampoline_func_refs: u32,
70 lowerings: u32,
71 memories: u32,
72 tables: u32,
73 reallocs: u32,
74 callbacks: u32,
75 post_returns: u32,
76 resource_destructors: u32,
77 size: u32,
78}
79
80#[inline]
81fn align(offset: u32, align: u32) -> u32 {
82 assert!(align.is_power_of_two());
83 (offset + (align - 1)) & !(align - 1)
84}
85
86impl<P: PtrSize> VMComponentOffsets<P> {
87 pub fn new(ptr: P, component: &Component) -> Self {
90 let mut ret = Self {
91 ptr,
92 num_lowerings: component.num_lowerings,
93 num_runtime_memories: component.num_runtime_memories.try_into().unwrap(),
94 num_runtime_tables: component.num_runtime_tables.try_into().unwrap(),
95 num_runtime_reallocs: component.num_runtime_reallocs.try_into().unwrap(),
96 num_runtime_callbacks: component.num_runtime_callbacks.try_into().unwrap(),
97 num_runtime_post_returns: component.num_runtime_post_returns.try_into().unwrap(),
98 num_runtime_component_instances: component
99 .num_runtime_component_instances
100 .try_into()
101 .unwrap(),
102 num_trampolines: component.trampolines.len().try_into().unwrap(),
103 num_resources: component.num_resources,
104 magic: 0,
105 builtins: 0,
106 vm_store_context: 0,
107 flags: 0,
108 trampoline_func_refs: 0,
109 lowerings: 0,
110 memories: 0,
111 tables: 0,
112 reallocs: 0,
113 callbacks: 0,
114 post_returns: 0,
115 resource_destructors: 0,
116 size: 0,
117 };
118
119 #[inline]
124 fn cmul(count: u32, size: u8) -> u32 {
125 count.checked_mul(u32::from(size)).unwrap()
126 }
127
128 let mut next_field_offset = 0;
129
130 macro_rules! fields {
131 (size($field:ident) = $size:expr, $($rest:tt)*) => {
132 ret.$field = next_field_offset;
133 next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();
134 fields!($($rest)*);
135 };
136 (align($align:expr), $($rest:tt)*) => {
137 next_field_offset = align(next_field_offset, $align);
138 fields!($($rest)*);
139 };
140 () => {};
141 }
142
143 fields! {
144 size(magic) = 4u32,
145 align(u32::from(ret.ptr.size())),
146 size(builtins) = ret.ptr.size(),
147 size(vm_store_context) = ret.ptr.size(),
148 align(16),
149 size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
150 align(u32::from(ret.ptr.size())),
151 size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),
152 size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
153 size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
154 size(tables) = cmul(ret.num_runtime_tables, ret.size_of_vmtable()),
155 size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
156 size(callbacks) = cmul(ret.num_runtime_callbacks, ret.ptr.size()),
157 size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),
158 size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),
159 }
160
161 ret.size = next_field_offset;
162
163 assert_eq!(ret.magic, 0);
167
168 return ret;
169 }
170
171 #[inline]
173 pub fn pointer_size(&self) -> u8 {
174 self.ptr.size()
175 }
176
177 #[inline]
179 pub fn magic(&self) -> u32 {
180 self.magic
181 }
182
183 #[inline]
185 pub fn builtins(&self) -> u32 {
186 self.builtins
187 }
188
189 #[inline]
191 pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {
192 assert!(index.as_u32() < self.num_runtime_component_instances);
193 self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
194 }
195
196 #[inline]
198 pub fn vm_store_context(&self) -> u32 {
199 self.vm_store_context
200 }
201
202 #[inline]
204 pub fn trampoline_func_refs(&self) -> u32 {
205 self.trampoline_func_refs
206 }
207
208 #[inline]
210 pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {
211 assert!(index.as_u32() < self.num_trampolines);
212 self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
213 }
214
215 #[inline]
217 pub fn lowerings(&self) -> u32 {
218 self.lowerings
219 }
220
221 #[inline]
223 pub fn lowering(&self, index: LoweredIndex) -> u32 {
224 assert!(index.as_u32() < self.num_lowerings);
225 self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())
226 }
227
228 #[inline]
230 pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {
231 self.lowering(index) + self.lowering_callee_offset()
232 }
233
234 #[inline]
236 pub fn lowering_data(&self, index: LoweredIndex) -> u32 {
237 self.lowering(index) + self.lowering_data_offset()
238 }
239
240 #[inline]
242 pub fn lowering_size(&self) -> u8 {
243 2 * self.ptr.size()
244 }
245
246 #[inline]
248 pub fn lowering_callee_offset(&self) -> u32 {
249 0
250 }
251
252 #[inline]
254 pub fn lowering_data_offset(&self) -> u32 {
255 u32::from(self.ptr.size())
256 }
257
258 #[inline]
260 pub fn runtime_memories(&self) -> u32 {
261 self.memories
262 }
263
264 #[inline]
267 pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {
268 assert!(index.as_u32() < self.num_runtime_memories);
269 self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())
270 }
271
272 #[inline]
274 pub fn runtime_tables(&self) -> u32 {
275 self.tables
276 }
277
278 #[inline]
280 pub fn runtime_table(&self, index: RuntimeTableIndex) -> u32 {
281 assert!(index.as_u32() < self.num_runtime_tables);
282 self.runtime_tables() + index.as_u32() * u32::from(self.size_of_vmtable())
283 }
284
285 #[inline]
288 pub fn size_of_vmtable(&self) -> u8 {
289 2 * self.pointer_size()
290 }
291
292 #[inline]
294 pub fn runtime_reallocs(&self) -> u32 {
295 self.reallocs
296 }
297
298 #[inline]
301 pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
302 assert!(index.as_u32() < self.num_runtime_reallocs);
303 self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())
304 }
305
306 #[inline]
308 pub fn runtime_callbacks(&self) -> u32 {
309 self.callbacks
310 }
311
312 #[inline]
315 pub fn runtime_callback(&self, index: RuntimeCallbackIndex) -> u32 {
316 assert!(index.as_u32() < self.num_runtime_callbacks);
317 self.runtime_callbacks() + index.as_u32() * u32::from(self.ptr.size())
318 }
319
320 #[inline]
322 pub fn runtime_post_returns(&self) -> u32 {
323 self.post_returns
324 }
325
326 #[inline]
329 pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {
330 assert!(index.as_u32() < self.num_runtime_post_returns);
331 self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())
332 }
333
334 #[inline]
336 pub fn resource_destructors(&self) -> u32 {
337 self.resource_destructors
338 }
339
340 #[inline]
343 pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {
344 assert!(index.as_u32() < self.num_resources);
345 self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())
346 }
347
348 #[inline]
350 pub fn size_of_vmctx(&self) -> u32 {
351 self.size
352 }
353}