wasmtime_environ/component/
vmcomponent_offsets.rs1use crate::PtrSize;
18use crate::component::*;
19
20pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");
25
26pub const FLAG_MAY_LEAVE: i32 = 1 << 0;
29
30pub const FLAG_MAY_ENTER: i32 = 1 << 1;
33
34pub const FLAG_NEEDS_POST_RETURN: i32 = 1 << 2;
37
38#[derive(Debug, Clone, Copy)]
40pub struct VMComponentOffsets<P> {
41 pub ptr: P,
43
44 pub num_lowerings: u32,
46 pub num_runtime_memories: u32,
48 pub num_runtime_tables: u32,
50 pub num_runtime_reallocs: u32,
52 pub num_runtime_callbacks: u32,
54 pub num_runtime_post_returns: u32,
56 pub num_runtime_component_instances: u32,
59 pub num_trampolines: u32,
61 pub num_resources: u32,
63
64 magic: u32,
66 builtins: u32,
67 vm_store_context: u32,
68 flags: u32,
69 trampoline_func_refs: u32,
70 lowerings: u32,
71 memories: u32,
72 tables: u32,
73 reallocs: u32,
74 callbacks: u32,
75 post_returns: u32,
76 resource_destructors: u32,
77 size: u32,
78}
79
80#[inline]
81fn align(offset: u32, align: u32) -> u32 {
82 assert!(align.is_power_of_two());
83 (offset + (align - 1)) & !(align - 1)
84}
85
86impl<P: PtrSize> VMComponentOffsets<P> {
87 pub fn new(ptr: P, component: &Component) -> Self {
90 let mut ret = Self {
91 ptr,
92 num_lowerings: component.num_lowerings,
93 num_runtime_memories: component.num_runtime_memories,
94 num_runtime_tables: component.num_runtime_tables,
95 num_runtime_reallocs: component.num_runtime_reallocs,
96 num_runtime_callbacks: component.num_runtime_callbacks,
97 num_runtime_post_returns: component.num_runtime_post_returns,
98 num_runtime_component_instances: component.num_runtime_component_instances,
99 num_trampolines: component.trampolines.len().try_into().unwrap(),
100 num_resources: component.num_resources,
101 magic: 0,
102 builtins: 0,
103 vm_store_context: 0,
104 flags: 0,
105 trampoline_func_refs: 0,
106 lowerings: 0,
107 memories: 0,
108 tables: 0,
109 reallocs: 0,
110 callbacks: 0,
111 post_returns: 0,
112 resource_destructors: 0,
113 size: 0,
114 };
115
116 #[inline]
121 fn cmul(count: u32, size: u8) -> u32 {
122 count.checked_mul(u32::from(size)).unwrap()
123 }
124
125 let mut next_field_offset = 0;
126
127 macro_rules! fields {
128 (size($field:ident) = $size:expr, $($rest:tt)*) => {
129 ret.$field = next_field_offset;
130 next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();
131 fields!($($rest)*);
132 };
133 (align($align:expr), $($rest:tt)*) => {
134 next_field_offset = align(next_field_offset, $align);
135 fields!($($rest)*);
136 };
137 () => {};
138 }
139
140 fields! {
141 size(magic) = 4u32,
142 align(u32::from(ret.ptr.size())),
143 size(builtins) = ret.ptr.size(),
144 size(vm_store_context) = ret.ptr.size(),
145 align(16),
146 size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
147 align(u32::from(ret.ptr.size())),
148 size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),
149 size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
150 size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
151 size(tables) = cmul(ret.num_runtime_tables, ret.size_of_vmtable_import()),
152 size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
153 size(callbacks) = cmul(ret.num_runtime_callbacks, ret.ptr.size()),
154 size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),
155 size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),
156 }
157
158 ret.size = next_field_offset;
159
160 assert_eq!(ret.magic, 0);
164
165 return ret;
166 }
167
168 #[inline]
170 pub fn pointer_size(&self) -> u8 {
171 self.ptr.size()
172 }
173
174 #[inline]
176 pub fn magic(&self) -> u32 {
177 self.magic
178 }
179
180 #[inline]
182 pub fn builtins(&self) -> u32 {
183 self.builtins
184 }
185
186 #[inline]
188 pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {
189 assert!(index.as_u32() < self.num_runtime_component_instances);
190 self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
191 }
192
193 #[inline]
195 pub fn vm_store_context(&self) -> u32 {
196 self.vm_store_context
197 }
198
199 #[inline]
201 pub fn trampoline_func_refs(&self) -> u32 {
202 self.trampoline_func_refs
203 }
204
205 #[inline]
207 pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {
208 assert!(index.as_u32() < self.num_trampolines);
209 self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
210 }
211
212 #[inline]
214 pub fn lowerings(&self) -> u32 {
215 self.lowerings
216 }
217
218 #[inline]
220 pub fn lowering(&self, index: LoweredIndex) -> u32 {
221 assert!(index.as_u32() < self.num_lowerings);
222 self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())
223 }
224
225 #[inline]
227 pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {
228 self.lowering(index) + self.lowering_callee_offset()
229 }
230
231 #[inline]
233 pub fn lowering_data(&self, index: LoweredIndex) -> u32 {
234 self.lowering(index) + self.lowering_data_offset()
235 }
236
237 #[inline]
239 pub fn lowering_size(&self) -> u8 {
240 2 * self.ptr.size()
241 }
242
243 #[inline]
245 pub fn lowering_callee_offset(&self) -> u32 {
246 0
247 }
248
249 #[inline]
251 pub fn lowering_data_offset(&self) -> u32 {
252 u32::from(self.ptr.size())
253 }
254
255 #[inline]
257 pub fn runtime_memories(&self) -> u32 {
258 self.memories
259 }
260
261 #[inline]
264 pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {
265 assert!(index.as_u32() < self.num_runtime_memories);
266 self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())
267 }
268
269 #[inline]
271 pub fn runtime_tables(&self) -> u32 {
272 self.tables
273 }
274
275 #[inline]
277 pub fn runtime_table(&self, index: RuntimeTableIndex) -> u32 {
278 assert!(index.as_u32() < self.num_runtime_tables);
279 self.runtime_tables() + index.as_u32() * u32::from(self.size_of_vmtable_import())
280 }
281
282 #[inline]
285 pub fn size_of_vmtable_import(&self) -> u8 {
286 3 * self.pointer_size()
287 }
288
289 #[inline]
291 pub fn runtime_reallocs(&self) -> u32 {
292 self.reallocs
293 }
294
295 #[inline]
298 pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
299 assert!(index.as_u32() < self.num_runtime_reallocs);
300 self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())
301 }
302
303 #[inline]
305 pub fn runtime_callbacks(&self) -> u32 {
306 self.callbacks
307 }
308
309 #[inline]
312 pub fn runtime_callback(&self, index: RuntimeCallbackIndex) -> u32 {
313 assert!(index.as_u32() < self.num_runtime_callbacks);
314 self.runtime_callbacks() + index.as_u32() * u32::from(self.ptr.size())
315 }
316
317 #[inline]
319 pub fn runtime_post_returns(&self) -> u32 {
320 self.post_returns
321 }
322
323 #[inline]
326 pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {
327 assert!(index.as_u32() < self.num_runtime_post_returns);
328 self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())
329 }
330
331 #[inline]
333 pub fn resource_destructors(&self) -> u32 {
334 self.resource_destructors
335 }
336
337 #[inline]
340 pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {
341 assert!(index.as_u32() < self.num_resources);
342 self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())
343 }
344
345 #[inline]
347 pub fn size_of_vmctx(&self) -> u32 {
348 self.size
349 }
350}