wasmtime_environ/component/
vmcomponent_offsets.rs1use crate::component::*;
17use crate::PtrSize;
18
19pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");
24
25pub const FLAG_MAY_LEAVE: i32 = 1 << 0;
28
29pub const FLAG_MAY_ENTER: i32 = 1 << 1;
32
33pub const FLAG_NEEDS_POST_RETURN: i32 = 1 << 2;
36
37#[derive(Debug, Clone, Copy)]
39pub struct VMComponentOffsets<P> {
40 pub ptr: P,
42
43 pub num_lowerings: u32,
45 pub num_runtime_memories: u32,
47 pub num_runtime_reallocs: u32,
49 pub num_runtime_callbacks: u32,
51 pub num_runtime_post_returns: u32,
53 pub num_runtime_component_instances: u32,
56 pub num_trampolines: u32,
58 pub num_resources: u32,
60
61 magic: u32,
63 builtins: u32,
64 limits: u32,
65 flags: u32,
66 trampoline_func_refs: u32,
67 lowerings: u32,
68 memories: u32,
69 reallocs: u32,
70 callbacks: u32,
71 post_returns: u32,
72 resource_destructors: u32,
73 size: u32,
74}
75
76#[inline]
77fn align(offset: u32, align: u32) -> u32 {
78 assert!(align.is_power_of_two());
79 (offset + (align - 1)) & !(align - 1)
80}
81
82impl<P: PtrSize> VMComponentOffsets<P> {
83 pub fn new(ptr: P, component: &Component) -> Self {
86 let mut ret = Self {
87 ptr,
88 num_lowerings: component.num_lowerings,
89 num_runtime_memories: component.num_runtime_memories.try_into().unwrap(),
90 num_runtime_reallocs: component.num_runtime_reallocs.try_into().unwrap(),
91 num_runtime_callbacks: component.num_runtime_callbacks.try_into().unwrap(),
92 num_runtime_post_returns: component.num_runtime_post_returns.try_into().unwrap(),
93 num_runtime_component_instances: component
94 .num_runtime_component_instances
95 .try_into()
96 .unwrap(),
97 num_trampolines: component.trampolines.len().try_into().unwrap(),
98 num_resources: component.num_resources,
99 magic: 0,
100 builtins: 0,
101 limits: 0,
102 flags: 0,
103 trampoline_func_refs: 0,
104 lowerings: 0,
105 memories: 0,
106 reallocs: 0,
107 callbacks: 0,
108 post_returns: 0,
109 resource_destructors: 0,
110 size: 0,
111 };
112
113 #[inline]
118 fn cmul(count: u32, size: u8) -> u32 {
119 count.checked_mul(u32::from(size)).unwrap()
120 }
121
122 let mut next_field_offset = 0;
123
124 macro_rules! fields {
125 (size($field:ident) = $size:expr, $($rest:tt)*) => {
126 ret.$field = next_field_offset;
127 next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();
128 fields!($($rest)*);
129 };
130 (align($align:expr), $($rest:tt)*) => {
131 next_field_offset = align(next_field_offset, $align);
132 fields!($($rest)*);
133 };
134 () => {};
135 }
136
137 fields! {
138 size(magic) = 4u32,
139 align(u32::from(ret.ptr.size())),
140 size(builtins) = ret.ptr.size(),
141 size(limits) = ret.ptr.size(),
142 align(16),
143 size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
144 align(u32::from(ret.ptr.size())),
145 size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),
146 size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
147 size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
148 size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
149 size(callbacks) = cmul(ret.num_runtime_callbacks, ret.ptr.size()),
150 size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),
151 size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),
152 }
153
154 ret.size = next_field_offset;
155
156 assert_eq!(ret.magic, 0);
160
161 return ret;
162 }
163
164 #[inline]
166 pub fn pointer_size(&self) -> u8 {
167 self.ptr.size()
168 }
169
170 #[inline]
172 pub fn magic(&self) -> u32 {
173 self.magic
174 }
175
176 #[inline]
178 pub fn builtins(&self) -> u32 {
179 self.builtins
180 }
181
182 #[inline]
184 pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {
185 assert!(index.as_u32() < self.num_runtime_component_instances);
186 self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
187 }
188
189 #[inline]
191 pub fn limits(&self) -> u32 {
192 self.limits
193 }
194
195 #[inline]
197 pub fn trampoline_func_refs(&self) -> u32 {
198 self.trampoline_func_refs
199 }
200
201 #[inline]
203 pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {
204 assert!(index.as_u32() < self.num_trampolines);
205 self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
206 }
207
208 #[inline]
210 pub fn lowerings(&self) -> u32 {
211 self.lowerings
212 }
213
214 #[inline]
216 pub fn lowering(&self, index: LoweredIndex) -> u32 {
217 assert!(index.as_u32() < self.num_lowerings);
218 self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())
219 }
220
221 #[inline]
223 pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {
224 self.lowering(index) + self.lowering_callee_offset()
225 }
226
227 #[inline]
229 pub fn lowering_data(&self, index: LoweredIndex) -> u32 {
230 self.lowering(index) + self.lowering_data_offset()
231 }
232
233 #[inline]
235 pub fn lowering_size(&self) -> u8 {
236 2 * self.ptr.size()
237 }
238
239 #[inline]
241 pub fn lowering_callee_offset(&self) -> u32 {
242 0
243 }
244
245 #[inline]
247 pub fn lowering_data_offset(&self) -> u32 {
248 u32::from(self.ptr.size())
249 }
250
251 #[inline]
253 pub fn runtime_memories(&self) -> u32 {
254 self.memories
255 }
256
257 #[inline]
260 pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {
261 assert!(index.as_u32() < self.num_runtime_memories);
262 self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())
263 }
264
265 #[inline]
267 pub fn runtime_reallocs(&self) -> u32 {
268 self.reallocs
269 }
270
271 #[inline]
274 pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
275 assert!(index.as_u32() < self.num_runtime_reallocs);
276 self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())
277 }
278
279 #[inline]
281 pub fn runtime_callbacks(&self) -> u32 {
282 self.callbacks
283 }
284
285 #[inline]
288 pub fn runtime_callback(&self, index: RuntimeCallbackIndex) -> u32 {
289 assert!(index.as_u32() < self.num_runtime_callbacks);
290 self.runtime_callbacks() + index.as_u32() * u32::from(self.ptr.size())
291 }
292
293 #[inline]
295 pub fn runtime_post_returns(&self) -> u32 {
296 self.post_returns
297 }
298
299 #[inline]
302 pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {
303 assert!(index.as_u32() < self.num_runtime_post_returns);
304 self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())
305 }
306
307 #[inline]
309 pub fn resource_destructors(&self) -> u32 {
310 self.resource_destructors
311 }
312
313 #[inline]
316 pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {
317 assert!(index.as_u32() < self.num_resources);
318 self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())
319 }
320
321 #[inline]
323 pub fn size_of_vmctx(&self) -> u32 {
324 self.size
325 }
326}