Skip to main content

wasmtime_environ/component/
vmcomponent_offsets.rs

1// Currently the `VMComponentContext` allocation by field looks like this:
2//
3// struct VMComponentContext {
4//      magic: u32,
5//      builtins: &'static VMComponentBuiltins,
6//      limits: *const VMStoreContext,
7//      flags: [VMGlobalDefinition; component.num_runtime_component_instances],
8//      task_may_block: u32,
9//      trampoline_func_refs: [VMFuncRef; component.num_trampolines],
10//      unsafe_intrinsics: [VMFuncRef; component.num_unsafe_intrinsics],
11//      lowerings: [VMLowering; component.num_lowerings],
12//      memories: [*mut VMMemoryDefinition; component.num_runtime_memories],
13//      tables: [VMTable; component.num_runtime_tables],
14//      reallocs: [*mut VMFuncRef; component.num_runtime_reallocs],
15//      post_returns: [*mut VMFuncRef; component.num_runtime_post_returns],
16//      resource_destructors: [*mut VMFuncRef; component.num_resources],
17// }
18
19use crate::PtrSize;
20use crate::component::*;
21
22/// Equivalent of `VMCONTEXT_MAGIC` except for components.
23///
24/// This is stored at the start of all `VMComponentContext` structures and
25/// double-checked on `VMComponentContext::from_opaque`.
26pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");
27
28/// Flag for the `VMComponentContext::flags` field which corresponds to the
29/// canonical ABI flag `may_leave`
30pub const FLAG_MAY_LEAVE: i32 = 1 << 0;
31
32/// Runtime offsets within a `VMComponentContext` for a specific component.
33#[derive(Debug, Clone, Copy)]
34pub struct VMComponentOffsets<P> {
35    /// The host pointer size
36    pub ptr: P,
37
38    /// The number of lowered functions this component will be creating.
39    pub num_lowerings: u32,
40    /// The number of memories which are recorded in this component for options.
41    pub num_runtime_memories: u32,
42    /// The number of tables which are recorded in this component for options.
43    pub num_runtime_tables: u32,
44    /// The number of reallocs which are recorded in this component for options.
45    pub num_runtime_reallocs: u32,
46    /// The number of callbacks which are recorded in this component for options.
47    pub num_runtime_callbacks: u32,
48    /// The number of post-returns which are recorded in this component for options.
49    pub num_runtime_post_returns: u32,
50    /// Number of component instances internally in the component (always at
51    /// least 1).
52    pub num_runtime_component_instances: u32,
53    /// Number of cranelift-compiled trampolines required for this component.
54    pub num_trampolines: u32,
55    /// Number of `VMFuncRef`s for unsafe intrinsics within this component's
56    /// context.
57    pub num_unsafe_intrinsics: u32,
58    /// Number of resources within a component which need destructors stored.
59    pub num_resources: u32,
60
61    // precalculated offsets of various member fields
62    magic: u32,
63    builtins: u32,
64    vm_store_context: u32,
65    flags: u32,
66    task_may_block: u32,
67    trampoline_func_refs: u32,
68    intrinsic_func_refs: u32,
69    lowerings: u32,
70    memories: u32,
71    tables: u32,
72    reallocs: u32,
73    callbacks: u32,
74    post_returns: u32,
75    resource_destructors: u32,
76    size: u32,
77}
78
79#[inline]
80fn align(offset: u32, align: u32) -> u32 {
81    assert!(align.is_power_of_two());
82    (offset + (align - 1)) & !(align - 1)
83}
84
85impl<P: PtrSize> VMComponentOffsets<P> {
86    /// Creates a new set of offsets for the `component` specified configured
87    /// additionally for the `ptr` size specified.
88    pub fn new(ptr: P, component: &Component) -> Self {
89        let mut ret = Self {
90            ptr,
91            num_lowerings: component.num_lowerings,
92            num_runtime_memories: component.num_runtime_memories,
93            num_runtime_tables: component.num_runtime_tables,
94            num_runtime_reallocs: component.num_runtime_reallocs,
95            num_runtime_callbacks: component.num_runtime_callbacks,
96            num_runtime_post_returns: component.num_runtime_post_returns,
97            num_runtime_component_instances: component.num_runtime_component_instances,
98            num_trampolines: component.trampolines.len().try_into().unwrap(),
99            num_unsafe_intrinsics: if let Some(i) = component
100                .unsafe_intrinsics
101                .iter()
102                .rposition(|x| x.is_some())
103            {
104                // Note: We do not currently have an indirection between "the
105                // `i`th unsafe intrinsic in the vmctx" and
106                // `UnsafeIntrinsic::from_u32(i)`, so therefore if we are
107                // compiling in *any* intrinsics, we need to include space for
108                // all of them up to the max `i` that is used.
109                //
110                // We _could_ introduce such an indirection via a map in
111                // `Component` like `PrimaryMap<UnsafeIntrinsicIndex,
112                // UnsafeIntrinsic>`, and that would allow us to densely pack
113                // intrinsics in the vmctx. However we do not do that today
114                // because there are very few unsafe intrinsics, and we do not
115                // see that changing anytime soon, so we aren't wasting much
116                // space.
117                u32::try_from(i + 1).unwrap()
118            } else {
119                0
120            },
121            num_resources: component.num_resources,
122            magic: 0,
123            builtins: 0,
124            vm_store_context: 0,
125            flags: 0,
126            task_may_block: 0,
127            trampoline_func_refs: 0,
128            intrinsic_func_refs: 0,
129            lowerings: 0,
130            memories: 0,
131            tables: 0,
132            reallocs: 0,
133            callbacks: 0,
134            post_returns: 0,
135            resource_destructors: 0,
136            size: 0,
137        };
138
139        // Convenience functions for checked addition and multiplication.
140        // As side effect this reduces binary size by using only a single
141        // `#[track_caller]` location for each function instead of one for
142        // each individual invocation.
143        #[inline]
144        fn cmul(count: u32, size: u8) -> u32 {
145            count.checked_mul(u32::from(size)).unwrap()
146        }
147
148        let mut next_field_offset = 0;
149
150        macro_rules! fields {
151            (size($field:ident) = $size:expr, $($rest:tt)*) => {
152                ret.$field = next_field_offset;
153                next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();
154                fields!($($rest)*);
155            };
156            (align($align:expr), $($rest:tt)*) => {
157                next_field_offset = align(next_field_offset, $align);
158                fields!($($rest)*);
159            };
160            () => {};
161        }
162
163        fields! {
164            size(magic) = 4u32,
165            align(u32::from(ret.ptr.size())),
166            size(builtins) = ret.ptr.size(),
167            size(vm_store_context) = ret.ptr.size(),
168            align(16),
169            size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
170            size(task_may_block) = ret.ptr.size_of_vmglobal_definition(),
171            align(u32::from(ret.ptr.size())),
172            size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),
173            size(intrinsic_func_refs) = cmul(ret.num_unsafe_intrinsics, ret.ptr.size_of_vm_func_ref()),
174            size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
175            size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
176            size(tables) = cmul(ret.num_runtime_tables, ret.size_of_vmtable_import()),
177            size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
178            size(callbacks) = cmul(ret.num_runtime_callbacks, ret.ptr.size()),
179            size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),
180            size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),
181        }
182
183        ret.size = next_field_offset;
184
185        // This is required by the implementation of
186        // `VMComponentContext::from_opaque`. If this value changes then this
187        // location needs to be updated.
188        assert_eq!(ret.magic, 0);
189
190        return ret;
191    }
192
193    /// The size, in bytes, of the host pointer.
194    #[inline]
195    pub fn pointer_size(&self) -> u8 {
196        self.ptr.size()
197    }
198
199    /// The offset of the `magic` field.
200    #[inline]
201    pub fn magic(&self) -> u32 {
202        self.magic
203    }
204
205    /// The offset of the `builtins` field.
206    #[inline]
207    pub fn builtins(&self) -> u32 {
208        self.builtins
209    }
210
211    /// The offset of the `flags` field.
212    #[inline]
213    pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {
214        assert!(index.as_u32() < self.num_runtime_component_instances);
215        self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
216    }
217
218    /// The offset of the `task_may_block` field.
219    pub fn task_may_block(&self) -> u32 {
220        self.task_may_block
221    }
222
223    /// The offset of the `vm_store_context` field.
224    #[inline]
225    pub fn vm_store_context(&self) -> u32 {
226        self.vm_store_context
227    }
228
229    /// The offset of the `trampoline_func_refs` field.
230    #[inline]
231    pub fn trampoline_func_refs(&self) -> u32 {
232        self.trampoline_func_refs
233    }
234
235    /// The offset of `VMFuncRef` for the `index` specified.
236    #[inline]
237    pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {
238        assert!(index.as_u32() < self.num_trampolines);
239        self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
240    }
241
242    /// The offset of the `unsafe_intrinsic_func_refs` field.
243    #[inline]
244    pub fn unsafe_intrinsic_func_refs(&self) -> u32 {
245        self.intrinsic_func_refs
246    }
247
248    /// The offset of the `VMFuncRef` for the `intrinsic` specified.
249    #[inline]
250    pub fn unsafe_intrinsic_func_ref(&self, intrinsic: UnsafeIntrinsic) -> u32 {
251        assert!(intrinsic.index() < self.num_unsafe_intrinsics);
252        self.unsafe_intrinsic_func_refs()
253            + intrinsic.index() * u32::from(self.ptr.size_of_vm_func_ref())
254    }
255
256    /// The offset of the `lowerings` field.
257    #[inline]
258    pub fn lowerings(&self) -> u32 {
259        self.lowerings
260    }
261
262    /// The offset of the `VMLowering` for the `index` specified.
263    #[inline]
264    pub fn lowering(&self, index: LoweredIndex) -> u32 {
265        assert!(index.as_u32() < self.num_lowerings);
266        self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())
267    }
268
269    /// The offset of the `callee` for the `index` specified.
270    #[inline]
271    pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {
272        self.lowering(index) + self.lowering_callee_offset()
273    }
274
275    /// The offset of the `data` for the `index` specified.
276    #[inline]
277    pub fn lowering_data(&self, index: LoweredIndex) -> u32 {
278        self.lowering(index) + self.lowering_data_offset()
279    }
280
281    /// The size of the `VMLowering` type
282    #[inline]
283    pub fn lowering_size(&self) -> u8 {
284        2 * self.ptr.size()
285    }
286
287    /// The offset of the `callee` field within the `VMLowering` type.
288    #[inline]
289    pub fn lowering_callee_offset(&self) -> u32 {
290        0
291    }
292
293    /// The offset of the `data` field within the `VMLowering` type.
294    #[inline]
295    pub fn lowering_data_offset(&self) -> u32 {
296        u32::from(self.ptr.size())
297    }
298
299    /// The offset of the base of the `runtime_memories` field
300    #[inline]
301    pub fn runtime_memories(&self) -> u32 {
302        self.memories
303    }
304
305    /// The offset of the `*mut VMMemoryDefinition` for the runtime index
306    /// provided.
307    #[inline]
308    pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {
309        assert!(index.as_u32() < self.num_runtime_memories);
310        self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())
311    }
312
313    /// The offset of the base of the `runtime_tables` field
314    #[inline]
315    pub fn runtime_tables(&self) -> u32 {
316        self.tables
317    }
318
319    /// The offset of the table for the runtime index provided.
320    #[inline]
321    pub fn runtime_table(&self, index: RuntimeTableIndex) -> u32 {
322        assert!(index.as_u32() < self.num_runtime_tables);
323        self.runtime_tables() + index.as_u32() * u32::from(self.size_of_vmtable_import())
324    }
325
326    /// Return the size of `VMTableImport`, used here to hold the pointers to
327    /// the `VMTableDefinition` and `VMContext`.
328    #[inline]
329    pub fn size_of_vmtable_import(&self) -> u8 {
330        3 * self.pointer_size()
331    }
332
333    /// The offset of the base of the `runtime_reallocs` field
334    #[inline]
335    pub fn runtime_reallocs(&self) -> u32 {
336        self.reallocs
337    }
338
339    /// The offset of the `*mut VMFuncRef` for the runtime index
340    /// provided.
341    #[inline]
342    pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
343        assert!(index.as_u32() < self.num_runtime_reallocs);
344        self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())
345    }
346
347    /// The offset of the base of the `runtime_callbacks` field
348    #[inline]
349    pub fn runtime_callbacks(&self) -> u32 {
350        self.callbacks
351    }
352
353    /// The offset of the `*mut VMFuncRef` for the runtime index
354    /// provided.
355    #[inline]
356    pub fn runtime_callback(&self, index: RuntimeCallbackIndex) -> u32 {
357        assert!(index.as_u32() < self.num_runtime_callbacks);
358        self.runtime_callbacks() + index.as_u32() * u32::from(self.ptr.size())
359    }
360
361    /// The offset of the base of the `runtime_post_returns` field
362    #[inline]
363    pub fn runtime_post_returns(&self) -> u32 {
364        self.post_returns
365    }
366
367    /// The offset of the `*mut VMFuncRef` for the runtime index
368    /// provided.
369    #[inline]
370    pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {
371        assert!(index.as_u32() < self.num_runtime_post_returns);
372        self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())
373    }
374
375    /// The offset of the base of the `resource_destructors` field
376    #[inline]
377    pub fn resource_destructors(&self) -> u32 {
378        self.resource_destructors
379    }
380
381    /// The offset of the `*mut VMFuncRef` for the runtime index
382    /// provided.
383    #[inline]
384    pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {
385        assert!(index.as_u32() < self.num_resources);
386        self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())
387    }
388
389    /// Return the size of the `VMComponentContext` allocation.
390    #[inline]
391    pub fn size_of_vmctx(&self) -> u32 {
392        self.size
393    }
394}