wasmtime_environ/component/
vmcomponent_offsets.rs

1// Currently the `VMComponentContext` allocation by field looks like this:
2//
3// struct VMComponentContext {
4//      magic: u32,
5//      builtins: &'static VMComponentBuiltins,
6//      limits: *const VMStoreContext,
7//      flags: [VMGlobalDefinition; component.num_runtime_component_instances],
8//      trampoline_func_refs: [VMFuncRef; component.num_trampolines],
9//      unsafe_intrinsics: [VMFuncRef; component.num_unsafe_intrinsics],
10//      lowerings: [VMLowering; component.num_lowerings],
11//      memories: [*mut VMMemoryDefinition; component.num_runtime_memories],
12//      tables: [VMTable; component.num_runtime_tables],
13//      reallocs: [*mut VMFuncRef; component.num_runtime_reallocs],
14//      post_returns: [*mut VMFuncRef; component.num_runtime_post_returns],
15//      resource_destructors: [*mut VMFuncRef; component.num_resources],
16// }
17
18use crate::PtrSize;
19use crate::component::*;
20
21/// Equivalent of `VMCONTEXT_MAGIC` except for components.
22///
23/// This is stored at the start of all `VMComponentContext` structures and
24/// double-checked on `VMComponentContext::from_opaque`.
25pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");
26
27/// Flag for the `VMComponentContext::flags` field which corresponds to the
28/// canonical ABI flag `may_leave`
29pub const FLAG_MAY_LEAVE: i32 = 1 << 0;
30
31/// Flag for the `VMComponentContext::flags` field which corresponds to the
32/// canonical ABI flag `may_enter`
33pub const FLAG_MAY_ENTER: i32 = 1 << 1;
34
35/// Flag for the `VMComponentContext::flags` field which is set whenever a
36/// function is called to indicate that `post_return` must be called next.
37pub const FLAG_NEEDS_POST_RETURN: i32 = 1 << 2;
38
39/// Runtime offsets within a `VMComponentContext` for a specific component.
40#[derive(Debug, Clone, Copy)]
41pub struct VMComponentOffsets<P> {
42    /// The host pointer size
43    pub ptr: P,
44
45    /// The number of lowered functions this component will be creating.
46    pub num_lowerings: u32,
47    /// The number of memories which are recorded in this component for options.
48    pub num_runtime_memories: u32,
49    /// The number of tables which are recorded in this component for options.
50    pub num_runtime_tables: u32,
51    /// The number of reallocs which are recorded in this component for options.
52    pub num_runtime_reallocs: u32,
53    /// The number of callbacks which are recorded in this component for options.
54    pub num_runtime_callbacks: u32,
55    /// The number of post-returns which are recorded in this component for options.
56    pub num_runtime_post_returns: u32,
57    /// Number of component instances internally in the component (always at
58    /// least 1).
59    pub num_runtime_component_instances: u32,
60    /// Number of cranelift-compiled trampolines required for this component.
61    pub num_trampolines: u32,
62    /// Number of `VMFuncRef`s for unsafe intrinsics within this component's
63    /// context.
64    pub num_unsafe_intrinsics: u32,
65    /// Number of resources within a component which need destructors stored.
66    pub num_resources: u32,
67
68    // precalculated offsets of various member fields
69    magic: u32,
70    builtins: u32,
71    vm_store_context: u32,
72    flags: u32,
73    trampoline_func_refs: u32,
74    intrinsic_func_refs: u32,
75    lowerings: u32,
76    memories: u32,
77    tables: u32,
78    reallocs: u32,
79    callbacks: u32,
80    post_returns: u32,
81    resource_destructors: u32,
82    size: u32,
83}
84
85#[inline]
86fn align(offset: u32, align: u32) -> u32 {
87    assert!(align.is_power_of_two());
88    (offset + (align - 1)) & !(align - 1)
89}
90
91impl<P: PtrSize> VMComponentOffsets<P> {
92    /// Creates a new set of offsets for the `component` specified configured
93    /// additionally for the `ptr` size specified.
94    pub fn new(ptr: P, component: &Component) -> Self {
95        let mut ret = Self {
96            ptr,
97            num_lowerings: component.num_lowerings,
98            num_runtime_memories: component.num_runtime_memories,
99            num_runtime_tables: component.num_runtime_tables,
100            num_runtime_reallocs: component.num_runtime_reallocs,
101            num_runtime_callbacks: component.num_runtime_callbacks,
102            num_runtime_post_returns: component.num_runtime_post_returns,
103            num_runtime_component_instances: component.num_runtime_component_instances,
104            num_trampolines: component.trampolines.len().try_into().unwrap(),
105            num_unsafe_intrinsics: if let Some(i) = component
106                .unsafe_intrinsics
107                .iter()
108                .rposition(|x| x.is_some())
109            {
110                // Note: We do not currently have an indirection between "the
111                // `i`th unsafe intrinsic in the vmctx" and
112                // `UnsafeIntrinsic::from_u32(i)`, so therefore if we are
113                // compiling in *any* intrinsics, we need to include space for
114                // all of them up to the max `i` that is used.
115                //
116                // We _could_ introduce such an indirection via a map in
117                // `Component` like `PrimaryMap<UnsafeIntrinsicIndex,
118                // UnsafeIntrinsic>`, and that would allow us to densely pack
119                // intrinsics in the vmctx. However we do not do that today
120                // because there are very few unsafe intrinsics, and we do not
121                // see that changing anytime soon, so we aren't wasting much
122                // space.
123                u32::try_from(i + 1).unwrap()
124            } else {
125                0
126            },
127            num_resources: component.num_resources,
128            magic: 0,
129            builtins: 0,
130            vm_store_context: 0,
131            flags: 0,
132            trampoline_func_refs: 0,
133            intrinsic_func_refs: 0,
134            lowerings: 0,
135            memories: 0,
136            tables: 0,
137            reallocs: 0,
138            callbacks: 0,
139            post_returns: 0,
140            resource_destructors: 0,
141            size: 0,
142        };
143
144        // Convenience functions for checked addition and multiplication.
145        // As side effect this reduces binary size by using only a single
146        // `#[track_caller]` location for each function instead of one for
147        // each individual invocation.
148        #[inline]
149        fn cmul(count: u32, size: u8) -> u32 {
150            count.checked_mul(u32::from(size)).unwrap()
151        }
152
153        let mut next_field_offset = 0;
154
155        macro_rules! fields {
156            (size($field:ident) = $size:expr, $($rest:tt)*) => {
157                ret.$field = next_field_offset;
158                next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();
159                fields!($($rest)*);
160            };
161            (align($align:expr), $($rest:tt)*) => {
162                next_field_offset = align(next_field_offset, $align);
163                fields!($($rest)*);
164            };
165            () => {};
166        }
167
168        fields! {
169            size(magic) = 4u32,
170            align(u32::from(ret.ptr.size())),
171            size(builtins) = ret.ptr.size(),
172            size(vm_store_context) = ret.ptr.size(),
173            align(16),
174            size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
175            align(u32::from(ret.ptr.size())),
176            size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),
177            size(intrinsic_func_refs) = cmul(ret.num_unsafe_intrinsics, ret.ptr.size_of_vm_func_ref()),
178            size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
179            size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
180            size(tables) = cmul(ret.num_runtime_tables, ret.size_of_vmtable_import()),
181            size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
182            size(callbacks) = cmul(ret.num_runtime_callbacks, ret.ptr.size()),
183            size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),
184            size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),
185        }
186
187        ret.size = next_field_offset;
188
189        // This is required by the implementation of
190        // `VMComponentContext::from_opaque`. If this value changes then this
191        // location needs to be updated.
192        assert_eq!(ret.magic, 0);
193
194        return ret;
195    }
196
197    /// The size, in bytes, of the host pointer.
198    #[inline]
199    pub fn pointer_size(&self) -> u8 {
200        self.ptr.size()
201    }
202
203    /// The offset of the `magic` field.
204    #[inline]
205    pub fn magic(&self) -> u32 {
206        self.magic
207    }
208
209    /// The offset of the `builtins` field.
210    #[inline]
211    pub fn builtins(&self) -> u32 {
212        self.builtins
213    }
214
215    /// The offset of the `flags` field.
216    #[inline]
217    pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {
218        assert!(index.as_u32() < self.num_runtime_component_instances);
219        self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
220    }
221
222    /// The offset of the `vm_store_context` field.
223    #[inline]
224    pub fn vm_store_context(&self) -> u32 {
225        self.vm_store_context
226    }
227
228    /// The offset of the `trampoline_func_refs` field.
229    #[inline]
230    pub fn trampoline_func_refs(&self) -> u32 {
231        self.trampoline_func_refs
232    }
233
234    /// The offset of `VMFuncRef` for the `index` specified.
235    #[inline]
236    pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {
237        assert!(index.as_u32() < self.num_trampolines);
238        self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
239    }
240
241    /// The offset of the `unsafe_intrinsic_func_refs` field.
242    #[inline]
243    pub fn unsafe_intrinsic_func_refs(&self) -> u32 {
244        self.intrinsic_func_refs
245    }
246
247    /// The offset of the `VMFuncRef` for the `intrinsic` specified.
248    #[inline]
249    pub fn unsafe_intrinsic_func_ref(&self, intrinsic: UnsafeIntrinsic) -> u32 {
250        assert!(intrinsic.index() < self.num_unsafe_intrinsics);
251        self.unsafe_intrinsic_func_refs()
252            + intrinsic.index() * u32::from(self.ptr.size_of_vm_func_ref())
253    }
254
255    /// The offset of the `lowerings` field.
256    #[inline]
257    pub fn lowerings(&self) -> u32 {
258        self.lowerings
259    }
260
261    /// The offset of the `VMLowering` for the `index` specified.
262    #[inline]
263    pub fn lowering(&self, index: LoweredIndex) -> u32 {
264        assert!(index.as_u32() < self.num_lowerings);
265        self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())
266    }
267
268    /// The offset of the `callee` for the `index` specified.
269    #[inline]
270    pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {
271        self.lowering(index) + self.lowering_callee_offset()
272    }
273
274    /// The offset of the `data` for the `index` specified.
275    #[inline]
276    pub fn lowering_data(&self, index: LoweredIndex) -> u32 {
277        self.lowering(index) + self.lowering_data_offset()
278    }
279
280    /// The size of the `VMLowering` type
281    #[inline]
282    pub fn lowering_size(&self) -> u8 {
283        2 * self.ptr.size()
284    }
285
286    /// The offset of the `callee` field within the `VMLowering` type.
287    #[inline]
288    pub fn lowering_callee_offset(&self) -> u32 {
289        0
290    }
291
292    /// The offset of the `data` field within the `VMLowering` type.
293    #[inline]
294    pub fn lowering_data_offset(&self) -> u32 {
295        u32::from(self.ptr.size())
296    }
297
298    /// The offset of the base of the `runtime_memories` field
299    #[inline]
300    pub fn runtime_memories(&self) -> u32 {
301        self.memories
302    }
303
304    /// The offset of the `*mut VMMemoryDefinition` for the runtime index
305    /// provided.
306    #[inline]
307    pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {
308        assert!(index.as_u32() < self.num_runtime_memories);
309        self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())
310    }
311
312    /// The offset of the base of the `runtime_tables` field
313    #[inline]
314    pub fn runtime_tables(&self) -> u32 {
315        self.tables
316    }
317
318    /// The offset of the table for the runtime index provided.
319    #[inline]
320    pub fn runtime_table(&self, index: RuntimeTableIndex) -> u32 {
321        assert!(index.as_u32() < self.num_runtime_tables);
322        self.runtime_tables() + index.as_u32() * u32::from(self.size_of_vmtable_import())
323    }
324
325    /// Return the size of `VMTableImport`, used here to hold the pointers to
326    /// the `VMTableDefinition` and `VMContext`.
327    #[inline]
328    pub fn size_of_vmtable_import(&self) -> u8 {
329        3 * self.pointer_size()
330    }
331
332    /// The offset of the base of the `runtime_reallocs` field
333    #[inline]
334    pub fn runtime_reallocs(&self) -> u32 {
335        self.reallocs
336    }
337
338    /// The offset of the `*mut VMFuncRef` for the runtime index
339    /// provided.
340    #[inline]
341    pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
342        assert!(index.as_u32() < self.num_runtime_reallocs);
343        self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())
344    }
345
346    /// The offset of the base of the `runtime_callbacks` field
347    #[inline]
348    pub fn runtime_callbacks(&self) -> u32 {
349        self.callbacks
350    }
351
352    /// The offset of the `*mut VMFuncRef` for the runtime index
353    /// provided.
354    #[inline]
355    pub fn runtime_callback(&self, index: RuntimeCallbackIndex) -> u32 {
356        assert!(index.as_u32() < self.num_runtime_callbacks);
357        self.runtime_callbacks() + index.as_u32() * u32::from(self.ptr.size())
358    }
359
360    /// The offset of the base of the `runtime_post_returns` field
361    #[inline]
362    pub fn runtime_post_returns(&self) -> u32 {
363        self.post_returns
364    }
365
366    /// The offset of the `*mut VMFuncRef` for the runtime index
367    /// provided.
368    #[inline]
369    pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {
370        assert!(index.as_u32() < self.num_runtime_post_returns);
371        self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())
372    }
373
374    /// The offset of the base of the `resource_destructors` field
375    #[inline]
376    pub fn resource_destructors(&self) -> u32 {
377        self.resource_destructors
378    }
379
380    /// The offset of the `*mut VMFuncRef` for the runtime index
381    /// provided.
382    #[inline]
383    pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {
384        assert!(index.as_u32() < self.num_resources);
385        self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())
386    }
387
388    /// Return the size of the `VMComponentContext` allocation.
389    #[inline]
390    pub fn size_of_vmctx(&self) -> u32 {
391        self.size
392    }
393}