Skip to main content

wasmtime_environ/component/
vmcomponent_offsets.rs

1// Currently the `VMComponentContext` allocation by field looks like this:
2//
3// struct VMComponentContext {
4//      magic: u32,
5//      builtins: &'static VMComponentBuiltins,
6//      limits: *const VMStoreContext,
7//      flags: [VMGlobalDefinition; component.num_runtime_component_instances],
8//      task_may_block: u32,
9//      trampoline_func_refs: [VMFuncRef; component.num_trampolines],
10//      unsafe_intrinsics: [VMFuncRef; component.num_unsafe_intrinsics],
11//      lowerings: [VMLowering; component.num_lowerings],
12//      memories: [*mut VMMemoryDefinition; component.num_runtime_memories],
13//      tables: [VMTable; component.num_runtime_tables],
14//      reallocs: [*mut VMFuncRef; component.num_runtime_reallocs],
15//      post_returns: [*mut VMFuncRef; component.num_runtime_post_returns],
16//      resource_destructors: [*mut VMFuncRef; component.num_resources],
17// }
18
19use crate::PtrSize;
20use crate::component::*;
21
22/// Equivalent of `VMCONTEXT_MAGIC` except for components.
23///
24/// This is stored at the start of all `VMComponentContext` structures and
25/// double-checked on `VMComponentContext::from_opaque`.
26pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");
27
28/// Flag for the `VMComponentContext::flags` field which corresponds to the
29/// canonical ABI flag `may_leave`
30pub const FLAG_MAY_LEAVE: i32 = 1 << 0;
31
32/// Flag for the `VMComponentContext::flags` field which is set whenever a
33/// function is called to indicate that `post_return` must be called next.
34pub const FLAG_NEEDS_POST_RETURN: i32 = 1 << 2;
35
36/// Runtime offsets within a `VMComponentContext` for a specific component.
37#[derive(Debug, Clone, Copy)]
38pub struct VMComponentOffsets<P> {
39    /// The host pointer size
40    pub ptr: P,
41
42    /// The number of lowered functions this component will be creating.
43    pub num_lowerings: u32,
44    /// The number of memories which are recorded in this component for options.
45    pub num_runtime_memories: u32,
46    /// The number of tables which are recorded in this component for options.
47    pub num_runtime_tables: u32,
48    /// The number of reallocs which are recorded in this component for options.
49    pub num_runtime_reallocs: u32,
50    /// The number of callbacks which are recorded in this component for options.
51    pub num_runtime_callbacks: u32,
52    /// The number of post-returns which are recorded in this component for options.
53    pub num_runtime_post_returns: u32,
54    /// Number of component instances internally in the component (always at
55    /// least 1).
56    pub num_runtime_component_instances: u32,
57    /// Number of cranelift-compiled trampolines required for this component.
58    pub num_trampolines: u32,
59    /// Number of `VMFuncRef`s for unsafe intrinsics within this component's
60    /// context.
61    pub num_unsafe_intrinsics: u32,
62    /// Number of resources within a component which need destructors stored.
63    pub num_resources: u32,
64
65    // precalculated offsets of various member fields
66    magic: u32,
67    builtins: u32,
68    vm_store_context: u32,
69    flags: u32,
70    task_may_block: u32,
71    trampoline_func_refs: u32,
72    intrinsic_func_refs: u32,
73    lowerings: u32,
74    memories: u32,
75    tables: u32,
76    reallocs: u32,
77    callbacks: u32,
78    post_returns: u32,
79    resource_destructors: u32,
80    size: u32,
81}
82
83#[inline]
84fn align(offset: u32, align: u32) -> u32 {
85    assert!(align.is_power_of_two());
86    (offset + (align - 1)) & !(align - 1)
87}
88
89impl<P: PtrSize> VMComponentOffsets<P> {
90    /// Creates a new set of offsets for the `component` specified configured
91    /// additionally for the `ptr` size specified.
92    pub fn new(ptr: P, component: &Component) -> Self {
93        let mut ret = Self {
94            ptr,
95            num_lowerings: component.num_lowerings,
96            num_runtime_memories: component.num_runtime_memories,
97            num_runtime_tables: component.num_runtime_tables,
98            num_runtime_reallocs: component.num_runtime_reallocs,
99            num_runtime_callbacks: component.num_runtime_callbacks,
100            num_runtime_post_returns: component.num_runtime_post_returns,
101            num_runtime_component_instances: component.num_runtime_component_instances,
102            num_trampolines: component.trampolines.len().try_into().unwrap(),
103            num_unsafe_intrinsics: if let Some(i) = component
104                .unsafe_intrinsics
105                .iter()
106                .rposition(|x| x.is_some())
107            {
108                // Note: We do not currently have an indirection between "the
109                // `i`th unsafe intrinsic in the vmctx" and
110                // `UnsafeIntrinsic::from_u32(i)`, so therefore if we are
111                // compiling in *any* intrinsics, we need to include space for
112                // all of them up to the max `i` that is used.
113                //
114                // We _could_ introduce such an indirection via a map in
115                // `Component` like `PrimaryMap<UnsafeIntrinsicIndex,
116                // UnsafeIntrinsic>`, and that would allow us to densely pack
117                // intrinsics in the vmctx. However we do not do that today
118                // because there are very few unsafe intrinsics, and we do not
119                // see that changing anytime soon, so we aren't wasting much
120                // space.
121                u32::try_from(i + 1).unwrap()
122            } else {
123                0
124            },
125            num_resources: component.num_resources,
126            magic: 0,
127            builtins: 0,
128            vm_store_context: 0,
129            flags: 0,
130            task_may_block: 0,
131            trampoline_func_refs: 0,
132            intrinsic_func_refs: 0,
133            lowerings: 0,
134            memories: 0,
135            tables: 0,
136            reallocs: 0,
137            callbacks: 0,
138            post_returns: 0,
139            resource_destructors: 0,
140            size: 0,
141        };
142
143        // Convenience functions for checked addition and multiplication.
144        // As side effect this reduces binary size by using only a single
145        // `#[track_caller]` location for each function instead of one for
146        // each individual invocation.
147        #[inline]
148        fn cmul(count: u32, size: u8) -> u32 {
149            count.checked_mul(u32::from(size)).unwrap()
150        }
151
152        let mut next_field_offset = 0;
153
154        macro_rules! fields {
155            (size($field:ident) = $size:expr, $($rest:tt)*) => {
156                ret.$field = next_field_offset;
157                next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();
158                fields!($($rest)*);
159            };
160            (align($align:expr), $($rest:tt)*) => {
161                next_field_offset = align(next_field_offset, $align);
162                fields!($($rest)*);
163            };
164            () => {};
165        }
166
167        fields! {
168            size(magic) = 4u32,
169            align(u32::from(ret.ptr.size())),
170            size(builtins) = ret.ptr.size(),
171            size(vm_store_context) = ret.ptr.size(),
172            align(16),
173            size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
174            size(task_may_block) = ret.ptr.size_of_vmglobal_definition(),
175            align(u32::from(ret.ptr.size())),
176            size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),
177            size(intrinsic_func_refs) = cmul(ret.num_unsafe_intrinsics, ret.ptr.size_of_vm_func_ref()),
178            size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
179            size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
180            size(tables) = cmul(ret.num_runtime_tables, ret.size_of_vmtable_import()),
181            size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
182            size(callbacks) = cmul(ret.num_runtime_callbacks, ret.ptr.size()),
183            size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),
184            size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),
185        }
186
187        ret.size = next_field_offset;
188
189        // This is required by the implementation of
190        // `VMComponentContext::from_opaque`. If this value changes then this
191        // location needs to be updated.
192        assert_eq!(ret.magic, 0);
193
194        return ret;
195    }
196
197    /// The size, in bytes, of the host pointer.
198    #[inline]
199    pub fn pointer_size(&self) -> u8 {
200        self.ptr.size()
201    }
202
203    /// The offset of the `magic` field.
204    #[inline]
205    pub fn magic(&self) -> u32 {
206        self.magic
207    }
208
209    /// The offset of the `builtins` field.
210    #[inline]
211    pub fn builtins(&self) -> u32 {
212        self.builtins
213    }
214
215    /// The offset of the `flags` field.
216    #[inline]
217    pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {
218        assert!(index.as_u32() < self.num_runtime_component_instances);
219        self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
220    }
221
222    /// The offset of the `task_may_block` field.
223    pub fn task_may_block(&self) -> u32 {
224        self.task_may_block
225    }
226
227    /// The offset of the `vm_store_context` field.
228    #[inline]
229    pub fn vm_store_context(&self) -> u32 {
230        self.vm_store_context
231    }
232
233    /// The offset of the `trampoline_func_refs` field.
234    #[inline]
235    pub fn trampoline_func_refs(&self) -> u32 {
236        self.trampoline_func_refs
237    }
238
239    /// The offset of `VMFuncRef` for the `index` specified.
240    #[inline]
241    pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {
242        assert!(index.as_u32() < self.num_trampolines);
243        self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
244    }
245
246    /// The offset of the `unsafe_intrinsic_func_refs` field.
247    #[inline]
248    pub fn unsafe_intrinsic_func_refs(&self) -> u32 {
249        self.intrinsic_func_refs
250    }
251
252    /// The offset of the `VMFuncRef` for the `intrinsic` specified.
253    #[inline]
254    pub fn unsafe_intrinsic_func_ref(&self, intrinsic: UnsafeIntrinsic) -> u32 {
255        assert!(intrinsic.index() < self.num_unsafe_intrinsics);
256        self.unsafe_intrinsic_func_refs()
257            + intrinsic.index() * u32::from(self.ptr.size_of_vm_func_ref())
258    }
259
260    /// The offset of the `lowerings` field.
261    #[inline]
262    pub fn lowerings(&self) -> u32 {
263        self.lowerings
264    }
265
266    /// The offset of the `VMLowering` for the `index` specified.
267    #[inline]
268    pub fn lowering(&self, index: LoweredIndex) -> u32 {
269        assert!(index.as_u32() < self.num_lowerings);
270        self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())
271    }
272
273    /// The offset of the `callee` for the `index` specified.
274    #[inline]
275    pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {
276        self.lowering(index) + self.lowering_callee_offset()
277    }
278
279    /// The offset of the `data` for the `index` specified.
280    #[inline]
281    pub fn lowering_data(&self, index: LoweredIndex) -> u32 {
282        self.lowering(index) + self.lowering_data_offset()
283    }
284
285    /// The size of the `VMLowering` type
286    #[inline]
287    pub fn lowering_size(&self) -> u8 {
288        2 * self.ptr.size()
289    }
290
291    /// The offset of the `callee` field within the `VMLowering` type.
292    #[inline]
293    pub fn lowering_callee_offset(&self) -> u32 {
294        0
295    }
296
297    /// The offset of the `data` field within the `VMLowering` type.
298    #[inline]
299    pub fn lowering_data_offset(&self) -> u32 {
300        u32::from(self.ptr.size())
301    }
302
303    /// The offset of the base of the `runtime_memories` field
304    #[inline]
305    pub fn runtime_memories(&self) -> u32 {
306        self.memories
307    }
308
309    /// The offset of the `*mut VMMemoryDefinition` for the runtime index
310    /// provided.
311    #[inline]
312    pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {
313        assert!(index.as_u32() < self.num_runtime_memories);
314        self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())
315    }
316
317    /// The offset of the base of the `runtime_tables` field
318    #[inline]
319    pub fn runtime_tables(&self) -> u32 {
320        self.tables
321    }
322
323    /// The offset of the table for the runtime index provided.
324    #[inline]
325    pub fn runtime_table(&self, index: RuntimeTableIndex) -> u32 {
326        assert!(index.as_u32() < self.num_runtime_tables);
327        self.runtime_tables() + index.as_u32() * u32::from(self.size_of_vmtable_import())
328    }
329
330    /// Return the size of `VMTableImport`, used here to hold the pointers to
331    /// the `VMTableDefinition` and `VMContext`.
332    #[inline]
333    pub fn size_of_vmtable_import(&self) -> u8 {
334        3 * self.pointer_size()
335    }
336
337    /// The offset of the base of the `runtime_reallocs` field
338    #[inline]
339    pub fn runtime_reallocs(&self) -> u32 {
340        self.reallocs
341    }
342
343    /// The offset of the `*mut VMFuncRef` for the runtime index
344    /// provided.
345    #[inline]
346    pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
347        assert!(index.as_u32() < self.num_runtime_reallocs);
348        self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())
349    }
350
351    /// The offset of the base of the `runtime_callbacks` field
352    #[inline]
353    pub fn runtime_callbacks(&self) -> u32 {
354        self.callbacks
355    }
356
357    /// The offset of the `*mut VMFuncRef` for the runtime index
358    /// provided.
359    #[inline]
360    pub fn runtime_callback(&self, index: RuntimeCallbackIndex) -> u32 {
361        assert!(index.as_u32() < self.num_runtime_callbacks);
362        self.runtime_callbacks() + index.as_u32() * u32::from(self.ptr.size())
363    }
364
365    /// The offset of the base of the `runtime_post_returns` field
366    #[inline]
367    pub fn runtime_post_returns(&self) -> u32 {
368        self.post_returns
369    }
370
371    /// The offset of the `*mut VMFuncRef` for the runtime index
372    /// provided.
373    #[inline]
374    pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {
375        assert!(index.as_u32() < self.num_runtime_post_returns);
376        self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())
377    }
378
379    /// The offset of the base of the `resource_destructors` field
380    #[inline]
381    pub fn resource_destructors(&self) -> u32 {
382        self.resource_destructors
383    }
384
385    /// The offset of the `*mut VMFuncRef` for the runtime index
386    /// provided.
387    #[inline]
388    pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {
389        assert!(index.as_u32() < self.num_resources);
390        self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())
391    }
392
393    /// Return the size of the `VMComponentContext` allocation.
394    #[inline]
395    pub fn size_of_vmctx(&self) -> u32 {
396        self.size
397    }
398}