1use crate::{
38 DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex, FuncIndex,
39 FuncRefIndex, GlobalIndex, MemoryIndex, Module, OwnedMemoryIndex, TableIndex, TagIndex,
40};
41use cranelift_entity::packed_option::ReservedValue;
42
43#[cfg(target_pointer_width = "32")]
44fn cast_to_u32(sz: usize) -> u32 {
45 u32::try_from(sz).unwrap()
46}
47#[cfg(target_pointer_width = "64")]
48fn cast_to_u32(sz: usize) -> u32 {
49 u32::try_from(sz).expect("overflow in cast from usize to u32")
50}
51
52#[inline]
54fn align(offset: u32, width: u32) -> u32 {
55 (offset + (width - 1)) / width * width
56}
57
58#[derive(Debug, Clone, Copy)]
61pub struct VMOffsets<P> {
62 pub ptr: P,
64 pub num_imported_functions: u32,
66 pub num_imported_tables: u32,
68 pub num_imported_memories: u32,
70 pub num_imported_globals: u32,
72 pub num_imported_tags: u32,
74 pub num_defined_tables: u32,
76 pub num_defined_memories: u32,
78 pub num_owned_memories: u32,
80 pub num_defined_globals: u32,
82 pub num_defined_tags: u32,
84 pub num_escaped_funcs: u32,
87
88 imported_functions: u32,
90 imported_tables: u32,
91 imported_memories: u32,
92 imported_globals: u32,
93 imported_tags: u32,
94 defined_tables: u32,
95 defined_memories: u32,
96 owned_memories: u32,
97 defined_globals: u32,
98 defined_tags: u32,
99 defined_func_refs: u32,
100 size: u32,
101}
102
103pub trait PtrSize {
105 fn size(&self) -> u8;
107
108 fn vmcontext_store_context(&self) -> u8 {
110 u8::try_from(align(
111 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
112 u32::from(self.size()),
113 ))
114 .unwrap()
115 }
116
117 fn vmcontext_builtin_functions(&self) -> u8 {
119 self.vmcontext_store_context() + self.size()
120 }
121
122 #[inline]
124 fn vm_func_ref_array_call(&self) -> u8 {
125 0 * self.size()
126 }
127
128 #[inline]
130 fn vm_func_ref_wasm_call(&self) -> u8 {
131 1 * self.size()
132 }
133
134 #[inline]
136 fn vm_func_ref_type_index(&self) -> u8 {
137 2 * self.size()
138 }
139
140 #[inline]
142 fn vm_func_ref_vmctx(&self) -> u8 {
143 3 * self.size()
144 }
145
146 #[inline]
148 fn size_of_vm_func_ref(&self) -> u8 {
149 4 * self.size()
150 }
151
152 #[inline]
155 fn size_of_vmglobal_definition(&self) -> u8 {
156 16
157 }
158
159 #[inline]
161 fn size_of_vmtag_definition(&self) -> u8 {
162 4
163 }
164
165 #[inline]
169 fn vmstore_context_fuel_consumed(&self) -> u8 {
170 0
171 }
172
173 #[inline]
175 fn vmstore_context_epoch_deadline(&self) -> u8 {
176 self.vmstore_context_fuel_consumed() + 8
177 }
178
179 #[inline]
181 fn vmstore_context_stack_limit(&self) -> u8 {
182 self.vmstore_context_epoch_deadline() + 8
183 }
184
185 #[inline]
187 fn vmstore_context_gc_heap(&self) -> u8 {
188 self.vmstore_context_stack_limit() + self.size()
189 }
190
191 fn vmstore_context_gc_heap_base(&self) -> u8 {
193 let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_base();
194 debug_assert!(offset < self.vmstore_context_last_wasm_exit_fp());
195 offset
196 }
197
198 fn vmstore_context_gc_heap_current_length(&self) -> u8 {
200 let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_current_length();
201 debug_assert!(offset < self.vmstore_context_last_wasm_exit_fp());
202 offset
203 }
204
205 fn vmstore_context_last_wasm_exit_fp(&self) -> u8 {
207 self.vmstore_context_gc_heap() + self.size_of_vmmemory_definition()
208 }
209
210 fn vmstore_context_last_wasm_exit_pc(&self) -> u8 {
212 self.vmstore_context_last_wasm_exit_fp() + self.size()
213 }
214
215 fn vmstore_context_last_wasm_entry_fp(&self) -> u8 {
217 self.vmstore_context_last_wasm_exit_pc() + self.size()
218 }
219
220 fn vmstore_context_stack_chain(&self) -> u8 {
222 self.vmstore_context_last_wasm_entry_fp() + self.size()
223 }
224
225 #[inline]
229 fn vmmemory_definition_base(&self) -> u8 {
230 0 * self.size()
231 }
232
233 #[inline]
235 fn vmmemory_definition_current_length(&self) -> u8 {
236 1 * self.size()
237 }
238
239 #[inline]
241 fn size_of_vmmemory_definition(&self) -> u8 {
242 2 * self.size()
243 }
244
245 #[inline]
247 fn size_of_vmmemory_pointer(&self) -> u8 {
248 self.size()
249 }
250
251 fn vmarray_call_host_func_context_func_ref(&self) -> u8 {
255 u8::try_from(align(
256 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
257 u32::from(self.size()),
258 ))
259 .unwrap()
260 }
261
262 fn size_of_vmstack_chain(&self) -> u8 {
264 2 * self.size()
265 }
266
267 fn vmstack_limits_stack_limit(&self) -> u8 {
271 0
272 }
273
274 fn vmstack_limits_last_wasm_entry_fp(&self) -> u8 {
276 self.size()
277 }
278
279 fn vmhostarray_length(&self) -> u8 {
283 0
284 }
285
286 fn vmhostarray_capacity(&self) -> u8 {
288 4
289 }
290
291 fn vmhostarray_data(&self) -> u8 {
293 8
294 }
295
296 fn size_of_vmhostarray(&self) -> u8 {
298 8 + self.size()
299 }
300
301 fn vmcommon_stack_information_limits(&self) -> u8 {
305 0 * self.size()
306 }
307
308 fn vmcommon_stack_information_state(&self) -> u8 {
310 2 * self.size()
311 }
312
313 fn vmcommon_stack_information_handlers(&self) -> u8 {
315 u8::try_from(align(
316 self.vmcommon_stack_information_state() as u32 + 4,
317 u32::from(self.size()),
318 ))
319 .unwrap()
320 }
321
322 fn vmcommon_stack_information_first_switch_handler_index(&self) -> u8 {
324 self.vmcommon_stack_information_handlers() + self.size_of_vmhostarray()
325 }
326
327 fn size_of_vmcommon_stack_information(&self) -> u8 {
329 u8::try_from(align(
330 self.vmcommon_stack_information_first_switch_handler_index() as u32 + 4,
331 u32::from(self.size()),
332 ))
333 .unwrap()
334 }
335
336 fn vmcontref_common_stack_information(&self) -> u8 {
340 0 * self.size()
341 }
342
343 fn vmcontref_parent_chain(&self) -> u8 {
345 u8::try_from(align(
346 (self.vmcontref_common_stack_information() + self.size_of_vmcommon_stack_information())
347 as u32,
348 u32::from(self.size()),
349 ))
350 .unwrap()
351 }
352
353 fn vmcontref_last_ancestor(&self) -> u8 {
355 self.vmcontref_parent_chain() + 2 * self.size()
356 }
357
358 fn vmcontref_revision(&self) -> u8 {
360 self.vmcontref_last_ancestor() + self.size()
361 }
362
363 fn vmcontref_stack(&self) -> u8 {
365 self.vmcontref_revision() + 8
366 }
367
368 fn vmcontref_args(&self) -> u8 {
370 self.vmcontref_stack() + 3 * self.size()
371 }
372
373 fn vmcontref_values(&self) -> u8 {
375 self.vmcontref_args() + self.size_of_vmhostarray()
376 }
377
378 #[inline]
380 fn vmctx_magic(&self) -> u8 {
381 0
385 }
386
387 #[inline]
389 fn vmctx_store_context(&self) -> u8 {
390 self.vmctx_magic() + self.size()
391 }
392
393 #[inline]
395 fn vmctx_builtin_functions(&self) -> u8 {
396 self.vmctx_store_context() + self.size()
397 }
398
399 #[inline]
401 fn vmctx_callee(&self) -> u8 {
402 self.vmctx_builtin_functions() + self.size()
403 }
404
405 #[inline]
408 fn vmctx_epoch_ptr(&self) -> u8 {
409 self.vmctx_callee() + self.size()
410 }
411
412 #[inline]
417 fn vmctx_gc_heap_data(&self) -> u8 {
418 self.vmctx_epoch_ptr() + self.size()
419 }
420
421 #[inline]
423 fn vmctx_type_ids_array(&self) -> u8 {
424 self.vmctx_gc_heap_data() + self.size()
425 }
426
427 #[inline]
431 fn vmctx_dynamic_data_start(&self) -> u8 {
432 self.vmctx_type_ids_array() + self.size()
433 }
434}
435
436#[derive(Clone, Copy)]
438pub struct HostPtr;
439
440impl PtrSize for HostPtr {
441 #[inline]
442 fn size(&self) -> u8 {
443 core::mem::size_of::<usize>() as u8
444 }
445}
446
447impl PtrSize for u8 {
448 #[inline]
449 fn size(&self) -> u8 {
450 *self
451 }
452}
453
454#[derive(Debug, Clone, Copy)]
456pub struct VMOffsetsFields<P> {
457 pub ptr: P,
459 pub num_imported_functions: u32,
461 pub num_imported_tables: u32,
463 pub num_imported_memories: u32,
465 pub num_imported_globals: u32,
467 pub num_imported_tags: u32,
469 pub num_defined_tables: u32,
471 pub num_defined_memories: u32,
473 pub num_owned_memories: u32,
475 pub num_defined_globals: u32,
477 pub num_defined_tags: u32,
479 pub num_escaped_funcs: u32,
482}
483
484impl<P: PtrSize> VMOffsets<P> {
485 pub fn new(ptr: P, module: &Module) -> Self {
487 let num_owned_memories = module
488 .memories
489 .iter()
490 .skip(module.num_imported_memories)
491 .filter(|p| !p.1.shared)
492 .count()
493 .try_into()
494 .unwrap();
495 VMOffsets::from(VMOffsetsFields {
496 ptr,
497 num_imported_functions: cast_to_u32(module.num_imported_funcs),
498 num_imported_tables: cast_to_u32(module.num_imported_tables),
499 num_imported_memories: cast_to_u32(module.num_imported_memories),
500 num_imported_globals: cast_to_u32(module.num_imported_globals),
501 num_imported_tags: cast_to_u32(module.num_imported_tags),
502 num_defined_tables: cast_to_u32(module.num_defined_tables()),
503 num_defined_memories: cast_to_u32(module.num_defined_memories()),
504 num_owned_memories,
505 num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
506 num_defined_tags: cast_to_u32(module.tags.len() - module.num_imported_tags),
507 num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
508 })
509 }
510
511 #[inline]
513 pub fn pointer_size(&self) -> u8 {
514 self.ptr.size()
515 }
516
517 pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
522 macro_rules! calculate_sizes {
523 ($($name:ident: $desc:tt,)*) => {{
524 let VMOffsets {
525 ptr: _,
528 num_imported_functions: _,
529 num_imported_tables: _,
530 num_imported_memories: _,
531 num_imported_globals: _,
532 num_imported_tags: _,
533 num_defined_tables: _,
534 num_defined_globals: _,
535 num_defined_memories: _,
536 num_defined_tags: _,
537 num_owned_memories: _,
538 num_escaped_funcs: _,
539
540 size,
542
543 $($name,)*
546 } = *self;
547
548 let mut last = size;
552 $(
553 assert!($name <= last);
554 let tmp = $name;
555 let $name = last - $name;
556 last = tmp;
557 )*
558 assert_ne!(last, 0);
559 IntoIterator::into_iter([
560 $(($desc, $name),)*
561 ("static vmctx data", last),
562 ])
563 }};
564 }
565
566 calculate_sizes! {
567 defined_func_refs: "module functions",
568 defined_tags: "defined tags",
569 defined_globals: "defined globals",
570 defined_tables: "defined tables",
571 imported_tags: "imported tags",
572 imported_globals: "imported globals",
573 imported_tables: "imported tables",
574 imported_functions: "imported functions",
575 owned_memories: "owned memories",
576 defined_memories: "defined memories",
577 imported_memories: "imported memories",
578 }
579 }
580}
581
582impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
583 fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
584 let mut ret = Self {
585 ptr: fields.ptr,
586 num_imported_functions: fields.num_imported_functions,
587 num_imported_tables: fields.num_imported_tables,
588 num_imported_memories: fields.num_imported_memories,
589 num_imported_globals: fields.num_imported_globals,
590 num_imported_tags: fields.num_imported_tags,
591 num_defined_tables: fields.num_defined_tables,
592 num_defined_memories: fields.num_defined_memories,
593 num_owned_memories: fields.num_owned_memories,
594 num_defined_globals: fields.num_defined_globals,
595 num_defined_tags: fields.num_defined_tags,
596 num_escaped_funcs: fields.num_escaped_funcs,
597 imported_functions: 0,
598 imported_tables: 0,
599 imported_memories: 0,
600 imported_globals: 0,
601 imported_tags: 0,
602 defined_tables: 0,
603 defined_memories: 0,
604 owned_memories: 0,
605 defined_globals: 0,
606 defined_tags: 0,
607 defined_func_refs: 0,
608 size: 0,
609 };
610
611 #[inline]
616 fn cadd(count: u32, size: u32) -> u32 {
617 count.checked_add(size).unwrap()
618 }
619
620 #[inline]
621 fn cmul(count: u32, size: u8) -> u32 {
622 count.checked_mul(u32::from(size)).unwrap()
623 }
624
625 let mut next_field_offset = u32::from(ret.ptr.vmctx_dynamic_data_start());
626
627 macro_rules! fields {
628 (size($field:ident) = $size:expr, $($rest:tt)*) => {
629 ret.$field = next_field_offset;
630 next_field_offset = cadd(next_field_offset, u32::from($size));
631 fields!($($rest)*);
632 };
633 (align($align:expr), $($rest:tt)*) => {
634 next_field_offset = align(next_field_offset, $align);
635 fields!($($rest)*);
636 };
637 () => {};
638 }
639
640 fields! {
641 size(imported_memories)
642 = cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
643 size(defined_memories)
644 = cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
645 size(owned_memories)
646 = cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
647 size(imported_functions)
648 = cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
649 size(imported_tables)
650 = cmul(ret.num_imported_tables, ret.size_of_vmtable_import()),
651 size(imported_globals)
652 = cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
653 size(imported_tags)
654 = cmul(ret.num_imported_tags, ret.size_of_vmtag_import()),
655 size(defined_tables)
656 = cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
657 align(16),
658 size(defined_globals)
659 = cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
660 size(defined_tags)
661 = cmul(ret.num_defined_tags, ret.ptr.size_of_vmtag_definition()),
662 size(defined_func_refs) = cmul(
663 ret.num_escaped_funcs,
664 ret.ptr.size_of_vm_func_ref(),
665 ),
666 }
667
668 ret.size = next_field_offset;
669
670 return ret;
671 }
672}
673
674impl<P: PtrSize> VMOffsets<P> {
675 #[inline]
677 pub fn vmfunction_import_wasm_call(&self) -> u8 {
678 0 * self.pointer_size()
679 }
680
681 #[inline]
683 pub fn vmfunction_import_array_call(&self) -> u8 {
684 1 * self.pointer_size()
685 }
686
687 #[inline]
689 pub fn vmfunction_import_vmctx(&self) -> u8 {
690 2 * self.pointer_size()
691 }
692
693 #[inline]
695 pub fn size_of_vmfunction_import(&self) -> u8 {
696 3 * self.pointer_size()
697 }
698}
699
700impl<P: PtrSize> VMOffsets<P> {
702 pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
704 1 * self.pointer_size()
705 }
706}
707
708impl<P: PtrSize> VMOffsets<P> {
710 #[inline]
712 pub fn vmtable_import_from(&self) -> u8 {
713 0 * self.pointer_size()
714 }
715
716 #[inline]
718 pub fn size_of_vmtable_import(&self) -> u8 {
719 3 * self.pointer_size()
720 }
721}
722
723impl<P: PtrSize> VMOffsets<P> {
725 #[inline]
727 pub fn vmtable_definition_base(&self) -> u8 {
728 0 * self.pointer_size()
729 }
730
731 pub fn vmtable_definition_current_elements(&self) -> u8 {
733 1 * self.pointer_size()
734 }
735
736 #[inline]
738 pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
739 self.pointer_size()
740 }
741
742 #[inline]
744 pub fn size_of_vmtable_definition(&self) -> u8 {
745 2 * self.pointer_size()
746 }
747}
748
749impl<P: PtrSize> VMOffsets<P> {
751 #[inline]
753 pub fn vmmemory_import_from(&self) -> u8 {
754 0 * self.pointer_size()
755 }
756
757 #[inline]
759 pub fn size_of_vmmemory_import(&self) -> u8 {
760 3 * self.pointer_size()
761 }
762}
763
764impl<P: PtrSize> VMOffsets<P> {
766 #[inline]
768 pub fn vmglobal_import_from(&self) -> u8 {
769 0 * self.pointer_size()
770 }
771
772 #[inline]
774 pub fn size_of_vmglobal_import(&self) -> u8 {
775 2 * self.pointer_size() + 8
777 }
778}
779
780impl<P: PtrSize> VMOffsets<P> {
782 #[inline]
784 pub fn size_of_vmshared_type_index(&self) -> u8 {
785 4
786 }
787}
788
789impl<P: PtrSize> VMOffsets<P> {
791 #[inline]
793 pub fn vmtag_import_from(&self) -> u8 {
794 0 * self.pointer_size()
795 }
796
797 #[inline]
799 pub fn size_of_vmtag_import(&self) -> u8 {
800 3 * self.pointer_size()
801 }
802}
803
804impl<P: PtrSize> VMOffsets<P> {
806 #[inline]
808 pub fn vmctx_imported_functions_begin(&self) -> u32 {
809 self.imported_functions
810 }
811
812 #[inline]
814 pub fn vmctx_imported_tables_begin(&self) -> u32 {
815 self.imported_tables
816 }
817
818 #[inline]
820 pub fn vmctx_imported_memories_begin(&self) -> u32 {
821 self.imported_memories
822 }
823
824 #[inline]
826 pub fn vmctx_imported_globals_begin(&self) -> u32 {
827 self.imported_globals
828 }
829
830 #[inline]
832 pub fn vmctx_imported_tags_begin(&self) -> u32 {
833 self.imported_tags
834 }
835
836 #[inline]
838 pub fn vmctx_tables_begin(&self) -> u32 {
839 self.defined_tables
840 }
841
842 #[inline]
844 pub fn vmctx_memories_begin(&self) -> u32 {
845 self.defined_memories
846 }
847
848 #[inline]
850 pub fn vmctx_owned_memories_begin(&self) -> u32 {
851 self.owned_memories
852 }
853
854 #[inline]
856 pub fn vmctx_globals_begin(&self) -> u32 {
857 self.defined_globals
858 }
859
860 #[inline]
862 pub fn vmctx_tags_begin(&self) -> u32 {
863 self.defined_tags
864 }
865
866 #[inline]
868 pub fn vmctx_func_refs_begin(&self) -> u32 {
869 self.defined_func_refs
870 }
871
872 #[inline]
874 pub fn size_of_vmctx(&self) -> u32 {
875 self.size
876 }
877
878 #[inline]
880 pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
881 assert!(index.as_u32() < self.num_imported_functions);
882 self.vmctx_imported_functions_begin()
883 + index.as_u32() * u32::from(self.size_of_vmfunction_import())
884 }
885
886 #[inline]
888 pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
889 assert!(index.as_u32() < self.num_imported_tables);
890 self.vmctx_imported_tables_begin()
891 + index.as_u32() * u32::from(self.size_of_vmtable_import())
892 }
893
894 #[inline]
896 pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
897 assert!(index.as_u32() < self.num_imported_memories);
898 self.vmctx_imported_memories_begin()
899 + index.as_u32() * u32::from(self.size_of_vmmemory_import())
900 }
901
902 #[inline]
904 pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
905 assert!(index.as_u32() < self.num_imported_globals);
906 self.vmctx_imported_globals_begin()
907 + index.as_u32() * u32::from(self.size_of_vmglobal_import())
908 }
909
910 #[inline]
912 pub fn vmctx_vmtag_import(&self, index: TagIndex) -> u32 {
913 assert!(index.as_u32() < self.num_imported_tags);
914 self.vmctx_imported_tags_begin() + index.as_u32() * u32::from(self.size_of_vmtag_import())
915 }
916
917 #[inline]
919 pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
920 assert!(index.as_u32() < self.num_defined_tables);
921 self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
922 }
923
924 #[inline]
926 pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
927 assert!(index.as_u32() < self.num_defined_memories);
928 self.vmctx_memories_begin()
929 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
930 }
931
932 #[inline]
934 pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
935 assert!(index.as_u32() < self.num_owned_memories);
936 self.vmctx_owned_memories_begin()
937 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
938 }
939
940 #[inline]
942 pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
943 assert!(index.as_u32() < self.num_defined_globals);
944 self.vmctx_globals_begin()
945 + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
946 }
947
948 #[inline]
950 pub fn vmctx_vmtag_definition(&self, index: DefinedTagIndex) -> u32 {
951 assert!(index.as_u32() < self.num_defined_tags);
952 self.vmctx_tags_begin() + index.as_u32() * u32::from(self.ptr.size_of_vmtag_definition())
953 }
954
955 #[inline]
958 pub fn vmctx_func_ref(&self, index: FuncRefIndex) -> u32 {
959 assert!(!index.is_reserved_value());
960 assert!(index.as_u32() < self.num_escaped_funcs);
961 self.vmctx_func_refs_begin() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
962 }
963
964 #[inline]
966 pub fn vmctx_vmfunction_import_wasm_call(&self, index: FuncIndex) -> u32 {
967 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_wasm_call())
968 }
969
970 #[inline]
972 pub fn vmctx_vmfunction_import_array_call(&self, index: FuncIndex) -> u32 {
973 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_array_call())
974 }
975
976 #[inline]
978 pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
979 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
980 }
981
982 #[inline]
985 pub fn vmctx_vmtable_from(&self, index: TableIndex) -> u32 {
986 self.vmctx_vmtable_import(index) + u32::from(self.vmtable_import_from())
987 }
988
989 #[inline]
991 pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
992 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
993 }
994
995 #[inline]
997 pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
998 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
999 }
1000
1001 #[inline]
1003 pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
1004 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
1005 }
1006
1007 #[inline]
1009 pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
1010 self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
1011 }
1012
1013 #[inline]
1015 pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
1016 self.vmctx_vmmemory_definition(index)
1017 + u32::from(self.ptr.vmmemory_definition_current_length())
1018 }
1019
1020 #[inline]
1022 pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
1023 self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
1024 }
1025
1026 #[inline]
1028 pub fn vmctx_vmtag_import_from(&self, index: TagIndex) -> u32 {
1029 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_from())
1030 }
1031}
1032
1033impl<P: PtrSize> VMOffsets<P> {
1037 #[inline]
1039 pub fn vm_drc_header_ref_count(&self) -> u32 {
1040 8
1041 }
1042}
1043
1044impl<P: PtrSize> VMOffsets<P> {
1048 #[inline]
1050 pub fn vm_gc_ref_activation_table_next(&self) -> u32 {
1051 0
1052 }
1053
1054 #[inline]
1056 pub fn vm_gc_ref_activation_table_end(&self) -> u32 {
1057 self.pointer_size().into()
1058 }
1059}
1060
1061pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
1065
1066pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
1071
1072#[cfg(test)]
1073mod tests {
1074 use crate::vmoffsets::align;
1075
1076 #[test]
1077 fn alignment() {
1078 fn is_aligned(x: u32) -> bool {
1079 x % 16 == 0
1080 }
1081 assert!(is_aligned(align(0, 16)));
1082 assert!(is_aligned(align(32, 16)));
1083 assert!(is_aligned(align(33, 16)));
1084 assert!(is_aligned(align(31, 16)));
1085 }
1086}