1use crate::{
37 DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex, FuncIndex,
38 FuncRefIndex, GlobalIndex, MemoryIndex, Module, OwnedMemoryIndex, TableIndex, TagIndex,
39};
40use cranelift_entity::packed_option::ReservedValue;
41
42#[cfg(target_pointer_width = "32")]
43fn cast_to_u32(sz: usize) -> u32 {
44 u32::try_from(sz).unwrap()
45}
46#[cfg(target_pointer_width = "64")]
47fn cast_to_u32(sz: usize) -> u32 {
48 u32::try_from(sz).expect("overflow in cast from usize to u32")
49}
50
51#[inline]
53fn align(offset: u32, width: u32) -> u32 {
54 (offset + (width - 1)) / width * width
55}
56
57#[derive(Debug, Clone, Copy)]
60pub struct VMOffsets<P> {
61 pub ptr: P,
63 pub num_imported_functions: u32,
65 pub num_imported_tables: u32,
67 pub num_imported_memories: u32,
69 pub num_imported_globals: u32,
71 pub num_imported_tags: u32,
73 pub num_defined_tables: u32,
75 pub num_defined_memories: u32,
77 pub num_owned_memories: u32,
79 pub num_defined_globals: u32,
81 pub num_defined_tags: u32,
83 pub num_escaped_funcs: u32,
86
87 imported_functions: u32,
89 imported_tables: u32,
90 imported_memories: u32,
91 imported_globals: u32,
92 imported_tags: u32,
93 defined_tables: u32,
94 defined_memories: u32,
95 owned_memories: u32,
96 defined_globals: u32,
97 defined_tags: u32,
98 defined_func_refs: u32,
99 size: u32,
100}
101
102pub trait PtrSize {
104 fn size(&self) -> u8;
106
107 fn vmcontext_store_context(&self) -> u8 {
109 u8::try_from(align(
110 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
111 u32::from(self.size()),
112 ))
113 .unwrap()
114 }
115
116 fn vmcontext_builtin_functions(&self) -> u8 {
118 self.vmcontext_store_context() + self.size()
119 }
120
121 #[inline]
123 fn vm_func_ref_array_call(&self) -> u8 {
124 0 * self.size()
125 }
126
127 #[inline]
129 fn vm_func_ref_wasm_call(&self) -> u8 {
130 1 * self.size()
131 }
132
133 #[inline]
135 fn vm_func_ref_type_index(&self) -> u8 {
136 2 * self.size()
137 }
138
139 #[inline]
141 fn vm_func_ref_vmctx(&self) -> u8 {
142 3 * self.size()
143 }
144
145 #[inline]
147 fn size_of_vm_func_ref(&self) -> u8 {
148 4 * self.size()
149 }
150
151 #[inline]
154 fn size_of_vmglobal_definition(&self) -> u8 {
155 16
156 }
157
158 #[inline]
160 fn size_of_vmtag_definition(&self) -> u8 {
161 4
162 }
163
164 #[inline]
166 fn maximum_value_size(&self) -> u8 {
167 self.size_of_vmglobal_definition()
168 }
169
170 #[inline]
174 fn vmstore_context_fuel_consumed(&self) -> u8 {
175 0
176 }
177
178 #[inline]
180 fn vmstore_context_epoch_deadline(&self) -> u8 {
181 self.vmstore_context_fuel_consumed() + 8
182 }
183
184 #[inline]
187 fn vmstore_context_execution_version(&self) -> u8 {
188 self.vmstore_context_epoch_deadline() + 8
189 }
190
191 #[inline]
193 fn vmstore_context_stack_limit(&self) -> u8 {
194 self.vmstore_context_execution_version() + 8
195 }
196
197 #[inline]
199 fn vmstore_context_gc_heap(&self) -> u8 {
200 self.vmstore_context_stack_limit() + self.size()
201 }
202
203 fn vmstore_context_gc_heap_base(&self) -> u8 {
205 let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_base();
206 debug_assert!(offset < self.vmstore_context_last_wasm_exit_trampoline_fp());
207 offset
208 }
209
210 fn vmstore_context_gc_heap_current_length(&self) -> u8 {
212 let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_current_length();
213 debug_assert!(offset < self.vmstore_context_last_wasm_exit_trampoline_fp());
214 offset
215 }
216
217 fn vmstore_context_last_wasm_exit_trampoline_fp(&self) -> u8 {
220 self.vmstore_context_gc_heap() + self.size_of_vmmemory_definition()
221 }
222
223 fn vmstore_context_last_wasm_exit_pc(&self) -> u8 {
225 self.vmstore_context_last_wasm_exit_trampoline_fp() + self.size()
226 }
227
228 fn vmstore_context_last_wasm_entry_sp(&self) -> u8 {
230 self.vmstore_context_last_wasm_exit_pc() + self.size()
231 }
232
233 fn vmstore_context_last_wasm_entry_fp(&self) -> u8 {
235 self.vmstore_context_last_wasm_entry_sp() + self.size()
236 }
237
238 fn vmstore_context_last_wasm_entry_trap_handler(&self) -> u8 {
240 self.vmstore_context_last_wasm_entry_fp() + self.size()
241 }
242
243 fn vmstore_context_stack_chain(&self) -> u8 {
245 self.vmstore_context_last_wasm_entry_trap_handler() + self.size()
246 }
247
248 fn vmstore_context_store_data(&self) -> u8 {
250 self.vmstore_context_stack_chain() + self.size_of_vmstack_chain()
251 }
252
253 #[inline]
257 fn vmmemory_definition_base(&self) -> u8 {
258 0 * self.size()
259 }
260
261 #[inline]
263 fn vmmemory_definition_current_length(&self) -> u8 {
264 1 * self.size()
265 }
266
267 #[inline]
269 fn size_of_vmmemory_definition(&self) -> u8 {
270 2 * self.size()
271 }
272
273 #[inline]
275 fn size_of_vmmemory_pointer(&self) -> u8 {
276 self.size()
277 }
278
279 fn vmarray_call_host_func_context_func_ref(&self) -> u8 {
283 u8::try_from(align(
284 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
285 u32::from(self.size()),
286 ))
287 .unwrap()
288 }
289
290 fn size_of_vmstack_chain(&self) -> u8 {
292 2 * self.size()
293 }
294
295 fn vmstack_limits_stack_limit(&self) -> u8 {
299 0
300 }
301
302 fn vmstack_limits_last_wasm_entry_fp(&self) -> u8 {
304 self.size()
305 }
306
307 fn vmhostarray_length(&self) -> u8 {
311 0
312 }
313
314 fn vmhostarray_capacity(&self) -> u8 {
316 4
317 }
318
319 fn vmhostarray_data(&self) -> u8 {
321 8
322 }
323
324 fn size_of_vmhostarray(&self) -> u8 {
326 8 + self.size()
327 }
328
329 fn vmcommon_stack_information_limits(&self) -> u8 {
333 0 * self.size()
334 }
335
336 fn vmcommon_stack_information_state(&self) -> u8 {
338 2 * self.size()
339 }
340
341 fn vmcommon_stack_information_handlers(&self) -> u8 {
343 u8::try_from(align(
344 self.vmcommon_stack_information_state() as u32 + 4,
345 u32::from(self.size()),
346 ))
347 .unwrap()
348 }
349
350 fn vmcommon_stack_information_first_switch_handler_index(&self) -> u8 {
352 self.vmcommon_stack_information_handlers() + self.size_of_vmhostarray()
353 }
354
355 fn size_of_vmcommon_stack_information(&self) -> u8 {
357 u8::try_from(align(
358 self.vmcommon_stack_information_first_switch_handler_index() as u32 + 4,
359 u32::from(self.size()),
360 ))
361 .unwrap()
362 }
363
364 fn vmcontobj_contref(&self) -> u8 {
368 0
369 }
370
371 fn vmcontobj_revision(&self) -> u8 {
373 self.size()
374 }
375
376 fn size_of_vmcontobj(&self) -> u8 {
378 u8::try_from(align(
379 u32::from(self.vmcontobj_revision())
380 + u32::try_from(core::mem::size_of::<usize>()).unwrap(),
381 u32::from(self.size()),
382 ))
383 .unwrap()
384 }
385
386 fn vmcontref_common_stack_information(&self) -> u8 {
390 0 * self.size()
391 }
392
393 fn vmcontref_parent_chain(&self) -> u8 {
395 u8::try_from(align(
396 (self.vmcontref_common_stack_information() + self.size_of_vmcommon_stack_information())
397 as u32,
398 u32::from(self.size()),
399 ))
400 .unwrap()
401 }
402
403 fn vmcontref_last_ancestor(&self) -> u8 {
405 self.vmcontref_parent_chain() + 2 * self.size()
406 }
407
408 fn vmcontref_revision(&self) -> u8 {
410 self.vmcontref_last_ancestor() + self.size()
411 }
412
413 fn vmcontref_stack(&self) -> u8 {
415 self.vmcontref_revision() + self.size()
416 }
417
418 fn vmcontref_args(&self) -> u8 {
420 self.vmcontref_stack() + 3 * self.size()
421 }
422
423 fn vmcontref_values(&self) -> u8 {
425 self.vmcontref_args() + self.size_of_vmhostarray()
426 }
427
428 #[inline]
430 fn vmctx_magic(&self) -> u8 {
431 0
435 }
436
437 #[inline]
439 fn vmctx_store_context(&self) -> u8 {
440 self.vmctx_magic() + self.size()
441 }
442
443 #[inline]
445 fn vmctx_builtin_functions(&self) -> u8 {
446 self.vmctx_store_context() + self.size()
447 }
448
449 #[inline]
452 fn vmctx_epoch_ptr(&self) -> u8 {
453 self.vmctx_builtin_functions() + self.size()
454 }
455
456 #[inline]
461 fn vmctx_gc_heap_data(&self) -> u8 {
462 self.vmctx_epoch_ptr() + self.size()
463 }
464
465 #[inline]
467 fn vmctx_type_ids_array(&self) -> u8 {
468 self.vmctx_gc_heap_data() + self.size()
469 }
470
471 #[inline]
475 fn vmctx_dynamic_data_start(&self) -> u8 {
476 self.vmctx_type_ids_array() + self.size()
477 }
478}
479
480#[derive(Clone, Copy)]
482pub struct HostPtr;
483
484impl PtrSize for HostPtr {
485 #[inline]
486 fn size(&self) -> u8 {
487 core::mem::size_of::<usize>() as u8
488 }
489}
490
491impl PtrSize for u8 {
492 #[inline]
493 fn size(&self) -> u8 {
494 *self
495 }
496}
497
498#[derive(Debug, Clone, Copy)]
500pub struct VMOffsetsFields<P> {
501 pub ptr: P,
503 pub num_imported_functions: u32,
505 pub num_imported_tables: u32,
507 pub num_imported_memories: u32,
509 pub num_imported_globals: u32,
511 pub num_imported_tags: u32,
513 pub num_defined_tables: u32,
515 pub num_defined_memories: u32,
517 pub num_owned_memories: u32,
519 pub num_defined_globals: u32,
521 pub num_defined_tags: u32,
523 pub num_escaped_funcs: u32,
526}
527
528impl<P: PtrSize> VMOffsets<P> {
529 pub fn new(ptr: P, module: &Module) -> Self {
531 let num_owned_memories = module
532 .memories
533 .iter()
534 .skip(module.num_imported_memories)
535 .filter(|p| !p.1.shared)
536 .count()
537 .try_into()
538 .unwrap();
539 VMOffsets::from(VMOffsetsFields {
540 ptr,
541 num_imported_functions: cast_to_u32(module.num_imported_funcs),
542 num_imported_tables: cast_to_u32(module.num_imported_tables),
543 num_imported_memories: cast_to_u32(module.num_imported_memories),
544 num_imported_globals: cast_to_u32(module.num_imported_globals),
545 num_imported_tags: cast_to_u32(module.num_imported_tags),
546 num_defined_tables: cast_to_u32(module.num_defined_tables()),
547 num_defined_memories: cast_to_u32(module.num_defined_memories()),
548 num_owned_memories,
549 num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
550 num_defined_tags: cast_to_u32(module.tags.len() - module.num_imported_tags),
551 num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
552 })
553 }
554
555 #[inline]
557 pub fn pointer_size(&self) -> u8 {
558 self.ptr.size()
559 }
560
561 pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
566 macro_rules! calculate_sizes {
567 ($($name:ident: $desc:tt,)*) => {{
568 let VMOffsets {
569 ptr: _,
572 num_imported_functions: _,
573 num_imported_tables: _,
574 num_imported_memories: _,
575 num_imported_globals: _,
576 num_imported_tags: _,
577 num_defined_tables: _,
578 num_defined_globals: _,
579 num_defined_memories: _,
580 num_defined_tags: _,
581 num_owned_memories: _,
582 num_escaped_funcs: _,
583
584 size,
586
587 $($name,)*
590 } = *self;
591
592 let mut last = size;
596 $(
597 assert!($name <= last);
598 let tmp = $name;
599 let $name = last - $name;
600 last = tmp;
601 )*
602 assert_ne!(last, 0);
603 IntoIterator::into_iter([
604 $(($desc, $name),)*
605 ("static vmctx data", last),
606 ])
607 }};
608 }
609
610 calculate_sizes! {
611 defined_func_refs: "module functions",
612 defined_tags: "defined tags",
613 defined_globals: "defined globals",
614 defined_tables: "defined tables",
615 imported_tags: "imported tags",
616 imported_globals: "imported globals",
617 imported_tables: "imported tables",
618 imported_functions: "imported functions",
619 owned_memories: "owned memories",
620 defined_memories: "defined memories",
621 imported_memories: "imported memories",
622 }
623 }
624}
625
626impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
627 fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
628 let mut ret = Self {
629 ptr: fields.ptr,
630 num_imported_functions: fields.num_imported_functions,
631 num_imported_tables: fields.num_imported_tables,
632 num_imported_memories: fields.num_imported_memories,
633 num_imported_globals: fields.num_imported_globals,
634 num_imported_tags: fields.num_imported_tags,
635 num_defined_tables: fields.num_defined_tables,
636 num_defined_memories: fields.num_defined_memories,
637 num_owned_memories: fields.num_owned_memories,
638 num_defined_globals: fields.num_defined_globals,
639 num_defined_tags: fields.num_defined_tags,
640 num_escaped_funcs: fields.num_escaped_funcs,
641 imported_functions: 0,
642 imported_tables: 0,
643 imported_memories: 0,
644 imported_globals: 0,
645 imported_tags: 0,
646 defined_tables: 0,
647 defined_memories: 0,
648 owned_memories: 0,
649 defined_globals: 0,
650 defined_tags: 0,
651 defined_func_refs: 0,
652 size: 0,
653 };
654
655 #[inline]
660 fn cadd(count: u32, size: u32) -> u32 {
661 count.checked_add(size).unwrap()
662 }
663
664 #[inline]
665 fn cmul(count: u32, size: u8) -> u32 {
666 count.checked_mul(u32::from(size)).unwrap()
667 }
668
669 let mut next_field_offset = u32::from(ret.ptr.vmctx_dynamic_data_start());
670
671 macro_rules! fields {
672 (size($field:ident) = $size:expr, $($rest:tt)*) => {
673 ret.$field = next_field_offset;
674 next_field_offset = cadd(next_field_offset, u32::from($size));
675 fields!($($rest)*);
676 };
677 (align($align:expr), $($rest:tt)*) => {
678 next_field_offset = align(next_field_offset, $align);
679 fields!($($rest)*);
680 };
681 () => {};
682 }
683
684 fields! {
685 size(imported_memories)
686 = cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
687 size(defined_memories)
688 = cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
689 size(owned_memories)
690 = cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
691 size(imported_functions)
692 = cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
693 size(imported_tables)
694 = cmul(ret.num_imported_tables, ret.size_of_vmtable_import()),
695 size(imported_globals)
696 = cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
697 size(imported_tags)
698 = cmul(ret.num_imported_tags, ret.size_of_vmtag_import()),
699 size(defined_tables)
700 = cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
701 align(16),
702 size(defined_globals)
703 = cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
704 size(defined_tags)
705 = cmul(ret.num_defined_tags, ret.ptr.size_of_vmtag_definition()),
706 size(defined_func_refs) = cmul(
707 ret.num_escaped_funcs,
708 ret.ptr.size_of_vm_func_ref(),
709 ),
710 }
711
712 ret.size = next_field_offset;
713
714 return ret;
715 }
716}
717
718impl<P: PtrSize> VMOffsets<P> {
719 #[inline]
721 pub fn vmfunction_import_wasm_call(&self) -> u8 {
722 0 * self.pointer_size()
723 }
724
725 #[inline]
727 pub fn vmfunction_import_array_call(&self) -> u8 {
728 1 * self.pointer_size()
729 }
730
731 #[inline]
733 pub fn vmfunction_import_vmctx(&self) -> u8 {
734 2 * self.pointer_size()
735 }
736
737 #[inline]
739 pub fn size_of_vmfunction_import(&self) -> u8 {
740 3 * self.pointer_size()
741 }
742}
743
744impl<P: PtrSize> VMOffsets<P> {
746 pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
748 1 * self.pointer_size()
749 }
750}
751
752impl<P: PtrSize> VMOffsets<P> {
754 #[inline]
756 pub fn vmtable_import_from(&self) -> u8 {
757 0 * self.pointer_size()
758 }
759
760 #[inline]
762 pub fn vmtable_import_vmctx(&self) -> u8 {
763 1 * self.pointer_size()
764 }
765
766 #[inline]
768 pub fn vmtable_import_index(&self) -> u8 {
769 2 * self.pointer_size()
770 }
771
772 #[inline]
774 pub fn size_of_vmtable_import(&self) -> u8 {
775 3 * self.pointer_size()
776 }
777}
778
779impl<P: PtrSize> VMOffsets<P> {
781 #[inline]
783 pub fn vmtable_definition_base(&self) -> u8 {
784 0 * self.pointer_size()
785 }
786
787 pub fn vmtable_definition_current_elements(&self) -> u8 {
789 1 * self.pointer_size()
790 }
791
792 #[inline]
794 pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
795 self.pointer_size()
796 }
797
798 #[inline]
800 pub fn size_of_vmtable_definition(&self) -> u8 {
801 2 * self.pointer_size()
802 }
803}
804
805impl<P: PtrSize> VMOffsets<P> {
807 #[inline]
809 pub fn vmmemory_import_from(&self) -> u8 {
810 0 * self.pointer_size()
811 }
812
813 #[inline]
815 pub fn vmmemory_import_vmctx(&self) -> u8 {
816 1 * self.pointer_size()
817 }
818
819 #[inline]
821 pub fn vmmemory_import_index(&self) -> u8 {
822 2 * self.pointer_size()
823 }
824
825 #[inline]
827 pub fn size_of_vmmemory_import(&self) -> u8 {
828 3 * self.pointer_size()
829 }
830}
831
832impl<P: PtrSize> VMOffsets<P> {
834 #[inline]
836 pub fn vmglobal_import_from(&self) -> u8 {
837 0 * self.pointer_size()
838 }
839
840 #[inline]
842 pub fn size_of_vmglobal_import(&self) -> u8 {
843 2 * self.pointer_size() + 8
845 }
846}
847
848impl<P: PtrSize> VMOffsets<P> {
850 #[inline]
852 pub fn size_of_vmshared_type_index(&self) -> u8 {
853 4
854 }
855}
856
857impl<P: PtrSize> VMOffsets<P> {
859 #[inline]
861 pub fn vmtag_import_from(&self) -> u8 {
862 0 * self.pointer_size()
863 }
864
865 #[inline]
867 pub fn vmtag_import_vmctx(&self) -> u8 {
868 1 * self.pointer_size()
869 }
870
871 #[inline]
873 pub fn vmtag_import_index(&self) -> u8 {
874 2 * self.pointer_size()
875 }
876
877 #[inline]
879 pub fn size_of_vmtag_import(&self) -> u8 {
880 3 * self.pointer_size()
881 }
882}
883
884impl<P: PtrSize> VMOffsets<P> {
886 #[inline]
888 pub fn vmctx_imported_functions_begin(&self) -> u32 {
889 self.imported_functions
890 }
891
892 #[inline]
894 pub fn vmctx_imported_tables_begin(&self) -> u32 {
895 self.imported_tables
896 }
897
898 #[inline]
900 pub fn vmctx_imported_memories_begin(&self) -> u32 {
901 self.imported_memories
902 }
903
904 #[inline]
906 pub fn vmctx_imported_globals_begin(&self) -> u32 {
907 self.imported_globals
908 }
909
910 #[inline]
912 pub fn vmctx_imported_tags_begin(&self) -> u32 {
913 self.imported_tags
914 }
915
916 #[inline]
918 pub fn vmctx_tables_begin(&self) -> u32 {
919 self.defined_tables
920 }
921
922 #[inline]
924 pub fn vmctx_memories_begin(&self) -> u32 {
925 self.defined_memories
926 }
927
928 #[inline]
930 pub fn vmctx_owned_memories_begin(&self) -> u32 {
931 self.owned_memories
932 }
933
934 #[inline]
936 pub fn vmctx_globals_begin(&self) -> u32 {
937 self.defined_globals
938 }
939
940 #[inline]
942 pub fn vmctx_tags_begin(&self) -> u32 {
943 self.defined_tags
944 }
945
946 #[inline]
948 pub fn vmctx_func_refs_begin(&self) -> u32 {
949 self.defined_func_refs
950 }
951
952 #[inline]
954 pub fn size_of_vmctx(&self) -> u32 {
955 self.size
956 }
957
958 #[inline]
960 pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
961 assert!(index.as_u32() < self.num_imported_functions);
962 self.vmctx_imported_functions_begin()
963 + index.as_u32() * u32::from(self.size_of_vmfunction_import())
964 }
965
966 #[inline]
968 pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
969 assert!(index.as_u32() < self.num_imported_tables);
970 self.vmctx_imported_tables_begin()
971 + index.as_u32() * u32::from(self.size_of_vmtable_import())
972 }
973
974 #[inline]
976 pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
977 assert!(index.as_u32() < self.num_imported_memories);
978 self.vmctx_imported_memories_begin()
979 + index.as_u32() * u32::from(self.size_of_vmmemory_import())
980 }
981
982 #[inline]
984 pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
985 assert!(index.as_u32() < self.num_imported_globals);
986 self.vmctx_imported_globals_begin()
987 + index.as_u32() * u32::from(self.size_of_vmglobal_import())
988 }
989
990 #[inline]
992 pub fn vmctx_vmtag_import(&self, index: TagIndex) -> u32 {
993 assert!(index.as_u32() < self.num_imported_tags);
994 self.vmctx_imported_tags_begin() + index.as_u32() * u32::from(self.size_of_vmtag_import())
995 }
996
997 #[inline]
999 pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
1000 assert!(index.as_u32() < self.num_defined_tables);
1001 self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
1002 }
1003
1004 #[inline]
1006 pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
1007 assert!(index.as_u32() < self.num_defined_memories);
1008 self.vmctx_memories_begin()
1009 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
1010 }
1011
1012 #[inline]
1014 pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
1015 assert!(index.as_u32() < self.num_owned_memories);
1016 self.vmctx_owned_memories_begin()
1017 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
1018 }
1019
1020 #[inline]
1022 pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
1023 assert!(index.as_u32() < self.num_defined_globals);
1024 self.vmctx_globals_begin()
1025 + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
1026 }
1027
1028 #[inline]
1030 pub fn vmctx_vmtag_definition(&self, index: DefinedTagIndex) -> u32 {
1031 assert!(index.as_u32() < self.num_defined_tags);
1032 self.vmctx_tags_begin() + index.as_u32() * u32::from(self.ptr.size_of_vmtag_definition())
1033 }
1034
1035 #[inline]
1038 pub fn vmctx_func_ref(&self, index: FuncRefIndex) -> u32 {
1039 assert!(!index.is_reserved_value());
1040 assert!(index.as_u32() < self.num_escaped_funcs);
1041 self.vmctx_func_refs_begin() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
1042 }
1043
1044 #[inline]
1046 pub fn vmctx_vmfunction_import_wasm_call(&self, index: FuncIndex) -> u32 {
1047 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_wasm_call())
1048 }
1049
1050 #[inline]
1052 pub fn vmctx_vmfunction_import_array_call(&self, index: FuncIndex) -> u32 {
1053 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_array_call())
1054 }
1055
1056 #[inline]
1058 pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
1059 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
1060 }
1061
1062 #[inline]
1065 pub fn vmctx_vmtable_from(&self, index: TableIndex) -> u32 {
1066 self.vmctx_vmtable_import(index) + u32::from(self.vmtable_import_from())
1067 }
1068
1069 #[inline]
1071 pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
1072 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
1073 }
1074
1075 #[inline]
1077 pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
1078 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
1079 }
1080
1081 #[inline]
1083 pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
1084 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
1085 }
1086
1087 #[inline]
1089 pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
1090 self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
1091 }
1092
1093 #[inline]
1095 pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
1096 self.vmctx_vmmemory_definition(index)
1097 + u32::from(self.ptr.vmmemory_definition_current_length())
1098 }
1099
1100 #[inline]
1102 pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
1103 self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
1104 }
1105
1106 #[inline]
1108 pub fn vmctx_vmtag_import_from(&self, index: TagIndex) -> u32 {
1109 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_from())
1110 }
1111
1112 #[inline]
1114 pub fn vmctx_vmtag_import_vmctx(&self, index: TagIndex) -> u32 {
1115 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_vmctx())
1116 }
1117
1118 #[inline]
1120 pub fn vmctx_vmtag_import_index(&self, index: TagIndex) -> u32 {
1121 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_index())
1122 }
1123}
1124
1125impl<P: PtrSize> VMOffsets<P> {
1127 #[inline]
1129 pub fn vm_gc_header_kind(&self) -> u32 {
1130 0
1131 }
1132
1133 #[inline]
1135 pub fn vm_gc_header_reserved_bits(&self) -> u32 {
1136 self.vm_gc_header_kind()
1138 }
1139
1140 #[inline]
1142 pub fn vm_gc_header_ty(&self) -> u32 {
1143 self.vm_gc_header_kind() + 4
1144 }
1145}
1146
1147impl<P: PtrSize> VMOffsets<P> {
1151 #[inline]
1153 pub fn vm_drc_header_ref_count(&self) -> u32 {
1154 8
1155 }
1156
1157 #[inline]
1159 pub fn vm_drc_header_next_over_approximated_stack_root(&self) -> u32 {
1160 self.vm_drc_header_ref_count() + 8
1161 }
1162}
1163
1164pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
1168
1169pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
1174
1175#[cfg(test)]
1176mod tests {
1177 use crate::vmoffsets::align;
1178
1179 #[test]
1180 fn alignment() {
1181 fn is_aligned(x: u32) -> bool {
1182 x % 16 == 0
1183 }
1184 assert!(is_aligned(align(0, 16)));
1185 assert!(is_aligned(align(32, 16)));
1186 assert!(is_aligned(align(33, 16)));
1187 assert!(is_aligned(align(31, 16)));
1188 }
1189}