1use crate::{
37 DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex, FuncIndex,
38 FuncRefIndex, GlobalIndex, MemoryIndex, Module, OwnedMemoryIndex, TableIndex, TagIndex,
39};
40use cranelift_entity::packed_option::ReservedValue;
41
42#[cfg(target_pointer_width = "32")]
43fn cast_to_u32(sz: usize) -> u32 {
44 u32::try_from(sz).unwrap()
45}
46#[cfg(target_pointer_width = "64")]
47fn cast_to_u32(sz: usize) -> u32 {
48 u32::try_from(sz).expect("overflow in cast from usize to u32")
49}
50
51#[inline]
53fn align(offset: u32, width: u32) -> u32 {
54 (offset + (width - 1)) / width * width
55}
56
57#[derive(Debug, Clone, Copy)]
60pub struct VMOffsets<P> {
61 pub ptr: P,
63 pub num_imported_functions: u32,
65 pub num_imported_tables: u32,
67 pub num_imported_memories: u32,
69 pub num_imported_globals: u32,
71 pub num_imported_tags: u32,
73 pub num_defined_tables: u32,
75 pub num_defined_memories: u32,
77 pub num_owned_memories: u32,
79 pub num_defined_globals: u32,
81 pub num_defined_tags: u32,
83 pub num_escaped_funcs: u32,
86
87 imported_functions: u32,
89 imported_tables: u32,
90 imported_memories: u32,
91 imported_globals: u32,
92 imported_tags: u32,
93 defined_tables: u32,
94 defined_memories: u32,
95 owned_memories: u32,
96 defined_globals: u32,
97 defined_tags: u32,
98 defined_func_refs: u32,
99 size: u32,
100}
101
102pub trait PtrSize {
104 fn size(&self) -> u8;
106
107 fn vmcontext_store_context(&self) -> u8 {
109 u8::try_from(align(
110 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
111 u32::from(self.size()),
112 ))
113 .unwrap()
114 }
115
116 fn vmcontext_builtin_functions(&self) -> u8 {
118 self.vmcontext_store_context() + self.size()
119 }
120
121 #[inline]
123 fn vm_func_ref_array_call(&self) -> u8 {
124 0 * self.size()
125 }
126
127 #[inline]
129 fn vm_func_ref_wasm_call(&self) -> u8 {
130 1 * self.size()
131 }
132
133 #[inline]
135 fn vm_func_ref_type_index(&self) -> u8 {
136 2 * self.size()
137 }
138
139 #[inline]
141 fn vm_func_ref_vmctx(&self) -> u8 {
142 3 * self.size()
143 }
144
145 #[inline]
147 fn size_of_vm_func_ref(&self) -> u8 {
148 4 * self.size()
149 }
150
151 #[inline]
154 fn size_of_vmglobal_definition(&self) -> u8 {
155 16
156 }
157
158 #[inline]
160 fn size_of_vmtag_definition(&self) -> u8 {
161 4
162 }
163
164 #[inline]
166 fn maximum_value_size(&self) -> u8 {
167 self.size_of_vmglobal_definition()
168 }
169
170 #[inline]
174 fn vmstore_context_fuel_consumed(&self) -> u8 {
175 0
176 }
177
178 #[inline]
180 fn vmstore_context_epoch_deadline(&self) -> u8 {
181 self.vmstore_context_fuel_consumed() + 8
182 }
183
184 #[inline]
186 fn vmstore_context_stack_limit(&self) -> u8 {
187 self.vmstore_context_epoch_deadline() + 8
188 }
189
190 #[inline]
192 fn vmstore_context_gc_heap(&self) -> u8 {
193 self.vmstore_context_stack_limit() + self.size()
194 }
195
196 fn vmstore_context_gc_heap_base(&self) -> u8 {
198 let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_base();
199 debug_assert!(offset < self.vmstore_context_last_wasm_exit_trampoline_fp());
200 offset
201 }
202
203 fn vmstore_context_gc_heap_current_length(&self) -> u8 {
205 let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_current_length();
206 debug_assert!(offset < self.vmstore_context_last_wasm_exit_trampoline_fp());
207 offset
208 }
209
210 fn vmstore_context_last_wasm_exit_trampoline_fp(&self) -> u8 {
213 self.vmstore_context_gc_heap() + self.size_of_vmmemory_definition()
214 }
215
216 fn vmstore_context_last_wasm_exit_pc(&self) -> u8 {
218 self.vmstore_context_last_wasm_exit_trampoline_fp() + self.size()
219 }
220
221 fn vmstore_context_last_wasm_entry_sp(&self) -> u8 {
223 self.vmstore_context_last_wasm_exit_pc() + self.size()
224 }
225
226 fn vmstore_context_last_wasm_entry_fp(&self) -> u8 {
228 self.vmstore_context_last_wasm_entry_sp() + self.size()
229 }
230
231 fn vmstore_context_last_wasm_entry_trap_handler(&self) -> u8 {
233 self.vmstore_context_last_wasm_entry_fp() + self.size()
234 }
235
236 fn vmstore_context_stack_chain(&self) -> u8 {
238 self.vmstore_context_last_wasm_entry_trap_handler() + self.size()
239 }
240
241 fn vmstore_context_store_data(&self) -> u8 {
243 self.vmstore_context_stack_chain() + self.size_of_vmstack_chain()
244 }
245
246 #[inline]
250 fn vmmemory_definition_base(&self) -> u8 {
251 0 * self.size()
252 }
253
254 #[inline]
256 fn vmmemory_definition_current_length(&self) -> u8 {
257 1 * self.size()
258 }
259
260 #[inline]
262 fn size_of_vmmemory_definition(&self) -> u8 {
263 2 * self.size()
264 }
265
266 #[inline]
268 fn size_of_vmmemory_pointer(&self) -> u8 {
269 self.size()
270 }
271
272 fn vmarray_call_host_func_context_func_ref(&self) -> u8 {
276 u8::try_from(align(
277 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
278 u32::from(self.size()),
279 ))
280 .unwrap()
281 }
282
283 fn size_of_vmstack_chain(&self) -> u8 {
285 2 * self.size()
286 }
287
288 fn vmstack_limits_stack_limit(&self) -> u8 {
292 0
293 }
294
295 fn vmstack_limits_last_wasm_entry_fp(&self) -> u8 {
297 self.size()
298 }
299
300 fn vmhostarray_length(&self) -> u8 {
304 0
305 }
306
307 fn vmhostarray_capacity(&self) -> u8 {
309 4
310 }
311
312 fn vmhostarray_data(&self) -> u8 {
314 8
315 }
316
317 fn size_of_vmhostarray(&self) -> u8 {
319 8 + self.size()
320 }
321
322 fn vmcommon_stack_information_limits(&self) -> u8 {
326 0 * self.size()
327 }
328
329 fn vmcommon_stack_information_state(&self) -> u8 {
331 2 * self.size()
332 }
333
334 fn vmcommon_stack_information_handlers(&self) -> u8 {
336 u8::try_from(align(
337 self.vmcommon_stack_information_state() as u32 + 4,
338 u32::from(self.size()),
339 ))
340 .unwrap()
341 }
342
343 fn vmcommon_stack_information_first_switch_handler_index(&self) -> u8 {
345 self.vmcommon_stack_information_handlers() + self.size_of_vmhostarray()
346 }
347
348 fn size_of_vmcommon_stack_information(&self) -> u8 {
350 u8::try_from(align(
351 self.vmcommon_stack_information_first_switch_handler_index() as u32 + 4,
352 u32::from(self.size()),
353 ))
354 .unwrap()
355 }
356
357 fn vmcontobj_contref(&self) -> u8 {
361 0
362 }
363
364 fn vmcontobj_revision(&self) -> u8 {
366 self.size()
367 }
368
369 fn size_of_vmcontobj(&self) -> u8 {
371 u8::try_from(align(
372 u32::from(self.vmcontobj_revision())
373 + u32::try_from(core::mem::size_of::<usize>()).unwrap(),
374 u32::from(self.size()),
375 ))
376 .unwrap()
377 }
378
379 fn vmcontref_common_stack_information(&self) -> u8 {
383 0 * self.size()
384 }
385
386 fn vmcontref_parent_chain(&self) -> u8 {
388 u8::try_from(align(
389 (self.vmcontref_common_stack_information() + self.size_of_vmcommon_stack_information())
390 as u32,
391 u32::from(self.size()),
392 ))
393 .unwrap()
394 }
395
396 fn vmcontref_last_ancestor(&self) -> u8 {
398 self.vmcontref_parent_chain() + 2 * self.size()
399 }
400
401 fn vmcontref_revision(&self) -> u8 {
403 self.vmcontref_last_ancestor() + self.size()
404 }
405
406 fn vmcontref_stack(&self) -> u8 {
408 self.vmcontref_revision() + self.size()
409 }
410
411 fn vmcontref_args(&self) -> u8 {
413 self.vmcontref_stack() + 3 * self.size()
414 }
415
416 fn vmcontref_values(&self) -> u8 {
418 self.vmcontref_args() + self.size_of_vmhostarray()
419 }
420
421 #[inline]
423 fn vmctx_magic(&self) -> u8 {
424 0
428 }
429
430 #[inline]
432 fn vmctx_store_context(&self) -> u8 {
433 self.vmctx_magic() + self.size()
434 }
435
436 #[inline]
438 fn vmctx_builtin_functions(&self) -> u8 {
439 self.vmctx_store_context() + self.size()
440 }
441
442 #[inline]
445 fn vmctx_epoch_ptr(&self) -> u8 {
446 self.vmctx_builtin_functions() + self.size()
447 }
448
449 #[inline]
454 fn vmctx_gc_heap_data(&self) -> u8 {
455 self.vmctx_epoch_ptr() + self.size()
456 }
457
458 #[inline]
460 fn vmctx_type_ids_array(&self) -> u8 {
461 self.vmctx_gc_heap_data() + self.size()
462 }
463
464 #[inline]
468 fn vmctx_dynamic_data_start(&self) -> u8 {
469 self.vmctx_type_ids_array() + self.size()
470 }
471}
472
473#[derive(Clone, Copy)]
475pub struct HostPtr;
476
477impl PtrSize for HostPtr {
478 #[inline]
479 fn size(&self) -> u8 {
480 core::mem::size_of::<usize>() as u8
481 }
482}
483
484impl PtrSize for u8 {
485 #[inline]
486 fn size(&self) -> u8 {
487 *self
488 }
489}
490
491#[derive(Debug, Clone, Copy)]
493pub struct VMOffsetsFields<P> {
494 pub ptr: P,
496 pub num_imported_functions: u32,
498 pub num_imported_tables: u32,
500 pub num_imported_memories: u32,
502 pub num_imported_globals: u32,
504 pub num_imported_tags: u32,
506 pub num_defined_tables: u32,
508 pub num_defined_memories: u32,
510 pub num_owned_memories: u32,
512 pub num_defined_globals: u32,
514 pub num_defined_tags: u32,
516 pub num_escaped_funcs: u32,
519}
520
521impl<P: PtrSize> VMOffsets<P> {
522 pub fn new(ptr: P, module: &Module) -> Self {
524 let num_owned_memories = module
525 .memories
526 .iter()
527 .skip(module.num_imported_memories)
528 .filter(|p| !p.1.shared)
529 .count()
530 .try_into()
531 .unwrap();
532 VMOffsets::from(VMOffsetsFields {
533 ptr,
534 num_imported_functions: cast_to_u32(module.num_imported_funcs),
535 num_imported_tables: cast_to_u32(module.num_imported_tables),
536 num_imported_memories: cast_to_u32(module.num_imported_memories),
537 num_imported_globals: cast_to_u32(module.num_imported_globals),
538 num_imported_tags: cast_to_u32(module.num_imported_tags),
539 num_defined_tables: cast_to_u32(module.num_defined_tables()),
540 num_defined_memories: cast_to_u32(module.num_defined_memories()),
541 num_owned_memories,
542 num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
543 num_defined_tags: cast_to_u32(module.tags.len() - module.num_imported_tags),
544 num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
545 })
546 }
547
548 #[inline]
550 pub fn pointer_size(&self) -> u8 {
551 self.ptr.size()
552 }
553
554 pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
559 macro_rules! calculate_sizes {
560 ($($name:ident: $desc:tt,)*) => {{
561 let VMOffsets {
562 ptr: _,
565 num_imported_functions: _,
566 num_imported_tables: _,
567 num_imported_memories: _,
568 num_imported_globals: _,
569 num_imported_tags: _,
570 num_defined_tables: _,
571 num_defined_globals: _,
572 num_defined_memories: _,
573 num_defined_tags: _,
574 num_owned_memories: _,
575 num_escaped_funcs: _,
576
577 size,
579
580 $($name,)*
583 } = *self;
584
585 let mut last = size;
589 $(
590 assert!($name <= last);
591 let tmp = $name;
592 let $name = last - $name;
593 last = tmp;
594 )*
595 assert_ne!(last, 0);
596 IntoIterator::into_iter([
597 $(($desc, $name),)*
598 ("static vmctx data", last),
599 ])
600 }};
601 }
602
603 calculate_sizes! {
604 defined_func_refs: "module functions",
605 defined_tags: "defined tags",
606 defined_globals: "defined globals",
607 defined_tables: "defined tables",
608 imported_tags: "imported tags",
609 imported_globals: "imported globals",
610 imported_tables: "imported tables",
611 imported_functions: "imported functions",
612 owned_memories: "owned memories",
613 defined_memories: "defined memories",
614 imported_memories: "imported memories",
615 }
616 }
617}
618
619impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
620 fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
621 let mut ret = Self {
622 ptr: fields.ptr,
623 num_imported_functions: fields.num_imported_functions,
624 num_imported_tables: fields.num_imported_tables,
625 num_imported_memories: fields.num_imported_memories,
626 num_imported_globals: fields.num_imported_globals,
627 num_imported_tags: fields.num_imported_tags,
628 num_defined_tables: fields.num_defined_tables,
629 num_defined_memories: fields.num_defined_memories,
630 num_owned_memories: fields.num_owned_memories,
631 num_defined_globals: fields.num_defined_globals,
632 num_defined_tags: fields.num_defined_tags,
633 num_escaped_funcs: fields.num_escaped_funcs,
634 imported_functions: 0,
635 imported_tables: 0,
636 imported_memories: 0,
637 imported_globals: 0,
638 imported_tags: 0,
639 defined_tables: 0,
640 defined_memories: 0,
641 owned_memories: 0,
642 defined_globals: 0,
643 defined_tags: 0,
644 defined_func_refs: 0,
645 size: 0,
646 };
647
648 #[inline]
653 fn cadd(count: u32, size: u32) -> u32 {
654 count.checked_add(size).unwrap()
655 }
656
657 #[inline]
658 fn cmul(count: u32, size: u8) -> u32 {
659 count.checked_mul(u32::from(size)).unwrap()
660 }
661
662 let mut next_field_offset = u32::from(ret.ptr.vmctx_dynamic_data_start());
663
664 macro_rules! fields {
665 (size($field:ident) = $size:expr, $($rest:tt)*) => {
666 ret.$field = next_field_offset;
667 next_field_offset = cadd(next_field_offset, u32::from($size));
668 fields!($($rest)*);
669 };
670 (align($align:expr), $($rest:tt)*) => {
671 next_field_offset = align(next_field_offset, $align);
672 fields!($($rest)*);
673 };
674 () => {};
675 }
676
677 fields! {
678 size(imported_memories)
679 = cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
680 size(defined_memories)
681 = cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
682 size(owned_memories)
683 = cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
684 size(imported_functions)
685 = cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
686 size(imported_tables)
687 = cmul(ret.num_imported_tables, ret.size_of_vmtable_import()),
688 size(imported_globals)
689 = cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
690 size(imported_tags)
691 = cmul(ret.num_imported_tags, ret.size_of_vmtag_import()),
692 size(defined_tables)
693 = cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
694 align(16),
695 size(defined_globals)
696 = cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
697 size(defined_tags)
698 = cmul(ret.num_defined_tags, ret.ptr.size_of_vmtag_definition()),
699 size(defined_func_refs) = cmul(
700 ret.num_escaped_funcs,
701 ret.ptr.size_of_vm_func_ref(),
702 ),
703 }
704
705 ret.size = next_field_offset;
706
707 return ret;
708 }
709}
710
711impl<P: PtrSize> VMOffsets<P> {
712 #[inline]
714 pub fn vmfunction_import_wasm_call(&self) -> u8 {
715 0 * self.pointer_size()
716 }
717
718 #[inline]
720 pub fn vmfunction_import_array_call(&self) -> u8 {
721 1 * self.pointer_size()
722 }
723
724 #[inline]
726 pub fn vmfunction_import_vmctx(&self) -> u8 {
727 2 * self.pointer_size()
728 }
729
730 #[inline]
732 pub fn size_of_vmfunction_import(&self) -> u8 {
733 3 * self.pointer_size()
734 }
735}
736
737impl<P: PtrSize> VMOffsets<P> {
739 pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
741 1 * self.pointer_size()
742 }
743}
744
745impl<P: PtrSize> VMOffsets<P> {
747 #[inline]
749 pub fn vmtable_import_from(&self) -> u8 {
750 0 * self.pointer_size()
751 }
752
753 #[inline]
755 pub fn vmtable_import_vmctx(&self) -> u8 {
756 1 * self.pointer_size()
757 }
758
759 #[inline]
761 pub fn vmtable_import_index(&self) -> u8 {
762 2 * self.pointer_size()
763 }
764
765 #[inline]
767 pub fn size_of_vmtable_import(&self) -> u8 {
768 3 * self.pointer_size()
769 }
770}
771
772impl<P: PtrSize> VMOffsets<P> {
774 #[inline]
776 pub fn vmtable_definition_base(&self) -> u8 {
777 0 * self.pointer_size()
778 }
779
780 pub fn vmtable_definition_current_elements(&self) -> u8 {
782 1 * self.pointer_size()
783 }
784
785 #[inline]
787 pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
788 self.pointer_size()
789 }
790
791 #[inline]
793 pub fn size_of_vmtable_definition(&self) -> u8 {
794 2 * self.pointer_size()
795 }
796}
797
798impl<P: PtrSize> VMOffsets<P> {
800 #[inline]
802 pub fn vmmemory_import_from(&self) -> u8 {
803 0 * self.pointer_size()
804 }
805
806 #[inline]
808 pub fn vmmemory_import_vmctx(&self) -> u8 {
809 1 * self.pointer_size()
810 }
811
812 #[inline]
814 pub fn vmmemory_import_index(&self) -> u8 {
815 2 * self.pointer_size()
816 }
817
818 #[inline]
820 pub fn size_of_vmmemory_import(&self) -> u8 {
821 3 * self.pointer_size()
822 }
823}
824
825impl<P: PtrSize> VMOffsets<P> {
827 #[inline]
829 pub fn vmglobal_import_from(&self) -> u8 {
830 0 * self.pointer_size()
831 }
832
833 #[inline]
835 pub fn size_of_vmglobal_import(&self) -> u8 {
836 2 * self.pointer_size() + 8
838 }
839}
840
841impl<P: PtrSize> VMOffsets<P> {
843 #[inline]
845 pub fn size_of_vmshared_type_index(&self) -> u8 {
846 4
847 }
848}
849
850impl<P: PtrSize> VMOffsets<P> {
852 #[inline]
854 pub fn vmtag_import_from(&self) -> u8 {
855 0 * self.pointer_size()
856 }
857
858 #[inline]
860 pub fn vmtag_import_vmctx(&self) -> u8 {
861 1 * self.pointer_size()
862 }
863
864 #[inline]
866 pub fn vmtag_import_index(&self) -> u8 {
867 2 * self.pointer_size()
868 }
869
870 #[inline]
872 pub fn size_of_vmtag_import(&self) -> u8 {
873 3 * self.pointer_size()
874 }
875}
876
877impl<P: PtrSize> VMOffsets<P> {
879 #[inline]
881 pub fn vmctx_imported_functions_begin(&self) -> u32 {
882 self.imported_functions
883 }
884
885 #[inline]
887 pub fn vmctx_imported_tables_begin(&self) -> u32 {
888 self.imported_tables
889 }
890
891 #[inline]
893 pub fn vmctx_imported_memories_begin(&self) -> u32 {
894 self.imported_memories
895 }
896
897 #[inline]
899 pub fn vmctx_imported_globals_begin(&self) -> u32 {
900 self.imported_globals
901 }
902
903 #[inline]
905 pub fn vmctx_imported_tags_begin(&self) -> u32 {
906 self.imported_tags
907 }
908
909 #[inline]
911 pub fn vmctx_tables_begin(&self) -> u32 {
912 self.defined_tables
913 }
914
915 #[inline]
917 pub fn vmctx_memories_begin(&self) -> u32 {
918 self.defined_memories
919 }
920
921 #[inline]
923 pub fn vmctx_owned_memories_begin(&self) -> u32 {
924 self.owned_memories
925 }
926
927 #[inline]
929 pub fn vmctx_globals_begin(&self) -> u32 {
930 self.defined_globals
931 }
932
933 #[inline]
935 pub fn vmctx_tags_begin(&self) -> u32 {
936 self.defined_tags
937 }
938
939 #[inline]
941 pub fn vmctx_func_refs_begin(&self) -> u32 {
942 self.defined_func_refs
943 }
944
945 #[inline]
947 pub fn size_of_vmctx(&self) -> u32 {
948 self.size
949 }
950
951 #[inline]
953 pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
954 assert!(index.as_u32() < self.num_imported_functions);
955 self.vmctx_imported_functions_begin()
956 + index.as_u32() * u32::from(self.size_of_vmfunction_import())
957 }
958
959 #[inline]
961 pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
962 assert!(index.as_u32() < self.num_imported_tables);
963 self.vmctx_imported_tables_begin()
964 + index.as_u32() * u32::from(self.size_of_vmtable_import())
965 }
966
967 #[inline]
969 pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
970 assert!(index.as_u32() < self.num_imported_memories);
971 self.vmctx_imported_memories_begin()
972 + index.as_u32() * u32::from(self.size_of_vmmemory_import())
973 }
974
975 #[inline]
977 pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
978 assert!(index.as_u32() < self.num_imported_globals);
979 self.vmctx_imported_globals_begin()
980 + index.as_u32() * u32::from(self.size_of_vmglobal_import())
981 }
982
983 #[inline]
985 pub fn vmctx_vmtag_import(&self, index: TagIndex) -> u32 {
986 assert!(index.as_u32() < self.num_imported_tags);
987 self.vmctx_imported_tags_begin() + index.as_u32() * u32::from(self.size_of_vmtag_import())
988 }
989
990 #[inline]
992 pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
993 assert!(index.as_u32() < self.num_defined_tables);
994 self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
995 }
996
997 #[inline]
999 pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
1000 assert!(index.as_u32() < self.num_defined_memories);
1001 self.vmctx_memories_begin()
1002 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
1003 }
1004
1005 #[inline]
1007 pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
1008 assert!(index.as_u32() < self.num_owned_memories);
1009 self.vmctx_owned_memories_begin()
1010 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
1011 }
1012
1013 #[inline]
1015 pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
1016 assert!(index.as_u32() < self.num_defined_globals);
1017 self.vmctx_globals_begin()
1018 + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
1019 }
1020
1021 #[inline]
1023 pub fn vmctx_vmtag_definition(&self, index: DefinedTagIndex) -> u32 {
1024 assert!(index.as_u32() < self.num_defined_tags);
1025 self.vmctx_tags_begin() + index.as_u32() * u32::from(self.ptr.size_of_vmtag_definition())
1026 }
1027
1028 #[inline]
1031 pub fn vmctx_func_ref(&self, index: FuncRefIndex) -> u32 {
1032 assert!(!index.is_reserved_value());
1033 assert!(index.as_u32() < self.num_escaped_funcs);
1034 self.vmctx_func_refs_begin() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
1035 }
1036
1037 #[inline]
1039 pub fn vmctx_vmfunction_import_wasm_call(&self, index: FuncIndex) -> u32 {
1040 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_wasm_call())
1041 }
1042
1043 #[inline]
1045 pub fn vmctx_vmfunction_import_array_call(&self, index: FuncIndex) -> u32 {
1046 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_array_call())
1047 }
1048
1049 #[inline]
1051 pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
1052 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
1053 }
1054
1055 #[inline]
1058 pub fn vmctx_vmtable_from(&self, index: TableIndex) -> u32 {
1059 self.vmctx_vmtable_import(index) + u32::from(self.vmtable_import_from())
1060 }
1061
1062 #[inline]
1064 pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
1065 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
1066 }
1067
1068 #[inline]
1070 pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
1071 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
1072 }
1073
1074 #[inline]
1076 pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
1077 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
1078 }
1079
1080 #[inline]
1082 pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
1083 self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
1084 }
1085
1086 #[inline]
1088 pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
1089 self.vmctx_vmmemory_definition(index)
1090 + u32::from(self.ptr.vmmemory_definition_current_length())
1091 }
1092
1093 #[inline]
1095 pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
1096 self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
1097 }
1098
1099 #[inline]
1101 pub fn vmctx_vmtag_import_from(&self, index: TagIndex) -> u32 {
1102 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_from())
1103 }
1104
1105 #[inline]
1107 pub fn vmctx_vmtag_import_vmctx(&self, index: TagIndex) -> u32 {
1108 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_vmctx())
1109 }
1110
1111 #[inline]
1113 pub fn vmctx_vmtag_import_index(&self, index: TagIndex) -> u32 {
1114 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_index())
1115 }
1116}
1117
1118impl<P: PtrSize> VMOffsets<P> {
1120 #[inline]
1122 pub fn vm_gc_header_kind(&self) -> u32 {
1123 0
1124 }
1125
1126 #[inline]
1128 pub fn vm_gc_header_reserved_bits(&self) -> u32 {
1129 self.vm_gc_header_kind()
1131 }
1132
1133 #[inline]
1135 pub fn vm_gc_header_ty(&self) -> u32 {
1136 self.vm_gc_header_kind() + 4
1137 }
1138}
1139
1140impl<P: PtrSize> VMOffsets<P> {
1144 #[inline]
1146 pub fn vm_drc_header_ref_count(&self) -> u32 {
1147 8
1148 }
1149
1150 #[inline]
1152 pub fn vm_drc_header_next_over_approximated_stack_root(&self) -> u32 {
1153 self.vm_drc_header_ref_count() + 8
1154 }
1155}
1156
1157pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
1161
1162pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
1167
1168#[cfg(test)]
1169mod tests {
1170 use crate::vmoffsets::align;
1171
1172 #[test]
1173 fn alignment() {
1174 fn is_aligned(x: u32) -> bool {
1175 x % 16 == 0
1176 }
1177 assert!(is_aligned(align(0, 16)));
1178 assert!(is_aligned(align(32, 16)));
1179 assert!(is_aligned(align(33, 16)));
1180 assert!(is_aligned(align(31, 16)));
1181 }
1182}