1use crate::{
37 DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex, FuncIndex,
38 FuncRefIndex, GlobalIndex, MemoryIndex, Module, OwnedMemoryIndex, TableIndex, TagIndex,
39};
40use cranelift_entity::packed_option::ReservedValue;
41
42#[cfg(target_pointer_width = "32")]
43fn cast_to_u32(sz: usize) -> u32 {
44 u32::try_from(sz).unwrap()
45}
46#[cfg(target_pointer_width = "64")]
47fn cast_to_u32(sz: usize) -> u32 {
48 u32::try_from(sz).expect("overflow in cast from usize to u32")
49}
50
51#[inline]
53fn align(offset: u32, width: u32) -> u32 {
54 (offset + (width - 1)) / width * width
55}
56
57#[derive(Debug, Clone, Copy)]
60pub struct VMOffsets<P> {
61 pub ptr: P,
63 pub num_imported_functions: u32,
65 pub num_imported_tables: u32,
67 pub num_imported_memories: u32,
69 pub num_imported_globals: u32,
71 pub num_imported_tags: u32,
73 pub num_defined_tables: u32,
75 pub num_defined_memories: u32,
77 pub num_owned_memories: u32,
79 pub num_defined_globals: u32,
81 pub num_defined_tags: u32,
83 pub num_escaped_funcs: u32,
86
87 imported_functions: u32,
89 imported_tables: u32,
90 imported_memories: u32,
91 imported_globals: u32,
92 imported_tags: u32,
93 defined_tables: u32,
94 defined_memories: u32,
95 owned_memories: u32,
96 defined_globals: u32,
97 defined_tags: u32,
98 defined_func_refs: u32,
99 size: u32,
100}
101
102pub trait PtrSize {
104 fn size(&self) -> u8;
106
107 fn vmcontext_store_context(&self) -> u8 {
109 u8::try_from(align(
110 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
111 u32::from(self.size()),
112 ))
113 .unwrap()
114 }
115
116 fn vmcontext_builtin_functions(&self) -> u8 {
118 self.vmcontext_store_context() + self.size()
119 }
120
121 #[inline]
123 fn vm_func_ref_array_call(&self) -> u8 {
124 0 * self.size()
125 }
126
127 #[inline]
129 fn vm_func_ref_wasm_call(&self) -> u8 {
130 1 * self.size()
131 }
132
133 #[inline]
135 fn vm_func_ref_type_index(&self) -> u8 {
136 2 * self.size()
137 }
138
139 #[inline]
141 fn vm_func_ref_vmctx(&self) -> u8 {
142 3 * self.size()
143 }
144
145 #[inline]
147 fn size_of_vm_func_ref(&self) -> u8 {
148 4 * self.size()
149 }
150
151 #[inline]
154 fn size_of_vmglobal_definition(&self) -> u8 {
155 16
156 }
157
158 #[inline]
160 fn size_of_vmtag_definition(&self) -> u8 {
161 4
162 }
163
164 #[inline]
168 fn vmstore_context_fuel_consumed(&self) -> u8 {
169 0
170 }
171
172 #[inline]
174 fn vmstore_context_epoch_deadline(&self) -> u8 {
175 self.vmstore_context_fuel_consumed() + 8
176 }
177
178 #[inline]
180 fn vmstore_context_stack_limit(&self) -> u8 {
181 self.vmstore_context_epoch_deadline() + 8
182 }
183
184 #[inline]
186 fn vmstore_context_gc_heap(&self) -> u8 {
187 self.vmstore_context_stack_limit() + self.size()
188 }
189
190 fn vmstore_context_gc_heap_base(&self) -> u8 {
192 let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_base();
193 debug_assert!(offset < self.vmstore_context_last_wasm_exit_fp());
194 offset
195 }
196
197 fn vmstore_context_gc_heap_current_length(&self) -> u8 {
199 let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_current_length();
200 debug_assert!(offset < self.vmstore_context_last_wasm_exit_fp());
201 offset
202 }
203
204 fn vmstore_context_last_wasm_exit_fp(&self) -> u8 {
206 self.vmstore_context_gc_heap() + self.size_of_vmmemory_definition()
207 }
208
209 fn vmstore_context_last_wasm_exit_pc(&self) -> u8 {
211 self.vmstore_context_last_wasm_exit_fp() + self.size()
212 }
213
214 fn vmstore_context_last_wasm_entry_fp(&self) -> u8 {
216 self.vmstore_context_last_wasm_exit_pc() + self.size()
217 }
218
219 fn vmstore_context_stack_chain(&self) -> u8 {
221 self.vmstore_context_last_wasm_entry_fp() + self.size()
222 }
223
224 #[inline]
228 fn vmmemory_definition_base(&self) -> u8 {
229 0 * self.size()
230 }
231
232 #[inline]
234 fn vmmemory_definition_current_length(&self) -> u8 {
235 1 * self.size()
236 }
237
238 #[inline]
240 fn size_of_vmmemory_definition(&self) -> u8 {
241 2 * self.size()
242 }
243
244 #[inline]
246 fn size_of_vmmemory_pointer(&self) -> u8 {
247 self.size()
248 }
249
250 fn vmarray_call_host_func_context_func_ref(&self) -> u8 {
254 u8::try_from(align(
255 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
256 u32::from(self.size()),
257 ))
258 .unwrap()
259 }
260
261 fn size_of_vmstack_chain(&self) -> u8 {
263 2 * self.size()
264 }
265
266 fn vmstack_limits_stack_limit(&self) -> u8 {
270 0
271 }
272
273 fn vmstack_limits_last_wasm_entry_fp(&self) -> u8 {
275 self.size()
276 }
277
278 fn vmhostarray_length(&self) -> u8 {
282 0
283 }
284
285 fn vmhostarray_capacity(&self) -> u8 {
287 4
288 }
289
290 fn vmhostarray_data(&self) -> u8 {
292 8
293 }
294
295 fn size_of_vmhostarray(&self) -> u8 {
297 8 + self.size()
298 }
299
300 fn vmcommon_stack_information_limits(&self) -> u8 {
304 0 * self.size()
305 }
306
307 fn vmcommon_stack_information_state(&self) -> u8 {
309 2 * self.size()
310 }
311
312 fn vmcommon_stack_information_handlers(&self) -> u8 {
314 u8::try_from(align(
315 self.vmcommon_stack_information_state() as u32 + 4,
316 u32::from(self.size()),
317 ))
318 .unwrap()
319 }
320
321 fn vmcommon_stack_information_first_switch_handler_index(&self) -> u8 {
323 self.vmcommon_stack_information_handlers() + self.size_of_vmhostarray()
324 }
325
326 fn size_of_vmcommon_stack_information(&self) -> u8 {
328 u8::try_from(align(
329 self.vmcommon_stack_information_first_switch_handler_index() as u32 + 4,
330 u32::from(self.size()),
331 ))
332 .unwrap()
333 }
334
335 fn vmcontref_common_stack_information(&self) -> u8 {
339 0 * self.size()
340 }
341
342 fn vmcontref_parent_chain(&self) -> u8 {
344 u8::try_from(align(
345 (self.vmcontref_common_stack_information() + self.size_of_vmcommon_stack_information())
346 as u32,
347 u32::from(self.size()),
348 ))
349 .unwrap()
350 }
351
352 fn vmcontref_last_ancestor(&self) -> u8 {
354 self.vmcontref_parent_chain() + 2 * self.size()
355 }
356
357 fn vmcontref_revision(&self) -> u8 {
359 self.vmcontref_last_ancestor() + self.size()
360 }
361
362 fn vmcontref_stack(&self) -> u8 {
364 self.vmcontref_revision() + 8
365 }
366
367 fn vmcontref_args(&self) -> u8 {
369 self.vmcontref_stack() + 3 * self.size()
370 }
371
372 fn vmcontref_values(&self) -> u8 {
374 self.vmcontref_args() + self.size_of_vmhostarray()
375 }
376
377 #[inline]
379 fn vmctx_magic(&self) -> u8 {
380 0
384 }
385
386 #[inline]
388 fn vmctx_store_context(&self) -> u8 {
389 self.vmctx_magic() + self.size()
390 }
391
392 #[inline]
394 fn vmctx_builtin_functions(&self) -> u8 {
395 self.vmctx_store_context() + self.size()
396 }
397
398 #[inline]
401 fn vmctx_epoch_ptr(&self) -> u8 {
402 self.vmctx_builtin_functions() + self.size()
403 }
404
405 #[inline]
410 fn vmctx_gc_heap_data(&self) -> u8 {
411 self.vmctx_epoch_ptr() + self.size()
412 }
413
414 #[inline]
416 fn vmctx_type_ids_array(&self) -> u8 {
417 self.vmctx_gc_heap_data() + self.size()
418 }
419
420 #[inline]
424 fn vmctx_dynamic_data_start(&self) -> u8 {
425 self.vmctx_type_ids_array() + self.size()
426 }
427}
428
429#[derive(Clone, Copy)]
431pub struct HostPtr;
432
433impl PtrSize for HostPtr {
434 #[inline]
435 fn size(&self) -> u8 {
436 core::mem::size_of::<usize>() as u8
437 }
438}
439
440impl PtrSize for u8 {
441 #[inline]
442 fn size(&self) -> u8 {
443 *self
444 }
445}
446
447#[derive(Debug, Clone, Copy)]
449pub struct VMOffsetsFields<P> {
450 pub ptr: P,
452 pub num_imported_functions: u32,
454 pub num_imported_tables: u32,
456 pub num_imported_memories: u32,
458 pub num_imported_globals: u32,
460 pub num_imported_tags: u32,
462 pub num_defined_tables: u32,
464 pub num_defined_memories: u32,
466 pub num_owned_memories: u32,
468 pub num_defined_globals: u32,
470 pub num_defined_tags: u32,
472 pub num_escaped_funcs: u32,
475}
476
477impl<P: PtrSize> VMOffsets<P> {
478 pub fn new(ptr: P, module: &Module) -> Self {
480 let num_owned_memories = module
481 .memories
482 .iter()
483 .skip(module.num_imported_memories)
484 .filter(|p| !p.1.shared)
485 .count()
486 .try_into()
487 .unwrap();
488 VMOffsets::from(VMOffsetsFields {
489 ptr,
490 num_imported_functions: cast_to_u32(module.num_imported_funcs),
491 num_imported_tables: cast_to_u32(module.num_imported_tables),
492 num_imported_memories: cast_to_u32(module.num_imported_memories),
493 num_imported_globals: cast_to_u32(module.num_imported_globals),
494 num_imported_tags: cast_to_u32(module.num_imported_tags),
495 num_defined_tables: cast_to_u32(module.num_defined_tables()),
496 num_defined_memories: cast_to_u32(module.num_defined_memories()),
497 num_owned_memories,
498 num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
499 num_defined_tags: cast_to_u32(module.tags.len() - module.num_imported_tags),
500 num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
501 })
502 }
503
504 #[inline]
506 pub fn pointer_size(&self) -> u8 {
507 self.ptr.size()
508 }
509
510 pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
515 macro_rules! calculate_sizes {
516 ($($name:ident: $desc:tt,)*) => {{
517 let VMOffsets {
518 ptr: _,
521 num_imported_functions: _,
522 num_imported_tables: _,
523 num_imported_memories: _,
524 num_imported_globals: _,
525 num_imported_tags: _,
526 num_defined_tables: _,
527 num_defined_globals: _,
528 num_defined_memories: _,
529 num_defined_tags: _,
530 num_owned_memories: _,
531 num_escaped_funcs: _,
532
533 size,
535
536 $($name,)*
539 } = *self;
540
541 let mut last = size;
545 $(
546 assert!($name <= last);
547 let tmp = $name;
548 let $name = last - $name;
549 last = tmp;
550 )*
551 assert_ne!(last, 0);
552 IntoIterator::into_iter([
553 $(($desc, $name),)*
554 ("static vmctx data", last),
555 ])
556 }};
557 }
558
559 calculate_sizes! {
560 defined_func_refs: "module functions",
561 defined_tags: "defined tags",
562 defined_globals: "defined globals",
563 defined_tables: "defined tables",
564 imported_tags: "imported tags",
565 imported_globals: "imported globals",
566 imported_tables: "imported tables",
567 imported_functions: "imported functions",
568 owned_memories: "owned memories",
569 defined_memories: "defined memories",
570 imported_memories: "imported memories",
571 }
572 }
573}
574
575impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
576 fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
577 let mut ret = Self {
578 ptr: fields.ptr,
579 num_imported_functions: fields.num_imported_functions,
580 num_imported_tables: fields.num_imported_tables,
581 num_imported_memories: fields.num_imported_memories,
582 num_imported_globals: fields.num_imported_globals,
583 num_imported_tags: fields.num_imported_tags,
584 num_defined_tables: fields.num_defined_tables,
585 num_defined_memories: fields.num_defined_memories,
586 num_owned_memories: fields.num_owned_memories,
587 num_defined_globals: fields.num_defined_globals,
588 num_defined_tags: fields.num_defined_tags,
589 num_escaped_funcs: fields.num_escaped_funcs,
590 imported_functions: 0,
591 imported_tables: 0,
592 imported_memories: 0,
593 imported_globals: 0,
594 imported_tags: 0,
595 defined_tables: 0,
596 defined_memories: 0,
597 owned_memories: 0,
598 defined_globals: 0,
599 defined_tags: 0,
600 defined_func_refs: 0,
601 size: 0,
602 };
603
604 #[inline]
609 fn cadd(count: u32, size: u32) -> u32 {
610 count.checked_add(size).unwrap()
611 }
612
613 #[inline]
614 fn cmul(count: u32, size: u8) -> u32 {
615 count.checked_mul(u32::from(size)).unwrap()
616 }
617
618 let mut next_field_offset = u32::from(ret.ptr.vmctx_dynamic_data_start());
619
620 macro_rules! fields {
621 (size($field:ident) = $size:expr, $($rest:tt)*) => {
622 ret.$field = next_field_offset;
623 next_field_offset = cadd(next_field_offset, u32::from($size));
624 fields!($($rest)*);
625 };
626 (align($align:expr), $($rest:tt)*) => {
627 next_field_offset = align(next_field_offset, $align);
628 fields!($($rest)*);
629 };
630 () => {};
631 }
632
633 fields! {
634 size(imported_memories)
635 = cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
636 size(defined_memories)
637 = cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
638 size(owned_memories)
639 = cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
640 size(imported_functions)
641 = cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
642 size(imported_tables)
643 = cmul(ret.num_imported_tables, ret.size_of_vmtable_import()),
644 size(imported_globals)
645 = cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
646 size(imported_tags)
647 = cmul(ret.num_imported_tags, ret.size_of_vmtag_import()),
648 size(defined_tables)
649 = cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
650 align(16),
651 size(defined_globals)
652 = cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
653 size(defined_tags)
654 = cmul(ret.num_defined_tags, ret.ptr.size_of_vmtag_definition()),
655 size(defined_func_refs) = cmul(
656 ret.num_escaped_funcs,
657 ret.ptr.size_of_vm_func_ref(),
658 ),
659 }
660
661 ret.size = next_field_offset;
662
663 return ret;
664 }
665}
666
667impl<P: PtrSize> VMOffsets<P> {
668 #[inline]
670 pub fn vmfunction_import_wasm_call(&self) -> u8 {
671 0 * self.pointer_size()
672 }
673
674 #[inline]
676 pub fn vmfunction_import_array_call(&self) -> u8 {
677 1 * self.pointer_size()
678 }
679
680 #[inline]
682 pub fn vmfunction_import_vmctx(&self) -> u8 {
683 2 * self.pointer_size()
684 }
685
686 #[inline]
688 pub fn size_of_vmfunction_import(&self) -> u8 {
689 3 * self.pointer_size()
690 }
691}
692
693impl<P: PtrSize> VMOffsets<P> {
695 pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
697 1 * self.pointer_size()
698 }
699}
700
701impl<P: PtrSize> VMOffsets<P> {
703 #[inline]
705 pub fn vmtable_import_from(&self) -> u8 {
706 0 * self.pointer_size()
707 }
708
709 #[inline]
711 pub fn size_of_vmtable_import(&self) -> u8 {
712 3 * self.pointer_size()
713 }
714}
715
716impl<P: PtrSize> VMOffsets<P> {
718 #[inline]
720 pub fn vmtable_definition_base(&self) -> u8 {
721 0 * self.pointer_size()
722 }
723
724 pub fn vmtable_definition_current_elements(&self) -> u8 {
726 1 * self.pointer_size()
727 }
728
729 #[inline]
731 pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
732 self.pointer_size()
733 }
734
735 #[inline]
737 pub fn size_of_vmtable_definition(&self) -> u8 {
738 2 * self.pointer_size()
739 }
740}
741
742impl<P: PtrSize> VMOffsets<P> {
744 #[inline]
746 pub fn vmmemory_import_from(&self) -> u8 {
747 0 * self.pointer_size()
748 }
749
750 #[inline]
752 pub fn size_of_vmmemory_import(&self) -> u8 {
753 3 * self.pointer_size()
754 }
755}
756
757impl<P: PtrSize> VMOffsets<P> {
759 #[inline]
761 pub fn vmglobal_import_from(&self) -> u8 {
762 0 * self.pointer_size()
763 }
764
765 #[inline]
767 pub fn size_of_vmglobal_import(&self) -> u8 {
768 2 * self.pointer_size() + 8
770 }
771}
772
773impl<P: PtrSize> VMOffsets<P> {
775 #[inline]
777 pub fn size_of_vmshared_type_index(&self) -> u8 {
778 4
779 }
780}
781
782impl<P: PtrSize> VMOffsets<P> {
784 #[inline]
786 pub fn vmtag_import_from(&self) -> u8 {
787 0 * self.pointer_size()
788 }
789
790 #[inline]
792 pub fn size_of_vmtag_import(&self) -> u8 {
793 3 * self.pointer_size()
794 }
795}
796
797impl<P: PtrSize> VMOffsets<P> {
799 #[inline]
801 pub fn vmctx_imported_functions_begin(&self) -> u32 {
802 self.imported_functions
803 }
804
805 #[inline]
807 pub fn vmctx_imported_tables_begin(&self) -> u32 {
808 self.imported_tables
809 }
810
811 #[inline]
813 pub fn vmctx_imported_memories_begin(&self) -> u32 {
814 self.imported_memories
815 }
816
817 #[inline]
819 pub fn vmctx_imported_globals_begin(&self) -> u32 {
820 self.imported_globals
821 }
822
823 #[inline]
825 pub fn vmctx_imported_tags_begin(&self) -> u32 {
826 self.imported_tags
827 }
828
829 #[inline]
831 pub fn vmctx_tables_begin(&self) -> u32 {
832 self.defined_tables
833 }
834
835 #[inline]
837 pub fn vmctx_memories_begin(&self) -> u32 {
838 self.defined_memories
839 }
840
841 #[inline]
843 pub fn vmctx_owned_memories_begin(&self) -> u32 {
844 self.owned_memories
845 }
846
847 #[inline]
849 pub fn vmctx_globals_begin(&self) -> u32 {
850 self.defined_globals
851 }
852
853 #[inline]
855 pub fn vmctx_tags_begin(&self) -> u32 {
856 self.defined_tags
857 }
858
859 #[inline]
861 pub fn vmctx_func_refs_begin(&self) -> u32 {
862 self.defined_func_refs
863 }
864
865 #[inline]
867 pub fn size_of_vmctx(&self) -> u32 {
868 self.size
869 }
870
871 #[inline]
873 pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
874 assert!(index.as_u32() < self.num_imported_functions);
875 self.vmctx_imported_functions_begin()
876 + index.as_u32() * u32::from(self.size_of_vmfunction_import())
877 }
878
879 #[inline]
881 pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
882 assert!(index.as_u32() < self.num_imported_tables);
883 self.vmctx_imported_tables_begin()
884 + index.as_u32() * u32::from(self.size_of_vmtable_import())
885 }
886
887 #[inline]
889 pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
890 assert!(index.as_u32() < self.num_imported_memories);
891 self.vmctx_imported_memories_begin()
892 + index.as_u32() * u32::from(self.size_of_vmmemory_import())
893 }
894
895 #[inline]
897 pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
898 assert!(index.as_u32() < self.num_imported_globals);
899 self.vmctx_imported_globals_begin()
900 + index.as_u32() * u32::from(self.size_of_vmglobal_import())
901 }
902
903 #[inline]
905 pub fn vmctx_vmtag_import(&self, index: TagIndex) -> u32 {
906 assert!(index.as_u32() < self.num_imported_tags);
907 self.vmctx_imported_tags_begin() + index.as_u32() * u32::from(self.size_of_vmtag_import())
908 }
909
910 #[inline]
912 pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
913 assert!(index.as_u32() < self.num_defined_tables);
914 self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
915 }
916
917 #[inline]
919 pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
920 assert!(index.as_u32() < self.num_defined_memories);
921 self.vmctx_memories_begin()
922 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
923 }
924
925 #[inline]
927 pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
928 assert!(index.as_u32() < self.num_owned_memories);
929 self.vmctx_owned_memories_begin()
930 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
931 }
932
933 #[inline]
935 pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
936 assert!(index.as_u32() < self.num_defined_globals);
937 self.vmctx_globals_begin()
938 + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
939 }
940
941 #[inline]
943 pub fn vmctx_vmtag_definition(&self, index: DefinedTagIndex) -> u32 {
944 assert!(index.as_u32() < self.num_defined_tags);
945 self.vmctx_tags_begin() + index.as_u32() * u32::from(self.ptr.size_of_vmtag_definition())
946 }
947
948 #[inline]
951 pub fn vmctx_func_ref(&self, index: FuncRefIndex) -> u32 {
952 assert!(!index.is_reserved_value());
953 assert!(index.as_u32() < self.num_escaped_funcs);
954 self.vmctx_func_refs_begin() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
955 }
956
957 #[inline]
959 pub fn vmctx_vmfunction_import_wasm_call(&self, index: FuncIndex) -> u32 {
960 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_wasm_call())
961 }
962
963 #[inline]
965 pub fn vmctx_vmfunction_import_array_call(&self, index: FuncIndex) -> u32 {
966 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_array_call())
967 }
968
969 #[inline]
971 pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
972 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
973 }
974
975 #[inline]
978 pub fn vmctx_vmtable_from(&self, index: TableIndex) -> u32 {
979 self.vmctx_vmtable_import(index) + u32::from(self.vmtable_import_from())
980 }
981
982 #[inline]
984 pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
985 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
986 }
987
988 #[inline]
990 pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
991 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
992 }
993
994 #[inline]
996 pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
997 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
998 }
999
1000 #[inline]
1002 pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
1003 self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
1004 }
1005
1006 #[inline]
1008 pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
1009 self.vmctx_vmmemory_definition(index)
1010 + u32::from(self.ptr.vmmemory_definition_current_length())
1011 }
1012
1013 #[inline]
1015 pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
1016 self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
1017 }
1018
1019 #[inline]
1021 pub fn vmctx_vmtag_import_from(&self, index: TagIndex) -> u32 {
1022 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_from())
1023 }
1024}
1025
1026impl<P: PtrSize> VMOffsets<P> {
1030 #[inline]
1032 pub fn vm_drc_header_ref_count(&self) -> u32 {
1033 8
1034 }
1035}
1036
1037impl<P: PtrSize> VMOffsets<P> {
1041 #[inline]
1043 pub fn vm_gc_ref_activation_table_next(&self) -> u32 {
1044 0
1045 }
1046
1047 #[inline]
1049 pub fn vm_gc_ref_activation_table_end(&self) -> u32 {
1050 self.pointer_size().into()
1051 }
1052}
1053
1054pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
1058
1059pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
1064
1065#[cfg(test)]
1066mod tests {
1067 use crate::vmoffsets::align;
1068
1069 #[test]
1070 fn alignment() {
1071 fn is_aligned(x: u32) -> bool {
1072 x % 16 == 0
1073 }
1074 assert!(is_aligned(align(0, 16)));
1075 assert!(is_aligned(align(32, 16)));
1076 assert!(is_aligned(align(33, 16)));
1077 assert!(is_aligned(align(31, 16)));
1078 }
1079}