1use crate::{
37 DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex, FuncIndex,
38 FuncRefIndex, GlobalIndex, MemoryIndex, Module, OwnedMemoryIndex, TableIndex, TagIndex,
39};
40use cranelift_entity::packed_option::ReservedValue;
41
42pub const NUM_COMPONENT_CONTEXT_SLOTS: usize = 2;
45
46#[cfg(target_pointer_width = "32")]
47fn cast_to_u32(sz: usize) -> u32 {
48 u32::try_from(sz).unwrap()
49}
50#[cfg(target_pointer_width = "64")]
51fn cast_to_u32(sz: usize) -> u32 {
52 u32::try_from(sz).expect("overflow in cast from usize to u32")
53}
54
55#[inline]
57fn align(offset: u32, width: u32) -> u32 {
58 (offset + (width - 1)) / width * width
59}
60
61#[derive(Debug, Clone, Copy)]
64pub struct VMOffsets<P> {
65 pub ptr: P,
67 pub num_imported_functions: u32,
69 pub num_imported_tables: u32,
71 pub num_imported_memories: u32,
73 pub num_imported_globals: u32,
75 pub num_imported_tags: u32,
77 pub num_defined_tables: u32,
79 pub num_defined_memories: u32,
81 pub num_owned_memories: u32,
83 pub num_defined_globals: u32,
85 pub num_defined_tags: u32,
87 pub num_escaped_funcs: u32,
90
91 imported_functions: u32,
93 imported_tables: u32,
94 imported_memories: u32,
95 imported_globals: u32,
96 imported_tags: u32,
97 defined_tables: u32,
98 defined_memories: u32,
99 owned_memories: u32,
100 defined_globals: u32,
101 defined_tags: u32,
102 defined_func_refs: u32,
103 size: u32,
104}
105
106pub trait PtrSize {
108 fn size(&self) -> u8;
110
111 fn vmcontext_store_context(&self) -> u8 {
113 u8::try_from(align(
114 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
115 u32::from(self.size()),
116 ))
117 .unwrap()
118 }
119
120 fn vmcontext_builtin_functions(&self) -> u8 {
122 self.vmcontext_store_context() + self.size()
123 }
124
125 #[inline]
127 fn vm_func_ref_array_call(&self) -> u8 {
128 0 * self.size()
129 }
130
131 #[inline]
133 fn vm_func_ref_wasm_call(&self) -> u8 {
134 1 * self.size()
135 }
136
137 #[inline]
139 fn vm_func_ref_type_index(&self) -> u8 {
140 2 * self.size()
141 }
142
143 #[inline]
145 fn vm_func_ref_vmctx(&self) -> u8 {
146 3 * self.size()
147 }
148
149 #[inline]
151 fn size_of_vm_func_ref(&self) -> u8 {
152 4 * self.size()
153 }
154
155 #[inline]
158 fn size_of_vmglobal_definition(&self) -> u8 {
159 16
160 }
161
162 #[inline]
164 fn size_of_vmtag_definition(&self) -> u8 {
165 4
166 }
167
168 #[inline]
170 fn maximum_value_size(&self) -> u8 {
171 self.size_of_vmglobal_definition()
172 }
173
174 #[inline]
178 fn vmstore_context_fuel_consumed(&self) -> u8 {
179 0
180 }
181
182 #[inline]
184 fn vmstore_context_epoch_deadline(&self) -> u8 {
185 self.vmstore_context_fuel_consumed() + 8
186 }
187
188 #[inline]
191 fn vmstore_context_execution_version(&self) -> u8 {
192 self.vmstore_context_epoch_deadline() + 8
193 }
194
195 #[inline]
197 fn vmstore_context_stack_limit(&self) -> u8 {
198 self.vmstore_context_execution_version() + 8
199 }
200
201 #[inline]
203 fn vmstore_context_gc_heap(&self) -> u8 {
204 self.vmstore_context_stack_limit() + self.size()
205 }
206
207 fn vmstore_context_gc_heap_base(&self) -> u8 {
209 let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_base();
210 debug_assert!(offset < self.vmstore_context_last_wasm_exit_trampoline_fp());
211 offset
212 }
213
214 fn vmstore_context_gc_heap_current_length(&self) -> u8 {
216 let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_current_length();
217 debug_assert!(offset < self.vmstore_context_last_wasm_exit_trampoline_fp());
218 offset
219 }
220
221 fn vmstore_context_last_wasm_exit_trampoline_fp(&self) -> u8 {
224 self.vmstore_context_gc_heap() + self.size_of_vmmemory_definition()
225 }
226
227 fn vmstore_context_last_wasm_exit_pc(&self) -> u8 {
229 self.vmstore_context_last_wasm_exit_trampoline_fp() + self.size()
230 }
231
232 fn vmstore_context_last_wasm_entry_sp(&self) -> u8 {
234 self.vmstore_context_last_wasm_exit_pc() + self.size()
235 }
236
237 fn vmstore_context_last_wasm_entry_fp(&self) -> u8 {
239 self.vmstore_context_last_wasm_entry_sp() + self.size()
240 }
241
242 fn vmstore_context_last_wasm_entry_trap_handler(&self) -> u8 {
244 self.vmstore_context_last_wasm_entry_fp() + self.size()
245 }
246
247 fn vmstore_context_stack_chain(&self) -> u8 {
249 self.vmstore_context_last_wasm_entry_trap_handler() + self.size()
250 }
251
252 fn vmstore_context_store_data(&self) -> u8 {
254 self.vmstore_context_stack_chain() + self.size_of_vmstack_chain()
255 }
256
257 fn vmstore_context_async_guard_range(&self) -> u8 {
259 self.vmstore_context_store_data() + self.size()
260 }
261
262 fn vmstore_context_component_context_slot(&self, i: u8) -> u8 {
265 assert!(usize::from(i) < NUM_COMPONENT_CONTEXT_SLOTS);
266 let base = self.vmstore_context_async_guard_range() + 2 * self.size();
267 let slot_size = 4;
268 base + i * slot_size
269 }
270
271 #[inline]
275 fn vmmemory_definition_base(&self) -> u8 {
276 0 * self.size()
277 }
278
279 #[inline]
281 fn vmmemory_definition_current_length(&self) -> u8 {
282 1 * self.size()
283 }
284
285 #[inline]
287 fn size_of_vmmemory_definition(&self) -> u8 {
288 2 * self.size()
289 }
290
291 #[inline]
293 fn size_of_vmmemory_pointer(&self) -> u8 {
294 self.size()
295 }
296
297 fn vmarray_call_host_func_context_func_ref(&self) -> u8 {
301 u8::try_from(align(
302 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
303 u32::from(self.size()),
304 ))
305 .unwrap()
306 }
307
308 fn size_of_vmstack_chain(&self) -> u8 {
310 2 * self.size()
311 }
312
313 fn vmstack_limits_stack_limit(&self) -> u8 {
317 0
318 }
319
320 fn vmstack_limits_last_wasm_entry_fp(&self) -> u8 {
322 self.size()
323 }
324
325 fn vmhostarray_length(&self) -> u8 {
329 0
330 }
331
332 fn vmhostarray_capacity(&self) -> u8 {
334 4
335 }
336
337 fn vmhostarray_data(&self) -> u8 {
339 8
340 }
341
342 fn size_of_vmhostarray(&self) -> u8 {
344 8 + self.size()
345 }
346
347 fn vmcommon_stack_information_limits(&self) -> u8 {
351 0 * self.size()
352 }
353
354 fn vmcommon_stack_information_state(&self) -> u8 {
356 2 * self.size()
357 }
358
359 fn vmcommon_stack_information_handlers(&self) -> u8 {
361 u8::try_from(align(
362 self.vmcommon_stack_information_state() as u32 + 4,
363 u32::from(self.size()),
364 ))
365 .unwrap()
366 }
367
368 fn vmcommon_stack_information_first_switch_handler_index(&self) -> u8 {
370 self.vmcommon_stack_information_handlers() + self.size_of_vmhostarray()
371 }
372
373 fn size_of_vmcommon_stack_information(&self) -> u8 {
375 u8::try_from(align(
376 self.vmcommon_stack_information_first_switch_handler_index() as u32 + 4,
377 u32::from(self.size()),
378 ))
379 .unwrap()
380 }
381
382 fn vmcontobj_contref(&self) -> u8 {
386 0
387 }
388
389 fn vmcontobj_revision(&self) -> u8 {
391 self.size()
392 }
393
394 fn size_of_vmcontobj(&self) -> u8 {
396 u8::try_from(align(
397 u32::from(self.vmcontobj_revision())
398 + u32::try_from(core::mem::size_of::<usize>()).unwrap(),
399 u32::from(self.size()),
400 ))
401 .unwrap()
402 }
403
404 fn vmcontref_common_stack_information(&self) -> u8 {
408 0 * self.size()
409 }
410
411 fn vmcontref_parent_chain(&self) -> u8 {
413 u8::try_from(align(
414 (self.vmcontref_common_stack_information() + self.size_of_vmcommon_stack_information())
415 as u32,
416 u32::from(self.size()),
417 ))
418 .unwrap()
419 }
420
421 fn vmcontref_last_ancestor(&self) -> u8 {
423 self.vmcontref_parent_chain() + 2 * self.size()
424 }
425
426 fn vmcontref_revision(&self) -> u8 {
428 self.vmcontref_last_ancestor() + self.size()
429 }
430
431 fn vmcontref_stack(&self) -> u8 {
433 self.vmcontref_revision() + self.size()
434 }
435
436 fn vmcontref_args(&self) -> u8 {
438 self.vmcontref_stack() + 3 * self.size()
439 }
440
441 fn vmcontref_values(&self) -> u8 {
443 self.vmcontref_args() + self.size_of_vmhostarray()
444 }
445
446 #[inline]
448 fn vmctx_magic(&self) -> u8 {
449 0
453 }
454
455 #[inline]
457 fn vmctx_store_context(&self) -> u8 {
458 self.vmctx_magic() + self.size()
459 }
460
461 #[inline]
463 fn vmctx_builtin_functions(&self) -> u8 {
464 self.vmctx_store_context() + self.size()
465 }
466
467 #[inline]
470 fn vmctx_epoch_ptr(&self) -> u8 {
471 self.vmctx_builtin_functions() + self.size()
472 }
473
474 #[inline]
479 fn vmctx_gc_heap_data(&self) -> u8 {
480 self.vmctx_epoch_ptr() + self.size()
481 }
482
483 #[inline]
485 fn vmcopying_heap_data_bump_ptr(&self) -> u8 {
486 0
487 }
488
489 #[inline]
492 fn vmcopying_heap_data_active_space_end(&self) -> u8 {
493 4
494 }
495
496 #[inline]
498 fn size_of_vmcopying_heap_data(&self) -> u8 {
499 8
500 }
501
502 #[inline]
504 fn align_of_vmcopying_heap_data(&self) -> u8 {
505 4
506 }
507
508 #[inline]
510 fn vmctx_type_ids_array(&self) -> u8 {
511 self.vmctx_gc_heap_data() + self.size()
512 }
513
514 #[inline]
518 fn vmctx_dynamic_data_start(&self) -> u8 {
519 self.vmctx_type_ids_array() + self.size()
520 }
521}
522
523#[derive(Clone, Copy)]
525pub struct HostPtr;
526
527impl PtrSize for HostPtr {
528 #[inline]
529 fn size(&self) -> u8 {
530 core::mem::size_of::<usize>() as u8
531 }
532}
533
534impl PtrSize for u8 {
535 #[inline]
536 fn size(&self) -> u8 {
537 *self
538 }
539}
540
541#[derive(Debug, Clone, Copy)]
543pub struct VMOffsetsFields<P> {
544 pub ptr: P,
546 pub num_imported_functions: u32,
548 pub num_imported_tables: u32,
550 pub num_imported_memories: u32,
552 pub num_imported_globals: u32,
554 pub num_imported_tags: u32,
556 pub num_defined_tables: u32,
558 pub num_defined_memories: u32,
560 pub num_owned_memories: u32,
562 pub num_defined_globals: u32,
564 pub num_defined_tags: u32,
566 pub num_escaped_funcs: u32,
569}
570
571impl<P: PtrSize> VMOffsets<P> {
572 pub fn new(ptr: P, module: &Module) -> Self {
574 let num_owned_memories = module
575 .memories
576 .iter()
577 .skip(module.num_imported_memories)
578 .filter(|p| !p.1.shared)
579 .count()
580 .try_into()
581 .unwrap();
582 VMOffsets::from(VMOffsetsFields {
583 ptr,
584 num_imported_functions: cast_to_u32(module.num_imported_funcs),
585 num_imported_tables: cast_to_u32(module.num_imported_tables),
586 num_imported_memories: cast_to_u32(module.num_imported_memories),
587 num_imported_globals: cast_to_u32(module.num_imported_globals),
588 num_imported_tags: cast_to_u32(module.num_imported_tags),
589 num_defined_tables: cast_to_u32(module.num_defined_tables()),
590 num_defined_memories: cast_to_u32(module.num_defined_memories()),
591 num_owned_memories,
592 num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
593 num_defined_tags: cast_to_u32(module.tags.len() - module.num_imported_tags),
594 num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
595 })
596 }
597
598 #[inline]
600 pub fn pointer_size(&self) -> u8 {
601 self.ptr.size()
602 }
603
604 pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
609 macro_rules! calculate_sizes {
610 ($($name:ident: $desc:tt,)*) => {{
611 let VMOffsets {
612 ptr: _,
615 num_imported_functions: _,
616 num_imported_tables: _,
617 num_imported_memories: _,
618 num_imported_globals: _,
619 num_imported_tags: _,
620 num_defined_tables: _,
621 num_defined_globals: _,
622 num_defined_memories: _,
623 num_defined_tags: _,
624 num_owned_memories: _,
625 num_escaped_funcs: _,
626
627 size,
629
630 $($name,)*
633 } = *self;
634
635 let mut last = size;
639 $(
640 assert!($name <= last);
641 let tmp = $name;
642 let $name = last - $name;
643 last = tmp;
644 )*
645 assert_ne!(last, 0);
646 IntoIterator::into_iter([
647 $(($desc, $name),)*
648 ("static vmctx data", last),
649 ])
650 }};
651 }
652
653 calculate_sizes! {
654 defined_func_refs: "module functions",
655 defined_tags: "defined tags",
656 defined_globals: "defined globals",
657 defined_tables: "defined tables",
658 imported_tags: "imported tags",
659 imported_globals: "imported globals",
660 imported_tables: "imported tables",
661 imported_functions: "imported functions",
662 owned_memories: "owned memories",
663 defined_memories: "defined memories",
664 imported_memories: "imported memories",
665 }
666 }
667}
668
669impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
670 fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
671 let mut ret = Self {
672 ptr: fields.ptr,
673 num_imported_functions: fields.num_imported_functions,
674 num_imported_tables: fields.num_imported_tables,
675 num_imported_memories: fields.num_imported_memories,
676 num_imported_globals: fields.num_imported_globals,
677 num_imported_tags: fields.num_imported_tags,
678 num_defined_tables: fields.num_defined_tables,
679 num_defined_memories: fields.num_defined_memories,
680 num_owned_memories: fields.num_owned_memories,
681 num_defined_globals: fields.num_defined_globals,
682 num_defined_tags: fields.num_defined_tags,
683 num_escaped_funcs: fields.num_escaped_funcs,
684 imported_functions: 0,
685 imported_tables: 0,
686 imported_memories: 0,
687 imported_globals: 0,
688 imported_tags: 0,
689 defined_tables: 0,
690 defined_memories: 0,
691 owned_memories: 0,
692 defined_globals: 0,
693 defined_tags: 0,
694 defined_func_refs: 0,
695 size: 0,
696 };
697
698 #[inline]
703 fn cadd(count: u32, size: u32) -> u32 {
704 count.checked_add(size).unwrap()
705 }
706
707 #[inline]
708 fn cmul(count: u32, size: u8) -> u32 {
709 count.checked_mul(u32::from(size)).unwrap()
710 }
711
712 let mut next_field_offset = u32::from(ret.ptr.vmctx_dynamic_data_start());
713
714 macro_rules! fields {
715 (size($field:ident) = $size:expr, $($rest:tt)*) => {
716 ret.$field = next_field_offset;
717 next_field_offset = cadd(next_field_offset, u32::from($size));
718 fields!($($rest)*);
719 };
720 (align($align:expr), $($rest:tt)*) => {
721 next_field_offset = align(next_field_offset, $align);
722 fields!($($rest)*);
723 };
724 () => {};
725 }
726
727 fields! {
728 size(imported_memories)
729 = cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
730 size(defined_memories)
731 = cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
732 size(owned_memories)
733 = cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
734 size(imported_functions)
735 = cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
736 size(imported_tables)
737 = cmul(ret.num_imported_tables, ret.size_of_vmtable_import()),
738 size(imported_globals)
739 = cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
740 size(imported_tags)
741 = cmul(ret.num_imported_tags, ret.size_of_vmtag_import()),
742 size(defined_tables)
743 = cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
744 align(16),
745 size(defined_globals)
746 = cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
747 size(defined_tags)
748 = cmul(ret.num_defined_tags, ret.ptr.size_of_vmtag_definition()),
749 size(defined_func_refs) = cmul(
750 ret.num_escaped_funcs,
751 ret.ptr.size_of_vm_func_ref(),
752 ),
753 }
754
755 ret.size = next_field_offset;
756
757 return ret;
758 }
759}
760
761impl<P: PtrSize> VMOffsets<P> {
762 #[inline]
764 pub fn vmfunction_import_array_call(&self) -> u8 {
765 0 * self.pointer_size()
766 }
767
768 #[inline]
770 pub fn vmfunction_import_wasm_call(&self) -> u8 {
771 1 * self.pointer_size()
772 }
773
774 #[inline]
776 pub fn vmfunction_import_type_index(&self) -> u8 {
777 2 * self.pointer_size()
778 }
779
780 #[inline]
782 pub fn vmfunction_import_vmctx(&self) -> u8 {
783 3 * self.pointer_size()
784 }
785
786 #[inline]
788 pub fn size_of_vmfunction_import(&self) -> u8 {
789 4 * self.pointer_size()
790 }
791}
792
793impl<P: PtrSize> VMOffsets<P> {
795 pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
797 1 * self.pointer_size()
798 }
799}
800
801impl<P: PtrSize> VMOffsets<P> {
803 #[inline]
805 pub fn vmtable_import_from(&self) -> u8 {
806 0 * self.pointer_size()
807 }
808
809 #[inline]
811 pub fn vmtable_import_vmctx(&self) -> u8 {
812 1 * self.pointer_size()
813 }
814
815 #[inline]
817 pub fn vmtable_import_index(&self) -> u8 {
818 2 * self.pointer_size()
819 }
820
821 #[inline]
823 pub fn size_of_vmtable_import(&self) -> u8 {
824 3 * self.pointer_size()
825 }
826}
827
828impl<P: PtrSize> VMOffsets<P> {
830 #[inline]
832 pub fn vmtable_definition_base(&self) -> u8 {
833 0 * self.pointer_size()
834 }
835
836 pub fn vmtable_definition_current_elements(&self) -> u8 {
838 1 * self.pointer_size()
839 }
840
841 #[inline]
843 pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
844 self.pointer_size()
845 }
846
847 #[inline]
849 pub fn size_of_vmtable_definition(&self) -> u8 {
850 2 * self.pointer_size()
851 }
852}
853
854impl<P: PtrSize> VMOffsets<P> {
856 #[inline]
858 pub fn vmmemory_import_from(&self) -> u8 {
859 0 * self.pointer_size()
860 }
861
862 #[inline]
864 pub fn vmmemory_import_vmctx(&self) -> u8 {
865 1 * self.pointer_size()
866 }
867
868 #[inline]
870 pub fn vmmemory_import_index(&self) -> u8 {
871 2 * self.pointer_size()
872 }
873
874 #[inline]
876 pub fn size_of_vmmemory_import(&self) -> u8 {
877 3 * self.pointer_size()
878 }
879}
880
881impl<P: PtrSize> VMOffsets<P> {
883 #[inline]
885 pub fn vmglobal_import_from(&self) -> u8 {
886 0 * self.pointer_size()
887 }
888
889 #[inline]
891 pub fn size_of_vmglobal_import(&self) -> u8 {
892 2 * self.pointer_size() + 8
894 }
895}
896
897impl<P: PtrSize> VMOffsets<P> {
899 #[inline]
901 pub fn size_of_vmshared_type_index(&self) -> u8 {
902 4
903 }
904}
905
906impl<P: PtrSize> VMOffsets<P> {
908 #[inline]
910 pub fn vmtag_import_from(&self) -> u8 {
911 0 * self.pointer_size()
912 }
913
914 #[inline]
916 pub fn vmtag_import_vmctx(&self) -> u8 {
917 1 * self.pointer_size()
918 }
919
920 #[inline]
922 pub fn vmtag_import_index(&self) -> u8 {
923 2 * self.pointer_size()
924 }
925
926 #[inline]
928 pub fn size_of_vmtag_import(&self) -> u8 {
929 3 * self.pointer_size()
930 }
931}
932
933impl<P: PtrSize> VMOffsets<P> {
935 #[inline]
937 pub fn vmctx_imported_functions_begin(&self) -> u32 {
938 self.imported_functions
939 }
940
941 #[inline]
943 pub fn vmctx_imported_tables_begin(&self) -> u32 {
944 self.imported_tables
945 }
946
947 #[inline]
949 pub fn vmctx_imported_memories_begin(&self) -> u32 {
950 self.imported_memories
951 }
952
953 #[inline]
955 pub fn vmctx_imported_globals_begin(&self) -> u32 {
956 self.imported_globals
957 }
958
959 #[inline]
961 pub fn vmctx_imported_tags_begin(&self) -> u32 {
962 self.imported_tags
963 }
964
965 #[inline]
967 pub fn vmctx_tables_begin(&self) -> u32 {
968 self.defined_tables
969 }
970
971 #[inline]
973 pub fn vmctx_memories_begin(&self) -> u32 {
974 self.defined_memories
975 }
976
977 #[inline]
979 pub fn vmctx_owned_memories_begin(&self) -> u32 {
980 self.owned_memories
981 }
982
983 #[inline]
985 pub fn vmctx_globals_begin(&self) -> u32 {
986 self.defined_globals
987 }
988
989 #[inline]
991 pub fn vmctx_tags_begin(&self) -> u32 {
992 self.defined_tags
993 }
994
995 #[inline]
997 pub fn vmctx_func_refs_begin(&self) -> u32 {
998 self.defined_func_refs
999 }
1000
1001 #[inline]
1003 pub fn size_of_vmctx(&self) -> u32 {
1004 self.size
1005 }
1006
1007 #[inline]
1009 pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
1010 assert!(index.as_u32() < self.num_imported_functions);
1011 self.vmctx_imported_functions_begin()
1012 + index.as_u32() * u32::from(self.size_of_vmfunction_import())
1013 }
1014
1015 #[inline]
1017 pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
1018 assert!(index.as_u32() < self.num_imported_tables);
1019 self.vmctx_imported_tables_begin()
1020 + index.as_u32() * u32::from(self.size_of_vmtable_import())
1021 }
1022
1023 #[inline]
1025 pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
1026 assert!(index.as_u32() < self.num_imported_memories);
1027 self.vmctx_imported_memories_begin()
1028 + index.as_u32() * u32::from(self.size_of_vmmemory_import())
1029 }
1030
1031 #[inline]
1033 pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
1034 assert!(index.as_u32() < self.num_imported_globals);
1035 self.vmctx_imported_globals_begin()
1036 + index.as_u32() * u32::from(self.size_of_vmglobal_import())
1037 }
1038
1039 #[inline]
1041 pub fn vmctx_vmtag_import(&self, index: TagIndex) -> u32 {
1042 assert!(index.as_u32() < self.num_imported_tags);
1043 self.vmctx_imported_tags_begin() + index.as_u32() * u32::from(self.size_of_vmtag_import())
1044 }
1045
1046 #[inline]
1048 pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
1049 assert!(index.as_u32() < self.num_defined_tables);
1050 self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
1051 }
1052
1053 #[inline]
1055 pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
1056 assert!(index.as_u32() < self.num_defined_memories);
1057 self.vmctx_memories_begin()
1058 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
1059 }
1060
1061 #[inline]
1063 pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
1064 assert!(index.as_u32() < self.num_owned_memories);
1065 self.vmctx_owned_memories_begin()
1066 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
1067 }
1068
1069 #[inline]
1071 pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
1072 assert!(index.as_u32() < self.num_defined_globals);
1073 self.vmctx_globals_begin()
1074 + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
1075 }
1076
1077 #[inline]
1079 pub fn vmctx_vmtag_definition(&self, index: DefinedTagIndex) -> u32 {
1080 assert!(index.as_u32() < self.num_defined_tags);
1081 self.vmctx_tags_begin() + index.as_u32() * u32::from(self.ptr.size_of_vmtag_definition())
1082 }
1083
1084 #[inline]
1087 pub fn vmctx_func_ref(&self, index: FuncRefIndex) -> u32 {
1088 assert!(!index.is_reserved_value());
1089 assert!(index.as_u32() < self.num_escaped_funcs);
1090 self.vmctx_func_refs_begin() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
1091 }
1092
1093 #[inline]
1095 pub fn vmctx_vmfunction_import_wasm_call(&self, index: FuncIndex) -> u32 {
1096 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_wasm_call())
1097 }
1098
1099 #[inline]
1101 pub fn vmctx_vmfunction_import_array_call(&self, index: FuncIndex) -> u32 {
1102 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_array_call())
1103 }
1104
1105 #[inline]
1107 pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
1108 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
1109 }
1110
1111 #[inline]
1114 pub fn vmctx_vmtable_from(&self, index: TableIndex) -> u32 {
1115 self.vmctx_vmtable_import(index) + u32::from(self.vmtable_import_from())
1116 }
1117
1118 #[inline]
1120 pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
1121 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
1122 }
1123
1124 #[inline]
1126 pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
1127 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
1128 }
1129
1130 #[inline]
1132 pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
1133 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
1134 }
1135
1136 #[inline]
1138 pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
1139 self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
1140 }
1141
1142 #[inline]
1144 pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
1145 self.vmctx_vmmemory_definition(index)
1146 + u32::from(self.ptr.vmmemory_definition_current_length())
1147 }
1148
1149 #[inline]
1151 pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
1152 self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
1153 }
1154
1155 #[inline]
1157 pub fn vmctx_vmtag_import_from(&self, index: TagIndex) -> u32 {
1158 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_from())
1159 }
1160
1161 #[inline]
1163 pub fn vmctx_vmtag_import_vmctx(&self, index: TagIndex) -> u32 {
1164 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_vmctx())
1165 }
1166
1167 #[inline]
1169 pub fn vmctx_vmtag_import_index(&self, index: TagIndex) -> u32 {
1170 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_index())
1171 }
1172}
1173
1174impl<P: PtrSize> VMOffsets<P> {
1176 #[inline]
1178 pub fn vm_gc_header_kind(&self) -> u32 {
1179 0
1180 }
1181
1182 #[inline]
1184 pub fn vm_gc_header_reserved_bits(&self) -> u32 {
1185 self.vm_gc_header_kind()
1187 }
1188
1189 #[inline]
1191 pub fn vm_gc_header_ty(&self) -> u32 {
1192 self.vm_gc_header_kind() + 4
1193 }
1194}
1195
1196impl<P: PtrSize> VMOffsets<P> {
1200 #[inline]
1202 pub fn vm_drc_header_ref_count(&self) -> u32 {
1203 8
1204 }
1205
1206 #[inline]
1208 pub fn vm_drc_header_next_over_approximated_stack_root(&self) -> u32 {
1209 self.vm_drc_header_ref_count() + 8
1210 }
1211}
1212
1213pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
1217
1218pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
1223
1224#[cfg(test)]
1225mod tests {
1226 use crate::vmoffsets::align;
1227
1228 #[test]
1229 fn alignment() {
1230 fn is_aligned(x: u32) -> bool {
1231 x % 16 == 0
1232 }
1233 assert!(is_aligned(align(0, 16)));
1234 assert!(is_aligned(align(32, 16)));
1235 assert!(is_aligned(align(33, 16)));
1236 assert!(is_aligned(align(31, 16)));
1237 }
1238}