1use crate::{
38 DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex, FuncIndex,
39 FuncRefIndex, GlobalIndex, MemoryIndex, Module, OwnedMemoryIndex, TableIndex, TagIndex,
40};
41use cranelift_entity::packed_option::ReservedValue;
42
43#[cfg(target_pointer_width = "32")]
44fn cast_to_u32(sz: usize) -> u32 {
45 u32::try_from(sz).unwrap()
46}
47#[cfg(target_pointer_width = "64")]
48fn cast_to_u32(sz: usize) -> u32 {
49 u32::try_from(sz).expect("overflow in cast from usize to u32")
50}
51
52#[inline]
54fn align(offset: u32, width: u32) -> u32 {
55 (offset + (width - 1)) / width * width
56}
57
58#[derive(Debug, Clone, Copy)]
61pub struct VMOffsets<P> {
62 pub ptr: P,
64 pub num_imported_functions: u32,
66 pub num_imported_tables: u32,
68 pub num_imported_memories: u32,
70 pub num_imported_globals: u32,
72 pub num_imported_tags: u32,
74 pub num_defined_tables: u32,
76 pub num_defined_memories: u32,
78 pub num_owned_memories: u32,
80 pub num_defined_globals: u32,
82 pub num_defined_tags: u32,
84 pub num_escaped_funcs: u32,
87
88 imported_functions: u32,
90 imported_tables: u32,
91 imported_memories: u32,
92 imported_globals: u32,
93 imported_tags: u32,
94 defined_tables: u32,
95 defined_memories: u32,
96 owned_memories: u32,
97 defined_globals: u32,
98 defined_tags: u32,
99 defined_func_refs: u32,
100 size: u32,
101}
102
103pub trait PtrSize {
105 fn size(&self) -> u8;
107
108 fn vmcontext_store_context(&self) -> u8 {
110 u8::try_from(align(
111 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
112 u32::from(self.size()),
113 ))
114 .unwrap()
115 }
116
117 fn vmcontext_builtin_functions(&self) -> u8 {
119 self.vmcontext_store_context() + self.size()
120 }
121
122 #[inline]
124 fn vm_func_ref_array_call(&self) -> u8 {
125 0 * self.size()
126 }
127
128 #[inline]
130 fn vm_func_ref_wasm_call(&self) -> u8 {
131 1 * self.size()
132 }
133
134 #[inline]
136 fn vm_func_ref_type_index(&self) -> u8 {
137 2 * self.size()
138 }
139
140 #[inline]
142 fn vm_func_ref_vmctx(&self) -> u8 {
143 3 * self.size()
144 }
145
146 #[inline]
148 fn size_of_vm_func_ref(&self) -> u8 {
149 4 * self.size()
150 }
151
152 #[inline]
155 fn size_of_vmglobal_definition(&self) -> u8 {
156 16
157 }
158
159 #[inline]
161 fn size_of_vmtag_definition(&self) -> u8 {
162 4
163 }
164
165 #[inline]
169 fn vmstore_context_fuel_consumed(&self) -> u8 {
170 0
171 }
172
173 #[inline]
175 fn vmstore_context_epoch_deadline(&self) -> u8 {
176 self.vmstore_context_fuel_consumed() + 8
177 }
178
179 #[inline]
181 fn vmstore_context_stack_limit(&self) -> u8 {
182 self.vmstore_context_epoch_deadline() + 8
183 }
184
185 #[inline]
187 fn vmstore_context_gc_heap(&self) -> u8 {
188 self.vmstore_context_stack_limit() + self.size()
189 }
190
191 fn vmstore_context_gc_heap_base(&self) -> u8 {
193 let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_base();
194 debug_assert!(offset < self.vmstore_context_last_wasm_exit_fp());
195 offset
196 }
197
198 fn vmstore_context_gc_heap_current_length(&self) -> u8 {
200 let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_current_length();
201 debug_assert!(offset < self.vmstore_context_last_wasm_exit_fp());
202 offset
203 }
204
205 fn vmstore_context_last_wasm_exit_fp(&self) -> u8 {
207 self.vmstore_context_gc_heap() + self.size_of_vmmemory_definition()
208 }
209
210 fn vmstore_context_last_wasm_exit_pc(&self) -> u8 {
212 self.vmstore_context_last_wasm_exit_fp() + self.size()
213 }
214
215 fn vmstore_context_last_wasm_entry_fp(&self) -> u8 {
217 self.vmstore_context_last_wasm_exit_pc() + self.size()
218 }
219
220 #[inline]
224 fn vmmemory_definition_base(&self) -> u8 {
225 0 * self.size()
226 }
227
228 #[inline]
230 fn vmmemory_definition_current_length(&self) -> u8 {
231 1 * self.size()
232 }
233
234 #[inline]
236 fn size_of_vmmemory_definition(&self) -> u8 {
237 2 * self.size()
238 }
239
240 #[inline]
242 fn size_of_vmmemory_pointer(&self) -> u8 {
243 self.size()
244 }
245
246 fn vmarray_call_host_func_context_func_ref(&self) -> u8 {
250 u8::try_from(align(
251 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
252 u32::from(self.size()),
253 ))
254 .unwrap()
255 }
256
257 #[inline]
259 fn vmctx_magic(&self) -> u8 {
260 0
264 }
265
266 #[inline]
268 fn vmctx_store_context(&self) -> u8 {
269 self.vmctx_magic() + self.size()
270 }
271
272 #[inline]
274 fn vmctx_builtin_functions(&self) -> u8 {
275 self.vmctx_store_context() + self.size()
276 }
277
278 #[inline]
280 fn vmctx_callee(&self) -> u8 {
281 self.vmctx_builtin_functions() + self.size()
282 }
283
284 #[inline]
287 fn vmctx_epoch_ptr(&self) -> u8 {
288 self.vmctx_callee() + self.size()
289 }
290
291 #[inline]
296 fn vmctx_gc_heap_data(&self) -> u8 {
297 self.vmctx_epoch_ptr() + self.size()
298 }
299
300 #[inline]
302 fn vmctx_type_ids_array(&self) -> u8 {
303 self.vmctx_gc_heap_data() + self.size()
304 }
305
306 #[inline]
310 fn vmctx_dynamic_data_start(&self) -> u8 {
311 self.vmctx_type_ids_array() + self.size()
312 }
313}
314
315#[derive(Clone, Copy)]
317pub struct HostPtr;
318
319impl PtrSize for HostPtr {
320 #[inline]
321 fn size(&self) -> u8 {
322 core::mem::size_of::<usize>() as u8
323 }
324}
325
326impl PtrSize for u8 {
327 #[inline]
328 fn size(&self) -> u8 {
329 *self
330 }
331}
332
333#[derive(Debug, Clone, Copy)]
335pub struct VMOffsetsFields<P> {
336 pub ptr: P,
338 pub num_imported_functions: u32,
340 pub num_imported_tables: u32,
342 pub num_imported_memories: u32,
344 pub num_imported_globals: u32,
346 pub num_imported_tags: u32,
348 pub num_defined_tables: u32,
350 pub num_defined_memories: u32,
352 pub num_owned_memories: u32,
354 pub num_defined_globals: u32,
356 pub num_defined_tags: u32,
358 pub num_escaped_funcs: u32,
361}
362
363impl<P: PtrSize> VMOffsets<P> {
364 pub fn new(ptr: P, module: &Module) -> Self {
366 let num_owned_memories = module
367 .memories
368 .iter()
369 .skip(module.num_imported_memories)
370 .filter(|p| !p.1.shared)
371 .count()
372 .try_into()
373 .unwrap();
374 VMOffsets::from(VMOffsetsFields {
375 ptr,
376 num_imported_functions: cast_to_u32(module.num_imported_funcs),
377 num_imported_tables: cast_to_u32(module.num_imported_tables),
378 num_imported_memories: cast_to_u32(module.num_imported_memories),
379 num_imported_globals: cast_to_u32(module.num_imported_globals),
380 num_imported_tags: cast_to_u32(module.num_imported_tags),
381 num_defined_tables: cast_to_u32(module.num_defined_tables()),
382 num_defined_memories: cast_to_u32(module.num_defined_memories()),
383 num_owned_memories,
384 num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
385 num_defined_tags: cast_to_u32(module.tags.len() - module.num_imported_tags),
386 num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
387 })
388 }
389
390 #[inline]
392 pub fn pointer_size(&self) -> u8 {
393 self.ptr.size()
394 }
395
396 pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
401 macro_rules! calculate_sizes {
402 ($($name:ident: $desc:tt,)*) => {{
403 let VMOffsets {
404 ptr: _,
407 num_imported_functions: _,
408 num_imported_tables: _,
409 num_imported_memories: _,
410 num_imported_globals: _,
411 num_imported_tags: _,
412 num_defined_tables: _,
413 num_defined_globals: _,
414 num_defined_memories: _,
415 num_defined_tags: _,
416 num_owned_memories: _,
417 num_escaped_funcs: _,
418
419 size,
421
422 $($name,)*
425 } = *self;
426
427 let mut last = size;
431 $(
432 assert!($name <= last);
433 let tmp = $name;
434 let $name = last - $name;
435 last = tmp;
436 )*
437 assert_ne!(last, 0);
438 IntoIterator::into_iter([
439 $(($desc, $name),)*
440 ("static vmctx data", last),
441 ])
442 }};
443 }
444
445 calculate_sizes! {
446 defined_func_refs: "module functions",
447 defined_tags: "defined tags",
448 defined_globals: "defined globals",
449 defined_tables: "defined tables",
450 imported_tags: "imported tags",
451 imported_globals: "imported globals",
452 imported_tables: "imported tables",
453 imported_functions: "imported functions",
454 owned_memories: "owned memories",
455 defined_memories: "defined memories",
456 imported_memories: "imported memories",
457 }
458 }
459}
460
461impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
462 fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
463 let mut ret = Self {
464 ptr: fields.ptr,
465 num_imported_functions: fields.num_imported_functions,
466 num_imported_tables: fields.num_imported_tables,
467 num_imported_memories: fields.num_imported_memories,
468 num_imported_globals: fields.num_imported_globals,
469 num_imported_tags: fields.num_imported_tags,
470 num_defined_tables: fields.num_defined_tables,
471 num_defined_memories: fields.num_defined_memories,
472 num_owned_memories: fields.num_owned_memories,
473 num_defined_globals: fields.num_defined_globals,
474 num_defined_tags: fields.num_defined_tags,
475 num_escaped_funcs: fields.num_escaped_funcs,
476 imported_functions: 0,
477 imported_tables: 0,
478 imported_memories: 0,
479 imported_globals: 0,
480 imported_tags: 0,
481 defined_tables: 0,
482 defined_memories: 0,
483 owned_memories: 0,
484 defined_globals: 0,
485 defined_tags: 0,
486 defined_func_refs: 0,
487 size: 0,
488 };
489
490 #[inline]
495 fn cadd(count: u32, size: u32) -> u32 {
496 count.checked_add(size).unwrap()
497 }
498
499 #[inline]
500 fn cmul(count: u32, size: u8) -> u32 {
501 count.checked_mul(u32::from(size)).unwrap()
502 }
503
504 let mut next_field_offset = u32::from(ret.ptr.vmctx_dynamic_data_start());
505
506 macro_rules! fields {
507 (size($field:ident) = $size:expr, $($rest:tt)*) => {
508 ret.$field = next_field_offset;
509 next_field_offset = cadd(next_field_offset, u32::from($size));
510 fields!($($rest)*);
511 };
512 (align($align:expr), $($rest:tt)*) => {
513 next_field_offset = align(next_field_offset, $align);
514 fields!($($rest)*);
515 };
516 () => {};
517 }
518
519 fields! {
520 size(imported_memories)
521 = cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
522 size(defined_memories)
523 = cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
524 size(owned_memories)
525 = cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
526 size(imported_functions)
527 = cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
528 size(imported_tables)
529 = cmul(ret.num_imported_tables, ret.size_of_vmtable()),
530 size(imported_globals)
531 = cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
532 size(imported_tags)
533 = cmul(ret.num_imported_tags, ret.size_of_vmtag_import()),
534 size(defined_tables)
535 = cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
536 align(16),
537 size(defined_globals)
538 = cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
539 size(defined_tags)
540 = cmul(ret.num_defined_tags, ret.ptr.size_of_vmtag_definition()),
541 size(defined_func_refs) = cmul(
542 ret.num_escaped_funcs,
543 ret.ptr.size_of_vm_func_ref(),
544 ),
545 }
546
547 ret.size = next_field_offset;
548
549 return ret;
550 }
551}
552
553impl<P: PtrSize> VMOffsets<P> {
554 #[inline]
556 pub fn vmfunction_import_wasm_call(&self) -> u8 {
557 0 * self.pointer_size()
558 }
559
560 #[inline]
562 pub fn vmfunction_import_array_call(&self) -> u8 {
563 1 * self.pointer_size()
564 }
565
566 #[inline]
568 pub fn vmfunction_import_vmctx(&self) -> u8 {
569 2 * self.pointer_size()
570 }
571
572 #[inline]
574 pub fn size_of_vmfunction_import(&self) -> u8 {
575 3 * self.pointer_size()
576 }
577}
578
579impl<P: PtrSize> VMOffsets<P> {
581 pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
583 1 * self.pointer_size()
584 }
585}
586
587impl<P: PtrSize> VMOffsets<P> {
589 #[inline]
591 pub fn vmtable_from(&self) -> u8 {
592 0 * self.pointer_size()
593 }
594
595 #[inline]
597 pub fn vmtable_vmctx(&self) -> u8 {
598 1 * self.pointer_size()
599 }
600
601 #[inline]
603 pub fn size_of_vmtable(&self) -> u8 {
604 2 * self.pointer_size()
605 }
606}
607
608impl<P: PtrSize> VMOffsets<P> {
610 #[inline]
612 pub fn vmtable_definition_base(&self) -> u8 {
613 0 * self.pointer_size()
614 }
615
616 pub fn vmtable_definition_current_elements(&self) -> u8 {
618 1 * self.pointer_size()
619 }
620
621 #[inline]
623 pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
624 self.pointer_size()
625 }
626
627 #[inline]
629 pub fn size_of_vmtable_definition(&self) -> u8 {
630 2 * self.pointer_size()
631 }
632}
633
634impl<P: PtrSize> VMOffsets<P> {
636 #[inline]
638 pub fn vmmemory_import_from(&self) -> u8 {
639 0 * self.pointer_size()
640 }
641
642 #[inline]
644 pub fn vmmemory_import_vmctx(&self) -> u8 {
645 1 * self.pointer_size()
646 }
647
648 #[inline]
650 pub fn size_of_vmmemory_import(&self) -> u8 {
651 3 * self.pointer_size()
652 }
653}
654
655impl<P: PtrSize> VMOffsets<P> {
657 #[inline]
659 pub fn vmglobal_import_from(&self) -> u8 {
660 0 * self.pointer_size()
661 }
662
663 #[inline]
665 pub fn size_of_vmglobal_import(&self) -> u8 {
666 1 * self.pointer_size()
667 }
668}
669
670impl<P: PtrSize> VMOffsets<P> {
672 #[inline]
674 pub fn size_of_vmshared_type_index(&self) -> u8 {
675 4
676 }
677}
678
679impl<P: PtrSize> VMOffsets<P> {
681 #[inline]
683 pub fn vmtag_import_from(&self) -> u8 {
684 0 * self.pointer_size()
685 }
686
687 #[inline]
689 pub fn size_of_vmtag_import(&self) -> u8 {
690 1 * self.pointer_size()
691 }
692}
693
694impl<P: PtrSize> VMOffsets<P> {
696 #[inline]
698 pub fn vmctx_imported_functions_begin(&self) -> u32 {
699 self.imported_functions
700 }
701
702 #[inline]
704 pub fn vmctx_imported_tables_begin(&self) -> u32 {
705 self.imported_tables
706 }
707
708 #[inline]
710 pub fn vmctx_imported_memories_begin(&self) -> u32 {
711 self.imported_memories
712 }
713
714 #[inline]
716 pub fn vmctx_imported_globals_begin(&self) -> u32 {
717 self.imported_globals
718 }
719
720 #[inline]
722 pub fn vmctx_imported_tags_begin(&self) -> u32 {
723 self.imported_tags
724 }
725
726 #[inline]
728 pub fn vmctx_tables_begin(&self) -> u32 {
729 self.defined_tables
730 }
731
732 #[inline]
734 pub fn vmctx_memories_begin(&self) -> u32 {
735 self.defined_memories
736 }
737
738 #[inline]
740 pub fn vmctx_owned_memories_begin(&self) -> u32 {
741 self.owned_memories
742 }
743
744 #[inline]
746 pub fn vmctx_globals_begin(&self) -> u32 {
747 self.defined_globals
748 }
749
750 #[inline]
752 pub fn vmctx_tags_begin(&self) -> u32 {
753 self.defined_tags
754 }
755
756 #[inline]
758 pub fn vmctx_func_refs_begin(&self) -> u32 {
759 self.defined_func_refs
760 }
761
762 #[inline]
764 pub fn size_of_vmctx(&self) -> u32 {
765 self.size
766 }
767
768 #[inline]
770 pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
771 assert!(index.as_u32() < self.num_imported_functions);
772 self.vmctx_imported_functions_begin()
773 + index.as_u32() * u32::from(self.size_of_vmfunction_import())
774 }
775
776 #[inline]
778 pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
779 assert!(index.as_u32() < self.num_imported_tables);
780 self.vmctx_imported_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable())
781 }
782
783 #[inline]
785 pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
786 assert!(index.as_u32() < self.num_imported_memories);
787 self.vmctx_imported_memories_begin()
788 + index.as_u32() * u32::from(self.size_of_vmmemory_import())
789 }
790
791 #[inline]
793 pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
794 assert!(index.as_u32() < self.num_imported_globals);
795 self.vmctx_imported_globals_begin()
796 + index.as_u32() * u32::from(self.size_of_vmglobal_import())
797 }
798
799 #[inline]
801 pub fn vmctx_vmtag_import(&self, index: TagIndex) -> u32 {
802 assert!(index.as_u32() < self.num_imported_tags);
803 self.vmctx_imported_tags_begin() + index.as_u32() * u32::from(self.size_of_vmtag_import())
804 }
805
806 #[inline]
808 pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
809 assert!(index.as_u32() < self.num_defined_tables);
810 self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
811 }
812
813 #[inline]
815 pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
816 assert!(index.as_u32() < self.num_defined_memories);
817 self.vmctx_memories_begin()
818 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
819 }
820
821 #[inline]
823 pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
824 assert!(index.as_u32() < self.num_owned_memories);
825 self.vmctx_owned_memories_begin()
826 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
827 }
828
829 #[inline]
831 pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
832 assert!(index.as_u32() < self.num_defined_globals);
833 self.vmctx_globals_begin()
834 + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
835 }
836
837 #[inline]
839 pub fn vmctx_vmtag_definition(&self, index: DefinedTagIndex) -> u32 {
840 assert!(index.as_u32() < self.num_defined_tags);
841 self.vmctx_tags_begin() + index.as_u32() * u32::from(self.ptr.size_of_vmtag_definition())
842 }
843
844 #[inline]
847 pub fn vmctx_func_ref(&self, index: FuncRefIndex) -> u32 {
848 assert!(!index.is_reserved_value());
849 assert!(index.as_u32() < self.num_escaped_funcs);
850 self.vmctx_func_refs_begin() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
851 }
852
853 #[inline]
855 pub fn vmctx_vmfunction_import_wasm_call(&self, index: FuncIndex) -> u32 {
856 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_wasm_call())
857 }
858
859 #[inline]
861 pub fn vmctx_vmfunction_import_array_call(&self, index: FuncIndex) -> u32 {
862 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_array_call())
863 }
864
865 #[inline]
867 pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
868 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
869 }
870
871 #[inline]
874 pub fn vmctx_vmtable_from(&self, index: TableIndex) -> u32 {
875 self.vmctx_vmtable_import(index) + u32::from(self.vmtable_from())
876 }
877
878 #[inline]
880 pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
881 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
882 }
883
884 #[inline]
886 pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
887 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
888 }
889
890 #[inline]
892 pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
893 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
894 }
895
896 #[inline]
898 pub fn vmctx_vmmemory_import_vmctx(&self, index: MemoryIndex) -> u32 {
899 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_vmctx())
900 }
901
902 #[inline]
904 pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
905 self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
906 }
907
908 #[inline]
910 pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
911 self.vmctx_vmmemory_definition(index)
912 + u32::from(self.ptr.vmmemory_definition_current_length())
913 }
914
915 #[inline]
917 pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
918 self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
919 }
920
921 #[inline]
923 pub fn vmctx_vmtag_import_from(&self, index: TagIndex) -> u32 {
924 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_from())
925 }
926}
927
928impl<P: PtrSize> VMOffsets<P> {
932 #[inline]
934 pub fn vm_drc_header_ref_count(&self) -> u32 {
935 8
936 }
937}
938
939impl<P: PtrSize> VMOffsets<P> {
943 #[inline]
945 pub fn vm_gc_ref_activation_table_next(&self) -> u32 {
946 0
947 }
948
949 #[inline]
951 pub fn vm_gc_ref_activation_table_end(&self) -> u32 {
952 self.pointer_size().into()
953 }
954}
955
956pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
960
961pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
966
967#[cfg(test)]
968mod tests {
969 use crate::vmoffsets::align;
970
971 #[test]
972 fn alignment() {
973 fn is_aligned(x: u32) -> bool {
974 x % 16 == 0
975 }
976 assert!(is_aligned(align(0, 16)));
977 assert!(is_aligned(align(32, 16)));
978 assert!(is_aligned(align(33, 16)));
979 assert!(is_aligned(align(31, 16)));
980 }
981}