1use crate::{
41 DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex, FuncIndex,
42 FuncRefIndex, GlobalIndex, MemoryIndex, Module, OwnedMemoryIndex, TableIndex, TagIndex,
43};
44use cranelift_entity::packed_option::ReservedValue;
45
46#[cfg(target_pointer_width = "32")]
47fn cast_to_u32(sz: usize) -> u32 {
48 u32::try_from(sz).unwrap()
49}
50#[cfg(target_pointer_width = "64")]
51fn cast_to_u32(sz: usize) -> u32 {
52 u32::try_from(sz).expect("overflow in cast from usize to u32")
53}
54
55#[inline]
57fn align(offset: u32, width: u32) -> u32 {
58 (offset + (width - 1)) / width * width
59}
60
61#[derive(Debug, Clone, Copy)]
64pub struct VMOffsets<P> {
65 pub ptr: P,
67 pub num_imported_functions: u32,
69 pub num_imported_tables: u32,
71 pub num_imported_memories: u32,
73 pub num_imported_globals: u32,
75 pub num_imported_tags: u32,
77 pub num_defined_tables: u32,
79 pub num_defined_memories: u32,
81 pub num_owned_memories: u32,
83 pub num_defined_globals: u32,
85 pub num_defined_tags: u32,
87 pub num_escaped_funcs: u32,
90
91 imported_functions: u32,
93 imported_tables: u32,
94 imported_memories: u32,
95 imported_globals: u32,
96 imported_tags: u32,
97 defined_tables: u32,
98 defined_memories: u32,
99 owned_memories: u32,
100 defined_globals: u32,
101 defined_tags: u32,
102 defined_func_refs: u32,
103 size: u32,
104}
105
106pub trait PtrSize {
108 fn size(&self) -> u8;
110
111 fn vmcontext_runtime_limits(&self) -> u8 {
113 u8::try_from(align(
114 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
115 u32::from(self.size()),
116 ))
117 .unwrap()
118 }
119
120 fn vmcontext_builtin_functions(&self) -> u8 {
122 self.vmcontext_runtime_limits() + self.size()
123 }
124
125 #[inline]
127 fn vm_func_ref_array_call(&self) -> u8 {
128 0 * self.size()
129 }
130
131 #[inline]
133 fn vm_func_ref_wasm_call(&self) -> u8 {
134 1 * self.size()
135 }
136
137 #[inline]
139 fn vm_func_ref_type_index(&self) -> u8 {
140 2 * self.size()
141 }
142
143 #[inline]
145 fn vm_func_ref_vmctx(&self) -> u8 {
146 3 * self.size()
147 }
148
149 #[inline]
151 fn size_of_vm_func_ref(&self) -> u8 {
152 4 * self.size()
153 }
154
155 #[inline]
158 fn size_of_vmglobal_definition(&self) -> u8 {
159 16
160 }
161
162 #[inline]
164 fn size_of_vmtag_definition(&self) -> u8 {
165 4
166 }
167
168 #[inline]
172 fn vmruntime_limits_fuel_consumed(&self) -> u8 {
173 0
174 }
175
176 #[inline]
178 fn vmruntime_limits_epoch_deadline(&self) -> u8 {
179 self.vmruntime_limits_fuel_consumed() + 8
180 }
181
182 #[inline]
184 fn vmruntime_limits_stack_limit(&self) -> u8 {
185 self.vmruntime_limits_epoch_deadline() + 8
186 }
187
188 fn vmruntime_limits_last_wasm_exit_fp(&self) -> u8 {
190 self.vmruntime_limits_stack_limit() + self.size()
191 }
192
193 fn vmruntime_limits_last_wasm_exit_pc(&self) -> u8 {
195 self.vmruntime_limits_last_wasm_exit_fp() + self.size()
196 }
197
198 fn vmruntime_limits_last_wasm_entry_fp(&self) -> u8 {
200 self.vmruntime_limits_last_wasm_exit_pc() + self.size()
201 }
202
203 #[inline]
207 fn vmmemory_definition_base(&self) -> u8 {
208 0 * self.size()
209 }
210
211 #[inline]
213 fn vmmemory_definition_current_length(&self) -> u8 {
214 1 * self.size()
215 }
216
217 #[inline]
219 fn size_of_vmmemory_definition(&self) -> u8 {
220 2 * self.size()
221 }
222
223 #[inline]
225 fn size_of_vmmemory_pointer(&self) -> u8 {
226 self.size()
227 }
228
229 fn vmarray_call_host_func_context_func_ref(&self) -> u8 {
233 u8::try_from(align(
234 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
235 u32::from(self.size()),
236 ))
237 .unwrap()
238 }
239
240 #[inline]
242 fn vmctx_magic(&self) -> u8 {
243 0
247 }
248
249 #[inline]
251 fn vmctx_runtime_limits(&self) -> u8 {
252 self.vmctx_magic() + self.size()
253 }
254
255 #[inline]
257 fn vmctx_builtin_functions(&self) -> u8 {
258 self.vmctx_runtime_limits() + self.size()
259 }
260
261 #[inline]
263 fn vmctx_callee(&self) -> u8 {
264 self.vmctx_builtin_functions() + self.size()
265 }
266
267 #[inline]
270 fn vmctx_epoch_ptr(&self) -> u8 {
271 self.vmctx_callee() + self.size()
272 }
273
274 #[inline]
276 fn vmctx_gc_heap_base(&self) -> u8 {
277 self.vmctx_epoch_ptr() + self.size()
278 }
279
280 #[inline]
282 fn vmctx_gc_heap_bound(&self) -> u8 {
283 self.vmctx_gc_heap_base() + self.size()
284 }
285
286 #[inline]
291 fn vmctx_gc_heap_data(&self) -> u8 {
292 self.vmctx_gc_heap_bound() + self.size()
293 }
294
295 #[inline]
297 fn vmctx_type_ids_array(&self) -> u8 {
298 self.vmctx_gc_heap_data() + self.size()
299 }
300
301 #[inline]
307 fn vmctx_stack_chain(&self) -> u8 {
308 self.vmctx_type_ids_array() + self.size()
309 }
310
311 #[inline]
315 fn vmctx_dynamic_data_start(&self) -> u8 {
316 self.vmctx_stack_chain() + self.size()
317 }
318}
319
320#[derive(Clone, Copy)]
322pub struct HostPtr;
323
324impl PtrSize for HostPtr {
325 #[inline]
326 fn size(&self) -> u8 {
327 core::mem::size_of::<usize>() as u8
328 }
329}
330
331impl PtrSize for u8 {
332 #[inline]
333 fn size(&self) -> u8 {
334 *self
335 }
336}
337
338#[derive(Debug, Clone, Copy)]
340pub struct VMOffsetsFields<P> {
341 pub ptr: P,
343 pub num_imported_functions: u32,
345 pub num_imported_tables: u32,
347 pub num_imported_memories: u32,
349 pub num_imported_globals: u32,
351 pub num_imported_tags: u32,
353 pub num_defined_tables: u32,
355 pub num_defined_memories: u32,
357 pub num_owned_memories: u32,
359 pub num_defined_globals: u32,
361 pub num_defined_tags: u32,
363 pub num_escaped_funcs: u32,
366}
367
368impl<P: PtrSize> VMOffsets<P> {
369 pub fn new(ptr: P, module: &Module) -> Self {
371 let num_owned_memories = module
372 .memories
373 .iter()
374 .skip(module.num_imported_memories)
375 .filter(|p| !p.1.shared)
376 .count()
377 .try_into()
378 .unwrap();
379 VMOffsets::from(VMOffsetsFields {
380 ptr,
381 num_imported_functions: cast_to_u32(module.num_imported_funcs),
382 num_imported_tables: cast_to_u32(module.num_imported_tables),
383 num_imported_memories: cast_to_u32(module.num_imported_memories),
384 num_imported_globals: cast_to_u32(module.num_imported_globals),
385 num_imported_tags: cast_to_u32(module.num_imported_tags),
386 num_defined_tables: cast_to_u32(module.num_defined_tables()),
387 num_defined_memories: cast_to_u32(module.num_defined_memories()),
388 num_owned_memories,
389 num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
390 num_defined_tags: cast_to_u32(module.tags.len() - module.num_imported_tags),
391 num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
392 })
393 }
394
395 #[inline]
397 pub fn pointer_size(&self) -> u8 {
398 self.ptr.size()
399 }
400
401 pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
406 macro_rules! calculate_sizes {
407 ($($name:ident: $desc:tt,)*) => {{
408 let VMOffsets {
409 ptr: _,
412 num_imported_functions: _,
413 num_imported_tables: _,
414 num_imported_memories: _,
415 num_imported_globals: _,
416 num_imported_tags: _,
417 num_defined_tables: _,
418 num_defined_globals: _,
419 num_defined_memories: _,
420 num_defined_tags: _,
421 num_owned_memories: _,
422 num_escaped_funcs: _,
423
424 size,
426
427 $($name,)*
430 } = *self;
431
432 let mut last = size;
436 $(
437 assert!($name <= last);
438 let tmp = $name;
439 let $name = last - $name;
440 last = tmp;
441 )*
442 assert_ne!(last, 0);
443 IntoIterator::into_iter([
444 $(($desc, $name),)*
445 ("static vmctx data", last),
446 ])
447 }};
448 }
449
450 calculate_sizes! {
451 defined_func_refs: "module functions",
452 defined_tags: "defined tags",
453 defined_globals: "defined globals",
454 defined_tables: "defined tables",
455 imported_tags: "imported tags",
456 imported_globals: "imported globals",
457 imported_tables: "imported tables",
458 imported_functions: "imported functions",
459 owned_memories: "owned memories",
460 defined_memories: "defined memories",
461 imported_memories: "imported memories",
462 }
463 }
464}
465
466impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
467 fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
468 let mut ret = Self {
469 ptr: fields.ptr,
470 num_imported_functions: fields.num_imported_functions,
471 num_imported_tables: fields.num_imported_tables,
472 num_imported_memories: fields.num_imported_memories,
473 num_imported_globals: fields.num_imported_globals,
474 num_imported_tags: fields.num_imported_tags,
475 num_defined_tables: fields.num_defined_tables,
476 num_defined_memories: fields.num_defined_memories,
477 num_owned_memories: fields.num_owned_memories,
478 num_defined_globals: fields.num_defined_globals,
479 num_defined_tags: fields.num_defined_tags,
480 num_escaped_funcs: fields.num_escaped_funcs,
481 imported_functions: 0,
482 imported_tables: 0,
483 imported_memories: 0,
484 imported_globals: 0,
485 imported_tags: 0,
486 defined_tables: 0,
487 defined_memories: 0,
488 owned_memories: 0,
489 defined_globals: 0,
490 defined_tags: 0,
491 defined_func_refs: 0,
492 size: 0,
493 };
494
495 #[inline]
500 fn cadd(count: u32, size: u32) -> u32 {
501 count.checked_add(size).unwrap()
502 }
503
504 #[inline]
505 fn cmul(count: u32, size: u8) -> u32 {
506 count.checked_mul(u32::from(size)).unwrap()
507 }
508
509 let mut next_field_offset = u32::from(ret.ptr.vmctx_dynamic_data_start());
510
511 macro_rules! fields {
512 (size($field:ident) = $size:expr, $($rest:tt)*) => {
513 ret.$field = next_field_offset;
514 next_field_offset = cadd(next_field_offset, u32::from($size));
515 fields!($($rest)*);
516 };
517 (align($align:expr), $($rest:tt)*) => {
518 next_field_offset = align(next_field_offset, $align);
519 fields!($($rest)*);
520 };
521 () => {};
522 }
523
524 fields! {
525 size(imported_memories)
526 = cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
527 size(defined_memories)
528 = cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
529 size(owned_memories)
530 = cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
531 size(imported_functions)
532 = cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
533 size(imported_tables)
534 = cmul(ret.num_imported_tables, ret.size_of_vmtable_import()),
535 size(imported_globals)
536 = cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
537 size(imported_tags)
538 = cmul(ret.num_imported_tags, ret.size_of_vmtag_import()),
539 size(defined_tables)
540 = cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
541 align(16),
542 size(defined_globals)
543 = cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
544 size(defined_tags)
545 = cmul(ret.num_defined_tags, ret.ptr.size_of_vmtag_definition()),
546 size(defined_func_refs) = cmul(
547 ret.num_escaped_funcs,
548 ret.ptr.size_of_vm_func_ref(),
549 ),
550 }
551
552 ret.size = next_field_offset;
553
554 return ret;
555 }
556}
557
558impl<P: PtrSize> VMOffsets<P> {
559 #[inline]
561 pub fn vmfunction_import_wasm_call(&self) -> u8 {
562 0 * self.pointer_size()
563 }
564
565 #[inline]
567 pub fn vmfunction_import_array_call(&self) -> u8 {
568 1 * self.pointer_size()
569 }
570
571 #[inline]
573 pub fn vmfunction_import_vmctx(&self) -> u8 {
574 2 * self.pointer_size()
575 }
576
577 #[inline]
579 pub fn size_of_vmfunction_import(&self) -> u8 {
580 3 * self.pointer_size()
581 }
582}
583
584impl<P: PtrSize> VMOffsets<P> {
586 pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
588 1 * self.pointer_size()
589 }
590}
591
592impl<P: PtrSize> VMOffsets<P> {
594 #[inline]
596 pub fn vmtable_import_from(&self) -> u8 {
597 0 * self.pointer_size()
598 }
599
600 #[inline]
602 pub fn vmtable_import_vmctx(&self) -> u8 {
603 1 * self.pointer_size()
604 }
605
606 #[inline]
608 pub fn size_of_vmtable_import(&self) -> u8 {
609 2 * self.pointer_size()
610 }
611}
612
613impl<P: PtrSize> VMOffsets<P> {
615 #[inline]
617 pub fn vmtable_definition_base(&self) -> u8 {
618 0 * self.pointer_size()
619 }
620
621 pub fn vmtable_definition_current_elements(&self) -> u8 {
623 1 * self.pointer_size()
624 }
625
626 #[inline]
628 pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
629 self.pointer_size()
630 }
631
632 #[inline]
634 pub fn size_of_vmtable_definition(&self) -> u8 {
635 2 * self.pointer_size()
636 }
637}
638
639impl<P: PtrSize> VMOffsets<P> {
641 #[inline]
643 pub fn vmmemory_import_from(&self) -> u8 {
644 0 * self.pointer_size()
645 }
646
647 #[inline]
649 pub fn vmmemory_import_vmctx(&self) -> u8 {
650 1 * self.pointer_size()
651 }
652
653 #[inline]
655 pub fn size_of_vmmemory_import(&self) -> u8 {
656 3 * self.pointer_size()
657 }
658}
659
660impl<P: PtrSize> VMOffsets<P> {
662 #[inline]
664 pub fn vmglobal_import_from(&self) -> u8 {
665 0 * self.pointer_size()
666 }
667
668 #[inline]
670 pub fn size_of_vmglobal_import(&self) -> u8 {
671 1 * self.pointer_size()
672 }
673}
674
675impl<P: PtrSize> VMOffsets<P> {
677 #[inline]
679 pub fn size_of_vmshared_type_index(&self) -> u8 {
680 4
681 }
682}
683
684impl<P: PtrSize> VMOffsets<P> {
686 #[inline]
688 pub fn vmtag_import_from(&self) -> u8 {
689 0 * self.pointer_size()
690 }
691
692 #[inline]
694 pub fn size_of_vmtag_import(&self) -> u8 {
695 1 * self.pointer_size()
696 }
697}
698
699impl<P: PtrSize> VMOffsets<P> {
701 #[inline]
703 pub fn vmctx_imported_functions_begin(&self) -> u32 {
704 self.imported_functions
705 }
706
707 #[inline]
709 pub fn vmctx_imported_tables_begin(&self) -> u32 {
710 self.imported_tables
711 }
712
713 #[inline]
715 pub fn vmctx_imported_memories_begin(&self) -> u32 {
716 self.imported_memories
717 }
718
719 #[inline]
721 pub fn vmctx_imported_globals_begin(&self) -> u32 {
722 self.imported_globals
723 }
724
725 #[inline]
727 pub fn vmctx_imported_tags_begin(&self) -> u32 {
728 self.imported_tags
729 }
730
731 #[inline]
733 pub fn vmctx_tables_begin(&self) -> u32 {
734 self.defined_tables
735 }
736
737 #[inline]
739 pub fn vmctx_memories_begin(&self) -> u32 {
740 self.defined_memories
741 }
742
743 #[inline]
745 pub fn vmctx_owned_memories_begin(&self) -> u32 {
746 self.owned_memories
747 }
748
749 #[inline]
751 pub fn vmctx_globals_begin(&self) -> u32 {
752 self.defined_globals
753 }
754
755 #[inline]
757 pub fn vmctx_tags_begin(&self) -> u32 {
758 self.defined_tags
759 }
760
761 #[inline]
763 pub fn vmctx_func_refs_begin(&self) -> u32 {
764 self.defined_func_refs
765 }
766
767 #[inline]
769 pub fn size_of_vmctx(&self) -> u32 {
770 self.size
771 }
772
773 #[inline]
775 pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
776 assert!(index.as_u32() < self.num_imported_functions);
777 self.vmctx_imported_functions_begin()
778 + index.as_u32() * u32::from(self.size_of_vmfunction_import())
779 }
780
781 #[inline]
783 pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
784 assert!(index.as_u32() < self.num_imported_tables);
785 self.vmctx_imported_tables_begin()
786 + index.as_u32() * u32::from(self.size_of_vmtable_import())
787 }
788
789 #[inline]
791 pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
792 assert!(index.as_u32() < self.num_imported_memories);
793 self.vmctx_imported_memories_begin()
794 + index.as_u32() * u32::from(self.size_of_vmmemory_import())
795 }
796
797 #[inline]
799 pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
800 assert!(index.as_u32() < self.num_imported_globals);
801 self.vmctx_imported_globals_begin()
802 + index.as_u32() * u32::from(self.size_of_vmglobal_import())
803 }
804
805 #[inline]
807 pub fn vmctx_vmtag_import(&self, index: TagIndex) -> u32 {
808 assert!(index.as_u32() < self.num_imported_tags);
809 self.vmctx_imported_tags_begin() + index.as_u32() * u32::from(self.size_of_vmtag_import())
810 }
811
812 #[inline]
814 pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
815 assert!(index.as_u32() < self.num_defined_tables);
816 self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
817 }
818
819 #[inline]
821 pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
822 assert!(index.as_u32() < self.num_defined_memories);
823 self.vmctx_memories_begin()
824 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
825 }
826
827 #[inline]
829 pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
830 assert!(index.as_u32() < self.num_owned_memories);
831 self.vmctx_owned_memories_begin()
832 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
833 }
834
835 #[inline]
837 pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
838 assert!(index.as_u32() < self.num_defined_globals);
839 self.vmctx_globals_begin()
840 + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
841 }
842
843 #[inline]
845 pub fn vmctx_vmtag_definition(&self, index: DefinedTagIndex) -> u32 {
846 assert!(index.as_u32() < self.num_defined_tags);
847 self.vmctx_tags_begin() + index.as_u32() * u32::from(self.ptr.size_of_vmtag_definition())
848 }
849
850 #[inline]
853 pub fn vmctx_func_ref(&self, index: FuncRefIndex) -> u32 {
854 assert!(!index.is_reserved_value());
855 assert!(index.as_u32() < self.num_escaped_funcs);
856 self.vmctx_func_refs_begin() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
857 }
858
859 #[inline]
861 pub fn vmctx_vmfunction_import_wasm_call(&self, index: FuncIndex) -> u32 {
862 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_wasm_call())
863 }
864
865 #[inline]
867 pub fn vmctx_vmfunction_import_array_call(&self, index: FuncIndex) -> u32 {
868 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_array_call())
869 }
870
871 #[inline]
873 pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
874 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
875 }
876
877 #[inline]
879 pub fn vmctx_vmtable_import_from(&self, index: TableIndex) -> u32 {
880 self.vmctx_vmtable_import(index) + u32::from(self.vmtable_import_from())
881 }
882
883 #[inline]
885 pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
886 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
887 }
888
889 #[inline]
891 pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
892 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
893 }
894
895 #[inline]
897 pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
898 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
899 }
900
901 #[inline]
903 pub fn vmctx_vmmemory_import_vmctx(&self, index: MemoryIndex) -> u32 {
904 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_vmctx())
905 }
906
907 #[inline]
909 pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
910 self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
911 }
912
913 #[inline]
915 pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
916 self.vmctx_vmmemory_definition(index)
917 + u32::from(self.ptr.vmmemory_definition_current_length())
918 }
919
920 #[inline]
922 pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
923 self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
924 }
925
926 #[inline]
928 pub fn vmctx_vmtag_import_from(&self, index: TagIndex) -> u32 {
929 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_from())
930 }
931}
932
933impl<P: PtrSize> VMOffsets<P> {
937 #[inline]
939 pub fn vm_drc_header_ref_count(&self) -> u32 {
940 8
941 }
942}
943
944impl<P: PtrSize> VMOffsets<P> {
948 #[inline]
950 pub fn vm_gc_ref_activation_table_next(&self) -> u32 {
951 0
952 }
953
954 #[inline]
956 pub fn vm_gc_ref_activation_table_end(&self) -> u32 {
957 self.pointer_size().into()
958 }
959}
960
961pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
965
966pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
971
972#[cfg(test)]
973mod tests {
974 use crate::vmoffsets::align;
975
976 #[test]
977 fn alignment() {
978 fn is_aligned(x: u32) -> bool {
979 x % 16 == 0
980 }
981 assert!(is_aligned(align(0, 16)));
982 assert!(is_aligned(align(32, 16)));
983 assert!(is_aligned(align(33, 16)));
984 assert!(is_aligned(align(31, 16)));
985 }
986}