1use crate::{
40 DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex, FuncIndex,
41 FuncRefIndex, GlobalIndex, MemoryIndex, Module, OwnedMemoryIndex, TableIndex, TagIndex,
42};
43use cranelift_entity::packed_option::ReservedValue;
44
45#[cfg(target_pointer_width = "32")]
46fn cast_to_u32(sz: usize) -> u32 {
47 u32::try_from(sz).unwrap()
48}
49#[cfg(target_pointer_width = "64")]
50fn cast_to_u32(sz: usize) -> u32 {
51 u32::try_from(sz).expect("overflow in cast from usize to u32")
52}
53
54#[inline]
56fn align(offset: u32, width: u32) -> u32 {
57 (offset + (width - 1)) / width * width
58}
59
60#[derive(Debug, Clone, Copy)]
63pub struct VMOffsets<P> {
64 pub ptr: P,
66 pub num_imported_functions: u32,
68 pub num_imported_tables: u32,
70 pub num_imported_memories: u32,
72 pub num_imported_globals: u32,
74 pub num_imported_tags: u32,
76 pub num_defined_tables: u32,
78 pub num_defined_memories: u32,
80 pub num_owned_memories: u32,
82 pub num_defined_globals: u32,
84 pub num_defined_tags: u32,
86 pub num_escaped_funcs: u32,
89
90 imported_functions: u32,
92 imported_tables: u32,
93 imported_memories: u32,
94 imported_globals: u32,
95 imported_tags: u32,
96 defined_tables: u32,
97 defined_memories: u32,
98 owned_memories: u32,
99 defined_globals: u32,
100 defined_tags: u32,
101 defined_func_refs: u32,
102 size: u32,
103}
104
105pub trait PtrSize {
107 fn size(&self) -> u8;
109
110 fn vmcontext_store_context(&self) -> u8 {
112 u8::try_from(align(
113 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
114 u32::from(self.size()),
115 ))
116 .unwrap()
117 }
118
119 fn vmcontext_builtin_functions(&self) -> u8 {
121 self.vmcontext_store_context() + self.size()
122 }
123
124 #[inline]
126 fn vm_func_ref_array_call(&self) -> u8 {
127 0 * self.size()
128 }
129
130 #[inline]
132 fn vm_func_ref_wasm_call(&self) -> u8 {
133 1 * self.size()
134 }
135
136 #[inline]
138 fn vm_func_ref_type_index(&self) -> u8 {
139 2 * self.size()
140 }
141
142 #[inline]
144 fn vm_func_ref_vmctx(&self) -> u8 {
145 3 * self.size()
146 }
147
148 #[inline]
150 fn size_of_vm_func_ref(&self) -> u8 {
151 4 * self.size()
152 }
153
154 #[inline]
157 fn size_of_vmglobal_definition(&self) -> u8 {
158 16
159 }
160
161 #[inline]
163 fn size_of_vmtag_definition(&self) -> u8 {
164 4
165 }
166
167 #[inline]
171 fn vmstore_context_fuel_consumed(&self) -> u8 {
172 0
173 }
174
175 #[inline]
177 fn vmstore_context_epoch_deadline(&self) -> u8 {
178 self.vmstore_context_fuel_consumed() + 8
179 }
180
181 #[inline]
183 fn vmstore_context_stack_limit(&self) -> u8 {
184 self.vmstore_context_epoch_deadline() + 8
185 }
186
187 fn vmstore_context_last_wasm_exit_fp(&self) -> u8 {
189 self.vmstore_context_stack_limit() + self.size()
190 }
191
192 fn vmstore_context_last_wasm_exit_pc(&self) -> u8 {
194 self.vmstore_context_last_wasm_exit_fp() + self.size()
195 }
196
197 fn vmstore_context_last_wasm_entry_fp(&self) -> u8 {
199 self.vmstore_context_last_wasm_exit_pc() + self.size()
200 }
201
202 #[inline]
206 fn vmmemory_definition_base(&self) -> u8 {
207 0 * self.size()
208 }
209
210 #[inline]
212 fn vmmemory_definition_current_length(&self) -> u8 {
213 1 * self.size()
214 }
215
216 #[inline]
218 fn size_of_vmmemory_definition(&self) -> u8 {
219 2 * self.size()
220 }
221
222 #[inline]
224 fn size_of_vmmemory_pointer(&self) -> u8 {
225 self.size()
226 }
227
228 fn vmarray_call_host_func_context_func_ref(&self) -> u8 {
232 u8::try_from(align(
233 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
234 u32::from(self.size()),
235 ))
236 .unwrap()
237 }
238
239 #[inline]
241 fn vmctx_magic(&self) -> u8 {
242 0
246 }
247
248 #[inline]
250 fn vmctx_store_context(&self) -> u8 {
251 self.vmctx_magic() + self.size()
252 }
253
254 #[inline]
256 fn vmctx_builtin_functions(&self) -> u8 {
257 self.vmctx_store_context() + self.size()
258 }
259
260 #[inline]
262 fn vmctx_callee(&self) -> u8 {
263 self.vmctx_builtin_functions() + self.size()
264 }
265
266 #[inline]
269 fn vmctx_epoch_ptr(&self) -> u8 {
270 self.vmctx_callee() + self.size()
271 }
272
273 #[inline]
275 fn vmctx_gc_heap_base(&self) -> u8 {
276 self.vmctx_epoch_ptr() + self.size()
277 }
278
279 #[inline]
281 fn vmctx_gc_heap_bound(&self) -> u8 {
282 self.vmctx_gc_heap_base() + self.size()
283 }
284
285 #[inline]
290 fn vmctx_gc_heap_data(&self) -> u8 {
291 self.vmctx_gc_heap_bound() + self.size()
292 }
293
294 #[inline]
296 fn vmctx_type_ids_array(&self) -> u8 {
297 self.vmctx_gc_heap_data() + self.size()
298 }
299
300 #[inline]
304 fn vmctx_dynamic_data_start(&self) -> u8 {
305 self.vmctx_type_ids_array() + self.size()
306 }
307}
308
309#[derive(Clone, Copy)]
311pub struct HostPtr;
312
313impl PtrSize for HostPtr {
314 #[inline]
315 fn size(&self) -> u8 {
316 core::mem::size_of::<usize>() as u8
317 }
318}
319
320impl PtrSize for u8 {
321 #[inline]
322 fn size(&self) -> u8 {
323 *self
324 }
325}
326
327#[derive(Debug, Clone, Copy)]
329pub struct VMOffsetsFields<P> {
330 pub ptr: P,
332 pub num_imported_functions: u32,
334 pub num_imported_tables: u32,
336 pub num_imported_memories: u32,
338 pub num_imported_globals: u32,
340 pub num_imported_tags: u32,
342 pub num_defined_tables: u32,
344 pub num_defined_memories: u32,
346 pub num_owned_memories: u32,
348 pub num_defined_globals: u32,
350 pub num_defined_tags: u32,
352 pub num_escaped_funcs: u32,
355}
356
357impl<P: PtrSize> VMOffsets<P> {
358 pub fn new(ptr: P, module: &Module) -> Self {
360 let num_owned_memories = module
361 .memories
362 .iter()
363 .skip(module.num_imported_memories)
364 .filter(|p| !p.1.shared)
365 .count()
366 .try_into()
367 .unwrap();
368 VMOffsets::from(VMOffsetsFields {
369 ptr,
370 num_imported_functions: cast_to_u32(module.num_imported_funcs),
371 num_imported_tables: cast_to_u32(module.num_imported_tables),
372 num_imported_memories: cast_to_u32(module.num_imported_memories),
373 num_imported_globals: cast_to_u32(module.num_imported_globals),
374 num_imported_tags: cast_to_u32(module.num_imported_tags),
375 num_defined_tables: cast_to_u32(module.num_defined_tables()),
376 num_defined_memories: cast_to_u32(module.num_defined_memories()),
377 num_owned_memories,
378 num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
379 num_defined_tags: cast_to_u32(module.tags.len() - module.num_imported_tags),
380 num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
381 })
382 }
383
384 #[inline]
386 pub fn pointer_size(&self) -> u8 {
387 self.ptr.size()
388 }
389
390 pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
395 macro_rules! calculate_sizes {
396 ($($name:ident: $desc:tt,)*) => {{
397 let VMOffsets {
398 ptr: _,
401 num_imported_functions: _,
402 num_imported_tables: _,
403 num_imported_memories: _,
404 num_imported_globals: _,
405 num_imported_tags: _,
406 num_defined_tables: _,
407 num_defined_globals: _,
408 num_defined_memories: _,
409 num_defined_tags: _,
410 num_owned_memories: _,
411 num_escaped_funcs: _,
412
413 size,
415
416 $($name,)*
419 } = *self;
420
421 let mut last = size;
425 $(
426 assert!($name <= last);
427 let tmp = $name;
428 let $name = last - $name;
429 last = tmp;
430 )*
431 assert_ne!(last, 0);
432 IntoIterator::into_iter([
433 $(($desc, $name),)*
434 ("static vmctx data", last),
435 ])
436 }};
437 }
438
439 calculate_sizes! {
440 defined_func_refs: "module functions",
441 defined_tags: "defined tags",
442 defined_globals: "defined globals",
443 defined_tables: "defined tables",
444 imported_tags: "imported tags",
445 imported_globals: "imported globals",
446 imported_tables: "imported tables",
447 imported_functions: "imported functions",
448 owned_memories: "owned memories",
449 defined_memories: "defined memories",
450 imported_memories: "imported memories",
451 }
452 }
453}
454
455impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
456 fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
457 let mut ret = Self {
458 ptr: fields.ptr,
459 num_imported_functions: fields.num_imported_functions,
460 num_imported_tables: fields.num_imported_tables,
461 num_imported_memories: fields.num_imported_memories,
462 num_imported_globals: fields.num_imported_globals,
463 num_imported_tags: fields.num_imported_tags,
464 num_defined_tables: fields.num_defined_tables,
465 num_defined_memories: fields.num_defined_memories,
466 num_owned_memories: fields.num_owned_memories,
467 num_defined_globals: fields.num_defined_globals,
468 num_defined_tags: fields.num_defined_tags,
469 num_escaped_funcs: fields.num_escaped_funcs,
470 imported_functions: 0,
471 imported_tables: 0,
472 imported_memories: 0,
473 imported_globals: 0,
474 imported_tags: 0,
475 defined_tables: 0,
476 defined_memories: 0,
477 owned_memories: 0,
478 defined_globals: 0,
479 defined_tags: 0,
480 defined_func_refs: 0,
481 size: 0,
482 };
483
484 #[inline]
489 fn cadd(count: u32, size: u32) -> u32 {
490 count.checked_add(size).unwrap()
491 }
492
493 #[inline]
494 fn cmul(count: u32, size: u8) -> u32 {
495 count.checked_mul(u32::from(size)).unwrap()
496 }
497
498 let mut next_field_offset = u32::from(ret.ptr.vmctx_dynamic_data_start());
499
500 macro_rules! fields {
501 (size($field:ident) = $size:expr, $($rest:tt)*) => {
502 ret.$field = next_field_offset;
503 next_field_offset = cadd(next_field_offset, u32::from($size));
504 fields!($($rest)*);
505 };
506 (align($align:expr), $($rest:tt)*) => {
507 next_field_offset = align(next_field_offset, $align);
508 fields!($($rest)*);
509 };
510 () => {};
511 }
512
513 fields! {
514 size(imported_memories)
515 = cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
516 size(defined_memories)
517 = cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
518 size(owned_memories)
519 = cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
520 size(imported_functions)
521 = cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
522 size(imported_tables)
523 = cmul(ret.num_imported_tables, ret.size_of_vmtable()),
524 size(imported_globals)
525 = cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
526 size(imported_tags)
527 = cmul(ret.num_imported_tags, ret.size_of_vmtag_import()),
528 size(defined_tables)
529 = cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
530 align(16),
531 size(defined_globals)
532 = cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
533 size(defined_tags)
534 = cmul(ret.num_defined_tags, ret.ptr.size_of_vmtag_definition()),
535 size(defined_func_refs) = cmul(
536 ret.num_escaped_funcs,
537 ret.ptr.size_of_vm_func_ref(),
538 ),
539 }
540
541 ret.size = next_field_offset;
542
543 return ret;
544 }
545}
546
547impl<P: PtrSize> VMOffsets<P> {
548 #[inline]
550 pub fn vmfunction_import_wasm_call(&self) -> u8 {
551 0 * self.pointer_size()
552 }
553
554 #[inline]
556 pub fn vmfunction_import_array_call(&self) -> u8 {
557 1 * self.pointer_size()
558 }
559
560 #[inline]
562 pub fn vmfunction_import_vmctx(&self) -> u8 {
563 2 * self.pointer_size()
564 }
565
566 #[inline]
568 pub fn size_of_vmfunction_import(&self) -> u8 {
569 3 * self.pointer_size()
570 }
571}
572
573impl<P: PtrSize> VMOffsets<P> {
575 pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
577 1 * self.pointer_size()
578 }
579}
580
581impl<P: PtrSize> VMOffsets<P> {
583 #[inline]
585 pub fn vmtable_from(&self) -> u8 {
586 0 * self.pointer_size()
587 }
588
589 #[inline]
591 pub fn vmtable_vmctx(&self) -> u8 {
592 1 * self.pointer_size()
593 }
594
595 #[inline]
597 pub fn size_of_vmtable(&self) -> u8 {
598 2 * self.pointer_size()
599 }
600}
601
602impl<P: PtrSize> VMOffsets<P> {
604 #[inline]
606 pub fn vmtable_definition_base(&self) -> u8 {
607 0 * self.pointer_size()
608 }
609
610 pub fn vmtable_definition_current_elements(&self) -> u8 {
612 1 * self.pointer_size()
613 }
614
615 #[inline]
617 pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
618 self.pointer_size()
619 }
620
621 #[inline]
623 pub fn size_of_vmtable_definition(&self) -> u8 {
624 2 * self.pointer_size()
625 }
626}
627
628impl<P: PtrSize> VMOffsets<P> {
630 #[inline]
632 pub fn vmmemory_import_from(&self) -> u8 {
633 0 * self.pointer_size()
634 }
635
636 #[inline]
638 pub fn vmmemory_import_vmctx(&self) -> u8 {
639 1 * self.pointer_size()
640 }
641
642 #[inline]
644 pub fn size_of_vmmemory_import(&self) -> u8 {
645 3 * self.pointer_size()
646 }
647}
648
649impl<P: PtrSize> VMOffsets<P> {
651 #[inline]
653 pub fn vmglobal_import_from(&self) -> u8 {
654 0 * self.pointer_size()
655 }
656
657 #[inline]
659 pub fn size_of_vmglobal_import(&self) -> u8 {
660 1 * self.pointer_size()
661 }
662}
663
664impl<P: PtrSize> VMOffsets<P> {
666 #[inline]
668 pub fn size_of_vmshared_type_index(&self) -> u8 {
669 4
670 }
671}
672
673impl<P: PtrSize> VMOffsets<P> {
675 #[inline]
677 pub fn vmtag_import_from(&self) -> u8 {
678 0 * self.pointer_size()
679 }
680
681 #[inline]
683 pub fn size_of_vmtag_import(&self) -> u8 {
684 1 * self.pointer_size()
685 }
686}
687
688impl<P: PtrSize> VMOffsets<P> {
690 #[inline]
692 pub fn vmctx_imported_functions_begin(&self) -> u32 {
693 self.imported_functions
694 }
695
696 #[inline]
698 pub fn vmctx_imported_tables_begin(&self) -> u32 {
699 self.imported_tables
700 }
701
702 #[inline]
704 pub fn vmctx_imported_memories_begin(&self) -> u32 {
705 self.imported_memories
706 }
707
708 #[inline]
710 pub fn vmctx_imported_globals_begin(&self) -> u32 {
711 self.imported_globals
712 }
713
714 #[inline]
716 pub fn vmctx_imported_tags_begin(&self) -> u32 {
717 self.imported_tags
718 }
719
720 #[inline]
722 pub fn vmctx_tables_begin(&self) -> u32 {
723 self.defined_tables
724 }
725
726 #[inline]
728 pub fn vmctx_memories_begin(&self) -> u32 {
729 self.defined_memories
730 }
731
732 #[inline]
734 pub fn vmctx_owned_memories_begin(&self) -> u32 {
735 self.owned_memories
736 }
737
738 #[inline]
740 pub fn vmctx_globals_begin(&self) -> u32 {
741 self.defined_globals
742 }
743
744 #[inline]
746 pub fn vmctx_tags_begin(&self) -> u32 {
747 self.defined_tags
748 }
749
750 #[inline]
752 pub fn vmctx_func_refs_begin(&self) -> u32 {
753 self.defined_func_refs
754 }
755
756 #[inline]
758 pub fn size_of_vmctx(&self) -> u32 {
759 self.size
760 }
761
762 #[inline]
764 pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
765 assert!(index.as_u32() < self.num_imported_functions);
766 self.vmctx_imported_functions_begin()
767 + index.as_u32() * u32::from(self.size_of_vmfunction_import())
768 }
769
770 #[inline]
772 pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
773 assert!(index.as_u32() < self.num_imported_tables);
774 self.vmctx_imported_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable())
775 }
776
777 #[inline]
779 pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
780 assert!(index.as_u32() < self.num_imported_memories);
781 self.vmctx_imported_memories_begin()
782 + index.as_u32() * u32::from(self.size_of_vmmemory_import())
783 }
784
785 #[inline]
787 pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
788 assert!(index.as_u32() < self.num_imported_globals);
789 self.vmctx_imported_globals_begin()
790 + index.as_u32() * u32::from(self.size_of_vmglobal_import())
791 }
792
793 #[inline]
795 pub fn vmctx_vmtag_import(&self, index: TagIndex) -> u32 {
796 assert!(index.as_u32() < self.num_imported_tags);
797 self.vmctx_imported_tags_begin() + index.as_u32() * u32::from(self.size_of_vmtag_import())
798 }
799
800 #[inline]
802 pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
803 assert!(index.as_u32() < self.num_defined_tables);
804 self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
805 }
806
807 #[inline]
809 pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
810 assert!(index.as_u32() < self.num_defined_memories);
811 self.vmctx_memories_begin()
812 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
813 }
814
815 #[inline]
817 pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
818 assert!(index.as_u32() < self.num_owned_memories);
819 self.vmctx_owned_memories_begin()
820 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
821 }
822
823 #[inline]
825 pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
826 assert!(index.as_u32() < self.num_defined_globals);
827 self.vmctx_globals_begin()
828 + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
829 }
830
831 #[inline]
833 pub fn vmctx_vmtag_definition(&self, index: DefinedTagIndex) -> u32 {
834 assert!(index.as_u32() < self.num_defined_tags);
835 self.vmctx_tags_begin() + index.as_u32() * u32::from(self.ptr.size_of_vmtag_definition())
836 }
837
838 #[inline]
841 pub fn vmctx_func_ref(&self, index: FuncRefIndex) -> u32 {
842 assert!(!index.is_reserved_value());
843 assert!(index.as_u32() < self.num_escaped_funcs);
844 self.vmctx_func_refs_begin() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
845 }
846
847 #[inline]
849 pub fn vmctx_vmfunction_import_wasm_call(&self, index: FuncIndex) -> u32 {
850 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_wasm_call())
851 }
852
853 #[inline]
855 pub fn vmctx_vmfunction_import_array_call(&self, index: FuncIndex) -> u32 {
856 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_array_call())
857 }
858
859 #[inline]
861 pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
862 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
863 }
864
865 #[inline]
868 pub fn vmctx_vmtable_from(&self, index: TableIndex) -> u32 {
869 self.vmctx_vmtable_import(index) + u32::from(self.vmtable_from())
870 }
871
872 #[inline]
874 pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
875 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
876 }
877
878 #[inline]
880 pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
881 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
882 }
883
884 #[inline]
886 pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
887 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
888 }
889
890 #[inline]
892 pub fn vmctx_vmmemory_import_vmctx(&self, index: MemoryIndex) -> u32 {
893 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_vmctx())
894 }
895
896 #[inline]
898 pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
899 self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
900 }
901
902 #[inline]
904 pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
905 self.vmctx_vmmemory_definition(index)
906 + u32::from(self.ptr.vmmemory_definition_current_length())
907 }
908
909 #[inline]
911 pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
912 self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
913 }
914
915 #[inline]
917 pub fn vmctx_vmtag_import_from(&self, index: TagIndex) -> u32 {
918 self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_from())
919 }
920}
921
922impl<P: PtrSize> VMOffsets<P> {
926 #[inline]
928 pub fn vm_drc_header_ref_count(&self) -> u32 {
929 8
930 }
931}
932
933impl<P: PtrSize> VMOffsets<P> {
937 #[inline]
939 pub fn vm_gc_ref_activation_table_next(&self) -> u32 {
940 0
941 }
942
943 #[inline]
945 pub fn vm_gc_ref_activation_table_end(&self) -> u32 {
946 self.pointer_size().into()
947 }
948}
949
950pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
954
955pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
960
961#[cfg(test)]
962mod tests {
963 use crate::vmoffsets::align;
964
965 #[test]
966 fn alignment() {
967 fn is_aligned(x: u32) -> bool {
968 x % 16 == 0
969 }
970 assert!(is_aligned(align(0, 16)));
971 assert!(is_aligned(align(32, 16)));
972 assert!(is_aligned(align(33, 16)));
973 assert!(is_aligned(align(31, 16)));
974 }
975}